From 52f3ef599271e47e3856151104649711d505c4aa Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 11 Jul 2023 17:36:44 -0700 Subject: [PATCH] New formatting rules and pre-commit hooks (#5027) * pre-apply spotless * New pre-format checkpoint * Once-over the repository * Add pre-commit hook * New github action * excludes * Revert "Once-over the repository" This reverts commit 89334ba8d107ce92c101ad11b0de2586d434e7c5. * Once-over the repo * do not inherit * Revert "Once-over the repo" This reverts commit 31ed8731ad7430d31388e2d942730c150f9edd91. * Add baisc license support to spotless * Move license * Fix inclusions * Fix inclusions * Fix licensing * license fixes * Remove dead file that is only a license * Remove dead class * remove dead file * Tighten licensing * Tighten licensing * Tighten licensing * Tighten licensing * Remove specific package * Make the precommit hook executable * Fix ordering * rollback license * wip * Add to deployable pom * remove file * Fix location * Remove dead comment * wipP * Fix ratchet * Fix up error * Add pre-commit * precommit fixes * wip * wip * Autoformat * remove dead pom parts * replace internal hook with pre-commit * Fix fetch-depth for checker action * Remove ratchet and filter * fix up * rename * Add comment check * rework message * First once-over of the repo * wip * format --- .github/workflows/spotless.yml | 33 + .pre-commit-config.yaml | 10 + HELPWANTED.md | 1 - README.md | 4 +- hapi-deployable-pom/pom.xml | 6 + .../ca/uhn/fhir/android/AndroidMarker.java | 4 +- .../main/java/ca/uhn/fhir/IHapiBootOrder.java | 1 - .../BaseRuntimeChildDatatypeDefinition.java | 31 +- .../context/BaseRuntimeChildDefinition.java | 12 +- .../BaseRuntimeDeclaredChildDefinition.java | 23 +- ...BaseRuntimeElementCompositeDefinition.java | 205 +- .../context/BaseRuntimeElementDefinition.java | 40 +- .../fhir/context/ComboSearchParamType.java | 2 - .../fhir/context/ConfigurationException.java | 3 +- .../java/ca/uhn/fhir/context/FhirContext.java | 196 +- .../ca/uhn/fhir/context/FhirVersionEnum.java | 17 +- .../context/IRuntimeDatatypeDefinition.java | 5 +- .../ca/uhn/fhir/context/ModelScanner.java | 142 +- .../ca/uhn/fhir/context/ParserOptions.java | 4 +- .../fhir/context/PerformanceOptionsEnum.java | 5 +- .../ca/uhn/fhir/context/RuntimeChildAny.java | 55 +- .../context/RuntimeChildChoiceDefinition.java | 29 +- ...ChildCompositeBoundDatatypeDefinition.java | 21 +- ...ntimeChildCompositeDatatypeDefinition.java | 18 +- .../RuntimeChildContainedResources.java | 12 +- ...ntimeChildDeclaredExtensionDefinition.java | 60 +- .../context/RuntimeChildDirectResource.java | 22 +- .../ca/uhn/fhir/context/RuntimeChildExt.java | 4 +- .../fhir/context/RuntimeChildExtension.java | 44 +- .../RuntimeChildNarrativeDefinition.java | 18 +- ...dPrimitiveBoundCodeDatatypeDefinition.java | 17 +- ...ntimeChildPrimitiveDatatypeDefinition.java | 15 +- ...rimitiveEnumerationDatatypeDefinition.java | 9 +- .../RuntimeChildResourceBlockDefinition.java | 27 +- .../RuntimeChildResourceDefinition.java | 53 +- ...imeChildUndeclaredExtensionDefinition.java | 22 +- .../RuntimeCompositeDatatypeDefinition.java | 26 +- .../RuntimeElemContainedResourceList.java | 3 +- .../RuntimeElemContainedResources.java | 1 - .../context/RuntimeElementDirectResource.java | 1 - .../context/RuntimeExtensionDtDefinition.java | 29 +- .../context/RuntimeIdDatatypeDefinition.java | 10 +- .../RuntimePrimitiveDatatypeDefinition.java | 17 +- ...ePrimitiveDatatypeNarrativeDefinition.java | 15 +- ...rimitiveDatatypeXhtmlHl7OrgDefinition.java | 14 +- .../RuntimeResourceBlockDefinition.java | 12 +- .../context/RuntimeResourceDefinition.java | 49 +- .../uhn/fhir/context/RuntimeSearchParam.java | 102 +- .../fhir/context/api/AddProfileTagEnum.java | 4 +- .../fhir/context/api/BundleInclusionRule.java | 15 +- .../fhir/context/phonetic/ApacheEncoder.java | 3 +- .../fhir/context/phonetic/NumericEncoder.java | 3 +- .../support/ConceptValidationOptions.java | 4 +- .../DefaultProfileValidationSupport.java | 28 +- ...rofileValidationSupportBundleStrategy.java | 59 +- .../context/support/IValidationSupport.java | 146 +- .../support/TranslateConceptResult.java | 16 +- .../support/ValueSetExpansionOptions.java | 4 +- .../fhirpath/FhirPathExecutionException.java | 1 - .../java/ca/uhn/fhir/fhirpath/IFhirPath.java | 6 +- .../java/ca/uhn/fhir/i18n/HapiErrorCode.java | 5 +- .../java/ca/uhn/fhir/i18n/HapiLocalizer.java | 19 +- .../src/main/java/ca/uhn/fhir/i18n/Msg.java | 1 - .../ca/uhn/fhir/interceptor/api/Hook.java | 1 - .../uhn/fhir/interceptor/api/HookParams.java | 31 +- .../api/IAnonymousInterceptor.java | 1 - .../api/IBaseInterceptorService.java | 2 +- .../api/IInterceptorBroadcaster.java | 4 +- .../interceptor/api/IInterceptorService.java | 1 - .../uhn/fhir/interceptor/api/IPointcut.java | 2 +- .../uhn/fhir/interceptor/api/Interceptor.java | 2 +- .../ca/uhn/fhir/interceptor/api/Pointcut.java | 755 ++- .../executor/BaseInterceptorService.java | 107 +- .../executor/InterceptorService.java | 7 +- .../interceptor/model/RequestPartitionId.java | 59 +- .../TransactionWriteOperationsDetails.java | 1 - .../ca/uhn/fhir/model/api/BaseElement.java | 43 +- .../model/api/BaseIdentifiableElement.java | 11 +- .../ca/uhn/fhir/model/api/BasePrimitive.java | 21 +- .../ca/uhn/fhir/model/api/ExtensionDt.java | 27 +- .../fhir/model/api/IBoundCodeableConcept.java | 4 +- .../ca/uhn/fhir/model/api/ICodingEnum.java | 5 +- .../fhir/model/api/ICompositeDatatype.java | 6 +- .../uhn/fhir/model/api/ICompositeElement.java | 10 +- .../java/ca/uhn/fhir/model/api/IDatatype.java | 4 +- .../java/ca/uhn/fhir/model/api/IElement.java | 7 +- .../ca/uhn/fhir/model/api/IFhirVersion.java | 12 +- .../fhir/model/api/IIdentifiableElement.java | 1 - .../ca/uhn/fhir/model/api/IModelJson.java | 10 +- .../fhir/model/api/IPrimitiveDatatype.java | 5 +- .../fhir/model/api/IQueryParameterAnd.java | 20 +- .../uhn/fhir/model/api/IQueryParameterOr.java | 8 +- .../fhir/model/api/IQueryParameterType.java | 27 +- .../java/ca/uhn/fhir/model/api/IResource.java | 16 +- .../fhir/model/api/IStreamingDatatype.java | 1 - .../api/ISupportsUndeclaredExtensions.java | 19 +- .../fhir/model/api/IValueSetEnumBinder.java | 5 +- .../java/ca/uhn/fhir/model/api/Include.java | 22 +- .../model/api/ResourceMetadataKeyEnum.java | 65 +- .../model/api/StorageResponseCodeEnum.java | 19 +- .../main/java/ca/uhn/fhir/model/api/Tag.java | 43 +- .../java/ca/uhn/fhir/model/api/TagList.java | 22 +- .../fhir/model/api/TemporalPrecisionEnum.java | 35 +- .../fhir/model/api/annotation/Binding.java | 2 +- .../uhn/fhir/model/api/annotation/Block.java | 3 +- .../uhn/fhir/model/api/annotation/Child.java | 35 +- .../fhir/model/api/annotation/ChildOrder.java | 6 +- .../model/api/annotation/Compartment.java | 5 +- .../model/api/annotation/DatatypeDef.java | 18 +- .../model/api/annotation/ExampleSupplier.java | 1 - .../fhir/model/api/annotation/Extension.java | 7 +- .../model/api/annotation/PasswordField.java | 4 +- .../model/api/annotation/ResourceDef.java | 7 +- .../api/annotation/SearchParamDefinition.java | 19 +- .../model/api/annotation/SimpleSetter.java | 16 +- .../model/base/composite/BaseCodingDt.java | 42 +- .../model/base/composite/BaseContainedDt.java | 1 - .../model/base/composite/BaseHumanNameDt.java | 24 +- .../base/composite/BaseIdentifierDt.java | 23 +- .../model/base/composite/BaseNarrativeDt.java | 11 +- .../model/base/composite/BaseQuantityDt.java | 144 +- .../composite/BaseResourceReferenceDt.java | 17 +- .../model/base/resource/BaseConformance.java | 8 +- .../base/resource/BaseOperationOutcome.java | 16 +- .../base/resource/BaseSecurityEvent.java | 4 +- .../base/resource/ResourceMetadataMap.java | 5 +- .../fhir/model/primitive/Base64BinaryDt.java | 4 +- .../fhir/model/primitive/BaseDateTimeDt.java | 81 +- .../uhn/fhir/model/primitive/BooleanDt.java | 1 - .../uhn/fhir/model/primitive/BoundCodeDt.java | 19 +- .../ca/uhn/fhir/model/primitive/CodeDt.java | 9 +- .../ca/uhn/fhir/model/primitive/DateDt.java | 40 +- .../uhn/fhir/model/primitive/DateTimeDt.java | 33 +- .../uhn/fhir/model/primitive/DecimalDt.java | 16 +- .../ca/uhn/fhir/model/primitive/IdDt.java | 37 +- .../uhn/fhir/model/primitive/InstantDt.java | 38 +- .../uhn/fhir/model/primitive/IntegerDt.java | 3 +- .../uhn/fhir/model/primitive/MarkdownDt.java | 6 +- .../ca/uhn/fhir/model/primitive/OidDt.java | 4 +- .../fhir/model/primitive/PositiveIntDt.java | 5 +- .../ca/uhn/fhir/model/primitive/StringDt.java | 27 +- .../ca/uhn/fhir/model/primitive/TimeDt.java | 7 +- .../fhir/model/primitive/UnsignedIntDt.java | 5 +- .../ca/uhn/fhir/model/primitive/UriDt.java | 21 +- .../ca/uhn/fhir/model/primitive/XhtmlDt.java | 2 - .../valueset/BundleEntrySearchModeEnum.java | 88 +- .../BundleEntryTransactionMethodEnum.java | 88 +- .../fhir/model/valueset/BundleTypeEnum.java | 96 +- .../ca/uhn/fhir/model/view/ViewGenerator.java | 22 +- .../BaseThymeleafNarrativeGenerator.java | 82 +- .../CustomThymeleafNarrativeGenerator.java | 16 +- .../DefaultThymeleafNarrativeGenerator.java | 4 +- .../narrative2/BaseNarrativeGenerator.java | 48 +- .../fhir/narrative2/INarrativeTemplate.java | 1 - .../INarrativeTemplateManifest.java | 21 +- .../fhir/narrative2/NarrativeTemplate.java | 7 +- .../narrative2/NarrativeTemplateManifest.java | 86 +- .../uhn/fhir/narrative2/TemplateTypeEnum.java | 2 - .../java/ca/uhn/fhir/parser/BaseParser.java | 301 +- .../uhn/fhir/parser/DataFormatException.java | 1 - .../uhn/fhir/parser/ErrorHandlerAdapter.java | 8 +- .../ca/uhn/fhir/parser/IJsonLikeParser.java | 11 +- .../main/java/ca/uhn/fhir/parser/IParser.java | 4 +- .../uhn/fhir/parser/IParserErrorHandler.java | 10 +- .../java/ca/uhn/fhir/parser/JsonParser.java | 704 ++- .../uhn/fhir/parser/LenientErrorHandler.java | 36 +- .../java/ca/uhn/fhir/parser/NDJsonParser.java | 125 +- .../ca/uhn/fhir/parser/ParseErrorHandler.java | 2 - .../java/ca/uhn/fhir/parser/ParserState.java | 291 +- .../java/ca/uhn/fhir/parser/RDFParser.java | 424 +- .../uhn/fhir/parser/StrictErrorHandler.java | 32 +- .../java/ca/uhn/fhir/parser/XmlParser.java | 333 +- .../fhir/parser/json/BaseJsonLikeArray.java | 8 +- .../fhir/parser/json/BaseJsonLikeObject.java | 1 - .../fhir/parser/json/BaseJsonLikeValue.java | 273 +- .../fhir/parser/json/BaseJsonLikeWriter.java | 1 - .../parser/json/jackson/JacksonStructure.java | 27 +- .../parser/json/jackson/JacksonWriter.java | 4 +- .../fhir/parser/path/EncodeContextPath.java | 2 - .../parser/path/EncodeContextPathElement.java | 126 +- .../ca/uhn/fhir/rest/annotation/AddTags.java | 19 +- .../java/ca/uhn/fhir/rest/annotation/At.java | 16 +- .../rest/annotation/ConditionalUrlParam.java | 5 +- .../ca/uhn/fhir/rest/annotation/Count.java | 10 +- .../ca/uhn/fhir/rest/annotation/Create.java | 9 +- .../ca/uhn/fhir/rest/annotation/Delete.java | 13 +- .../uhn/fhir/rest/annotation/DeleteTags.java | 20 +- .../ca/uhn/fhir/rest/annotation/Destroy.java | 3 +- .../ca/uhn/fhir/rest/annotation/Elements.java | 2 +- .../ca/uhn/fhir/rest/annotation/GetPage.java | 6 +- .../ca/uhn/fhir/rest/annotation/GraphQL.java | 3 +- .../ca/uhn/fhir/rest/annotation/History.java | 33 +- .../ca/uhn/fhir/rest/annotation/IdParam.java | 3 +- .../fhir/rest/annotation/IncludeParam.java | 29 +- .../uhn/fhir/rest/annotation/Initialize.java | 3 +- .../ca/uhn/fhir/rest/annotation/Metadata.java | 6 +- .../ca/uhn/fhir/rest/annotation/Offset.java | 6 +- .../uhn/fhir/rest/annotation/Operation.java | 1 - .../fhir/rest/annotation/OperationParam.java | 26 +- .../fhir/rest/annotation/OptionalParam.java | 18 +- .../uhn/fhir/rest/annotation/PageIdParam.java | 2 +- .../ca/uhn/fhir/rest/annotation/Patch.java | 9 +- .../ca/uhn/fhir/rest/annotation/RawParam.java | 6 +- .../ca/uhn/fhir/rest/annotation/Read.java | 9 +- .../fhir/rest/annotation/RequiredParam.java | 19 +- .../fhir/rest/annotation/ResourceParam.java | 4 +- .../ca/uhn/fhir/rest/annotation/Search.java | 37 +- .../uhn/fhir/rest/annotation/ServerBase.java | 2 +- .../ca/uhn/fhir/rest/annotation/Since.java | 18 +- .../ca/uhn/fhir/rest/annotation/Sort.java | 14 +- .../uhn/fhir/rest/annotation/Transaction.java | 2 +- .../rest/annotation/TransactionParam.java | 8 +- .../ca/uhn/fhir/rest/annotation/Update.java | 11 +- .../ca/uhn/fhir/rest/annotation/Validate.java | 28 +- .../ca/uhn/fhir/rest/api/BundleLinks.java | 3 +- .../fhir/rest/api/CacheControlDirective.java | 1 - .../java/ca/uhn/fhir/rest/api/Constants.java | 41 +- .../fhir/rest/api/DeleteCascadeModeEnum.java | 2 - .../ca/uhn/fhir/rest/api/EncodingEnum.java | 21 +- .../api/IVersionSpecificBundleFactory.java | 25 +- .../ca/uhn/fhir/rest/api/MethodOutcome.java | 1 - .../ca/uhn/fhir/rest/api/PatchTypeEnum.java | 11 +- .../uhn/fhir/rest/api/PreferHandlingEnum.java | 5 +- .../uhn/fhir/rest/api/PreferReturnEnum.java | 4 +- .../uhn/fhir/rest/api/QualifiedParamList.java | 2 - .../rest/api/RequestFormatParamStyleEnum.java | 1 - .../ca/uhn/fhir/rest/api/RequestTypeEnum.java | 11 +- .../fhir/rest/api/RestOperationTypeEnum.java | 8 +- .../rest/api/RestSearchParameterTypeEnum.java | 106 +- .../rest/api/SearchContainedModeEnum.java | 4 +- .../ca/uhn/fhir/rest/api/SearchStyleEnum.java | 5 +- .../fhir/rest/api/SearchTotalModeEnum.java | 1 - .../ca/uhn/fhir/rest/api/SortOrderEnum.java | 2 - .../java/ca/uhn/fhir/rest/api/SortSpec.java | 9 +- .../ca/uhn/fhir/rest/api/SummaryEnum.java | 3 +- .../uhn/fhir/rest/api/ValidationModeEnum.java | 22 +- .../fhir/rest/client/api/BaseHttpRequest.java | 1 - .../ca/uhn/fhir/rest/client/api/Header.java | 35 +- .../fhir/rest/client/api/HttpClientUtil.java | 1 - .../fhir/rest/client/api/IBasicClient.java | 6 +- .../rest/client/api/IClientInterceptor.java | 5 +- .../fhir/rest/client/api/IGenericClient.java | 23 +- .../uhn/fhir/rest/client/api/IHttpClient.java | 74 +- .../fhir/rest/client/api/IHttpRequest.java | 1 - .../client/api/IRestfulClientFactory.java | 49 +- .../client/api/ServerValidationModeEnum.java | 5 +- .../fhir/rest/client/api/UrlSourceEnum.java | 1 - .../FhirClientConnectionException.java | 1 - ...ClientInappropriateForServerException.java | 1 - .../exceptions/InvalidResponseException.java | 1 - .../exceptions/NonFhirResponseException.java | 11 +- .../fhir/rest/gclient/BaseClientParam.java | 9 +- .../rest/gclient/CompositeClientParam.java | 9 +- .../fhir/rest/gclient/CompositeCriterion.java | 12 +- .../fhir/rest/gclient/DateClientParam.java | 13 +- .../ca/uhn/fhir/rest/gclient/IBaseOn.java | 7 +- .../ca/uhn/fhir/rest/gclient/IBaseQuery.java | 1 - .../fhir/rest/gclient/IClientExecutable.java | 1 - .../fhir/rest/gclient/ICompositeWithLeft.java | 1 - .../ca/uhn/fhir/rest/gclient/ICreate.java | 4 +- .../uhn/fhir/rest/gclient/ICreateTyped.java | 7 +- .../fhir/rest/gclient/ICreateWithQuery.java | 5 +- .../rest/gclient/ICreateWithQueryTyped.java | 4 +- .../ca/uhn/fhir/rest/gclient/ICriterion.java | 4 +- .../fhir/rest/gclient/ICriterionInternal.java | 1 - .../ca/uhn/fhir/rest/gclient/IDelete.java | 5 +- .../uhn/fhir/rest/gclient/IDeleteTyped.java | 2 - .../fhir/rest/gclient/IDeleteWithQuery.java | 4 +- .../rest/gclient/IDeleteWithQueryTyped.java | 4 +- .../rest/gclient/IFetchConformanceTyped.java | 5 +- .../gclient/IFetchConformanceUntyped.java | 1 - .../ca/uhn/fhir/rest/gclient/IGetPage.java | 5 +- .../uhn/fhir/rest/gclient/IGetPageTyped.java | 2 - .../fhir/rest/gclient/IGetPageUntyped.java | 4 +- .../ca/uhn/fhir/rest/gclient/IHistory.java | 4 +- .../uhn/fhir/rest/gclient/IHistoryTyped.java | 2 - .../fhir/rest/gclient/IHistoryUntyped.java | 1 - .../java/ca/uhn/fhir/rest/gclient/IMeta.java | 7 +- .../gclient/IMetaAddOrDeleteUnsourced.java | 1 - .../fhir/rest/gclient/IMetaGetUnsourced.java | 3 +- .../ca/uhn/fhir/rest/gclient/IOperation.java | 1 - .../uhn/fhir/rest/gclient/IOperationOn.java | 6 +- .../rest/gclient/IOperationProcessMsg.java | 34 +- .../gclient/IOperationProcessMsgMode.java | 11 +- .../fhir/rest/gclient/IOperationTyped.java | 6 +- .../fhir/rest/gclient/IOperationUnnamed.java | 1 - .../fhir/rest/gclient/IOperationUntyped.java | 21 +- .../gclient/IOperationUntypedWithInput.java | 2 +- ...ationUntypedWithInputAndPartialOutput.java | 11 +- .../java/ca/uhn/fhir/rest/gclient/IParam.java | 1 - .../fhir/rest/gclient/IPatchExecutable.java | 7 +- .../uhn/fhir/rest/gclient/IPatchWithBody.java | 4 +- .../fhir/rest/gclient/IPatchWithQuery.java | 5 +- .../rest/gclient/IPatchWithQueryTyped.java | 4 +- .../java/ca/uhn/fhir/rest/gclient/IQuery.java | 5 +- .../fhir/rest/gclient/IReadExecutable.java | 3 +- .../fhir/rest/gclient/IReadIfNoneMatch.java | 8 +- .../ca/uhn/fhir/rest/gclient/IReadTyped.java | 11 +- .../java/ca/uhn/fhir/rest/gclient/ISort.java | 1 - .../uhn/fhir/rest/gclient/ITransaction.java | 7 +- .../fhir/rest/gclient/ITransactionTyped.java | 3 +- .../uhn/fhir/rest/gclient/IUntypedQuery.java | 3 +- .../ca/uhn/fhir/rest/gclient/IUpdate.java | 1 - .../fhir/rest/gclient/IUpdateExecutable.java | 7 +- .../fhir/rest/gclient/IUpdateWithQuery.java | 5 +- .../rest/gclient/IUpdateWithQueryTyped.java | 4 +- .../ca/uhn/fhir/rest/gclient/IValidate.java | 3 +- .../fhir/rest/gclient/IValidateUntyped.java | 3 +- .../fhir/rest/gclient/NumberClientParam.java | 13 +- .../rest/gclient/QuantityClientParam.java | 13 +- .../fhir/rest/gclient/QuantityCriterion.java | 8 +- .../rest/gclient/ReferenceClientParam.java | 5 +- .../fhir/rest/gclient/SpecialClientParam.java | 10 +- .../fhir/rest/gclient/StringClientParam.java | 17 +- .../fhir/rest/gclient/StringCriterion.java | 18 +- .../fhir/rest/gclient/TokenClientParam.java | 40 +- .../uhn/fhir/rest/gclient/TokenCriterion.java | 45 +- .../uhn/fhir/rest/gclient/UriClientParam.java | 17 +- .../uhn/fhir/rest/param/BaseAndListParam.java | 4 +- .../uhn/fhir/rest/param/BaseOrListParam.java | 3 +- .../ca/uhn/fhir/rest/param/BaseParam.java | 14 +- .../fhir/rest/param/BaseParamWithPrefix.java | 52 +- .../rest/param/CompositeAndListParam.java | 10 +- .../fhir/rest/param/CompositeOrListParam.java | 10 +- .../uhn/fhir/rest/param/CompositeParam.java | 8 +- .../uhn/fhir/rest/param/DateAndListParam.java | 4 +- .../uhn/fhir/rest/param/DateOrListParam.java | 6 +- .../ca/uhn/fhir/rest/param/DateParam.java | 25 +- .../uhn/fhir/rest/param/DateRangeParam.java | 90 +- .../uhn/fhir/rest/param/HasAndListParam.java | 3 - .../uhn/fhir/rest/param/HasOrListParam.java | 2 - .../java/ca/uhn/fhir/rest/param/HasParam.java | 14 +- .../param/HistorySearchDateRangeParam.java | 15 +- .../rest/param/HistorySearchStyleEnum.java | 8 +- .../uhn/fhir/rest/param/InternalCodingDt.java | 38 +- .../fhir/rest/param/NumberAndListParam.java | 5 +- .../fhir/rest/param/NumberOrListParam.java | 7 +- .../ca/uhn/fhir/rest/param/NumberParam.java | 25 +- .../uhn/fhir/rest/param/ParamPrefixEnum.java | 12 +- .../ca/uhn/fhir/rest/param/ParameterUtil.java | 32 +- .../uhn/fhir/rest/param/QualifierDetails.java | 4 - .../fhir/rest/param/QuantityAndListParam.java | 4 +- .../fhir/rest/param/QuantityOrListParam.java | 4 - .../ca/uhn/fhir/rest/param/QuantityParam.java | 54 +- .../rest/param/ReferenceAndListParam.java | 5 +- .../fhir/rest/param/ReferenceOrListParam.java | 4 +- .../uhn/fhir/rest/param/ReferenceParam.java | 4 +- .../fhir/rest/param/SpecialAndListParam.java | 4 +- .../fhir/rest/param/SpecialOrListParam.java | 2 - .../ca/uhn/fhir/rest/param/SpecialParam.java | 4 +- .../fhir/rest/param/StringAndListParam.java | 6 +- .../fhir/rest/param/StringOrListParam.java | 4 +- .../ca/uhn/fhir/rest/param/StringParam.java | 31 +- .../fhir/rest/param/TokenAndListParam.java | 3 +- .../uhn/fhir/rest/param/TokenOrListParam.java | 16 +- .../ca/uhn/fhir/rest/param/TokenParam.java | 18 +- .../fhir/rest/param/TokenParamModifier.java | 25 +- .../uhn/fhir/rest/param/UriAndListParam.java | 5 +- .../uhn/fhir/rest/param/UriOrListParam.java | 7 +- .../java/ca/uhn/fhir/rest/param/UriParam.java | 16 +- .../rest/param/UriParamQualifierEnum.java | 26 +- .../fhir/rest/param/binder/BaseBinder.java | 19 +- .../param/binder/BaseJavaPrimitiveBinder.java | 11 +- .../rest/param/binder/CalendarBinder.java | 9 +- .../rest/param/binder/CollectionBinder.java | 15 +- .../fhir/rest/param/binder/DateBinder.java | 9 +- .../param/binder/FhirPrimitiveBinder.java | 7 +- .../fhir/rest/param/binder/IParamBinder.java | 10 +- .../param/binder/QueryParameterAndBinder.java | 16 +- .../param/binder/QueryParameterOrBinder.java | 21 +- .../binder/QueryParameterTypeBinder.java | 20 +- .../fhir/rest/param/binder/StringBinder.java | 5 +- .../exceptions/AuthenticationException.java | 19 +- .../BaseServerResponseException.java | 28 +- .../ForbiddenOperationException.java | 14 +- .../exceptions/InternalErrorException.java | 12 +- .../exceptions/InvalidRequestException.java | 13 +- .../exceptions/MethodNotAllowedException.java | 25 +- .../NotImplementedOperationException.java | 8 +- .../exceptions/NotModifiedException.java | 10 +- .../exceptions/PayloadTooLargeException.java | 3 +- .../PreconditionFailedException.java | 11 +- .../exceptions/ResourceGoneException.java | 9 +- .../exceptions/ResourceNotFoundException.java | 16 +- .../ResourceVersionConflictException.java | 17 +- .../ResourceVersionNotSpecifiedException.java | 15 +- .../UnclassifiedServerFailureException.java | 11 +- .../UnprocessableEntityException.java | 2 - .../ca/uhn/fhir/store/IAuditDataStore.java | 4 +- .../uhn/fhir/system/HapiSystemProperties.java | 11 +- .../java/ca/uhn/fhir/tls/BaseStoreInfo.java | 13 +- .../java/ca/uhn/fhir/tls/KeyStoreType.java | 9 +- .../main/java/ca/uhn/fhir/tls/PathType.java | 3 +- .../java/ca/uhn/fhir/tls/TrustStoreInfo.java | 1 - .../main/java/ca/uhn/fhir/util/ArrayUtil.java | 7 +- .../main/java/ca/uhn/fhir/util/AsyncUtil.java | 9 +- .../java/ca/uhn/fhir/util/AttachmentUtil.java | 24 +- .../java/ca/uhn/fhir/util/BinaryUtil.java | 33 +- .../java/ca/uhn/fhir/util/BundleBuilder.java | 78 +- .../java/ca/uhn/fhir/util/BundleUtil.java | 233 +- .../java/ca/uhn/fhir/util/ClasspathUtil.java | 5 +- .../java/ca/uhn/fhir/util/CollectionUtil.java | 1 - .../ca/uhn/fhir/util/CompositionBuilder.java | 6 +- .../util/CountingAndLimitingInputStream.java | 1 - .../java/ca/uhn/fhir/util/DateRangeUtil.java | 15 +- .../main/java/ca/uhn/fhir/util/DateUtils.java | 62 +- .../java/ca/uhn/fhir/util/ElementUtil.java | 20 +- .../ca/uhn/fhir/util/ExtensionConstants.java | 34 +- .../java/ca/uhn/fhir/util/ExtensionUtil.java | 89 +- .../java/ca/uhn/fhir/util/FhirTerser.java | 440 +- .../util/FhirVersionIndependentConcept.java | 11 +- .../main/java/ca/uhn/fhir/util/FileUtil.java | 3 +- .../java/ca/uhn/fhir/util/HapiExtensions.java | 65 +- .../main/java/ca/uhn/fhir/util/ICallable.java | 1 - .../main/java/ca/uhn/fhir/util/ILockable.java | 1 - .../java/ca/uhn/fhir/util/IModelVisitor.java | 15 +- .../java/ca/uhn/fhir/util/IModelVisitor2.java | 17 +- .../main/java/ca/uhn/fhir/util/IoUtil.java | 1 - .../main/java/ca/uhn/fhir/util/JsonUtil.java | 2 +- .../main/java/ca/uhn/fhir/util/LogUtil.java | 1 - .../src/main/java/ca/uhn/fhir/util/Logs.java | 12 +- .../ca/uhn/fhir/util/MessageSupplier.java | 22 +- .../main/java/ca/uhn/fhir/util/MetaUtil.java | 29 +- .../ca/uhn/fhir/util/MultimapCollector.java | 6 +- .../util/NonPrettyPrintWriterWrapper.java | 14 +- .../uhn/fhir/util/NumericParamRangeUtil.java | 4 +- .../java/ca/uhn/fhir/util/ObjectUtil.java | 10 +- .../uhn/fhir/util/OperationOutcomeUtil.java | 105 +- .../java/ca/uhn/fhir/util/ParametersUtil.java | 297 +- .../ca/uhn/fhir/util/PhoneticEncoderUtil.java | 12 +- .../fhir/util/PrettyPrintWriterWrapper.java | 24 +- .../fhir/util/PropertyModifyingHelper.java | 26 +- .../main/java/ca/uhn/fhir/util/ProxyUtil.java | 2 +- .../java/ca/uhn/fhir/util/ReflectionUtil.java | 32 +- .../uhn/fhir/util/ResourceReferenceInfo.java | 40 +- .../ca/uhn/fhir/util/SearchParameterUtil.java | 67 +- .../main/java/ca/uhn/fhir/util/StopWatch.java | 19 +- .../java/ca/uhn/fhir/util/StringUtil.java | 8 +- .../ca/uhn/fhir/util/SubscriptionUtil.java | 6 +- .../java/ca/uhn/fhir/util/TaskChunker.java | 1 - .../java/ca/uhn/fhir/util/TerserUtil.java | 167 +- .../ca/uhn/fhir/util/TerserUtilHelper.java | 1 - .../main/java/ca/uhn/fhir/util/TestUtil.java | 4 +- .../java/ca/uhn/fhir/util/TimeoutManager.java | 3 +- .../ca/uhn/fhir/util/UrlPathTokenizer.java | 1 - .../main/java/ca/uhn/fhir/util/UrlUtil.java | 59 +- .../java/ca/uhn/fhir/util/ValidateUtil.java | 1 - .../java/ca/uhn/fhir/util/VersionEnum.java | 3 +- .../java/ca/uhn/fhir/util/VersionUtil.java | 1 - .../ca/uhn/fhir/util/XmlDetectionUtil.java | 2 - .../main/java/ca/uhn/fhir/util/XmlUtil.java | 82 +- .../fhir/util/bundle/BundleEntryMutator.java | 10 +- .../fhir/util/bundle/BundleEntryParts.java | 7 +- .../util/bundle/SearchBundleEntryParts.java | 2 - .../fhir/util/jar/DependencyLogFactory.java | 4 +- .../uhn/fhir/util/jar/DependencyLogImpl.java | 18 +- .../ca/uhn/fhir/util/jar/IDependencyLog.java | 1 - .../validation/BaseValidationContext.java | 7 +- .../ca/uhn/fhir/validation/FhirValidator.java | 123 +- .../uhn/fhir/validation/IResourceLoader.java | 6 +- .../fhir/validation/IValidationContext.java | 6 +- .../uhn/fhir/validation/IValidatorModule.java | 6 +- .../ca/uhn/fhir/validation/LSInputImpl.java | 21 +- .../fhir/validation/ResultSeverityEnum.java | 4 +- .../fhir/validation/SchemaBaseValidator.java | 32 +- .../validation/SingleValidationMessage.java | 1 - .../fhir/validation/ValidationContext.java | 54 +- .../ValidationFailureException.java | 30 +- .../fhir/validation/ValidationOptions.java | 4 +- .../uhn/fhir/validation/ValidationResult.java | 42 +- .../schematron/SchematronBaseValidator.java | 44 +- .../schematron/SchematronProvider.java | 10 +- .../fhir/instance/model/api/IAnyResource.java | 3 +- .../instance/model/api/IBackboneElement.java | 5 +- .../hl7/fhir/instance/model/api/IBase.java | 13 +- .../model/api/IBaseBackboneElement.java | 5 +- .../fhir/instance/model/api/IBaseBinary.java | 7 +- .../model/api/IBaseBooleanDatatype.java | 5 +- .../fhir/instance/model/api/IBaseBundle.java | 10 +- .../fhir/instance/model/api/IBaseCoding.java | 2 - .../instance/model/api/IBaseConformance.java | 5 +- .../instance/model/api/IBaseDatatype.java | 5 +- .../model/api/IBaseDatatypeElement.java | 5 +- .../model/api/IBaseDecimalDatatype.java | 4 +- .../fhir/instance/model/api/IBaseElement.java | 1 - .../instance/model/api/IBaseEnumFactory.java | 5 +- .../instance/model/api/IBaseEnumeration.java | 6 +- .../instance/model/api/IBaseExtension.java | 3 +- .../instance/model/api/IBaseFhirEnum.java | 51 +- .../model/api/IBaseHasExtensions.java | 1 - .../model/api/IBaseHasModifierExtensions.java | 1 - .../model/api/IBaseIntegerDatatype.java | 5 +- .../instance/model/api/IBaseLongDatatype.java | 5 +- .../instance/model/api/IBaseMetaType.java | 12 +- .../model/api/IBaseOperationOutcome.java | 5 +- .../instance/model/api/IBaseParameters.java | 5 +- .../instance/model/api/IBaseReference.java | 2 - .../instance/model/api/IBaseResource.java | 8 +- .../fhir/instance/model/api/IBaseXhtml.java | 5 +- .../instance/model/api/ICompositeType.java | 5 +- .../instance/model/api/IDomainResource.java | 1 - .../hl7/fhir/instance/model/api/IIdType.java | 36 +- .../fhir/instance/model/api/INarrative.java | 2 - .../instance/model/api/IPrimitiveType.java | 3 - .../uhn/fhir/checks/HapiErrorCodeCheck.java | 22 +- ...tractImportExportCsvConceptMapCommand.java | 103 +- .../main/java/ca/uhn/fhir/cli/BaseApp.java | 39 +- .../cli/BaseClearMigrationLockCommand.java | 15 +- .../java/ca/uhn/fhir/cli/BaseCommand.java | 218 +- .../cli/BaseFlywayMigrateDatabaseCommand.java | 46 +- .../cli/BaseRequestGeneratingCommand.java | 52 +- .../ca/uhn/fhir/cli/BulkImportCommand.java | 115 +- .../uhn/fhir/cli/CommandFailureException.java | 1 - .../ca/uhn/fhir/cli/CreatePackageCommand.java | 45 +- .../ca/uhn/fhir/cli/ExampleDataUploader.java | 204 +- .../cli/ExportConceptMapToCsvCommand.java | 43 +- .../cli/ImportCsvToConceptMapCommand.java | 144 +- .../cli/LoadingValidationSupportDstu2.java | 13 +- .../cli/LoadingValidationSupportDstu3.java | 13 +- .../fhir/cli/LoadingValidationSupportR4.java | 7 +- .../fhir/cli/ReindexTerminologyCommand.java | 32 +- .../ca/uhn/fhir/cli/RunServerCommand.java | 1 - .../cli/ToggleSearchParametersCommand.java | 12 +- .../fhir/cli/UploadTerminologyCommand.java | 112 +- .../java/ca/uhn/fhir/cli/ValidateCommand.java | 44 +- .../uhn/fhir/cli/ValidationDataUploader.java | 176 +- .../cli/ValidationSupportChainCreator.java | 5 +- .../fhir/cli/WebsocketSubscribeCommand.java | 5 +- .../HapiFhirCliRestfulClientFactory.java | 44 +- .../okhttp/client/OkHttpRestfulClient.java | 213 +- .../client/OkHttpRestfulClientFactory.java | 105 +- .../okhttp/client/OkHttpRestfulRequest.java | 87 +- .../okhttp/client/OkHttpRestfulResponse.java | 21 +- .../uhn/fhir/okhttp/utils/UrlStringUtils.java | 31 +- .../rest/client/apache/ApacheHttpClient.java | 68 +- .../rest/client/apache/ApacheHttpRequest.java | 5 +- .../client/apache/ApacheHttpResponse.java | 30 +- .../apache/ApacheRestfulClientFactory.java | 34 +- .../rest/client/apache/BaseHttpClient.java | 22 +- .../client/apache/GZipContentInterceptor.java | 23 +- .../rest/client/apache/ResourceEntity.java | 4 +- .../uhn/fhir/rest/client/impl/BaseClient.java | 132 +- .../client/impl/BaseHttpClientInvocation.java | 21 +- .../client/impl/ClientInvocationHandler.java | 24 +- .../impl/ClientInvocationHandlerFactory.java | 52 +- .../fhir/rest/client/impl/GenericClient.java | 500 +- .../client/impl/HttpBasicAuthInterceptor.java | 32 +- .../client/impl/RestfulClientFactory.java | 80 +- .../AdditionalRequestHeadersInterceptor.java | 1 - .../interceptor/BasicAuthInterceptor.java | 15 +- .../BearerTokenAuthInterceptor.java | 6 +- .../interceptor/CapturingInterceptor.java | 3 +- .../client/interceptor/CookieInterceptor.java | 3 +- .../interceptor/LoggingInterceptor.java | 4 +- .../SimpleRequestHeaderInterceptor.java | 14 +- .../ThreadLocalCapturingInterceptor.java | 3 - .../UrlTenantSelectionInterceptor.java | 7 +- .../interceptor/UserInfoInterceptor.java | 27 +- .../fhir/rest/client/method/AtParameter.java | 1 - .../BaseHttpClientInvocationWithContents.java | 29 +- .../rest/client/method/BaseMethodBinding.java | 145 +- .../BaseOutcomeReturningMethodBinding.java | 45 +- ...indingWithResourceIdButNoResourceBody.java | 33 +- ...turningMethodBindingWithResourceParam.java | 29 +- .../client/method/BaseQueryParameter.java | 27 +- .../BaseResourceReturningMethodBinding.java | 171 +- .../client/method/ConditionalParamBinder.java | 38 +- .../method/ConformanceMethodBinding.java | 19 +- .../rest/client/method/CountParameter.java | 39 +- .../client/method/CreateMethodBinding.java | 37 +- .../client/method/DeleteMethodBinding.java | 31 +- .../rest/client/method/ElementsParameter.java | 31 +- .../client/method/HistoryMethodBinding.java | 39 +- .../method/HttpDeleteClientInvocation.java | 22 +- .../method/HttpGetClientInvocation.java | 17 +- .../method/HttpPatchClientInvocation.java | 25 +- .../method/HttpPostClientInvocation.java | 20 +- .../method/HttpPutClientInvocation.java | 7 +- .../method/HttpSimpleClientInvocation.java | 9 +- .../client/method/IClientResponseHandler.java | 13 +- .../IClientResponseHandlerHandlesBinary.java | 12 +- .../fhir/rest/client/method/IParameter.java | 22 +- .../rest/client/method/IRestfulHeader.java | 4 +- .../rest/client/method/IncludeParameter.java | 16 +- .../fhir/rest/client/method/MethodUtil.java | 144 +- .../rest/client/method/NullParameter.java | 29 +- .../rest/client/method/OffsetParameter.java | 47 +- .../client/method/OperationMethodBinding.java | 69 +- .../client/method/OperationParameter.java | 93 +- .../rest/client/method/PageMethodBinding.java | 10 +- .../client/method/PatchMethodBinding.java | 58 +- .../client/method/PatchTypeParameter.java | 22 +- .../rest/client/method/RawParamsParmeter.java | 28 +- .../rest/client/method/ReadMethodBinding.java | 74 +- .../rest/client/method/ResourceParameter.java | 23 +- .../client/method/SearchMethodBinding.java | 116 +- .../rest/client/method/SearchParameter.java | 74 +- .../client/method/SinceOrAtParameter.java | 94 +- .../rest/client/method/SinceParameter.java | 1 - .../rest/client/method/SortParameter.java | 47 +- .../client/method/SummaryEnumParameter.java | 31 +- .../method/TransactionMethodBinding.java | 33 +- .../client/method/TransactionParameter.java | 41 +- .../client/method/UpdateMethodBinding.java | 43 +- .../ValidateMethodBindingDstu2Plus.java | 50 +- .../rest/client/tls/TlsAuthenticationSvc.java | 71 +- .../canonical/VersionCanonicalizer.java | 240 +- .../VersionedApiConverterInterceptor.java | 14 +- .../NullVersionConverterAdvisor10_30.java | 4 +- .../NullVersionConverterAdvisor10_40.java | 4 +- .../src/main/java/ChangelogMigrator.java | 27 +- .../fhir/docs/AuthorizationInterceptors.java | 300 +- .../AuthorizingTesterUiClientFactory.java | 20 +- .../ca/uhn/hapi/fhir/docs/BalpExample.java | 37 +- .../hapi/fhir/docs/BundleBuilderExamples.java | 55 +- .../ca/uhn/hapi/fhir/docs/BundleFetcher.java | 18 +- .../ca/uhn/hapi/fhir/docs/ClientExamples.java | 336 +- .../fhir/docs/ClientTransactionExamples.java | 147 +- .../hapi/fhir/docs/CompleteExampleClient.java | 75 +- .../hapi/fhir/docs/ConsentInterceptors.java | 34 +- .../uhn/hapi/fhir/docs/ConverterExamples.java | 15 +- .../java/ca/uhn/hapi/fhir/docs/Copier.java | 72 +- .../CreateCompositionAndGenerateDocument.java | 58 +- .../uhn/hapi/fhir/docs/CustomObservation.java | 4 +- .../ca/uhn/hapi/fhir/docs/Dstu2Examples.java | 88 +- .../uhn/hapi/fhir/docs/ExampleProviders.java | 103 +- .../hapi/fhir/docs/ExampleRestfulClient.java | 27 +- .../hapi/fhir/docs/ExampleRestfulServlet.java | 40 +- .../uhn/hapi/fhir/docs/ExtensionsDstu2.java | 143 +- .../uhn/hapi/fhir/docs/ExtensionsDstu3.java | 254 +- .../uhn/hapi/fhir/docs/FhirContextIntro.java | 141 +- .../ca/uhn/hapi/fhir/docs/FhirDataModel.java | 278 +- .../uhn/hapi/fhir/docs/FhirTesterConfig.java | 22 +- .../hapi/fhir/docs/GenericClientExample.java | 353 +- .../uhn/hapi/fhir/docs/GenomicsUploader.java | 10 +- .../java/ca/uhn/hapi/fhir/docs/HttpProxy.java | 57 +- .../ca/uhn/hapi/fhir/docs/IRestfulClient.java | 17 +- .../uhn/hapi/fhir/docs/IncludesExamples.java | 60 +- .../ca/uhn/hapi/fhir/docs/Interceptors.java | 4 - .../ca/uhn/hapi/fhir/docs/JaxRsClient.java | 32 +- .../fhir/docs/JaxRsConformanceProvider.java | 33 +- .../fhir/docs/JaxRsPatientRestProvider.java | 92 +- .../ca/uhn/hapi/fhir/docs/Multitenancy.java | 48 +- .../java/ca/uhn/hapi/fhir/docs/MyPatient.java | 39 +- .../ca/uhn/hapi/fhir/docs/MyPatientUse.java | 108 +- .../java/ca/uhn/hapi/fhir/docs/Narrative.java | 48 +- .../hapi/fhir/docs/NarrativeGenerator.java | 19 +- .../hapi/fhir/docs/PagingPatientProvider.java | 105 +- .../ca/uhn/hapi/fhir/docs/PagingServer.java | 16 +- .../java/ca/uhn/hapi/fhir/docs/Parser.java | 46 +- .../uhn/hapi/fhir/docs/PartitionExamples.java | 35 +- .../ca/uhn/hapi/fhir/docs/PatchExamples.java | 11 +- .../ca/uhn/hapi/fhir/docs/QuickUsage.java | 56 +- ...positoryValidatingInterceptorExamples.java | 108 +- .../fhir/docs/RequestCounterInterceptor.java | 33 +- .../docs/RequestExceptionInterceptor.java | 16 +- .../ca/uhn/hapi/fhir/docs/ResourceRefs.java | 43 +- .../RestfulObservationResourceProvider.java | 17 +- .../docs/RestfulPatientResourceProvider.java | 19 +- .../RestfulPatientResourceProviderMore.java | 2012 ++++--- .../hapi/fhir/docs/SecurityInterceptors.java | 12 +- .../hapi/fhir/docs/ServerETagExamples.java | 31 +- .../fhir/docs/ServerExceptionsExample.java | 29 +- .../hapi/fhir/docs/ServerInterceptors.java | 105 +- .../fhir/docs/ServerMetadataExamples.java | 55 +- .../uhn/hapi/fhir/docs/ServerOperations.java | 128 +- .../uhn/hapi/fhir/docs/ServletExamples.java | 284 +- .../ca/uhn/hapi/fhir/docs/TagsExamples.java | 78 +- .../uhn/hapi/fhir/docs/ValidateDirectory.java | 134 +- .../uhn/hapi/fhir/docs/ValidatorExamples.java | 442 +- .../fhir/docs/ValidatorExamplesDstu3.java | 72 +- .../customtype/CustomCompositeExtension.java | 8 +- .../fhir/docs/customtype/CustomDatatype.java | 33 +- .../fhir/docs/customtype/CustomResource.java | 31 +- .../fhir/docs/customtype/CustomUsage.java | 61 +- ...BasedBinarySecurityContextInterceptor.java | 4 +- .../docs/interceptor/MyTestInterceptor.java | 4 +- ...meModifierMdmPreProcessingInterceptor.java | 5 +- .../interceptor/TagTrimmingInterceptor.java | 1 - .../fhir/jaxrs/client/JaxRsHttpClient.java | 24 +- .../fhir/jaxrs/client/JaxRsHttpRequest.java | 7 +- .../fhir/jaxrs/client/JaxRsHttpResponse.java | 33 +- .../client/JaxRsRestfulClientFactory.java | 74 +- .../server/AbstractJaxRsBundleProvider.java | 273 +- .../AbstractJaxRsConformanceProvider.java | 58 +- .../server/AbstractJaxRsPageProvider.java | 40 +- .../jaxrs/server/AbstractJaxRsProvider.java | 75 +- .../server/AbstractJaxRsResourceProvider.java | 631 +-- .../JaxRsExceptionInterceptor.java | 171 +- .../interceptor/JaxRsResponseException.java | 11 +- .../server/util/JaxRsMethodBindings.java | 119 +- .../fhir/jaxrs/server/util/JaxRsRequest.java | 69 +- .../fhir/jaxrs/server/util/JaxRsResponse.java | 17 +- ...ocalContainerEntityManagerFactoryBean.java | 15 +- .../ca/uhn/fhir/jpa/model/sched/HapiJob.java | 3 +- .../jpa/model/sched/ISchedulerService.java | 5 +- .../model/sched/ScheduledJobDefinition.java | 8 +- .../ca/uhn/fhir/jpa/nickname/NicknameMap.java | 2 +- .../ca/uhn/fhir/jpa/nickname/NicknameSvc.java | 5 +- .../sched/AutowiringSpringBeanJobFactory.java | 8 +- .../uhn/fhir/jpa/sched/BaseHapiScheduler.java | 28 +- .../jpa/sched/BaseSchedulerServiceImpl.java | 19 +- .../uhn/fhir/jpa/sched/HapiNullScheduler.java | 16 +- .../util/DerbyTenSevenHapiFhirDialect.java | 6 +- .../java/ca/uhn/fhir/jpa/util/TestUtil.java | 3 - .../uhn/fhir/jpa/batch2/JobInstanceUtil.java | 6 +- .../uhn/fhir/jpa/batch2/JpaBatch2Config.java | 33 +- .../jpa/batch2/JpaJobPersistenceImpl.java | 187 +- .../DatabaseBlobBinaryStorageSvcImpl.java | 56 +- .../bulk/export/job/BulkExportJobConfig.java | 1 - ...BulkDataExportJobSchedulingHelperImpl.java | 54 +- .../export/svc/JpaBulkExportProcessor.java | 293 +- .../bulk/imprt/svc/BulkDataImportSvcImpl.java | 73 +- .../jpa/cache/ResourceVersionSvcDaoImpl.java | 41 +- .../fhir/jpa/config/Batch2SupportConfig.java | 14 +- .../jpa/config/BeanPostProcessorConfig.java | 1 - .../fhir/jpa/config/EnversAuditConfig.java | 1 - .../config/HapiFhirHibernateJpaDialect.java | 29 +- .../config/HibernatePropertiesProvider.java | 13 +- .../ca/uhn/fhir/jpa/config/JpaConfig.java | 122 +- .../uhn/fhir/jpa/config/JpaDstu2Config.java | 7 +- .../fhir/jpa/config/PackageLoaderConfig.java | 1 - .../ca/uhn/fhir/jpa/config/SearchConfig.java | 130 +- .../jpa/config/ValidationSupportConfig.java | 17 +- .../config/dstu3/FhirContextDstu3Config.java | 1 - .../fhir/jpa/config/dstu3/JpaDstu3Config.java | 24 +- .../uhn/fhir/jpa/config/r4/JpaR4Config.java | 44 +- .../jpa/config/r4b/FhirContextR4BConfig.java | 1 - .../uhn/fhir/jpa/config/r4b/JpaR4BConfig.java | 24 +- .../jpa/config/r5/FhirContextR5Config.java | 1 - .../uhn/fhir/jpa/config/r5/JpaR5Config.java | 25 +- ...cDataSourceConnectionPoolInfoProvider.java | 7 +- .../util/ConnectionPoolInfoProvider.java | 14 +- .../util/HapiEntityManagerFactoryUtil.java | 10 +- .../util/IConnectionPoolInfoProvider.java | 1 - .../config/util/ResourceCountCacheUtil.java | 1 + .../util/ValidationSupportConfigUtil.java | 14 +- .../ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java | 568 +- .../fhir/jpa/dao/BaseHapiFhirResourceDao.java | 1096 ++-- .../fhir/jpa/dao/BaseHapiFhirSystemDao.java | 147 +- .../java/ca/uhn/fhir/jpa/dao/CodingSpy.java | 5 +- .../dao/FhirResourceDaoSubscriptionDstu2.java | 36 +- .../uhn/fhir/jpa/dao/FhirSystemDaoDstu2.java | 14 +- .../fhir/jpa/dao/FulltextSearchSvcImpl.java | 202 +- .../ca/uhn/fhir/jpa/dao/HistoryBuilder.java | 101 +- .../fhir/jpa/dao/HistoryBuilderFactory.java | 16 +- .../uhn/fhir/jpa/dao/IFulltextSearchSvc.java | 25 +- .../jpa/dao/IJpaStorageResourceParser.java | 15 +- .../ca/uhn/fhir/jpa/dao/IndexedParam.java | 1 - ...JpaPersistedResourceValidationSupport.java | 49 +- .../ca/uhn/fhir/jpa/dao/JpaResourceDao.java | 4 +- .../fhir/jpa/dao/JpaResourceDaoBundle.java | 11 +- .../jpa/dao/JpaResourceDaoCodeSystem.java | 138 +- .../jpa/dao/JpaResourceDaoComposition.java | 15 +- .../jpa/dao/JpaResourceDaoConceptMap.java | 31 +- .../fhir/jpa/dao/JpaResourceDaoEncounter.java | 29 +- .../jpa/dao/JpaResourceDaoObservation.java | 122 +- .../fhir/jpa/dao/JpaResourceDaoPatient.java | 87 +- .../dao/JpaResourceDaoSearchParameter.java | 19 +- .../JpaResourceDaoStructureDefinition.java | 7 +- .../fhir/jpa/dao/JpaResourceDaoValueSet.java | 137 +- .../jpa/dao/JpaStorageResourceParser.java | 80 +- .../dao/ObservationLastNIndexPersistSvc.java | 95 +- .../uhn/fhir/jpa/dao/TolerantJsonParser.java | 44 +- .../fhir/jpa/dao/TransactionProcessor.java | 257 +- ...ansactionProcessorVersionAdapterDstu2.java | 13 +- .../data/IBatch2JobInstanceRepository.java | 60 +- .../dao/data/IBatch2WorkChunkRepository.java | 78 +- .../jpa/dao/data/IBinaryStorageEntityDao.java | 3 +- .../jpa/dao/data/IBulkImportJobFileDao.java | 10 +- .../jpa/dao/data/IHapiFhirJpaRepository.java | 3 +- .../jpa/dao/data/IMdmLinkJpaRepository.java | 74 +- .../uhn/fhir/jpa/dao/data/INpmPackageDao.java | 1 - .../jpa/dao/data/INpmPackageVersionDao.java | 9 +- .../data/INpmPackageVersionResourceDao.java | 28 +- .../uhn/fhir/jpa/dao/data/IPartitionDao.java | 1 - .../data/IResourceHistoryProvenanceDao.java | 4 +- .../dao/data/IResourceHistoryTableDao.java | 50 +- .../jpa/dao/data/IResourceHistoryTagDao.java | 3 - ...esourceIndexedComboTokensNonUniqueDao.java | 3 +- .../IResourceIndexedSearchParamCoordsDao.java | 6 +- .../IResourceIndexedSearchParamDateDao.java | 3 +- .../IResourceIndexedSearchParamNumberDao.java | 6 +- ...ResourceIndexedSearchParamQuantityDao.java | 6 +- ...dexedSearchParamQuantityNormalizedDao.java | 5 +- .../IResourceIndexedSearchParamStringDao.java | 3 +- .../IResourceIndexedSearchParamTokenDao.java | 3 +- .../IResourceIndexedSearchParamUriDao.java | 5 +- .../fhir/jpa/dao/data/IResourceLinkDao.java | 5 +- .../jpa/dao/data/IResourceReindexJobDao.java | 1 - .../jpa/dao/data/IResourceSearchUrlDao.java | 4 +- .../jpa/dao/data/IResourceSearchViewDao.java | 5 +- .../fhir/jpa/dao/data/IResourceTableDao.java | 96 +- .../fhir/jpa/dao/data/IResourceTagDao.java | 13 +- .../ca/uhn/fhir/jpa/dao/data/ISearchDao.java | 12 +- .../fhir/jpa/dao/data/ISearchIncludeDao.java | 4 +- .../jpa/dao/data/ISearchParamPresentDao.java | 1 - .../fhir/jpa/dao/data/ISearchResultDao.java | 9 +- .../jpa/dao/data/ISubscriptionTableDao.java | 3 +- .../fhir/jpa/dao/data/ITagDefinitionDao.java | 3 +- .../fhir/jpa/dao/data/ITermCodeSystemDao.java | 6 +- .../dao/data/ITermCodeSystemVersionDao.java | 26 +- .../fhir/jpa/dao/data/ITermConceptDao.java | 16 +- .../dao/data/ITermConceptDesignationDao.java | 4 +- .../fhir/jpa/dao/data/ITermConceptMapDao.java | 19 +- .../data/ITermConceptMapGroupElementDao.java | 3 +- .../ITermConceptMapGroupElementTargetDao.java | 3 +- .../data/ITermConceptParentChildLinkDao.java | 4 +- .../jpa/dao/data/ITermConceptPropertyDao.java | 1 - .../jpa/dao/data/ITermValueSetConceptDao.java | 41 +- .../ITermValueSetConceptDesignationDao.java | 4 +- .../dao/data/ITermValueSetConceptViewDao.java | 18 +- .../ITermValueSetConceptViewOracleDao.java | 16 +- .../fhir/jpa/dao/data/ITermValueSetDao.java | 16 +- .../jpa/dao/data/custom/IForcedIdDaoImpl.java | 136 +- .../jpa/dao/data/custom/IForcedIdQueries.java | 22 +- .../FhirResourceDaoSubscriptionDstu3.java | 36 +- .../jpa/dao/dstu3/FhirSystemDaoDstu3.java | 14 +- .../dao/expunge/ExpungeEverythingService.java | 190 +- .../expunge/JpaResourceExpungeService.java | 100 +- .../jpa/dao/expunge/ResourceForeignKey.java | 17 +- .../dao/expunge/ResourceTableFKProvider.java | 41 +- .../dao/index/DaoSearchParamSynchronizer.java | 21 +- .../fhir/jpa/dao/index/IdHelperService.java | 285 +- ...rchParamWithInlineReferencesExtractor.java | 88 +- .../jpa/dao/mdm/MdmExpansionCacheSvc.java | 12 +- .../fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java | 215 +- .../jpa/dao/predicate/SearchFilterParser.java | 194 +- .../dao/r4/FhirResourceDaoSubscriptionR4.java | 38 +- .../uhn/fhir/jpa/dao/r4/FhirSystemDaoR4.java | 14 +- .../r4b/FhirResourceDaoSubscriptionR4B.java | 7 +- .../fhir/jpa/dao/r4b/FhirSystemDaoR4B.java | 15 +- ...TransactionProcessorVersionAdapterR4B.java | 14 +- .../dao/r5/FhirResourceDaoSubscriptionR5.java | 38 +- .../uhn/fhir/jpa/dao/r5/FhirSystemDaoR5.java | 15 +- .../TransactionProcessorVersionAdapterR5.java | 16 +- .../search/ExtendedHSearchClauseBuilder.java | 374 +- .../search/ExtendedHSearchIndexExtractor.java | 111 +- .../ExtendedHSearchResourceProjection.java | 1 - .../search/ExtendedHSearchSearchBuilder.java | 106 +- .../HSearchCompositeSearchIndexDataImpl.java | 125 +- .../jpa/dao/search/HSearchSortHelperImpl.java | 50 +- .../jpa/dao/search/IHSearchSortHelper.java | 1 - .../fhir/jpa/dao/search/LastNAggregation.java | 100 +- .../fhir/jpa/dao/search/LastNOperation.java | 33 +- .../uhn/fhir/jpa/dao/search/PathContext.java | 27 +- .../ResourceNotFoundInIndexException.java | 1 - .../SearchScrollQueryExecutorAdaptor.java | 1 - .../uhn/fhir/jpa/dao/search/TermHelper.java | 27 +- .../uhn/fhir/jpa/dao/search/package-info.java | 1 - .../delete/DeleteConflictFinderService.java | 5 +- .../jpa/delete/DeleteConflictOutcome.java | 1 - .../jpa/delete/DeleteConflictService.java | 75 +- .../delete/ThreadSafeResourceDeleterSvc.java | 54 +- .../batch2/DeleteExpungeSqlBuilder.java | 67 +- .../delete/batch2/DeleteExpungeSvcImpl.java | 17 +- .../jpa/entity/Batch2JobInstanceEntity.java | 63 +- .../jpa/entity/Batch2WorkChunkEntity.java | 91 +- .../entity/BulkExportCollectionEntity.java | 17 +- .../BulkExportCollectionFileEntity.java | 9 +- .../fhir/jpa/entity/BulkExportJobEntity.java | 29 +- .../fhir/jpa/entity/BulkImportJobEntity.java | 25 +- .../jpa/entity/BulkImportJobFileEntity.java | 25 +- .../jpa/entity/HapiFhirEnversRevision.java | 10 +- .../java/ca/uhn/fhir/jpa/entity/MdmLink.java | 99 +- .../uhn/fhir/jpa/entity/PartitionEntity.java | 12 +- .../jpa/entity/ResourceReindexJobEntity.java | 16 +- .../fhir/jpa/entity/ResourceSearchView.java | 67 +- .../java/ca/uhn/fhir/jpa/entity/Search.java | 74 +- .../ca/uhn/fhir/jpa/entity/SearchInclude.java | 22 +- .../ca/uhn/fhir/jpa/entity/SearchResult.java | 23 +- .../uhn/fhir/jpa/entity/SearchTypeEnum.java | 4 +- .../fhir/jpa/entity/SubscriptionTable.java | 23 +- .../uhn/fhir/jpa/entity/TermCodeSystem.java | 51 +- .../jpa/entity/TermCodeSystemVersion.java | 57 +- .../ca/uhn/fhir/jpa/entity/TermConcept.java | 115 +- .../jpa/entity/TermConceptDesignation.java | 70 +- .../uhn/fhir/jpa/entity/TermConceptMap.java | 76 +- .../fhir/jpa/entity/TermConceptMapGroup.java | 74 +- .../entity/TermConceptMapGroupElement.java | 68 +- .../TermConceptMapGroupElementTarget.java | 72 +- .../entity/TermConceptParentChildLink.java | 63 +- .../fhir/jpa/entity/TermConceptProperty.java | 71 +- .../jpa/entity/TermConceptPropertyBinder.java | 29 +- .../ca/uhn/fhir/jpa/entity/TermValueSet.java | 70 +- .../fhir/jpa/entity/TermValueSetConcept.java | 85 +- .../TermValueSetConceptDesignation.java | 102 +- .../jpa/entity/TermValueSetConceptView.java | 55 +- .../entity/TermValueSetConceptViewOracle.java | 55 +- .../TermValueSetPreExpansionStatusEnum.java | 10 +- ...nallyStoredResourceAddressMetadataKey.java | 7 +- ...ternallyStoredResourceServiceRegistry.java | 7 +- .../ca/uhn/fhir/jpa/esr/package-info.java | 1 - .../GraphQLProviderWithIntrospection.java | 115 +- .../CascadingDeleteInterceptor.java | 33 +- .../ForceOffsetSearchModeInterceptor.java | 2 - .../JpaConsentContextServices.java | 3 +- .../JpaPreResourceAccessDetails.java | 9 +- ...rentialIntegrityForDeletesInterceptor.java | 22 +- .../PerformanceTracingLoggingInterceptor.java | 37 +- ...actionConcurrencySemaphoreInterceptor.java | 46 +- .../tasks/HapiFhirJpaMigrationTasks.java | 3028 +++++++---- .../jpa/model/cross/JpaResourceLookup.java | 30 +- .../packages/IHapiPackageCacheManager.java | 2 - .../jpa/packages/IPackageInstallerSvc.java | 1 - .../fhir/jpa/packages/JpaPackageCache.java | 230 +- .../jpa/packages/NpmJpaValidationSupport.java | 7 +- .../jpa/packages/NpmPackageMetadataJson.java | 25 +- .../packages/NpmPackageSearchResultJson.java | 32 +- .../packages/PackageDeleteOutcomeJson.java | 8 +- .../packages/PackageInstallOutcomeJson.java | 15 +- .../jpa/packages/PackageInstallationSpec.java | 63 +- .../jpa/packages/PackageInstallerSvcImpl.java | 163 +- .../packages/PackageVersionComparator.java | 2 +- .../jpa/packages/loader/NpmPackageData.java | 15 +- .../jpa/packages/loader/PackageLoaderSvc.java | 98 +- .../loader/PackageResourceParsingSvc.java | 8 +- .../fhir/jpa/packages/util/PackageUtils.java | 15 +- .../jpa/partition/IPartitionLookupSvc.java | 2 +- .../jpa/partition/PartitionLookupSvcImpl.java | 72 +- .../PartitionManagementProvider.java | 132 +- .../partition/RequestPartitionHelperSvc.java | 53 +- .../BaseJpaResourceProviderCodeSystem.java | 137 +- .../BaseJpaResourceProviderComposition.java | 53 +- .../BaseJpaResourceProviderConceptMap.java | 85 +- .../BaseJpaResourceProviderEncounter.java | 83 +- ...BaseJpaResourceProviderEncounterDstu2.java | 108 +- .../BaseJpaResourceProviderObservation.java | 66 +- .../BaseJpaResourceProviderPatient.java | 222 +- ...paResourceProviderStructureDefinition.java | 22 +- .../jpa/provider/BaseJpaSystemProvider.java | 35 +- .../uhn/fhir/jpa/provider/DiffProvider.java | 70 +- .../jpa/provider/InstanceReindexProvider.java | 21 +- .../JpaCapabilityStatementProvider.java | 29 +- .../provider/JpaConformanceProviderDstu2.java | 21 +- .../fhir/jpa/provider/JpaSystemProvider.java | 45 +- .../jpa/provider/ProcessMessageProvider.java | 16 +- .../provider/TerminologyUploaderProvider.java | 160 +- .../provider/ValueSetOperationProvider.java | 193 +- .../ValueSetOperationProviderDstu2.java | 98 +- ...aseJpaResourceProviderConceptMapDstu3.java | 19 - .../dstu3/JpaConformanceProviderDstu3.java | 76 +- .../r4/IConsentExtensionProvider.java | 1 - .../provider/r4/IMemberMatchConsentHook.java | 3 +- .../r4/MemberMatchR4ResourceProvider.java | 95 +- .../provider/r4/MemberMatcherR4Helper.java | 109 +- .../fhir/jpa/reindex/Batch2DaoSvcImpl.java | 63 +- .../search/DatabaseBackedPagingProvider.java | 4 +- .../DeferConceptIndexingRoutingBinder.java | 12 +- .../uhn/fhir/jpa/search/ExceptionService.java | 18 +- .../HapiHSearchAnalysisConfigurers.java | 206 +- .../jpa/search/IStaleSearchDeletingSvc.java | 1 - .../jpa/search/ISynchronousSearchSvc.java | 9 +- .../search/PersistedJpaBundleProvider.java | 160 +- .../PersistedJpaBundleProviderFactory.java | 49 +- ...istedJpaSearchFirstPageBundleProvider.java | 33 +- .../fhir/jpa/search/ResourceSearchUrlSvc.java | 25 +- .../jpa/search/SearchCoordinatorSvcImpl.java | 338 +- .../jpa/search/SearchStrategyFactory.java | 46 +- .../SearchUrlJobMaintenanceSvcImpl.java | 4 +- .../search/StaleSearchDeletingSvcImpl.java | 2 + .../jpa/search/SynchronousSearchSvcImpl.java | 251 +- .../fhir/jpa/search/WarmSearchDefinition.java | 1 - .../TokenAutocompleteAggregation.java | 132 +- .../autocomplete/TokenAutocompleteHit.java | 7 +- .../autocomplete/TokenAutocompleteSearch.java | 43 +- .../ValueSetAutocompleteOptions.java | 33 +- .../ValueSetAutocompleteSearch.java | 16 +- .../jpa/search/autocomplete/package-info.java | 1 - .../fhir/jpa/search/builder/QueryStack.java | 1779 ++++-- .../jpa/search/builder/SearchBuilder.java | 703 ++- .../search/builder/SearchQueryExecutors.java | 10 +- .../StorageInterceptorHooksFacade.java | 22 +- .../models/MissingParameterQueryParams.java | 18 +- .../MissingQueryParameterPredicateParams.java | 9 +- .../models/PredicateBuilderCacheKey.java | 14 +- .../models/PredicateBuilderTypeEnum.java | 10 +- .../BaseJoiningPredicateBuilder.java | 32 +- .../predicate/BasePredicateBuilder.java | 8 +- .../BaseQuantityPredicateBuilder.java | 32 +- .../BaseSearchParamPredicateBuilder.java | 47 +- ...UniqueSearchParameterPredicateBuilder.java | 1 - ...UniqueSearchParameterPredicateBuilder.java | 1 - .../predicate/CoordsPredicateBuilder.java | 46 +- .../predicate/DatePredicateBuilder.java | 82 +- .../predicate/ForcedIdPredicateBuilder.java | 4 - .../predicate/NumberPredicateBuilder.java | 33 +- .../predicate/ParsedLocationParam.java | 19 +- .../QuantityNormalizedPredicateBuilder.java | 3 +- .../predicate/QuantityPredicateBuilder.java | 4 +- .../predicate/ResourceIdPredicateBuilder.java | 26 +- .../ResourceLinkPredicateBuilder.java | 251 +- .../ResourceTablePredicateBuilder.java | 14 +- .../SearchParamPresentPredicateBuilder.java | 8 +- .../predicate/SourcePredicateBuilder.java | 2 - .../predicate/StringPredicateBuilder.java | 126 +- .../predicate/TagPredicateBuilder.java | 18 +- .../predicate/TokenPredicateBuilder.java | 161 +- .../predicate/UriPredicateBuilder.java | 62 +- .../builder/sql/PredicateBuilderFactory.java | 12 +- .../builder/sql/SearchQueryBuilder.java | 230 +- .../builder/sql/SearchQueryExecutor.java | 22 +- .../search/builder/sql/SqlObjectFactory.java | 21 +- .../builder/tasks/SearchContinuationTask.java | 68 +- .../jpa/search/builder/tasks/SearchTask.java | 302 +- .../builder/tasks/SearchTaskParameters.java | 18 +- .../cache/DatabaseSearchCacheSvcImpl.java | 122 +- .../DatabaseSearchResultCacheSvcImpl.java | 93 +- .../jpa/search/cache/ISearchCacheSvc.java | 6 +- .../search/cache/ISearchResultCacheSvc.java | 20 +- .../search/cache/SearchCacheStatusEnum.java | 2 - ...asticsearchHibernatePropertiesBuilder.java | 71 +- .../IndexNamePrefixLayoutStrategy.java | 9 +- .../lastn/ElasticsearchRestClientFactory.java | 45 +- .../search/lastn/ElasticsearchSvcImpl.java | 275 +- .../jpa/search/lastn/IElasticsearchSvc.java | 4 +- .../fhir/jpa/search/lastn/json/CodeJson.java | 68 +- .../search/lastn/json/ObservationJson.java | 12 +- .../reindex/IInstanceReindexService.java | 5 +- .../reindex/InstanceReindexServiceImpl.java | 567 +- .../jpa/search/reindex/ResourceReindexer.java | 23 +- .../reindex/ResourceReindexingSvcImpl.java | 98 +- .../jpa/search/warm/CacheWarmingSvcImpl.java | 10 +- .../fhir/jpa/sp/ISearchParamPresenceSvc.java | 1 - .../jpa/sp/SearchParamPresenceSvcImpl.java | 4 +- .../term/BaseTermVersionAdapterSvcImpl.java | 5 +- .../ca/uhn/fhir/jpa/term/ExpansionFilter.java | 24 +- .../jpa/term/IValueSetConceptAccumulator.java | 20 +- .../fhir/jpa/term/IZipContentsHandler.java | 1 - .../fhir/jpa/term/IZipContentsHandlerCsv.java | 2 +- .../fhir/jpa/term/LoadedFileDescriptors.java | 17 +- .../term/TermCodeSystemStorageSvcImpl.java | 294 +- .../uhn/fhir/jpa/term/TermConceptDaoSvc.java | 2 - .../jpa/term/TermConceptMappingSvcImpl.java | 208 +- .../jpa/term/TermDeferredStorageSvcImpl.java | 111 +- .../uhn/fhir/jpa/term/TermLoaderSvcImpl.java | 594 ++- .../ca/uhn/fhir/jpa/term/TermReadSvcImpl.java | 1446 +++-- .../ca/uhn/fhir/jpa/term/TermReadSvcUtil.java | 13 +- .../fhir/jpa/term/TermReindexingSvcImpl.java | 36 +- .../jpa/term/TermVersionAdapterSvcDstu2.java | 1 - .../jpa/term/TermVersionAdapterSvcDstu3.java | 22 +- .../jpa/term/TermVersionAdapterSvcR4.java | 12 +- .../jpa/term/TermVersionAdapterSvcR4B.java | 30 +- .../jpa/term/TermVersionAdapterSvcR5.java | 21 +- .../jpa/term/ValueSetConceptAccumulator.java | 106 +- ...ansionComponentWithConceptAccumulator.java | 75 +- .../term/api/ITermCodeSystemStorageSvc.java | 53 +- .../jpa/term/api/ITermConceptMappingSvc.java | 6 +- .../uhn/fhir/jpa/term/api/ITermReadSvc.java | 49 +- .../fhir/jpa/term/api/ITermReindexingSvc.java | 1 - .../jpa/term/api/ITermVersionAdapterSvc.java | 1 - .../term/api/TermCodeSystemDeleteJobSvc.java | 8 +- .../jpa/term/custom/CustomTerminologySet.java | 39 +- .../fhir/jpa/term/custom/PropertyHandler.java | 12 +- .../term/ex/ExpansionTooCostlyException.java | 1 - .../uhn/fhir/jpa/term/icd10/Icd10Loader.java | 9 +- .../fhir/jpa/term/icd10cm/Icd10CmLoader.java | 16 +- .../fhir/jpa/term/loinc/BaseLoincHandler.java | 70 +- .../BaseLoincTop2000LabResultsHandler.java | 15 +- .../term/loinc/LoincAnswerListHandler.java | 35 +- .../loinc/LoincAnswerListLinkHandler.java | 2 - .../loinc/LoincCodingPropertiesHandler.java | 37 +- .../term/loinc/LoincConsumerNameHandler.java | 11 +- .../loinc/LoincDocumentOntologyHandler.java | 14 +- .../jpa/term/loinc/LoincGroupFileHandler.java | 17 +- .../loinc/LoincGroupTermsFileHandler.java | 12 +- .../uhn/fhir/jpa/term/loinc/LoincHandler.java | 23 +- .../jpa/term/loinc/LoincHierarchyHandler.java | 71 +- .../LoincIeeeMedicalDeviceCodeHandler.java | 46 +- .../LoincImagingDocumentCodeHandler.java | 12 +- .../loinc/LoincLinguisticVariantHandler.java | 66 +- .../loinc/LoincLinguisticVariantsHandler.java | 19 +- .../jpa/term/loinc/LoincMapToHandler.java | 9 +- .../loinc/LoincParentGroupFileHandler.java | 8 +- .../fhir/jpa/term/loinc/LoincPartHandler.java | 17 +- .../jpa/term/loinc/LoincPartLinkHandler.java | 19 +- .../LoincPartRelatedCodeMappingHandler.java | 54 +- .../term/loinc/LoincRsnaPlaybookHandler.java | 97 +- .../LoincTop2000LabResultsSiHandler.java | 21 +- .../LoincTop2000LabResultsUsHandler.java | 21 +- .../loinc/LoincUniversalOrderSetHandler.java | 10 +- .../term/loinc/LoincUploadPropertiesEnum.java | 7 +- .../jpa/term/loinc/PartTypeAndPartName.java | 17 +- .../jpa/term/snomedct/SctHandlerConcept.java | 40 +- .../term/snomedct/SctHandlerDescription.java | 60 +- .../term/snomedct/SctHandlerRelationship.java | 120 +- .../ca/uhn/fhir/jpa/util/AddRemoveCount.java | 1 - .../ca/uhn/fhir/jpa/util/BaseIterator.java | 2 +- .../ca/uhn/fhir/jpa/util/CoordCalculator.java | 25 +- .../java/ca/uhn/fhir/jpa/util/Counter.java | 1 - .../jpa/util/JpaHapiTransactionService.java | 1 - .../java/ca/uhn/fhir/jpa/util/LogicUtil.java | 1 - .../ca/uhn/fhir/jpa/util/MethodRequest.java | 8 +- .../ca/uhn/fhir/jpa/util/QueryChunker.java | 2 - .../fhir/jpa/util/QueryParameterUtils.java | 201 +- .../fhir/jpa/util/RegexpGsonBuilderUtil.java | 5 +- .../uhn/fhir/jpa/util/ResourceCountCache.java | 2 - .../jpa/util/ScrollableResultsIterator.java | 2 - .../util/SearchParameterMapCalculator.java | 10 +- .../uhn/fhir/jpa/util/SpringObjectCaster.java | 29 +- ...quireManualActivationInterceptorDstu2.java | 32 +- ...quireManualActivationInterceptorDstu3.java | 29 +- ...sRequireManualActivationInterceptorR4.java | 29 +- .../jpa/util/jsonpatch/JsonPatchUtils.java | 18 +- .../fhir/jpa/util/xmlpatch/XmlPatchUtils.java | 19 +- .../validation/JpaValidationSupportChain.java | 5 +- .../jpa/ips/api/IIpsGenerationStrategy.java | 8 +- .../ca/uhn/fhir/jpa/ips/api/IpsContext.java | 4 +- .../uhn/fhir/jpa/ips/api/IpsSectionEnum.java | 30 +- .../uhn/fhir/jpa/ips/api/SectionRegistry.java | 253 +- .../ips/generator/IpsGeneratorSvcImpl.java | 374 +- .../ips/provider/IpsOperationProvider.java | 35 +- .../DefaultIpsGenerationStrategy.java | 264 +- .../jpa/mdm/broker/MdmMessageHandler.java | 38 +- .../fhir/jpa/mdm/broker/MdmMessageKeySvc.java | 2 +- .../mdm/broker/MdmQueueConsumerLoader.java | 19 +- .../fhir/jpa/mdm/config/MdmCommonConfig.java | 17 +- .../jpa/mdm/config/MdmConsumerConfig.java | 39 +- .../ca/uhn/fhir/jpa/mdm/config/MdmLoader.java | 2 + .../jpa/mdm/config/MdmSubscriptionLoader.java | 52 +- .../uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java | 80 +- .../mdm/svc/GoldenResourceMergerSvcImpl.java | 114 +- .../mdm/svc/GoldenResourceSearchSvcImpl.java | 29 +- .../jpa/mdm/svc/IMdmModelConverterSvc.java | 2 +- .../jpa/mdm/svc/MdmControllerSvcImpl.java | 193 +- .../fhir/jpa/mdm/svc/MdmEidUpdateService.java | 164 +- .../jpa/mdm/svc/MdmLinkCreateSvcImpl.java | 56 +- .../jpa/mdm/svc/MdmLinkQuerySvcImplSvc.java | 64 +- .../uhn/fhir/jpa/mdm/svc/MdmLinkSvcImpl.java | 89 +- .../jpa/mdm/svc/MdmLinkUpdaterSvcImpl.java | 117 +- .../jpa/mdm/svc/MdmMatchFinderSvcImpl.java | 17 +- .../uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java | 127 +- .../jpa/mdm/svc/MdmModelConverterSvcImpl.java | 17 +- .../fhir/jpa/mdm/svc/MdmResourceDaoSvc.java | 43 +- .../jpa/mdm/svc/MdmResourceFilteringSvc.java | 14 +- .../jpa/mdm/svc/MdmSurvivorshipSvcImpl.java | 15 +- .../svc/candidate/BaseCandidateFinder.java | 1 + .../jpa/mdm/svc/candidate/CandidateList.java | 14 +- .../mdm/svc/candidate/CandidateSearcher.java | 9 +- .../svc/candidate/CandidateStrategyEnum.java | 6 +- .../svc/candidate/FindCandidateByEidSvc.java | 25 +- .../candidate/FindCandidateByExampleSvc.java | 47 +- .../svc/candidate/FindCandidateByLinkSvc.java | 2 +- .../MatchedGoldenResourceCandidate.java | 3 +- .../MdmCandidateSearchCriteriaBuilderSvc.java | 25 +- .../svc/candidate/MdmCandidateSearchSvc.java | 81 +- .../MdmGoldenResourceFindingSvc.java | 7 +- .../jpa/model/config/PartitionSettings.java | 8 +- .../fhir/jpa/model/cross/IResourceLookup.java | 1 - .../dialect/HapiFhirPostgres94Dialect.java | 3 +- .../dialect/HapiSequenceStyleGenerator.java | 12 +- .../model/dialect/ISequenceValueMassager.java | 4 - .../entity/AuditableBasePartitionable.java | 3 +- .../jpa/model/entity/BaseHasResource.java | 18 +- .../jpa/model/entity/BasePartitionable.java | 10 +- .../jpa/model/entity/BaseResourceIndex.java | 3 +- .../BaseResourceIndexedSearchParam.java | 35 +- ...aseResourceIndexedSearchParamQuantity.java | 45 +- .../ca/uhn/fhir/jpa/model/entity/BaseTag.java | 3 +- .../jpa/model/entity/BinaryStorageEntity.java | 12 +- .../fhir/jpa/model/entity/EnversRevision.java | 12 +- .../uhn/fhir/jpa/model/entity/ForcedId.java | 55 +- .../jpa/model/entity/IBaseResourceEntity.java | 3 +- .../IResourceIndexComboSearchParameter.java | 1 - .../entity/NormalizedQuantitySearchLevel.java | 27 +- .../jpa/model/entity/NpmPackageEntity.java | 23 +- .../model/entity/NpmPackageVersionEntity.java | 49 +- .../NpmPackageVersionResourceEntity.java | 60 +- .../entity/PartitionablePartitionId.java | 28 +- .../model/entity/ResourceEncodingEnum.java | 1 - .../ResourceHistoryProvenanceEntity.java | 34 +- .../model/entity/ResourceHistoryTable.java | 52 +- .../jpa/model/entity/ResourceHistoryTag.java | 30 +- .../ResourceIndexedComboStringUnique.java | 49 +- .../ResourceIndexedComboTokenNonUnique.java | 41 +- .../ResourceIndexedSearchParamCoords.java | 44 +- .../ResourceIndexedSearchParamDate.java | 125 +- .../ResourceIndexedSearchParamNumber.java | 35 +- .../ResourceIndexedSearchParamQuantity.java | 79 +- ...eIndexedSearchParamQuantityNormalized.java | 73 +- .../ResourceIndexedSearchParamString.java | 99 +- .../ResourceIndexedSearchParamToken.java | 128 +- .../entity/ResourceIndexedSearchParamUri.java | 56 +- .../fhir/jpa/model/entity/ResourceLink.java | 68 +- .../model/entity/ResourceSearchUrlEntity.java | 21 +- .../fhir/jpa/model/entity/ResourceTable.java | 206 +- .../fhir/jpa/model/entity/ResourceTag.java | 25 +- .../entity/SearchParamPresentEntity.java | 50 +- .../jpa/model/entity/StorageSettings.java | 27 +- .../fhir/jpa/model/entity/TagDefinition.java | 27 +- .../fhir/jpa/model/entity/TagTypeEnum.java | 11 +- .../jpa/model/search/DateSearchIndexData.java | 3 +- .../search/ExtendedHSearchIndexData.java | 34 +- .../jpa/model/search/HSearchElementCache.java | 2 +- .../jpa/model/search/HSearchIndexWriter.java | 26 +- .../model/search/QuantitySearchIndexData.java | 14 +- .../search/ResourceTableRoutingBinder.java | 14 +- .../search/SearchParamTextPropertyBinder.java | 260 +- .../fhir/jpa/model/util/CodeSystemHash.java | 38 +- .../uhn/fhir/jpa/model/util/JpaConstants.java | 39 +- .../fhir/jpa/model/util/UcumServiceUtil.java | 84 +- .../model/ReadPartitionIdRequestDetails.java | 37 +- .../fhir/jpa/cache/IResourceChangeEvent.java | 2 + .../IResourceChangeListenerRegistry.java | 7 +- .../fhir/jpa/cache/IResourceVersionSvc.java | 5 +- .../fhir/jpa/cache/ResourceChangeEvent.java | 18 +- .../cache/ResourceChangeListenerCache.java | 33 +- .../ResourceChangeListenerCacheFactory.java | 13 +- ...ourceChangeListenerCacheRefresherImpl.java | 37 +- .../ResourceChangeListenerRegistryImpl.java | 33 +- ...urceChangeListenerRegistryInterceptor.java | 2 +- .../fhir/jpa/cache/ResourceChangeResult.java | 16 +- .../jpa/cache/ResourcePersistentIdMap.java | 3 +- .../fhir/jpa/cache/ResourceVersionMap.java | 1 + .../partition/IRequestPartitionHelperSvc.java | 31 +- .../fhir/jpa/searchparam/MatchUrlService.java | 75 +- .../fhir/jpa/searchparam/ResourceSearch.java | 5 +- .../jpa/searchparam/SearchParameterMap.java | 55 +- .../searchparam/config/SearchParamConfig.java | 22 +- .../extractor/BaseSearchParamExtractor.java | 978 +++- .../CrossPartitionReferenceDetails.java | 12 +- .../extractor/GeopointNormalizer.java | 36 +- .../extractor/IResourceLinkResolver.java | 17 +- .../extractor/ISearchParamExtractor.java | 51 +- .../extractor/LogicalReferenceHelper.java | 3 - .../ResourceIndexedSearchParamComposite.java | 18 +- .../ResourceIndexedSearchParams.java | 217 +- .../extractor/SearchParamExtractorDstu2.java | 12 +- .../extractor/SearchParamExtractorDstu3.java | 10 +- .../extractor/SearchParamExtractorR4.java | 26 +- .../extractor/SearchParamExtractorR4B.java | 25 +- .../extractor/SearchParamExtractorR5.java | 29 +- .../SearchParamExtractorService.java | 558 +- .../matcher/InMemoryMatchResult.java | 12 +- .../matcher/InMemoryResourceMatcher.java | 244 +- .../matcher/IndexedSearchParamExtractor.java | 18 +- .../matcher/SearchParamMatcher.java | 8 +- .../nickname/NicknameInterceptor.java | 6 +- .../SearchableHashMapResourceProvider.java | 17 +- .../ISearchParamRegistryController.java | 3 +- .../registry/JpaSearchParamCache.java | 69 +- .../registry/ReadOnlySearchParamCache.java | 67 +- .../registry/RuntimeSearchParamCache.java | 15 +- .../registry/SearchParamRegistryImpl.java | 84 +- .../SearchParameterCanonicalizer.java | 173 +- .../fhir/jpa/searchparam/retry/Retrier.java | 19 +- .../searchparam/util/Dstu3DistanceHelper.java | 11 +- .../jpa/searchparam/util/JpaParamUtil.java | 75 +- .../util/LastNParameterHelper.java | 43 +- .../util/SearchParameterHelper.java | 7 +- .../models/ProducingChannelParameters.java | 1 - .../SubscriptionChannelRegistry.java | 21 +- .../SubscriptionChannelWithHandlers.java | 6 +- .../SubscriptionDeliveryChannelNamer.java | 6 +- .../SubscriptionDeliveryHandlerFactory.java | 4 +- .../config/SubscriptionProcessorConfig.java | 32 +- .../config/WebsocketDispatcherConfig.java | 7 +- .../BaseSubscriptionDeliverySubscriber.java | 33 +- .../match/deliver/email/EmailDetails.java | 15 +- .../match/deliver/email/EmailSenderImpl.java | 32 +- .../match/deliver/email/IEmailSender.java | 1 - ...SubscriptionDeliveringEmailSubscriber.java | 13 +- ...bscriptionDeliveringMessageSubscriber.java | 49 +- ...scriptionDeliveringRestHookSubscriber.java | 70 +- .../SubscriptionWebsocketHandler.java | 16 +- .../WebsocketConnectionValidator.java | 10 +- ...mpositeInMemoryDaoSubscriptionMatcher.java | 12 +- .../matching/DaoSubscriptionMatcher.java | 13 +- .../matching/InMemorySubscriptionMatcher.java | 10 +- .../MatchingQueueSubscriberLoader.java | 22 +- .../SubscriptionActivatingSubscriber.java | 25 +- .../SubscriptionCriteriaParser.java | 5 +- .../SubscriptionDeliveryRequest.java | 17 +- .../SubscriptionMatchDeliverer.java | 50 +- .../SubscriptionMatchingSubscriber.java | 56 +- .../SubscriptionRegisteringSubscriber.java | 9 +- .../registry/ActiveSubscriptionCache.java | 10 +- .../match/registry/SubscriptionLoader.java | 36 +- .../match/registry/SubscriptionRegistry.java | 45 +- .../fhir/jpa/subscription/package-info.java | 1 - .../config/SubscriptionSubmitterConfig.java | 5 +- .../SubscriptionMatcherInterceptor.java | 36 +- .../SubscriptionQueryValidator.java | 12 +- .../SubscriptionSubmitInterceptorLoader.java | 13 +- .../SubscriptionValidatingInterceptor.java | 125 +- .../SubscriptionTriggeringSvcImpl.java | 183 +- .../util/SubscriptionDebugLogInterceptor.java | 58 +- .../subscription/util/SubscriptionUtil.java | 5 +- .../topic/ActiveSubscriptionTopicCache.java | 4 +- .../topic/SubscriptionTopicCanonicalizer.java | 10 +- .../jpa/topic/SubscriptionTopicConfig.java | 6 +- .../SubscriptionTopicDispatchRequest.java | 131 +- .../topic/SubscriptionTopicDispatcher.java | 50 +- .../jpa/topic/SubscriptionTopicLoader.java | 8 +- .../jpa/topic/SubscriptionTopicMatcher.java | 3 +- .../SubscriptionTopicMatchingSubscriber.java | 36 +- .../SubscriptionTopicPayloadBuilder.java | 18 +- ...ubscriptionTopicRegisteringSubscriber.java | 10 +- .../jpa/topic/SubscriptionTopicRegistry.java | 3 +- .../jpa/topic/SubscriptionTopicSupport.java | 3 +- .../fhir/jpa/topic/SubscriptionTopicUtil.java | 13 +- ...ubscriptionTopicValidatingInterceptor.java | 56 +- .../jpa/topic/SubscriptionTriggerMatcher.java | 30 +- .../ISubscriptionTopicFilterMatcher.java | 3 +- .../filter/InMemoryTopicFilterMatcher.java | 6 +- .../filter/SubscriptionTopicFilterUtil.java | 38 +- .../status/INotificationStatusBuilder.java | 3 +- .../status/R4BNotificationStatusBuilder.java | 6 +- .../status/R4NotificationStatusBuilder.java | 18 +- .../status/R5NotificationStatusBuilder.java | 9 +- .../ca/uhn/fhir/jpa/dao/DaoTestUtils.java | 20 +- .../jpa/dao/SimplePartitionTestHelper.java | 8 +- .../ca/uhn/fhir/jpa/dao/TestDaoSearch.java | 58 +- .../embedded/DatabaseInitializerHelper.java | 9 +- .../fhir/jpa/embedded/H2EmbeddedDatabase.java | 7 +- .../HapiEmbeddedDatabasesExtension.java | 18 +- .../embedded/HapiForeignKeyIndexHelper.java | 52 +- .../jpa/embedded/JpaEmbeddedDatabase.java | 3 +- .../jpa/embedded/MsSqlEmbeddedDatabase.java | 12 +- .../jpa/embedded/OracleEmbeddedDatabase.java | 21 +- .../embedded/PostgresEmbeddedDatabase.java | 13 +- ...PartitionInterceptorReadAllPartitions.java | 1 - ...nterceptorReadPartitionsBasedOnScopes.java | 18 +- .../uhn/fhir/jpa/packages/FakeNpmServlet.java | 7 +- .../provider/BaseResourceProviderR4Test.java | 113 +- .../jpa/provider/ServerConfiguration.java | 11 +- .../r4/BaseResourceProviderR4Test.java | 37 +- .../CompositeSearchParameterTestCases.java | 180 +- .../search/MockHapiTransactionService.java | 1 - .../QuantitySearchParameterTestCases.java | 261 +- .../jpa/subscription/CountingInterceptor.java | 6 +- .../jpa/subscription/NotificationServlet.java | 9 +- ...SystemDeleteJobSvcWithUniTestFailures.java | 9 +- .../ca/uhn/fhir/jpa/term/TermTestUtil.java | 79 +- .../fhir/jpa/term/ZipCollectionBuilder.java | 4 +- .../ForceSynchronousSearchInterceptor.java | 1 - .../uhn/fhir/jpa/util/ValueSetTestUtil.java | 6 +- .../jpa/util/WebsocketSubscriptionClient.java | 6 +- .../ca/uhn/fhirtest/FhirContextFactory.java | 7 +- .../ca/uhn/fhirtest/RequestInterceptor.java | 5 +- .../ScheduledSubscriptionDeleter.java | 7 +- .../ca/uhn/fhirtest/TestRestfulServer.java | 65 +- .../ca/uhn/fhirtest/config/CommonConfig.java | 21 +- .../CommonJpaStorageSettingsConfigurer.java | 1 - .../uhn/fhirtest/config/DbServerConfig.java | 4 +- .../FhirTestBalpAuditContextServices.java | 2 +- .../uhn/fhirtest/config/FhirTesterConfig.java | 146 +- .../config/OldAuditEventPurgeService.java | 3 +- .../config/SqlCaptureInterceptor.java | 10 +- .../uhn/fhirtest/config/TestAuditConfig.java | 27 +- .../uhn/fhirtest/config/TestDstu2Config.java | 39 +- .../uhn/fhirtest/config/TestDstu3Config.java | 40 +- .../ca/uhn/fhirtest/config/TestR4BConfig.java | 33 +- .../ca/uhn/fhirtest/config/TestR4Config.java | 36 +- .../ca/uhn/fhirtest/config/TestR5Config.java | 33 +- .../interceptor/AnalyticsInterceptor.java | 19 +- .../PublicSecurityInterceptor.java | 71 +- .../interceptor/TdlSecurityInterceptor.java | 40 +- .../fhirtest/joke/HolyFooCowInterceptor.java | 14 +- .../mvc/SubscriptionPlaygroundController.java | 48 +- .../rp/FhirtestBaseResourceProviderDstu2.java | 4 +- .../mdm/api/IGoldenResourceMergerSvc.java | 6 +- .../uhn/fhir/mdm/api/IMdmControllerSvc.java | 79 +- .../java/ca/uhn/fhir/mdm/api/IMdmLink.java | 16 +- .../uhn/fhir/mdm/api/IMdmLinkCreateSvc.java | 8 +- .../ca/uhn/fhir/mdm/api/IMdmLinkQuerySvc.java | 36 +- .../java/ca/uhn/fhir/mdm/api/IMdmLinkSvc.java | 12 +- .../uhn/fhir/mdm/api/IMdmLinkUpdaterSvc.java | 10 +- .../uhn/fhir/mdm/api/IMdmMatchFinderSvc.java | 7 +- .../ca/uhn/fhir/mdm/api/IMdmSubmitSvc.java | 4 +- .../fhir/mdm/api/IMdmSurvivorshipService.java | 3 +- .../ca/uhn/fhir/mdm/api/MdmConstants.java | 14 +- .../mdm/api/MdmHistorySearchParameters.java | 20 +- .../ca/uhn/fhir/mdm/api/MdmLinkEvent.java | 9 +- .../java/ca/uhn/fhir/mdm/api/MdmLinkJson.java | 76 +- .../uhn/fhir/mdm/api/MdmLinkWithRevision.java | 7 +- .../fhir/mdm/api/MdmLinkWithRevisionJson.java | 12 +- .../ca/uhn/fhir/mdm/api/MdmMatchOutcome.java | 39 +- .../mdm/api/MdmQuerySearchParameters.java | 65 +- .../mdm/api/paging/MdmPageLinkBuilder.java | 23 +- .../fhir/mdm/api/paging/MdmPageRequest.java | 15 +- .../java/ca/uhn/fhir/mdm/dao/IMdmLinkDao.java | 19 +- .../interceptor/IMdmStorageInterceptor.java | 3 +- .../MdmSearchExpandingInterceptor.java | 36 +- .../interceptor/MdmStorageInterceptor.java | 51 +- .../ca/uhn/fhir/mdm/model/CanonicalEID.java | 44 +- .../ca/uhn/fhir/mdm/model/MdmPidTuple.java | 4 +- .../fhir/mdm/model/MdmTransactionContext.java | 7 +- .../fhir/mdm/provider/BaseMdmProvider.java | 167 +- .../mdm/provider/MdmControllerHelper.java | 57 +- .../fhir/mdm/provider/MdmControllerUtil.java | 7 +- .../MdmLinkHistoryProviderDstu3Plus.java | 27 +- .../mdm/provider/MdmProviderDstu3Plus.java | 372 +- .../fhir/mdm/provider/MdmProviderLoader.java | 21 +- .../mdm/rules/config/MdmRuleValidator.java | 109 +- .../rules/json/MdmFilterSearchParamJson.java | 8 +- .../json/MdmResourceSearchParamJson.java | 1 + .../uhn/fhir/mdm/rules/json/MdmRulesJson.java | 30 +- .../mdm/rules/json/VectorMatchResultMap.java | 11 +- .../mdm/rules/matcher/MdmMatcherFactory.java | 6 +- .../fieldmatchers/DateTimeWrapper.java | 4 +- .../fieldmatchers/EmptyFieldMatcher.java | 3 +- .../fieldmatchers/HapiStringMatcher.java | 6 +- .../matcher/fieldmatchers/NameMatcher.java | 8 +- .../fieldmatchers/NicknameMatcher.java | 3 +- .../matcher/fieldmatchers/NumericMatcher.java | 9 +- .../fieldmatchers/PhoneticEncoderMatcher.java | 1 - .../rules/matcher/models/MatchTypeEnum.java | 2 - .../rules/similarity/MdmSimilarityEnum.java | 19 +- .../rules/svc/MdmResourceFieldMatcher.java | 27 +- .../mdm/rules/svc/MdmResourceMatcherSvc.java | 53 +- .../mdm/svc/MdmChannelSubmitterSvcImpl.java | 10 +- .../ca/uhn/fhir/mdm/svc/MdmLinkDeleteSvc.java | 17 +- .../ca/uhn/fhir/mdm/svc/MdmLinkExpandSvc.java | 25 +- .../uhn/fhir/mdm/svc/MdmSearchParamSvc.java | 7 +- .../ca/uhn/fhir/mdm/svc/MdmSubmitSvcImpl.java | 46 +- .../java/ca/uhn/fhir/mdm/util/EIDHelper.java | 28 +- .../fhir/mdm/util/GoldenResourceHelper.java | 152 +- .../ca/uhn/fhir/mdm/util/IdentifierUtil.java | 13 +- .../uhn/fhir/mdm/util/MdmPartitionHelper.java | 36 +- .../ca/uhn/fhir/mdm/util/MdmResourceUtil.java | 42 +- .../ca/uhn/fhir/mdm/util/MessageHelper.java | 51 +- .../java/ca/uhn/fhir/mdm/util/NameUtil.java | 33 +- .../fhir/rest/openapi/OpenApiInterceptor.java | 303 +- .../fhir/rest/api/server/BaseParseAction.java | 1 - .../fhir/rest/api/server/IBundleProvider.java | 14 +- .../rest/api/server/IFhirVersionServer.java | 7 +- .../api/server/IPreResourceAccessDetails.java | 1 - .../api/server/IPreResourceShowDetails.java | 1 - .../rest/api/server/IRestfulResponse.java | 12 +- .../rest/api/server/IServerMethodBinding.java | 4 +- .../fhir/rest/api/server/RequestDetails.java | 18 +- .../fhir/rest/api/server/ResponseDetails.java | 1 - .../server/SimplePreResourceShowDetails.java | 18 +- .../rest/api/server/SystemRequestDetails.java | 9 +- .../server/bulk/BulkExportJobParameters.java | 8 +- .../storage/BaseResourcePersistentId.java | 2 - .../storage/IDeleteExpungeJobSubmitter.java | 7 +- .../server/storage/IResourcePersistentId.java | 1 - .../rest/api/server/storage/NotFoundPid.java | 6 +- .../server/storage/TransactionDetails.java | 23 +- .../server/ApacheProxyAddressStrategy.java | 31 +- .../fhir/rest/server/BasePagingProvider.java | 1 - .../fhir/rest/server/BaseRestfulResponse.java | 1 - .../ca/uhn/fhir/rest/server/Bindings.java | 6 +- .../server/BundleProviderWithNamedPages.java | 6 +- .../uhn/fhir/rest/server/BundleProviders.java | 10 +- .../CommonResourceSupertypeScanner.java | 80 +- .../uhn/fhir/rest/server/ETagSupportEnum.java | 4 +- .../fhir/rest/server/ElementsSupportEnum.java | 1 - .../rest/server/FifoMemoryPagingProvider.java | 1 - .../HardcodedServerAddressStrategy.java | 5 +- .../IDynamicSearchResourceProvider.java | 5 +- .../uhn/fhir/rest/server/IPagingProvider.java | 4 +- .../fhir/rest/server/IResourceProvider.java | 3 +- .../fhir/rest/server/IRestfulServerUtil.java | 25 +- .../rest/server/IServerAddressStrategy.java | 1 - .../server/IServerConformanceProvider.java | 8 +- .../IncomingRequestAddressStrategy.java | 19 +- .../ca/uhn/fhir/rest/server/PageProvider.java | 3 +- .../uhn/fhir/rest/server/ResourceBinding.java | 5 +- .../uhn/fhir/rest/server/RestfulServer.java | 287 +- .../server/RestfulServerConfiguration.java | 100 +- .../fhir/rest/server/RestfulServerUtils.java | 237 +- .../rest/server/ServletRequestTracing.java | 8 +- .../rest/server/SimpleBundleProvider.java | 9 +- .../BanUnsupportedHttpMethodsInterceptor.java | 20 +- .../BaseResponseTerminologyInterceptor.java | 7 +- .../BaseValidatingInterceptor.java | 50 +- ...reResourceSourceFromHeaderInterceptor.java | 1 - .../rest/server/interceptor/ConfigLoader.java | 9 +- .../server/interceptor/CorsInterceptor.java | 15 +- .../ExceptionHandlingInterceptor.java | 119 +- .../FhirPathFilterInterceptor.java | 6 +- .../interceptor/IServerInterceptor.java | 61 +- .../IServerOperationInterceptor.java | 1 - .../InteractionBlockingInterceptor.java | 19 +- .../interceptor/InterceptorAdapter.java | 48 +- .../server/interceptor/InterceptorOrders.java | 1 - .../interceptor/LoggingInterceptor.java | 44 +- .../RequestValidatingInterceptor.java | 14 +- .../ResponseHighlighterInterceptor.java | 199 +- .../ResponseSizeCapturingInterceptor.java | 19 +- ...rminologyDisplayPopulationInterceptor.java | 39 +- ...onseTerminologyTranslationInterceptor.java | 4 +- .../ResponseTerminologyTranslationSvc.java | 53 +- .../ResponseValidatingInterceptor.java | 10 +- .../SearchPreferHandlingInterceptor.java | 28 +- .../ServeMediaResourceRawInterceptor.java | 35 +- .../interceptor/ServerInterceptorUtil.java | 17 +- .../ServerOperationInterceptorAdapter.java | 7 +- .../StaticCapabilityStatementInterceptor.java | 8 +- .../ValidationResultEnrichingInterceptor.java | 21 +- .../VerboseLoggingInterceptor.java | 40 +- ...AdditionalCompartmentSearchParameters.java | 18 +- .../auth/AllowedCodeInValueSet.java | 6 +- .../interceptor/auth/AppliesTypeEnum.java | 4 +- .../auth/AuthorizationConstants.java | 1 - .../auth/AuthorizationInterceptor.java | 136 +- .../interceptor/auth/AuthorizedList.java | 16 +- .../server/interceptor/auth/BaseRule.java | 40 +- .../interceptor/auth/ClassifierTypeEnum.java | 4 +- .../interceptor/auth/FhirQueryRuleTester.java | 20 +- .../server/interceptor/auth/IAuthRule.java | 11 +- .../interceptor/auth/IAuthRuleBuilder.java | 7 +- .../auth/IAuthRuleBuilderAppliesTo.java | 1 - .../auth/IAuthRuleBuilderOperation.java | 3 +- .../auth/IAuthRuleBuilderOperationNamed.java | 1 - ...uthRuleBuilderOperationNamedAndScoped.java | 2 - .../auth/IAuthRuleBuilderPatch.java | 1 - .../auth/IAuthRuleBuilderRuleBulkExport.java | 2 - ...thRuleBuilderRuleBulkExportWithTarget.java | 1 - .../auth/IAuthRuleBuilderRuleConditional.java | 5 +- .../auth/IAuthRuleBuilderRuleOp.java | 3 +- .../IAuthRuleBuilderRuleOpClassifier.java | 32 +- ...rRuleOpClassifierFinishedWithTenantId.java | 4 +- .../IAuthRuleBuilderRuleTransactionOp.java | 1 - .../interceptor/auth/IAuthRuleFinished.java | 3 +- .../interceptor/auth/IAuthRuleTester.java | 45 +- .../IAuthorizationSearchParamMatcher.java | 7 +- .../server/interceptor/auth/IRuleApplier.java | 16 +- .../interceptor/auth/OperationRule.java | 23 +- .../server/interceptor/auth/PolicyEnum.java | 2 - .../server/interceptor/auth/RuleBuilder.java | 125 +- .../interceptor/auth/RuleBulkExportImpl.java | 78 +- .../interceptor/auth/RuleImplConditional.java | 23 +- .../server/interceptor/auth/RuleImplOp.java | 449 +- .../interceptor/auth/RuleImplPatch.java | 18 +- .../auth/RuleImplUpdateHistoryRewrite.java | 23 +- .../server/interceptor/auth/RuleOpEnum.java | 6 +- .../auth/SearchNarrowingConsentService.java | 37 +- .../auth/SearchNarrowingInterceptor.java | 147 +- .../SearchParameterAndValueSetRuleImpl.java | 132 +- .../auth/TransactionAppliesToEnum.java | 1 - .../BinarySecurityContextInterceptor.java | 19 +- .../consent/ConsentInterceptor.java | 78 +- .../consent/ConsentOperationStatusEnum.java | 1 - .../interceptor/consent/ConsentOutcome.java | 10 +- .../consent/DelegatingConsentService.java | 13 +- .../consent/IConsentContextServices.java | 1 - .../interceptor/consent/IConsentService.java | 22 +- .../consent/NullConsentContextServices.java | 3 +- .../consent/RuleFilteringConsentService.java | 7 +- .../RequestTenantPartitionInterceptor.java | 2 - .../s13n/StandardizingInterceptor.java | 26 +- .../standardizers/FirstNameStandardizer.java | 13 +- .../s13n/standardizers/IStandardizer.java | 1 - .../standardizers/LastNameStandardizer.java | 1 - .../s13n/standardizers/NoiseCharacters.java | 4 +- .../s13n/standardizers/PhoneStandardizer.java | 1 - .../s13n/standardizers/TextStandardizer.java | 15 +- .../s13n/standardizers/TitleStandardizer.java | 30 +- ...lidationMessageSuppressingInterceptor.java | 5 +- .../address/AddressValidatingInterceptor.java | 38 +- .../address/AddressValidationException.java | 1 - .../address/AddressValidationResult.java | 11 +- .../validation/address/IAddressValidator.java | 13 +- .../address/impl/BaseRestfulValidator.java | 22 +- .../address/impl/LoquateAddressValidator.java | 43 +- .../validation/fields/EmailValidator.java | 4 +- .../fields/FieldValidatingInterceptor.java | 15 +- .../validation/fields/IValidator.java | 4 +- .../validation/helpers/AddressHelper.java | 12 +- .../uhn/fhir/rest/server/mail/IMailSvc.java | 10 +- .../uhn/fhir/rest/server/mail/MailConfig.java | 3 +- .../ca/uhn/fhir/rest/server/mail/MailSvc.java | 33 +- .../server/messaging/BaseResourceMessage.java | 7 +- .../BaseResourceModifiedMessage.java | 32 +- .../server/messaging/IResourceMessage.java | 2 - .../messaging/ResourceOperationMessage.java | 27 +- .../messaging/json/BaseJsonMessage.java | 9 +- .../messaging/json/HapiMessageHeaders.java | 5 +- .../json/ResourceOperationJsonMessage.java | 9 +- .../fhir/rest/server/method/AtParameter.java | 1 - .../rest/server/method/BaseMethodBinding.java | 119 +- .../BaseOutcomeReturningMethodBinding.java | 48 +- ...indingWithResourceIdButNoResourceBody.java | 36 +- ...turningMethodBindingWithResourceParam.java | 40 +- .../server/method/BaseQueryParameter.java | 29 +- .../BaseResourceReturningMethodBinding.java | 96 +- .../server/method/ConditionalParamBinder.java | 30 +- .../method/ConformanceMethodBinding.java | 44 +- .../rest/server/method/CountParameter.java | 24 +- .../server/method/CreateMethodBinding.java | 34 +- .../server/method/DeleteMethodBinding.java | 15 +- .../rest/server/method/ElementsParameter.java | 21 +- .../server/method/GraphQLMethodBinding.java | 62 +- .../method/GraphQLQueryBodyParameter.java | 19 +- .../method/GraphQLQueryUrlParameter.java | 21 +- .../server/method/HistoryMethodBinding.java | 22 +- .../fhir/rest/server/method/IParameter.java | 18 +- .../rest/server/method/IRestfulHeader.java | 4 +- .../rest/server/method/IncludeParameter.java | 30 +- .../InterceptorBroadcasterParameter.java | 17 +- .../rest/server/method/MethodMatchEnum.java | 3 +- .../fhir/rest/server/method/MethodUtil.java | 193 +- .../rest/server/method/NullParameter.java | 18 +- .../rest/server/method/OffsetCalculator.java | 1 - .../rest/server/method/OffsetParameter.java | 34 +- .../server/method/OperationMethodBinding.java | 120 +- .../server/method/OperationParameter.java | 110 +- .../rest/server/method/PageMethodBinding.java | 42 +- .../server/method/PatchMethodBinding.java | 39 +- .../server/method/PatchTypeParameter.java | 23 +- .../server/method/RawParamsParameter.java | 47 +- .../rest/server/method/ReadMethodBinding.java | 60 +- .../method/RequestDetailsParameter.java | 18 +- .../rest/server/method/RequestedPage.java | 1 - .../rest/server/method/ResourceParameter.java | 59 +- .../server/method/ResponseBundleBuilder.java | 131 +- .../server/method/ResponseBundleRequest.java | 21 +- .../fhir/rest/server/method/ResponsePage.java | 7 +- .../method/SearchContainedModeParameter.java | 14 +- .../server/method/SearchMethodBinding.java | 83 +- .../rest/server/method/SearchParameter.java | 89 +- .../method/SearchTotalModeParameter.java | 11 +- .../server/method/ServerBaseParamBinder.java | 17 +- .../method/ServletRequestParameter.java | 19 +- .../method/ServletResponseParameter.java | 17 +- .../server/method/SinceOrAtParameter.java | 95 +- .../rest/server/method/SinceParameter.java | 1 - .../rest/server/method/SortParameter.java | 35 +- .../server/method/SummaryEnumParameter.java | 47 +- .../method/TransactionMethodBinding.java | 25 +- .../server/method/TransactionParameter.java | 30 +- .../server/method/UpdateMethodBinding.java | 41 +- .../ValidateMethodBindingDstu2Plus.java | 33 +- .../server/provider/BaseLastNProvider.java | 21 +- .../provider/HashMapResourceProvider.java | 161 +- .../IResourceProviderFactoryObserver.java | 3 +- .../server/provider/ProviderConstants.java | 3 + .../provider/ResourceProviderFactory.java | 2 +- .../ServerCapabilityStatementProvider.java | 170 +- .../server/servlet/ServletRequestDetails.java | 20 +- .../servlet/ServletRestfulResponse.java | 19 +- .../servlet/ServletSubRequestDetails.java | 5 +- .../UrlBaseTenantIdentificationStrategy.java | 9 +- ...BaseServerCapabilityStatementProvider.java | 38 +- .../util/CompositeInterceptorBroadcaster.java | 37 +- .../util/FhirContextSearchParamRegistry.java | 16 +- .../server/util/ICachedSearchDetails.java | 1 - .../server/util/ISearchParamRegistry.java | 19 +- .../server/util/ITestingUiClientFactory.java | 7 +- .../server/util/JsonDateDeserializer.java | 1 - .../rest/server/util/JsonDateSerializer.java | 2 - .../fhir/rest/server/util/NarrativeUtil.java | 16 +- .../server/util/ResourceSearchParams.java | 8 +- .../rest/server/util/ServletRequestUtil.java | 13 +- .../subscription/SubscriptionConstants.java | 29 +- .../main/java/ca/uhn/fhir/sl/cache/Cache.java | 21 +- .../ca/uhn/fhir/sl/cache/CacheFactory.java | 11 +- .../ca/uhn/fhir/sl/cache/CacheLoader.java | 2 +- .../ca/uhn/fhir/sl/cache/CacheProvider.java | 10 +- .../ca/uhn/fhir/sl/cache/LoadingCache.java | 7 +- .../sl/cache/caffeine/CacheDelegator.java | 12 +- .../fhir/sl/cache/caffeine/CacheProvider.java | 47 +- .../cache/caffeine/LoadingCacheDelegator.java | 10 +- .../fhir/sl/cache/guava/CacheDelegator.java | 22 +- .../fhir/sl/cache/guava/CacheProvider.java | 43 +- .../sl/cache/guava/LoadingCacheDelegator.java | 24 +- .../autoconfigure/FhirAutoConfiguration.java | 56 +- .../boot/autoconfigure/FhirProperties.java | 108 +- .../FhirRestfulServerCustomizer.java | 10 +- .../SampleApacheRestfulClientApplication.java | 40 +- .../SampleOkHttpRestfulClientApplication.java | 40 +- .../SampleJerseyRestfulServerApplication.java | 19 +- .../provider/PatientResourceProvider.java | 92 +- .../uhn/fhir/jpa/migrate/DriverTypeEnum.java | 18 +- .../jpa/migrate/HapiMigrationException.java | 5 +- .../fhir/jpa/migrate/HapiMigrationLock.java | 21 +- .../jpa/migrate/HapiMigrationStorageSvc.java | 20 +- .../ca/uhn/fhir/jpa/migrate/HapiMigrator.java | 27 +- .../jpa/migrate/IHapiMigrationCallback.java | 1 + .../ca/uhn/fhir/jpa/migrate/JdbcUtils.java | 142 +- .../uhn/fhir/jpa/migrate/MigrationResult.java | 11 +- .../fhir/jpa/migrate/MigrationTaskList.java | 19 +- .../jpa/migrate/MigrationTaskSkipper.java | 12 +- .../uhn/fhir/jpa/migrate/SchemaMigrator.java | 25 +- .../jpa/migrate/dao/HapiMigrationDao.java | 22 +- .../migrate/dao/MigrationQueryBuilder.java | 83 +- .../migrate/entity/HapiMigrationEntity.java | 13 +- .../jpa/migrate/taskdef/AddColumnTask.java | 7 +- .../migrate/taskdef/AddForeignKeyTask.java | 11 +- .../migrate/taskdef/AddIdGeneratorTask.java | 6 +- .../jpa/migrate/taskdef/AddIndexTask.java | 22 +- .../migrate/taskdef/AddTableByColumnTask.java | 6 +- .../jpa/migrate/taskdef/ArbitrarySqlTask.java | 11 +- .../taskdef/BaseColumnCalculatorTask.java | 45 +- .../migrate/taskdef/BaseTableColumnTask.java | 9 +- .../taskdef/BaseTableColumnTypeTask.java | 4 +- .../jpa/migrate/taskdef/BaseTableTask.java | 5 +- .../fhir/jpa/migrate/taskdef/BaseTask.java | 9 +- .../migrate/taskdef/CalculateHashesTask.java | 3 +- .../taskdef/CalculateOrdinalDatesTask.java | 6 +- .../jpa/migrate/taskdef/ColumnTypeEnum.java | 1 - .../ColumnTypeToDriverTypeToSqlType.java | 12 +- .../jpa/migrate/taskdef/DropColumnTask.java | 28 +- .../migrate/taskdef/DropForeignKeyTask.java | 4 +- .../migrate/taskdef/DropIdGeneratorTask.java | 6 +- .../jpa/migrate/taskdef/DropIndexTask.java | 52 +- .../jpa/migrate/taskdef/DropTableTask.java | 15 +- .../migrate/taskdef/ExecuteRawSqlTask.java | 1 - .../migrate/taskdef/ForeignKeyContainer.java | 36 +- .../migrate/taskdef/InitializeSchemaTask.java | 23 +- .../jpa/migrate/taskdef/MetadataSource.java | 8 +- ...gratePostgresTextClobToBinaryClobTask.java | 10 +- .../jpa/migrate/taskdef/ModifyColumnTask.java | 66 +- .../jpa/migrate/taskdef/RenameColumnTask.java | 45 +- .../jpa/migrate/taskdef/RenameIndexTask.java | 13 +- .../tasks/SchemaInitializationProvider.java | 21 +- .../migrate/tasks/api/BaseMigrationTasks.java | 18 +- .../fhir/jpa/migrate/tasks/api/Builder.java | 85 +- .../batch2/jobs/config/Batch2JobsConfig.java | 6 +- .../batch2/jobs/config/BatchCommonCtx.java | 4 +- .../jobs/export/BulkDataExportProvider.java | 388 +- .../batch2/jobs/export/BulkExportAppCtx.java | 61 +- .../export/BulkExportCreateReportStep.java | 28 +- .../BulkExportJobParametersValidator.java | 23 +- .../jobs/export/ExpandResourcesStep.java | 86 +- .../jobs/export/FetchResourceIdsStep.java | 31 +- .../batch2/jobs/export/WriteBinaryStep.java | 69 +- .../export/models/BulkExportBinaryFileId.java | 3 +- .../jobs/export/models/BulkExportJobBase.java | 4 +- .../jobs/expunge/DeleteExpungeAppCtx.java | 75 +- .../expunge/DeleteExpungeJobParameters.java | 1 + .../DeleteExpungeJobParametersValidator.java | 17 +- .../DeleteExpungeJobSubmitterImpl.java | 37 +- .../jobs/expunge/DeleteExpungeProvider.java | 41 +- .../jobs/expunge/DeleteExpungeStep.java | 74 +- .../BulkImportParameterValidator.java | 7 +- .../jobs/importpull/BulkImportPullConfig.java | 48 +- .../importpull/FetchPartitionedFilesStep.java | 14 +- .../ReadInResourcesFromFileStep.java | 17 +- .../importpull/WriteBundleForImportStep.java | 25 +- .../jobs/imprt/BulkDataImportProvider.java | 111 +- .../batch2/jobs/imprt/BulkImportAppCtx.java | 24 +- .../jobs/imprt/BulkImportFileServlet.java | 16 +- .../jobs/imprt/BulkImportJobParameters.java | 4 +- .../batch2/jobs/imprt/ConsumeFilesStep.java | 27 +- .../batch2/jobs/imprt/FetchFilesStep.java | 61 +- .../batch2/jobs/imprt/NdJsonFileJson.java | 1 + .../batch2/jobs/imprt/ResourceOrderUtil.java | 123 +- .../batch2/jobs/models/BatchResourceId.java | 6 +- .../batch2/jobs/reindex/ReindexAppCtx.java | 54 +- .../batch2/jobs/reindex/ReindexChunkIds.java | 19 - .../ReindexGenerateRangeChunksStep.java | 22 +- .../jobs/reindex/ReindexJobParameters.java | 23 +- .../ReindexJobParametersValidator.java | 4 +- .../batch2/jobs/reindex/ReindexProvider.java | 71 +- .../fhir/batch2/jobs/reindex/ReindexStep.java | 80 +- .../jobs/reindex/ReindexWarningProcessor.java | 12 +- .../TermCodeSystemJobConfig.java | 114 +- .../DeleteCodeSystemCompletionHandler.java | 3 +- ...DeleteCodeSystemConceptsByVersionStep.java | 14 +- .../DeleteCodeSystemStep.java | 14 +- .../DeleteCodeSystemVersionStep.java | 12 +- .../ReadTermConceptVersionsStep.java | 11 +- ...odeSystemDeleteJobParametersValidator.java | 10 +- ...eteCodeSystemVersionCompletionHandler.java | 3 +- .../DeleteCodeSystemVersionFinalStep.java | 11 +- .../DeleteCodeSystemVersionFirstStep.java | 9 +- ...teCodeSystemVersionParameterValidator.java | 10 +- .../batch2/api/ChunkExecutionDetails.java | 5 +- .../fhir/batch2/api/IFirstJobStepWorker.java | 4 +- .../batch2/api/IJobCompletionHandler.java | 1 - .../uhn/fhir/batch2/api/IJobCoordinator.java | 16 +- .../ca/uhn/fhir/batch2/api/IJobDataSink.java | 1 - .../batch2/api/IJobMaintenanceService.java | 2 +- .../batch2/api/IJobParametersValidator.java | 3 +- .../uhn/fhir/batch2/api/IJobPersistence.java | 31 +- .../uhn/fhir/batch2/api/IJobStepWorker.java | 3 +- .../fhir/batch2/api/ILastJobStepWorker.java | 4 +- .../fhir/batch2/api/IReductionStepWorker.java | 2 +- .../batch2/api/IWorkChunkPersistence.java | 10 +- .../fhir/batch2/api/JobCompletionDetails.java | 1 - .../batch2/api/JobOperationResultJson.java | 2 + .../api/ReductionStepExecutionDetails.java | 10 +- .../fhir/batch2/api/StepExecutionDetails.java | 9 +- .../ca/uhn/fhir/batch2/api/VoidModel.java | 3 +- .../ca/uhn/fhir/batch2/api/package-info.java | 1 - .../fhir/batch2/config/BaseBatch2Config.java | 71 +- .../batch2/config/Batch2JobRegisterer.java | 13 +- .../fhir/batch2/coordinator/BaseDataSink.java | 6 +- .../batch2/coordinator/FinalStepDataSink.java | 9 +- .../coordinator/JobCoordinatorImpl.java | 79 +- .../fhir/batch2/coordinator/JobDataSink.java | 28 +- .../coordinator/JobDefinitionRegistry.java | 41 +- .../JobParameterJsonValidator.java | 21 +- .../fhir/batch2/coordinator/JobQuerySvc.java | 53 +- .../batch2/coordinator/JobStepExecutor.java | 48 +- .../coordinator/JobStepExecutorFactory.java | 26 +- .../ReductionStepChunkProcessingResponse.java | 14 +- .../coordinator/ReductionStepDataSink.java | 29 +- .../ReductionStepExecutorServiceImpl.java | 130 +- .../fhir/batch2/coordinator/StepExecutor.java | 37 +- .../WorkChannelMessageHandler.java | 170 +- .../coordinator/WorkChunkProcessor.java | 43 +- .../batch2/jobs/chunk/ChunkRangeJson.java | 4 +- .../chunk/ResourceIdListWorkChunkJson.java | 22 +- .../fhir/batch2/jobs/chunk/TypedPidJson.java | 9 +- .../jobs/parameters/IUrlListValidator.java | 2 +- .../parameters/PartitionedJobParameters.java | 1 + .../jobs/parameters/PartitionedUrl.java | 5 +- .../PartitionedUrlListJobParameters.java | 5 +- .../jobs/parameters/UrlListValidator.java | 17 +- .../jobs/parameters/UrlPartitioner.java | 8 +- .../jobs/step/GenerateRangeChunksStep.java | 11 +- .../batch2/jobs/step/IIdChunkProducer.java | 9 +- .../fhir/batch2/jobs/step/LoadIdsStep.java | 19 +- .../PartitionedUrlListIdChunkProducer.java | 18 +- .../batch2/jobs/step/ResourceIdListStep.java | 21 +- .../JobChunkProgressAccumulator.java | 29 +- .../maintenance/JobInstanceProcessor.java | 69 +- .../JobMaintenanceServiceImpl.java | 66 +- .../model/FetchJobInstancesRequest.java | 14 +- .../uhn/fhir/batch2/model/JobDefinition.java | 138 +- .../model/JobDefinitionReductionStep.java | 21 +- .../fhir/batch2/model/JobDefinitionStep.java | 11 +- .../ca/uhn/fhir/batch2/model/JobInstance.java | 52 +- .../batch2/model/JobInstanceStartRequest.java | 11 +- .../uhn/fhir/batch2/model/JobWorkCursor.java | 26 +- .../batch2/model/JobWorkNotification.java | 27 +- .../model/JobWorkNotificationJsonMessage.java | 1 - .../ca/uhn/fhir/batch2/model/StatusEnum.java | 20 +- .../ca/uhn/fhir/batch2/model/WorkChunk.java | 6 +- .../model/WorkChunkCompletionEvent.java | 17 +- .../batch2/model/WorkChunkCreateEvent.java | 38 +- .../uhn/fhir/batch2/model/WorkChunkData.java | 1 - .../batch2/model/WorkChunkErrorEvent.java | 17 +- .../batch2/model/WorkChunkStatusEnum.java | 16 +- .../batch2/progress/InstanceProgress.java | 37 +- .../JobInstanceProgressCalculator.java | 27 +- .../progress/JobInstanceStatusUpdater.java | 20 +- .../ca/uhn/fhir/cr/common/BundleIterable.java | 9 +- .../CodeCacheResourceChangeListener.java | 16 +- .../CqlExceptionHandlingInterceptor.java | 17 +- .../ElmCacheResourceChangeListener.java | 19 +- .../ca/uhn/fhir/cr/common/HapiFhirDal.java | 13 +- .../cr/common/HapiFhirRetrieveProvider.java | 18 +- .../cr/common/HapiLibrarySourceProvider.java | 13 +- .../cr/common/HapiTerminologyProvider.java | 47 +- .../uhn/fhir/cr/common/IDaoRegistryUser.java | 20 +- .../common/PreExpandedValidationSupport.java | 20 +- .../java/ca/uhn/fhir/cr/common/Searches.java | 3 +- .../config/BaseClinicalReasoningConfig.java | 116 +- .../fhir/cr/config/BaseRepositoryConfig.java | 2 - .../ca/uhn/fhir/cr/config/CrDstu3Config.java | 4 +- .../ca/uhn/fhir/cr/config/CrProperties.java | 21 +- .../uhn/fhir/cr/config/CrProviderFactory.java | 3 +- .../uhn/fhir/cr/config/CrProviderLoader.java | 8 +- .../ca/uhn/fhir/cr/config/CrR4Config.java | 16 +- .../PreExpandedValidationSupportLoader.java | 6 +- .../fhir/cr/constant/CareCapsConstants.java | 20 +- .../uhn/fhir/cr/constant/HtmlConstants.java | 3 +- .../cr/constant/MeasureReportConstants.java | 19 +- .../ActivityDefinitionOperationsProvider.java | 81 +- .../measure/MeasureOperationsProvider.java | 48 +- .../fhir/cr/dstu3/measure/MeasureService.java | 114 +- .../PlanDefinitionOperationsProvider.java | 89 +- .../QuestionnaireOperationsProvider.java | 99 +- ...estionnaireResponseOperationsProvider.java | 13 +- .../cr/enumeration/CareGapsStatusCode.java | 4 +- .../ActivityDefinitionOperationsProvider.java | 141 +- .../r4/measure/CareGapsOperationProvider.java | 36 +- .../fhir/cr/r4/measure/CareGapsService.java | 380 +- .../cr/r4/measure/ISubmitDataService.java | 3 +- .../r4/measure/MeasureOperationsProvider.java | 49 +- .../fhir/cr/r4/measure/MeasureService.java | 158 +- .../cr/r4/measure/SubmitDataProvider.java | 20 +- .../fhir/cr/r4/measure/SubmitDataService.java | 22 +- .../PlanDefinitionOperationsProvider.java | 345 +- .../QuestionnaireOperationsProvider.java | 194 +- ...estionnaireResponseOperationsProvider.java | 21 +- .../uhn/fhir/cr/repo/BundleProviderUtil.java | 123 +- .../uhn/fhir/cr/repo/HapiFhirRepository.java | 209 +- .../fhir/cr/repo/RequestDetailsCloner.java | 14 +- .../ca/uhn/fhir/cr/repo/SearchConverter.java | 39 +- .../MdmSubmitterInterceptorLoader.java | 8 +- .../fhir/mdm/batch2/LoadGoldenIdsStep.java | 8 +- .../uhn/fhir/mdm/batch2/MdmBatch2Config.java | 12 +- .../batch2/MdmGenerateRangeChunksStep.java | 8 +- .../fhir/mdm/batch2/MdmIdChunkProducer.java | 18 +- .../mdm/batch2/MdmJobDefinitionLoader.java | 7 +- .../fhir/mdm/batch2/clear/MdmClearAppCtx.java | 52 +- .../batch2/clear/MdmClearJobParameters.java | 7 +- .../clear/MdmClearJobParametersValidator.java | 8 +- .../fhir/mdm/batch2/clear/MdmClearStep.java | 57 +- .../MdmInflateAndSubmitResourcesStep.java | 29 +- .../mdm/batch2/submit/MdmSubmitAppCtx.java | 51 +- .../batch2/submit/MdmSubmitJobParameters.java | 4 +- .../MdmSubmitJobParametersValidator.java | 16 +- .../models/JobInstanceFetchRequest.java | 8 +- .../cache/BaseResourceCacheSynchronizer.java | 34 +- ...kenParamFormatInvalidRequestException.java | 6 +- .../ca/uhn/fhir/jpa/api/IDaoRegistry.java | 1 - .../jpa/api/config/JpaStorageSettings.java | 51 +- .../ca/uhn/fhir/jpa/api/dao/DaoRegistry.java | 34 +- .../java/ca/uhn/fhir/jpa/api/dao/IDao.java | 2 - .../fhir/jpa/api/dao/IFhirResourceDao.java | 110 +- .../api/dao/IFhirResourceDaoCodeSystem.java | 38 +- .../api/dao/IFhirResourceDaoComposition.java | 10 +- .../api/dao/IFhirResourceDaoEncounter.java | 16 +- .../api/dao/IFhirResourceDaoObservation.java | 4 +- .../jpa/api/dao/IFhirResourceDaoPatient.java | 20 +- .../dao/IFhirResourceDaoSearchParameter.java | 4 +- .../IFhirResourceDaoStructureDefinition.java | 1 - .../api/dao/IFhirResourceDaoSubscription.java | 4 +- .../jpa/api/dao/IFhirResourceDaoValueSet.java | 24 +- .../uhn/fhir/jpa/api/dao/IFhirSystemDao.java | 6 +- .../java/ca/uhn/fhir/jpa/api/dao/IJpaDao.java | 38 +- .../dao/MetadataKeyCurrentlyReindexing.java | 1 - .../jpa/api/dao/MetadataKeyResourcePid.java | 1 - .../api/dao/PatientEverythingParameters.java | 28 +- .../uhn/fhir/jpa/api/dao/ReindexOutcome.java | 1 - .../fhir/jpa/api/dao/ReindexParameters.java | 2 - .../jpa/api/model/BulkExportJobResults.java | 3 +- .../fhir/jpa/api/model/DaoMethodOutcome.java | 1 - .../fhir/jpa/api/model/DeleteConflict.java | 9 +- .../jpa/api/model/DeleteConflictList.java | 7 +- .../jpa/api/model/DeleteMethodOutcome.java | 5 +- .../fhir/jpa/api/model/ExpungeOptions.java | 10 +- .../jpa/api/model/HistoryCountModeEnum.java | 1 - .../jpa/api/model/LazyDaoMethodOutcome.java | 2 - .../api/model/PersistentIdToForcedIdMap.java | 15 +- .../fhir/jpa/api/model/TranslationQuery.java | 40 +- .../jpa/api/model/TranslationRequest.java | 26 +- .../fhir/jpa/api/model/WarmCacheEntry.java | 12 +- .../fhir/jpa/api/pid/BaseResourcePidList.java | 9 +- .../jpa/api/pid/EmptyResourcePidList.java | 6 +- .../api/pid/HomogeneousResourcePidList.java | 10 +- .../fhir/jpa/api/pid/IResourcePidList.java | 2 +- .../jpa/api/pid/MixedResourcePidList.java | 8 +- .../jpa/api/pid/ResourcePidListBuilder.java | 2 +- .../uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java | 10 +- .../fhir/jpa/api/svc/IDeleteExpungeSvc.java | 2 - .../jpa/api/svc/IGoldenResourceSearchSvc.java | 10 +- .../fhir/jpa/api/svc/IIdHelperService.java | 43 +- .../jpa/api/svc/ISearchCoordinatorSvc.java | 21 +- .../ca/uhn/fhir/jpa/api/svc/ISearchSvc.java | 4 +- .../jpa/binary/api/IBinaryStorageSvc.java | 22 +- .../fhir/jpa/binary/api/IBinaryTarget.java | 16 +- .../fhir/jpa/binary/api/StoredDetails.java | 28 +- .../interceptor/BinaryStorageInterceptor.java | 154 +- .../binary/provider/BinaryAccessProvider.java | 106 +- .../binary/svc/BaseBinaryStorageSvcImpl.java | 49 +- .../binary/svc/NullBinaryStorageSvcImpl.java | 10 +- .../FilesystemBinaryStorageSvcImpl.java | 24 +- .../binstore/MemoryBinaryStorageSvcImpl.java | 14 +- .../export/job/BaseResourceToFileWriter.java | 19 - .../export/model/BulkExportResponseJson.java | 15 +- .../model/ExportPIDIteratorParameters.java | 2 +- .../export/svc/BulkExportHelperService.java | 19 +- .../bulk/imprt/api/IBulkDataImportSvc.java | 62 +- .../bulk/imprt/model/ActivateJobResult.java | 3 +- .../imprt/model/BulkImportJobFileJson.java | 3 +- .../bulk/imprt/model/BulkImportJobJson.java | 3 + .../imprt/model/BulkImportJobStatusEnum.java | 2 - .../model/JobFileRowProcessingModeEnum.java | 2 - .../ca/uhn/fhir/jpa/dao/BaseStorageDao.java | 252 +- .../fhir/jpa/dao/BaseStorageResourceDao.java | 146 +- .../jpa/dao/BaseTransactionProcessor.java | 795 ++- .../java/ca/uhn/fhir/jpa/dao/GZipUtil.java | 1 - .../ca/uhn/fhir/jpa/dao/IResultIterator.java | 1 - .../ca/uhn/fhir/jpa/dao/ISearchBuilder.java | 55 +- .../fhir/jpa/dao/IStorageResourceParser.java | 1 - .../ITransactionProcessorVersionAdapter.java | 1 - .../uhn/fhir/jpa/dao/IdSubstitutionMap.java | 15 +- .../fhir/jpa/dao/MatchResourceUrlService.java | 83 +- .../fhir/jpa/dao/SearchBuilderFactory.java | 7 +- .../uhn/fhir/jpa/dao/ThreadPoolFactory.java | 3 +- ...ansactionProcessorVersionAdapterDstu3.java | 15 +- .../jpa/dao/expunge/ExpungeOperation.java | 44 +- .../fhir/jpa/dao/expunge/ExpungeService.java | 34 +- .../dao/expunge/IResourceExpungeService.java | 9 +- .../fhir/jpa/dao/expunge/PartitionRunner.java | 69 +- .../dao/index/DaoResourceLinkResolver.java | 150 +- .../TransactionProcessorVersionAdapterR4.java | 16 +- .../jpa/dao/tx/HapiTransactionService.java | 163 +- .../jpa/dao/tx/IHapiTransactionService.java | 13 +- .../SearchParameterDaoValidator.java | 81 +- .../fhir/jpa/delete/DeleteConflictUtil.java | 23 +- .../DaoRegistryGraphQLStorageServices.java | 100 +- .../uhn/fhir/jpa/graphql/GraphQLProvider.java | 66 +- .../PatientIdPartitionInterceptor.java | 86 +- ...questRetryVersionConflictsInterceptor.java | 1 - .../interceptor/validation/BaseTypedRule.java | 1 - .../validation/IRepositoryValidatingRule.java | 7 +- .../jpa/interceptor/validation/IRuleRoot.java | 1 - .../RepositoryValidatingInterceptor.java | 26 +- .../RepositoryValidatingRuleBuilder.java | 31 +- .../validation/RequireValidationRule.java | 38 +- .../validation/RuleDisallowProfile.java | 14 +- .../RuleRequireProfileDeclaration.java | 30 +- .../SearchBuilderLoadIncludesParameters.java | 2 +- .../BaseRequestPartitionHelperSvc.java | 133 +- .../java/ca/uhn/fhir/jpa/patch/FhirPatch.java | 166 +- .../ca/uhn/fhir/jpa/patch/JsonPatchUtils.java | 16 +- .../ca/uhn/fhir/jpa/patch/XmlPatchUtils.java | 17 +- .../fhir/jpa/provider/BaseJpaProvider.java | 19 +- .../jpa/provider/BaseJpaResourceProvider.java | 190 +- .../provider/BaseStorageSystemProvider.java | 41 +- .../SubscriptionTriggeringProvider.java | 40 +- .../uhn/fhir/jpa/search/SearchConstants.java | 3 +- ...rchParamWithInlineReferencesExtractor.java | 47 +- ...rchParamWithInlineReferencesExtractor.java | 3 +- .../config/SearchParamSubmitterConfig.java | 7 +- .../SearchParamSubmitInterceptorLoader.java | 3 +- .../SearchParamValidatingInterceptor.java | 52 +- .../channel/api/BaseChannelSettings.java | 1 - .../channel/api/IChannelFactory.java | 6 +- .../channel/api/IChannelProducer.java | 3 +- .../channel/impl/LinkedBlockingChannel.java | 18 +- .../impl/LinkedBlockingChannelFactory.java | 26 +- .../impl/RetryingMessageHandlerWrapper.java | 11 +- .../SubscriptionChannelFactory.java | 19 +- .../matching/IResourceModifiedConsumer.java | 6 +- .../SubscriptionMatchingStrategy.java | 3 +- .../registry/SubscriptionCanonicalizer.java | 320 +- .../model/CanonicalSubscription.java | 95 +- .../CanonicalSubscriptionChannelType.java | 18 +- .../model/CanonicalTopicSubscription.java | 4 +- .../CanonicalTopicSubscriptionFilter.java | 3 +- .../model/ResourceDeliveryJsonMessage.java | 7 +- .../model/ResourceDeliveryMessage.java | 20 +- .../model/ResourceModifiedJsonMessage.java | 4 +- .../model/ResourceModifiedMessage.java | 29 +- .../ISubscriptionTriggeringSvc.java | 8 +- .../uhn/fhir/jpa/term/UploadStatistics.java | 1 - .../uhn/fhir/jpa/term/api/ITermLoaderSvc.java | 2 - .../DeleteCodeSystemBaseParameters.java | 3 +- .../jpa/util/BaseCaptureQueriesListener.java | 23 +- .../CircularQueueCaptureQueriesListener.java | 113 +- .../CurrentThreadCaptureQueriesListener.java | 8 +- .../uhn/fhir/jpa/util/MemoryCacheService.java | 46 +- .../java/ca/uhn/fhir/jpa/util/SqlQuery.java | 33 +- .../jpa/validation/ValidationSettings.java | 6 +- .../validation/ValidatorPolicyAdvisor.java | 23 +- .../validation/ValidatorResourceFetcher.java | 18 +- .../main/java/ca/uhn/fhir/mdm/log/Logs.java | 3 +- ...ncMemoryQueueBackedFhirClientBalpSink.java | 25 +- .../balp/BalpAuditCaptureInterceptor.java | 227 +- .../interceptor/balp/BalpConstants.java | 9 +- .../interceptor/balp/BalpProfileEnum.java | 99 +- .../interceptor/balp/FhirClientBalpSink.java | 17 +- .../balp/IBalpAuditContextServices.java | 9 +- .../interceptor/balp/IBalpAuditEventSink.java | 1 - .../ca/uhn/fhir/util/CanonicalIdentifier.java | 11 +- .../java/ca/uhn/fhir/util/ThreadPoolUtil.java | 21 +- .../dstu2016may/hapi/ctx/FhirDstu2_1.java | 30 +- .../hapi/ctx/FhirServerDstu2_1.java | 1 - .../rest/server/Dstu2_1BundleFactory.java | 34 +- .../server/ServerConformanceProvider.java | 129 +- .../hapi/validation/BaseValidatorBridge.java | 9 +- .../hapi/validation/HapiWorkerContext.java | 84 +- .../ca/uhn/fhir/model/dstu2/FhirDstu2.java | 24 +- .../uhn/fhir/model/dstu2/FhirServerDstu2.java | 1 - .../uhn/fhir/model/dstu2/composite/AgeDt.java | 8 +- .../composite/BoundCodeableConceptDt.java | 34 +- .../model/dstu2/composite/ContainedDt.java | 8 +- .../fhir/model/dstu2/composite/CountDt.java | 7 +- .../model/dstu2/composite/DistanceDt.java | 8 +- .../model/dstu2/composite/DurationDt.java | 7 +- .../fhir/model/dstu2/composite/MoneyDt.java | 45 +- .../model/dstu2/composite/NarrativeDt.java | 92 +- .../dstu2/composite/ResourceReferenceDt.java | 133 +- .../dstu2/composite/SimpleQuantityDt.java | 29 +- .../model/dstu2/resource/BaseResource.java | 38 +- .../provider/dstu2/Dstu2BundleFactory.java | 31 +- .../dstu2/ServerConformanceProvider.java | 102 +- .../hl7/fhir/dstu3/hapi/ctx/FhirDstu3.java | 136 +- .../fhir/dstu3/hapi/ctx/FhirServerDstu3.java | 9 +- .../dstu3/hapi/ctx/HapiWorkerContext.java | 529 +- .../dstu3/hapi/fluentpath/FhirPathDstu3.java | 101 +- .../hapi/rest/server/Dstu3BundleFactory.java | 402 +- .../ServerCapabilityStatementProvider.java | 1097 ++-- .../dstu2hl7org/Dstu2Hl7OrgBundleFactory.java | 410 +- .../fhir/dstu2/hapi/ctx/FhirDstu2Hl7Org.java | 138 +- .../hapi/ctx/FhirServerDstu2Hl7Org2.java | 9 +- .../server/ServerConformanceProvider.java | 823 +-- .../java/org/hl7/fhir/r4/hapi/ctx/FhirR4.java | 24 +- .../hl7/fhir/r4/hapi/ctx/FhirServerR4.java | 9 +- .../fhir/r4/hapi/ctx/HapiWorkerContext.java | 627 ++- .../fhir/r4/hapi/fluentpath/FhirPathR4.java | 159 +- .../r4/hapi/rest/server/R4BundleFactory.java | 416 +- .../rest/server/helper/BatchHelperR4.java | 22 +- .../org/hl7/fhir/r4b/hapi/ctx/FhirR4B.java | 5 +- .../hl7/fhir/r4b/hapi/ctx/FhirServerR4B.java | 1 - .../fhir/r4b/hapi/ctx/HapiWorkerContext.java | 91 +- .../fhir/r4b/hapi/fhirpath/FhirPathR4B.java | 67 +- .../hapi/rest/server/R4BBundleFactory.java | 35 +- .../java/org/hl7/fhir/r5/hapi/ctx/FhirR5.java | 7 +- .../hl7/fhir/r5/hapi/ctx/FhirServerR5.java | 1 - .../fhir/r5/hapi/ctx/HapiWorkerContext.java | 106 +- .../hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java | 15 +- .../r5/hapi/rest/server/R5BundleFactory.java | 35 +- .../jpa/conformance/DateSearchTestCase.java | 69 +- .../fhir/jpa/conformance/package-info.java | 1 - .../AbstractJsonParserErrorHandlerTest.java | 9 +- .../AbstractParserErrorHandlerTest.java | 17 +- .../AbstractXmlParserErrorHandlerTest.java | 3 +- .../auth/OperationRuleTestUtil.java | 18 +- .../SubscriptionTestDataHelper.java | 29 +- .../java/ca/uhn/fhir/to/BaseController.java | 191 +- .../main/java/ca/uhn/fhir/to/Controller.java | 386 +- .../ca/uhn/fhir/to/FhirTesterMvcConfig.java | 4 +- .../java/ca/uhn/fhir/to/TesterConfig.java | 51 +- .../to/client/BearerTokenClientFactory.java | 12 +- .../to/model/BufferResponseInterceptor.java | 5 +- .../ca/uhn/fhir/to/model/HomeRequest.java | 33 +- .../ca/uhn/fhir/to/model/ResourceRequest.java | 2 - .../uhn/fhir/to/model/TransactionRequest.java | 1 - .../uhn/fhir/to/mvc/ToBindingInitializer.java | 1 - .../java/ca/uhn/fhir/to/util/WebUtil.java | 10 +- .../BaseStaticResourceValidationSupport.java | 1 - .../support/BaseValidationSupportWrapper.java | 43 +- .../support/CachingValidationSupport.java | 129 +- .../CommonCodeSystemsTerminologyService.java | 134 +- ...ltProfileValidationSupportNpmStrategy.java | 7 +- ...oryTerminologyServerValidationSupport.java | 714 ++- .../support/LocalFileValidationSupport.java | 3 +- .../support/NpmPackageValidationSupport.java | 2 +- .../PrePopulatedValidationSupport.java | 55 +- ...teTerminologyServiceValidationSupport.java | 264 +- .../support/RemoteTerminologyUtil.java | 33 +- .../SnapshotGeneratingValidationSupport.java | 87 +- ...ownCodeSystemWarningValidationSupport.java | 40 +- .../support/ValidationConstants.java | 2 +- .../support/ValidationSupportChain.java | 77 +- .../validator/BaseValidatorBridge.java | 4 +- .../FHIRPathResourceGeneratorR4.java | 1186 ++-- .../validator/FhirDefaultPolicyAdvisor.java | 26 +- .../validator/FhirInstanceValidator.java | 39 +- ...ToHl7OrgDstu2ValidatingSupportWrapper.java | 11 +- .../validator/ProfileKnowledgeWorkerR5.java | 95 +- .../validator/ValidatorWrapper.java | 35 +- .../VersionSpecificWorkerContextWrapper.java | 225 +- .../ca/uhn/fhir/model/dstu2/FhirDstu2.java | 28 +- .../uhn/fhir/model/dstu2/composite/AgeDt.java | 12 +- .../fhir/model/dstu2/composite/CountDt.java | 11 +- .../model/dstu2/composite/DistanceDt.java | 12 +- .../model/dstu2/composite/DurationDt.java | 11 +- .../dstu2/composite/ElementDefinitionDt.java | 4748 ++++++++--------- .../fhir/model/dstu2/composite/MoneyDt.java | 11 +- .../dstu2/composite/SimpleQuantityDt.java | 33 +- .../valueset/StructureDefinitionKindEnum.java | 87 +- .../uhn/fhir/model/dstu3/composite/AgeDt.java | 12 +- .../composite/BoundCodeableConceptDt.java | 40 +- .../dstu3/composite/CodeableConceptDt.java | 153 +- .../fhir/model/dstu3/composite/CodingDt.java | 316 +- .../model/dstu3/composite/ContainedDt.java | 1 - .../fhir/model/dstu3/composite/CountDt.java | 11 +- .../model/dstu3/composite/DistanceDt.java | 12 +- .../model/dstu3/composite/DurationDt.java | 11 +- .../fhir/model/dstu3/composite/MoneyDt.java | 11 +- .../model/dstu3/composite/NarrativeDt.java | 72 +- .../model/dstu3/composite/QuantityDt.java | 369 +- .../dstu3/composite/ResourceReferenceDt.java | 137 +- .../dstu3/composite/SimpleQuantityDt.java | 32 +- .../ca/uhn/fhir/tinder/AbstractGenerator.java | 58 +- .../fhir/tinder/AbstractGeneratorMojo.java | 12 +- .../ca/uhn/fhir/tinder/Configuration.java | 14 +- .../ca/uhn/fhir/tinder/ExamineTestTrace.java | 7 +- .../ca/uhn/fhir/tinder/GeneratorContext.java | 36 +- .../fhir/tinder/ResourceMinimizerMojo.java | 222 +- .../tinder/TinderGenericMultiFileMojo.java | 110 +- .../tinder/TinderGenericSingleFileMojo.java | 126 +- .../fhir/tinder/TinderJpaRestServerMojo.java | 34 +- .../tinder/TinderResourceGeneratorMojo.java | 12 +- .../tinder/TinderSourcesGeneratorMojo.java | 3 +- .../uhn/fhir/tinder/TinderStructuresMojo.java | 47 +- .../ca/uhn/fhir/tinder/ValueSetGenerator.java | 54 +- .../ca/uhn/fhir/tinder/VelocityHelper.java | 38 +- .../VersionPropertyFileGeneratorMojo.java | 85 +- .../fhir/tinder/ant/TinderGeneratorTask.java | 183 +- .../ca/uhn/fhir/tinder/model/AnyChild.java | 2 - .../ca/uhn/fhir/tinder/model/BaseElement.java | 10 +- .../uhn/fhir/tinder/model/BaseRootType.java | 11 +- .../java/ca/uhn/fhir/tinder/model/Child.java | 23 +- .../uhn/fhir/tinder/model/CompartmentDef.java | 1 - .../ca/uhn/fhir/tinder/model/Composite.java | 2 +- .../ca/uhn/fhir/tinder/model/Extension.java | 16 +- .../ca/uhn/fhir/tinder/model/Resource.java | 4 +- .../uhn/fhir/tinder/model/ResourceBlock.java | 18 +- .../fhir/tinder/model/ResourceBlockCopy.java | 7 +- .../fhir/tinder/model/SearchParameter.java | 44 +- .../ca/uhn/fhir/tinder/model/SimpleChild.java | 4 +- .../uhn/fhir/tinder/model/SimpleSetter.java | 4 +- .../ca/uhn/fhir/tinder/model/Slicing.java | 1 - .../model/UndeclaredExtensionChild.java | 8 +- .../ca/uhn/fhir/tinder/model/ValueSetTm.java | 26 +- .../tinder/parser/BaseStructureParser.java | 64 +- .../BaseStructureSpreadsheetParser.java | 58 +- .../fhir/tinder/parser/CompartmentParser.java | 61 +- .../DatatypeGeneratorUsingSpreadsheet.java | 32 +- .../parser/ResourceGeneratorUsingModel.java | 40 +- .../ResourceGeneratorUsingSpreadsheet.java | 19 +- .../ca/uhn/fhir/tinder/parser/TargetType.java | 4 +- .../ca/uhn/fhir/tinder/util/SyncUtil.java | 24 +- .../ca/uhn/fhir/tinder/util/XMLUtils.java | 201 +- .../org/hl7/fhir/instance/model/IBase.java | 7 +- .../fhir/instance/model/IBaseResource.java | 14 +- .../org/hl7/fhir/instance/model/IIdType.java | 13 +- pom.xml | 52 + 2203 files changed, 63312 insertions(+), 43299 deletions(-) create mode 100644 .github/workflows/spotless.yml create mode 100644 .pre-commit-config.yaml delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderConceptMapDstu3.java delete mode 100644 hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexChunkIds.java delete mode 100644 hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseResourceToFileWriter.java diff --git a/.github/workflows/spotless.yml b/.github/workflows/spotless.yml new file mode 100644 index 00000000000..578df2f90fd --- /dev/null +++ b/.github/workflows/spotless.yml @@ -0,0 +1,33 @@ +name: mvn spotless:check (Formatting) + + +on: + pull_request: + types: [opened, reopened, synchronize] + +jobs: + build: + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'temurin' + - name: spotless:check + run: mvn spotless:check + - uses: mshick/add-pr-comment@v2 + if: always() + with: + message-success: | + Formatting check succeeded! + message-failure: | + **This Pull Request has failed the formatting check** + + Please run `mvn spotless:apply` or `mvn clean install -DskipTests` to fix the formatting issues. + + You can automate this auto-formatting process to execute on the git pre-push hook, by installing [pre-commit](https://pre-commit.com/) and then calling `pre-commit install --hook-type pre-push`. This will cause formatting to run automatically whenever you push. + diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000000..4823ea9b222 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,10 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +# If you wish to automatically format your code on every commit, please install this pre-commit hook by using the pre-commit framework, and running `pre-commit install` + +repos: +- repo: https://github.com/ejba/pre-commit-maven + rev: v0.3.3 + hooks: + - id: maven-spotless-apply + stages: [pre-push] diff --git a/HELPWANTED.md b/HELPWANTED.md index 5ef933eda90..de535c8b0e7 100644 --- a/HELPWANTED.md +++ b/HELPWANTED.md @@ -5,4 +5,3 @@ This page is a work in progress! It serves as a place to list potential help a new volunteer could offer. * Investigate adding support for FHIR's RDF (Turtle) encoding to HAPI - diff --git a/README.md b/README.md index 5422bc7841b..02050e8db82 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ http://hapi.fhir.org/ This project is Open Source, licensed under the Apache Software License 2.0. -Please see [this wiki page][Link-wiki] for information on where to get help with HAPI FHIR. +Please see [this wiki page][Link-wiki] for information on where to get help with HAPI FHIR. Please see [Smile CDR][Link-SmileCDR] for information on commercial support. @@ -43,5 +43,3 @@ Please see [Smile CDR][Link-SmileCDR] for information on commercial support. [Badge-MavenCentral]: https://maven-badges.herokuapp.com/maven-central/ca.uhn.hapi.fhir/hapi-fhir-base/badge.svg [Badge-CodeCov]: https://codecov.io/gh/hapifhir/hapi-fhir/branch/master/graph/badge.svg?token=zHfnKfQB9X [Badge-License]: https://img.shields.io/badge/license-apache%202.0-60C060.svg - - diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index d70c31ee289..8c50b645fe9 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -17,6 +17,12 @@ + + + com.diffplug.spotless + spotless-maven-plugin + true + org.basepom.maven duplicate-finder-maven-plugin diff --git a/hapi-fhir-android/src/main/java/ca/uhn/fhir/android/AndroidMarker.java b/hapi-fhir-android/src/main/java/ca/uhn/fhir/android/AndroidMarker.java index 0d74e1638a2..92d67230b1f 100644 --- a/hapi-fhir-android/src/main/java/ca/uhn/fhir/android/AndroidMarker.java +++ b/hapi-fhir-android/src/main/java/ca/uhn/fhir/android/AndroidMarker.java @@ -23,13 +23,11 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.okhttp.client.OkHttpRestfulClientFactory; /** - * This class exists in order to ensure that + * This class exists in order to ensure that */ public class AndroidMarker { public static void configureContext(FhirContext theContext) { theContext.setRestfulClientFactory(new OkHttpRestfulClientFactory(theContext)); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/IHapiBootOrder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/IHapiBootOrder.java index 179d57656f5..4c518e84c90 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/IHapiBootOrder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/IHapiBootOrder.java @@ -28,5 +28,4 @@ public interface IHapiBootOrder { int SUBSCRIPTION_MATCHING_CHANNEL_HANDLER = 300; int AFTER_SUBSCRIPTION_INITIALIZED = 310; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeChildDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeChildDatatypeDefinition.java index 824f409127d..3050c5acf5f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeChildDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeChildDatatypeDefinition.java @@ -19,19 +19,18 @@ */ package ca.uhn.fhir.context; +import ca.uhn.fhir.model.api.annotation.Child; +import ca.uhn.fhir.model.api.annotation.Description; +import org.hl7.fhir.instance.model.api.IBase; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.Collections; import java.util.Map; import java.util.Set; -import org.hl7.fhir.instance.model.api.IBase; - -import ca.uhn.fhir.model.api.annotation.Child; -import ca.uhn.fhir.model.api.annotation.Description; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public abstract class BaseRuntimeChildDatatypeDefinition extends BaseRuntimeDeclaredChildDefinition { Logger ourLog = LoggerFactory.getLogger(BaseRuntimeChildDatatypeDefinition.class); @@ -39,11 +38,18 @@ public abstract class BaseRuntimeChildDatatypeDefinition extends BaseRuntimeDecl private BaseRuntimeElementDefinition myElementDefinition; - public BaseRuntimeChildDatatypeDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation, Class theDatatype) { + public BaseRuntimeChildDatatypeDefinition( + Field theField, + String theElementName, + Child theChildAnnotation, + Description theDescriptionAnnotation, + Class theDatatype) { super(theField, theChildAnnotation, theDescriptionAnnotation, theElementName); // should use RuntimeChildAny - assert Modifier.isInterface(theDatatype.getModifiers()) == false : "Type of " + theDatatype + " shouldn't be here"; - assert Modifier.isAbstract(theDatatype.getModifiers()) == false : "Type of " + theDatatype + " shouldn't be here"; + assert Modifier.isInterface(theDatatype.getModifiers()) == false + : "Type of " + theDatatype + " shouldn't be here"; + assert Modifier.isAbstract(theDatatype.getModifiers()) == false + : "Type of " + theDatatype + " shouldn't be here"; myDatatype = theDatatype; } @@ -97,7 +103,9 @@ public abstract class BaseRuntimeChildDatatypeDefinition extends BaseRuntimeDecl } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { myElementDefinition = theClassToElementDefinitions.get(getDatatype()); if (myElementDefinition == null) { myElementDefinition = theContext.getElementDefinition(getDatatype()); @@ -109,5 +117,4 @@ public abstract class BaseRuntimeChildDatatypeDefinition extends BaseRuntimeDecl public String toString() { return getClass().getSimpleName() + "[" + getElementName() + "]"; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeChildDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeChildDefinition.java index d82cf803395..bafc0cce4ce 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeChildDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeChildDefinition.java @@ -61,7 +61,9 @@ public abstract class BaseRuntimeChildDefinition { public abstract boolean isSummary(); - abstract void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions); + abstract void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions); @Override public String toString() { @@ -99,13 +101,15 @@ public abstract class BaseRuntimeChildDefinition { } } - BaseRuntimeElementDefinition findResourceReferenceDefinition(Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { - for (Entry, BaseRuntimeElementDefinition> next : theClassToElementDefinitions.entrySet()) { + BaseRuntimeElementDefinition findResourceReferenceDefinition( + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + for (Entry, BaseRuntimeElementDefinition> next : + theClassToElementDefinitions.entrySet()) { if (IBaseReference.class.isAssignableFrom(next.getKey())) { return next.getValue(); } } - + // Shouldn't happen throw new IllegalStateException(Msg.code(1692) + "Unable to find reference type"); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeDeclaredChildDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeDeclaredChildDefinition.java index d9087fbe965..76059bcc72d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeDeclaredChildDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeDeclaredChildDefinition.java @@ -46,11 +46,15 @@ public abstract class BaseRuntimeDeclaredChildDefinition extends BaseRuntimeChil private boolean myModifier; private boolean mySummary; - BaseRuntimeDeclaredChildDefinition(Field theField, Child theChildAnnotation, Description theDescriptionAnnotation, String theElementName) throws ConfigurationException { + BaseRuntimeDeclaredChildDefinition( + Field theField, Child theChildAnnotation, Description theDescriptionAnnotation, String theElementName) + throws ConfigurationException { super(); Validate.notNull(theField, "No field specified"); ValidateUtil.isGreaterThanOrEqualTo(theChildAnnotation.min(), 0, "Min must be >= 0"); - Validate.isTrue(theChildAnnotation.max() == -1 || theChildAnnotation.max() >= theChildAnnotation.min(), "Max must be >= Min (unless it is -1 / unlimited)"); + Validate.isTrue( + theChildAnnotation.max() == -1 || theChildAnnotation.max() >= theChildAnnotation.min(), + "Max must be >= Min (unless it is -1 / unlimited)"); Validate.notBlank(theElementName, "Element name must not be blank"); myField = theField; @@ -76,7 +80,6 @@ public abstract class BaseRuntimeDeclaredChildDefinition extends BaseRuntimeChil myAccessor = new FieldPlainAccessor(); myMutator = new FieldPlainMutator(); } - } @Override @@ -147,7 +150,6 @@ public abstract class BaseRuntimeDeclaredChildDefinition extends BaseRuntimeChil } return retVal; } - } protected final class FieldListMutator implements IMutator { @@ -181,10 +183,12 @@ public abstract class BaseRuntimeDeclaredChildDefinition extends BaseRuntimeChil public void remove(IBase theTarget, int theIndex) { List existingList = (List) getFieldValue(theTarget, myField); if (existingList == null) { - throw new IndexOutOfBoundsException(Msg.code(2143) + "Can not remove element at index " + theIndex + " from list - List is null"); + throw new IndexOutOfBoundsException( + Msg.code(2143) + "Can not remove element at index " + theIndex + " from list - List is null"); } if (theIndex >= existingList.size()) { - throw new IndexOutOfBoundsException(Msg.code(2144) + "Can not remove element at index " + theIndex + " from list - List size is " + existingList.size()); + throw new IndexOutOfBoundsException(Msg.code(2144) + "Can not remove element at index " + theIndex + + " from list - List size is " + existingList.size()); } existingList.remove(theIndex); } @@ -202,7 +206,7 @@ public abstract class BaseRuntimeDeclaredChildDefinition extends BaseRuntimeChil @Override public Optional getFirstValueOrNull(IBase theTarget) { - return Optional.ofNullable(((T)getFieldValue(theTarget, myField))); + return Optional.ofNullable(((T) getFieldValue(theTarget, myField))); } } @@ -219,7 +223,9 @@ public abstract class BaseRuntimeDeclaredChildDefinition extends BaseRuntimeChil @Override public void remove(IBase theTarget, int theIndex) { - throw new UnsupportedOperationException(Msg.code(2142) + "Remove by index can only be called on a list-valued field. '" + myField.getName() + "' is a single-valued field."); + throw new UnsupportedOperationException( + Msg.code(2142) + "Remove by index can only be called on a list-valued field. '" + myField.getName() + + "' is a single-valued field."); } } @@ -238,5 +244,4 @@ public abstract class BaseRuntimeDeclaredChildDefinition extends BaseRuntimeChil throw new ConfigurationException(Msg.code(1737) + "Failed to get value", e); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementCompositeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementCompositeDefinition.java index 2063773c4c2..69ed56928e3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementCompositeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementCompositeDefinition.java @@ -69,7 +69,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public abstract class BaseRuntimeElementCompositeDefinition extends BaseRuntimeElementDefinition { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseRuntimeElementCompositeDefinition.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(BaseRuntimeElementCompositeDefinition.class); private final FhirContext myContext; private Map forcedOrder = null; private List myChildren = new ArrayList<>(); @@ -80,7 +81,12 @@ public abstract class BaseRuntimeElementCompositeDefinition ext private volatile SealingStateEnum mySealed = SealingStateEnum.NOT_SEALED; @SuppressWarnings("unchecked") - public BaseRuntimeElementCompositeDefinition(String theName, Class theImplementingClass, boolean theStandardType, FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + public BaseRuntimeElementCompositeDefinition( + String theName, + Class theImplementingClass, + boolean theStandardType, + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super(theName, theImplementingClass, theStandardType); myContext = theContext; @@ -126,7 +132,6 @@ public abstract class BaseRuntimeElementCompositeDefinition ext } } } - } void addChild(BaseRuntimeChildDefinition theNext) { @@ -134,7 +139,8 @@ public abstract class BaseRuntimeElementCompositeDefinition ext throw new NullPointerException(Msg.code(1698)); } if (theNext.getExtensionUrl() != null) { - throw new IllegalArgumentException(Msg.code(1699) + "Shouldn't haven an extension URL, use addExtension instead"); + throw new IllegalArgumentException( + Msg.code(1699) + "Shouldn't haven an extension URL, use addExtension instead"); } myChildren.add(theNext); } @@ -145,11 +151,13 @@ public abstract class BaseRuntimeElementCompositeDefinition ext return myNameToChild.get(theName); } - public BaseRuntimeChildDefinition getChildByNameOrThrowDataFormatException(String theName) throws DataFormatException { + public BaseRuntimeChildDefinition getChildByNameOrThrowDataFormatException(String theName) + throws DataFormatException { validateSealed(); BaseRuntimeChildDefinition retVal = myNameToChild.get(theName); if (retVal == null) { - throw new DataFormatException(Msg.code(1700) + "Unknown child name '" + theName + "' in element " + getName() + " - Valid names are: " + new TreeSet(myNameToChild.keySet())); + throw new DataFormatException(Msg.code(1700) + "Unknown child name '" + theName + "' in element " + + getName() + " - Valid names are: " + new TreeSet(myNameToChild.keySet())); } return retVal; } @@ -160,13 +168,11 @@ public abstract class BaseRuntimeElementCompositeDefinition ext return myChildren; } - public List getChildrenAndExtension() { validateSealed(); return myChildrenAndExtensions; } - /** * Has this class been sealed */ @@ -178,12 +184,14 @@ public abstract class BaseRuntimeElementCompositeDefinition ext void populateScanAlso(Set> theScanAlso) { for (ScannedField next : myScannedFields) { if (IBase.class.isAssignableFrom(next.getElementType())) { - if (next.getElementType().isInterface() == false && Modifier.isAbstract(next.getElementType().getModifiers()) == false) { + if (next.getElementType().isInterface() == false + && Modifier.isAbstract(next.getElementType().getModifiers()) == false) { theScanAlso.add((Class) next.getElementType()); } } for (Class nextChildType : next.getChoiceTypes()) { - if (nextChildType.isInterface() == false && Modifier.isAbstract(nextChildType.getModifiers()) == false) { + if (nextChildType.isInterface() == false + && Modifier.isAbstract(nextChildType.getModifiers()) == false) { theScanAlso.add(nextChildType); } } @@ -233,17 +241,20 @@ public abstract class BaseRuntimeElementCompositeDefinition ext this.addExtension((RuntimeChildDeclaredExtensionDefinition) nextExt); } } - } @SuppressWarnings("unchecked") - private void scanCompositeElementForChildren(Set elementNames, TreeMap theOrderToElementDef, - TreeMap theOrderToExtensionDef) { + private void scanCompositeElementForChildren( + Set elementNames, + TreeMap theOrderToElementDef, + TreeMap theOrderToExtensionDef) { int baseElementOrder = 0; for (ScannedField next : myScannedFields) { if (next.isFirstFieldInNewClass()) { - baseElementOrder = theOrderToElementDef.isEmpty() ? 0 : theOrderToElementDef.lastEntry().getKey() + 1; + baseElementOrder = theOrderToElementDef.isEmpty() + ? 0 + : theOrderToElementDef.lastEntry().getKey() + 1; } Class declaringClass = next.getField().getDeclaringClass(); @@ -281,8 +292,12 @@ public abstract class BaseRuntimeElementCompositeDefinition ext } } if (order == Child.REPLACE_PARENT) { - throw new ConfigurationException(Msg.code(1701) + "Field " + nextField.getName() + "' on target type " + declaringClass.getSimpleName() + " has order() of REPLACE_PARENT (" + Child.REPLACE_PARENT - + ") but no parent element with extension URL " + extensionAttr.url() + " could be found on type " + nextField.getDeclaringClass().getSimpleName()); + throw new ConfigurationException( + Msg.code(1701) + "Field " + nextField.getName() + "' on target type " + + declaringClass.getSimpleName() + " has order() of REPLACE_PARENT (" + + Child.REPLACE_PARENT + ") but no parent element with extension URL " + + extensionAttr.url() + " could be found on type " + + nextField.getDeclaringClass().getSimpleName()); } } else { @@ -308,16 +323,18 @@ public abstract class BaseRuntimeElementCompositeDefinition ext } } if (order == Child.REPLACE_PARENT) { - throw new ConfigurationException(Msg.code(1702) + "Field " + nextField.getName() + "' on target type " + declaringClass.getSimpleName() + " has order() of REPLACE_PARENT (" + Child.REPLACE_PARENT - + ") but no parent element with name " + elementName + " could be found on type " + nextField.getDeclaringClass().getSimpleName()); + throw new ConfigurationException(Msg.code(1702) + "Field " + nextField.getName() + + "' on target type " + declaringClass.getSimpleName() + + " has order() of REPLACE_PARENT (" + Child.REPLACE_PARENT + + ") but no parent element with name " + elementName + " could be found on type " + + nextField.getDeclaringClass().getSimpleName()); } - } - } if (order < 0 && order != Child.ORDER_UNKNOWN) { - throw new ConfigurationException(Msg.code(1703) + "Invalid order '" + order + "' on @Child for field '" + nextField.getName() + "' on target type: " + declaringClass); + throw new ConfigurationException(Msg.code(1703) + "Invalid order '" + order + "' on @Child for field '" + + nextField.getName() + "' on target type: " + declaringClass); } if (order != Child.ORDER_UNKNOWN && !orderIsReplaceParent) { @@ -339,34 +356,49 @@ public abstract class BaseRuntimeElementCompositeDefinition ext List> choiceTypes = next.getChoiceTypes(); if (orderMap.containsKey(order)) { - throw new ConfigurationException(Msg.code(1704) + "Detected duplicate field order '" + childAnnotation.order() + "' for element named '" + elementName + "' in type '" + declaringClass.getCanonicalName() + "' - Already had: " + orderMap.get(order).getElementName()); + throw new ConfigurationException(Msg.code(1704) + "Detected duplicate field order '" + + childAnnotation.order() + "' for element named '" + elementName + "' in type '" + + declaringClass.getCanonicalName() + "' - Already had: " + + orderMap.get(order).getElementName()); } if (elementNames.contains(elementName)) { - throw new ConfigurationException(Msg.code(1705) + "Detected duplicate field name '" + elementName + "' in type '" + declaringClass.getCanonicalName() + "'"); + throw new ConfigurationException(Msg.code(1705) + "Detected duplicate field name '" + elementName + + "' in type '" + declaringClass.getCanonicalName() + "'"); } Class nextElementType = next.getElementType(); BaseRuntimeDeclaredChildDefinition def; if (childAnnotation.name().equals("extension") && IBaseExtension.class.isAssignableFrom(nextElementType)) { - def = new RuntimeChildExtension(nextField, childAnnotation.name(), childAnnotation, descriptionAnnotation); - } else if (childAnnotation.name().equals("modifierExtension") && IBaseExtension.class.isAssignableFrom(nextElementType)) { - def = new RuntimeChildExtension(nextField, childAnnotation.name(), childAnnotation, descriptionAnnotation); - } else if (BaseContainedDt.class.isAssignableFrom(nextElementType) || (childAnnotation.name().equals("contained") && IBaseResource.class.isAssignableFrom(nextElementType))) { + def = new RuntimeChildExtension( + nextField, childAnnotation.name(), childAnnotation, descriptionAnnotation); + } else if (childAnnotation.name().equals("modifierExtension") + && IBaseExtension.class.isAssignableFrom(nextElementType)) { + def = new RuntimeChildExtension( + nextField, childAnnotation.name(), childAnnotation, descriptionAnnotation); + } else if (BaseContainedDt.class.isAssignableFrom(nextElementType) + || (childAnnotation.name().equals("contained") + && IBaseResource.class.isAssignableFrom(nextElementType))) { /* * Child is contained resources */ - def = new RuntimeChildContainedResources(nextField, childAnnotation, descriptionAnnotation, elementName); - } else if (IAnyResource.class.isAssignableFrom(nextElementType) || IResource.class.equals(nextElementType)) { + def = new RuntimeChildContainedResources( + nextField, childAnnotation, descriptionAnnotation, elementName); + } else if (IAnyResource.class.isAssignableFrom(nextElementType) + || IResource.class.equals(nextElementType)) { /* * Child is a resource as a direct child, as in Bundle.entry.resource */ def = new RuntimeChildDirectResource(nextField, childAnnotation, descriptionAnnotation, elementName); } else { childIsChoiceType |= choiceTypes.size() > 1; - if (extensionAttr == null && childIsChoiceType && !BaseResourceReferenceDt.class.isAssignableFrom(nextElementType) && !IBaseReference.class.isAssignableFrom(nextElementType)) { - def = new RuntimeChildChoiceDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, choiceTypes); + if (extensionAttr == null + && childIsChoiceType + && !BaseResourceReferenceDt.class.isAssignableFrom(nextElementType) + && !IBaseReference.class.isAssignableFrom(nextElementType)) { + def = new RuntimeChildChoiceDefinition( + nextField, elementName, childAnnotation, descriptionAnnotation, choiceTypes); } else if (extensionAttr != null) { /* * Child is an extension @@ -374,72 +406,124 @@ public abstract class BaseRuntimeElementCompositeDefinition ext Class et = (Class) nextElementType; Object binder = null; - if (BoundCodeDt.class.isAssignableFrom(nextElementType) || IBoundCodeableConcept.class.isAssignableFrom(nextElementType)) { + if (BoundCodeDt.class.isAssignableFrom(nextElementType) + || IBoundCodeableConcept.class.isAssignableFrom(nextElementType)) { binder = ModelScanner.getBoundCodeBinder(nextField); } - def = new RuntimeChildDeclaredExtensionDefinition(nextField, childAnnotation, descriptionAnnotation, extensionAttr, elementName, extensionAttr.url(), et, binder); + def = new RuntimeChildDeclaredExtensionDefinition( + nextField, + childAnnotation, + descriptionAnnotation, + extensionAttr, + elementName, + extensionAttr.url(), + et, + binder); if (IBaseEnumeration.class.isAssignableFrom(nextElementType)) { - ((RuntimeChildDeclaredExtensionDefinition) def).setEnumerationType(ReflectionUtil.getGenericCollectionTypeOfFieldWithSecondOrderForList(nextField)); + ((RuntimeChildDeclaredExtensionDefinition) def) + .setEnumerationType( + ReflectionUtil.getGenericCollectionTypeOfFieldWithSecondOrderForList( + nextField)); } - } else if (BaseResourceReferenceDt.class.isAssignableFrom(nextElementType) || IBaseReference.class.isAssignableFrom(nextElementType)) { + } else if (BaseResourceReferenceDt.class.isAssignableFrom(nextElementType) + || IBaseReference.class.isAssignableFrom(nextElementType)) { /* * Child is a resource reference */ List> refTypesList = new ArrayList<>(); for (Class nextType : childAnnotation.type()) { if (IBaseReference.class.isAssignableFrom(nextType)) { - refTypesList.add(myContext.getVersion().getVersion().isRi() ? IAnyResource.class : IResource.class); + refTypesList.add( + myContext.getVersion().getVersion().isRi() ? IAnyResource.class : IResource.class); continue; } else if (IBaseResource.class.isAssignableFrom(nextType) == false) { - throw new ConfigurationException(Msg.code(1706) + "Field '" + nextField.getName() + "' in class '" + nextField.getDeclaringClass().getCanonicalName() + "' is of type " + BaseResourceReferenceDt.class + " but contains a non-resource type: " + nextType.getCanonicalName()); + throw new ConfigurationException( + Msg.code(1706) + "Field '" + nextField.getName() + "' in class '" + + nextField.getDeclaringClass().getCanonicalName() + "' is of type " + + BaseResourceReferenceDt.class + " but contains a non-resource type: " + + nextType.getCanonicalName()); } refTypesList.add((Class) nextType); } - def = new RuntimeChildResourceDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, refTypesList); + def = new RuntimeChildResourceDefinition( + nextField, elementName, childAnnotation, descriptionAnnotation, refTypesList); - } else if (IResourceBlock.class.isAssignableFrom(nextElementType) || IBaseBackboneElement.class.isAssignableFrom(nextElementType) - || IBaseDatatypeElement.class.isAssignableFrom(nextElementType)) { + } else if (IResourceBlock.class.isAssignableFrom(nextElementType) + || IBaseBackboneElement.class.isAssignableFrom(nextElementType) + || IBaseDatatypeElement.class.isAssignableFrom(nextElementType)) { /* * Child is a resource block (i.e. a sub-tag within a resource) TODO: do these have a better name according to HL7? */ Class blockDef = (Class) nextElementType; - def = new RuntimeChildResourceBlockDefinition(myContext, nextField, childAnnotation, descriptionAnnotation, elementName, blockDef); - } else if (IDatatype.class.equals(nextElementType) || IElement.class.equals(nextElementType) || "Type".equals(nextElementType.getSimpleName()) - || IBaseDatatype.class.equals(nextElementType)) { + def = new RuntimeChildResourceBlockDefinition( + myContext, nextField, childAnnotation, descriptionAnnotation, elementName, blockDef); + } else if (IDatatype.class.equals(nextElementType) + || IElement.class.equals(nextElementType) + || "Type".equals(nextElementType.getSimpleName()) + || IBaseDatatype.class.equals(nextElementType)) { def = new RuntimeChildAny(nextField, elementName, childAnnotation, descriptionAnnotation); - } else if (IDatatype.class.isAssignableFrom(nextElementType) || IPrimitiveType.class.isAssignableFrom(nextElementType) || ICompositeType.class.isAssignableFrom(nextElementType) - || IBaseDatatype.class.isAssignableFrom(nextElementType) || IBaseExtension.class.isAssignableFrom(nextElementType)) { + } else if (IDatatype.class.isAssignableFrom(nextElementType) + || IPrimitiveType.class.isAssignableFrom(nextElementType) + || ICompositeType.class.isAssignableFrom(nextElementType) + || IBaseDatatype.class.isAssignableFrom(nextElementType) + || IBaseExtension.class.isAssignableFrom(nextElementType)) { Class nextDatatype = (Class) nextElementType; if (IPrimitiveType.class.isAssignableFrom(nextElementType)) { if (nextElementType.equals(BoundCodeDt.class)) { IValueSetEnumBinder> binder = ModelScanner.getBoundCodeBinder(nextField); Class> enumType = ModelScanner.determineEnumTypeForBoundField(nextField); - def = new RuntimeChildPrimitiveBoundCodeDatatypeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype, binder, enumType); + def = new RuntimeChildPrimitiveBoundCodeDatatypeDefinition( + nextField, + elementName, + childAnnotation, + descriptionAnnotation, + nextDatatype, + binder, + enumType); } else if (IBaseEnumeration.class.isAssignableFrom(nextElementType)) { - Class> binderType = ModelScanner.determineEnumTypeForBoundField(nextField); - def = new RuntimeChildPrimitiveEnumerationDatatypeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype, binderType); + Class> binderType = + ModelScanner.determineEnumTypeForBoundField(nextField); + def = new RuntimeChildPrimitiveEnumerationDatatypeDefinition( + nextField, + elementName, + childAnnotation, + descriptionAnnotation, + nextDatatype, + binderType); } else { - def = new RuntimeChildPrimitiveDatatypeDefinition(nextField, elementName, descriptionAnnotation, childAnnotation, nextDatatype); + def = new RuntimeChildPrimitiveDatatypeDefinition( + nextField, elementName, descriptionAnnotation, childAnnotation, nextDatatype); } } else { if (IBoundCodeableConcept.class.isAssignableFrom(nextElementType)) { IValueSetEnumBinder> binder = ModelScanner.getBoundCodeBinder(nextField); Class> enumType = ModelScanner.determineEnumTypeForBoundField(nextField); - def = new RuntimeChildCompositeBoundDatatypeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype, binder, enumType); - } else if (BaseNarrativeDt.class.isAssignableFrom(nextElementType) || INarrative.class.isAssignableFrom(nextElementType)) { - def = new RuntimeChildNarrativeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype); + def = new RuntimeChildCompositeBoundDatatypeDefinition( + nextField, + elementName, + childAnnotation, + descriptionAnnotation, + nextDatatype, + binder, + enumType); + } else if (BaseNarrativeDt.class.isAssignableFrom(nextElementType) + || INarrative.class.isAssignableFrom(nextElementType)) { + def = new RuntimeChildNarrativeDefinition( + nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype); } else { - def = new RuntimeChildCompositeDatatypeDefinition(nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype); + def = new RuntimeChildCompositeDatatypeDefinition( + nextField, elementName, childAnnotation, descriptionAnnotation, nextDatatype); } } } else { - throw new ConfigurationException(Msg.code(1707) + "Field '" + elementName + "' in type '" + declaringClass.getCanonicalName() + "' is not a valid child type: " + nextElementType); + throw new ConfigurationException(Msg.code(1707) + "Field '" + elementName + "' in type '" + + declaringClass.getCanonicalName() + "' is not a valid child type: " + nextElementType); } Binding bindingAnnotation = ModelScanner.pullAnnotation(nextField, Binding.class); @@ -448,7 +532,6 @@ public abstract class BaseRuntimeElementCompositeDefinition ext def.setBindingValueSet(bindingAnnotation.valueSet()); } } - } def.setReplacedParentDefinition(replacedParent); @@ -458,7 +541,9 @@ public abstract class BaseRuntimeElementCompositeDefinition ext } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { if (mySealed == SealingStateEnum.SEALED) { return; } @@ -485,7 +570,8 @@ public abstract class BaseRuntimeElementCompositeDefinition ext } for (String nextName : next.getValidChildNames()) { if (myNameToChild.containsKey(nextName)) { - throw new ConfigurationException(Msg.code(1708) + "Duplicate child name[" + nextName + "] in Element[" + getName() + "]"); + throw new ConfigurationException(Msg.code(1708) + "Duplicate child name[" + nextName + + "] in Element[" + getName() + "]"); } myNameToChild.put(nextName, next); } @@ -538,7 +624,6 @@ public abstract class BaseRuntimeElementCompositeDefinition ext } } - @Override protected void validateSealed() { if (mySealed != SealingStateEnum.SEALED) { @@ -610,7 +695,8 @@ public abstract class BaseRuntimeElementCompositeDefinition ext } } - private static int findIndex(List theChildren, String theName, boolean theDefaultAtEnd) { + private static int findIndex( + List theChildren, String theName, boolean theDefaultAtEnd) { int index = theDefaultAtEnd ? theChildren.size() : -1; for (ListIterator iter = theChildren.listIterator(); iter.hasNext(); ) { if (iter.next().getElementName().equals(theName)) { @@ -620,5 +706,4 @@ public abstract class BaseRuntimeElementCompositeDefinition ext } return index; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java index 9fedc06e060..76ece1131bf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java @@ -25,14 +25,14 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBase; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public abstract class BaseRuntimeElementDefinition { @@ -47,7 +47,8 @@ public abstract class BaseRuntimeElementDefinition { private Map myUrlToExtension = new HashMap<>(); private BaseRuntimeElementDefinition myRootParentDefinition; - public BaseRuntimeElementDefinition(String theName, Class theImplementingClass, boolean theStandardType) { + public BaseRuntimeElementDefinition( + String theName, Class theImplementingClass, boolean theStandardType) { assert StringUtils.isNotBlank(theName); assert theImplementingClass != null; @@ -57,7 +58,6 @@ public abstract class BaseRuntimeElementDefinition { name = name.substring(0, name.length() - 2); } - myName = name; myStandardType = theStandardType; myImplementingClass = theImplementingClass; @@ -100,7 +100,8 @@ public abstract class BaseRuntimeElementDefinition { } } if (retVal == null) { - throw new ConfigurationException(Msg.code(1695) + "Class " + getImplementingClass() + " has no constructor with a single argument of type " + argumentType); + throw new ConfigurationException(Msg.code(1695) + "Class " + getImplementingClass() + + " has no constructor with a single argument of type " + argumentType); } myConstructors.put(argumentType, retVal); } @@ -110,12 +111,15 @@ public abstract class BaseRuntimeElementDefinition { /** * @return Returns null if none */ - public RuntimeChildDeclaredExtensionDefinition getDeclaredExtension(String theExtensionUrl, final String serverBaseUrl) { + public RuntimeChildDeclaredExtensionDefinition getDeclaredExtension( + String theExtensionUrl, final String serverBaseUrl) { validateSealed(); RuntimeChildDeclaredExtensionDefinition definition = myUrlToExtension.get(theExtensionUrl); if (definition == null && StringUtils.isNotBlank(serverBaseUrl)) { for (final Map.Entry entry : myUrlToExtension.entrySet()) { - final String key = (!UrlUtil.isValid(entry.getKey()) && StringUtils.isNotBlank(serverBaseUrl)) ? serverBaseUrl + entry.getKey() : entry.getKey(); + final String key = (!UrlUtil.isValid(entry.getKey()) && StringUtils.isNotBlank(serverBaseUrl)) + ? serverBaseUrl + entry.getKey() + : entry.getKey(); if (key.equals(theExtensionUrl)) { definition = entry.getValue(); break; @@ -173,7 +177,10 @@ public abstract class BaseRuntimeElementDefinition { return getConstructor(theArgument).newInstance(theArgument); } catch (Exception e) { - throw new ConfigurationException(Msg.code(1696) + "Failed to instantiate type:" + getImplementingClass().getName(), e); + throw new ConfigurationException( + Msg.code(1696) + "Failed to instantiate type:" + + getImplementingClass().getName(), + e); } } @@ -187,7 +194,9 @@ public abstract class BaseRuntimeElementDefinition { * * @param theContext TODO */ - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { for (BaseRuntimeChildDefinition next : myExtensions) { next.sealAndInitialize(theContext, theClassToElementDefinitions); } @@ -195,7 +204,8 @@ public abstract class BaseRuntimeElementDefinition { for (RuntimeChildDeclaredExtensionDefinition next : myExtensions) { String extUrl = next.getExtensionUrl(); if (myUrlToExtension.containsKey(extUrl)) { - throw new ConfigurationException(Msg.code(1697) + "Duplicate extension URL[" + extUrl + "] in Element[" + getName() + "]"); + throw new ConfigurationException( + Msg.code(1697) + "Duplicate extension URL[" + extUrl + "] in Element[" + getName() + "]"); } myUrlToExtension.put(extUrl, next); if (next.isModifier()) { @@ -203,7 +213,6 @@ public abstract class BaseRuntimeElementDefinition { } else { myExtensionsNonModifier.add(next); } - } myExtensions = Collections.unmodifiableList(myExtensions); @@ -216,12 +225,12 @@ public abstract class BaseRuntimeElementDefinition { } parent = parent.getSuperclass(); } while (!parent.equals(Object.class)); - } @Override public String toString() { - return getClass().getSimpleName() + "[" + getName() + ", " + getImplementingClass().getSimpleName() + "]"; + return getClass().getSimpleName() + "[" + getName() + ", " + + getImplementingClass().getSimpleName() + "]"; } protected void validateSealed() { @@ -245,7 +254,8 @@ public abstract class BaseRuntimeElementDefinition { /** * HAPI structure style. */ - CONTAINED_RESOURCES, EXTENSION_DECLARED, + CONTAINED_RESOURCES, + EXTENSION_DECLARED, ID_DATATYPE, PRIMITIVE_DATATYPE, /** @@ -260,7 +270,5 @@ public abstract class BaseRuntimeElementDefinition { RESOURCE_BLOCK, UNDECL_EXT, - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ComboSearchParamType.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ComboSearchParamType.java index 19aaafbd6bb..26ef17413dc 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ComboSearchParamType.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ComboSearchParamType.java @@ -20,8 +20,6 @@ package ca.uhn.fhir.context; public enum ComboSearchParamType { - UNIQUE, NON_UNIQUE - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ConfigurationException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ConfigurationException.java index 886ac415da6..31b344ca271 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ConfigurationException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ConfigurationException.java @@ -20,7 +20,7 @@ package ca.uhn.fhir.context; /** - * Non-checked exception indicating that HAPI was unable to initialize due to + * Non-checked exception indicating that HAPI was unable to initialize due to * a detected configuration problem. */ public class ConfigurationException extends RuntimeException { @@ -42,5 +42,4 @@ public class ConfigurationException extends RuntimeException { public ConfigurationException(Throwable theCause) { super(theCause); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java index 5529225d76d..76b90ee29ee 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirContext.java @@ -56,8 +56,6 @@ import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Method; @@ -75,6 +73,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * The FHIR context is the central starting point for the use of the HAPI FHIR API. It should be created once, and then @@ -100,14 +100,16 @@ import java.util.Set; public class FhirContext { private static final List> EMPTY_LIST = Collections.emptyList(); - private static final Map ourStaticContexts = Collections.synchronizedMap(new EnumMap<>(FhirVersionEnum.class)); + private static final Map ourStaticContexts = + Collections.synchronizedMap(new EnumMap<>(FhirVersionEnum.class)); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirContext.class); private final IFhirVersion myVersion; private final Map> myDefaultTypeForProfile = new HashMap<>(); private final Set myPerformanceOptions = new HashSet<>(); private final Collection> myResourceTypesToScan; private AddProfileTagEnum myAddProfileTagWhenEncoding = AddProfileTagEnum.ONLY_FOR_CUSTOM; - private volatile Map, BaseRuntimeElementDefinition> myClassToElementDefinition = Collections.emptyMap(); + private volatile Map, BaseRuntimeElementDefinition> myClassToElementDefinition = + Collections.emptyMap(); private ArrayList> myCustomTypes; private volatile Map myIdToResourceDefinition = Collections.emptyMap(); private volatile boolean myInitialized; @@ -122,7 +124,8 @@ public class FhirContext { private volatile IRestfulClientFactory myRestfulClientFactory; private volatile RuntimeChildUndeclaredExtensionDefinition myRuntimeChildUndeclaredExtensionDefinition; private IValidationSupport myValidationSupport; - private Map>> myVersionToNameToResourceType = Collections.emptyMap(); + private Map>> myVersionToNameToResourceType = + Collections.emptyMap(); private volatile Set myResourceNames; private volatile Boolean myFormatXmlSupported; private volatile Boolean myFormatJsonSupported; @@ -175,12 +178,15 @@ public class FhirContext { this(theVersion, null); } - private FhirContext(final FhirVersionEnum theVersion, final Collection> theResourceTypes) { + private FhirContext( + final FhirVersionEnum theVersion, final Collection> theResourceTypes) { VersionUtil.getVersion(); if (theVersion != null) { if (!theVersion.isPresentOnClasspath()) { - throw new IllegalStateException(Msg.code(1680) + getLocalizer().getMessage(FhirContext.class, "noStructuresForSpecifiedVersion", theVersion.name())); + throw new IllegalStateException(Msg.code(1680) + + getLocalizer() + .getMessage(FhirContext.class, "noStructuresForSpecifiedVersion", theVersion.name())); } myVersion = theVersion.getVersionImplementation(); } else if (FhirVersionEnum.DSTU2.isPresentOnClasspath()) { @@ -196,18 +202,25 @@ public class FhirContext { } else if (FhirVersionEnum.R4B.isPresentOnClasspath()) { myVersion = FhirVersionEnum.R4B.getVersionImplementation(); } else { - throw new IllegalStateException(Msg.code(1681) + getLocalizer().getMessage(FhirContext.class, "noStructures")); + throw new IllegalStateException( + Msg.code(1681) + getLocalizer().getMessage(FhirContext.class, "noStructures")); } if (theVersion == null) { - ourLog.info("Creating new FhirContext with auto-detected version [{}]. It is recommended to explicitly select a version for future compatibility by invoking FhirContext.forDstuX()", - myVersion.getVersion().name()); + ourLog.info( + "Creating new FhirContext with auto-detected version [{}]. It is recommended to explicitly select a version for future compatibility by invoking FhirContext.forDstuX()", + myVersion.getVersion().name()); } else { if (HapiSystemProperties.isUnitTestModeEnabled()) { String calledAt = ExceptionUtils.getStackFrames(new Throwable())[4]; - ourLog.info("Creating new FHIR context for FHIR version [{}]{}", myVersion.getVersion().name(), calledAt); + ourLog.info( + "Creating new FHIR context for FHIR version [{}]{}", + myVersion.getVersion().name(), + calledAt); } else { - ourLog.info("Creating new FHIR context for FHIR version [{}]", myVersion.getVersion().name()); + ourLog.info( + "Creating new FHIR context for FHIR version [{}]", + myVersion.getVersion().name()); } } @@ -228,10 +241,8 @@ public class FhirContext { } catch (ClassNotFoundException e) { ourLog.trace("Android mode not detected"); } - } - /** * @since 5.6.0 */ @@ -246,7 +257,6 @@ public class FhirContext { return forCached(FhirVersionEnum.DSTU2_HL7ORG); } - /** * @since 5.5.0 */ @@ -475,7 +485,9 @@ public class FhirContext { Validate.notNull(theResourceType, "theResourceType can not be null"); if (Modifier.isAbstract(theResourceType.getModifiers())) { - throw new IllegalArgumentException(Msg.code(1682) + "Can not scan abstract or interface class (resource definitions must be concrete classes): " + theResourceType.getName()); + throw new IllegalArgumentException(Msg.code(1682) + + "Can not scan abstract or interface class (resource definitions must be concrete classes): " + + theResourceType.getName()); } RuntimeResourceDefinition retVal = (RuntimeResourceDefinition) myClassToElementDefinition.get(theResourceType); @@ -486,7 +498,8 @@ public class FhirContext { return retVal; } - public RuntimeResourceDefinition getResourceDefinition(final FhirVersionEnum theVersion, final String theResourceName) { + public RuntimeResourceDefinition getResourceDefinition( + final FhirVersionEnum theVersion, final String theResourceName) { Validate.notNull(theVersion, "theVersion can not be null"); validateInitialized(); @@ -500,7 +513,8 @@ public class FhirContext { Map, BaseRuntimeElementDefinition> existing = new HashMap<>(); ModelScanner.scanVersionPropertyFile(null, nameToType, theVersion, existing); - Map>> newVersionToNameToResourceType = new HashMap<>(); + Map>> newVersionToNameToResourceType = + new HashMap<>(); newVersionToNameToResourceType.putAll(myVersionToNameToResourceType); newVersionToNameToResourceType.put(theVersion, nameToType); myVersionToNameToResourceType = newVersionToNameToResourceType; @@ -575,7 +589,8 @@ public class FhirContext { // Multiple spots in HAPI FHIR and Smile CDR depend on DataFormatException // being thrown by this method, don't change that. // *********************************************************************** - throw new DataFormatException(Msg.code(1684) + createUnknownResourceNameError(theResourceName, myVersion.getVersion())); + throw new DataFormatException( + Msg.code(1684) + createUnknownResourceNameError(theResourceName, myVersion.getVersion())); } if (IBaseResource.class.isAssignableFrom(clazz)) { retVal = scanResourceType(clazz); @@ -645,9 +660,13 @@ public class FhirContext { public IRestfulClientFactory getRestfulClientFactory() { if (myRestfulClientFactory == null) { try { - myRestfulClientFactory = (IRestfulClientFactory) ReflectionUtil.newInstance(Class.forName("ca.uhn.fhir.rest.client.apache.ApacheRestfulClientFactory"), FhirContext.class, this); + myRestfulClientFactory = (IRestfulClientFactory) ReflectionUtil.newInstance( + Class.forName("ca.uhn.fhir.rest.client.apache.ApacheRestfulClientFactory"), + FhirContext.class, + this); } catch (ClassNotFoundException e) { - throw new ConfigurationException(Msg.code(1686) + "hapi-fhir-client does not appear to be on the classpath"); + throw new ConfigurationException( + Msg.code(1686) + "hapi-fhir-client does not appear to be on the classpath"); } } return myRestfulClientFactory; @@ -684,20 +703,31 @@ public class FhirContext { * If hapi-fhir-validation is on the classpath, we can create a much more robust * validation chain using the classes found in that package */ - String inMemoryTermSvcType = "org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport"; - String commonCodeSystemsSupportType = "org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService"; + String inMemoryTermSvcType = + "org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport"; + String commonCodeSystemsSupportType = + "org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService"; if (ReflectionUtil.typeExists(inMemoryTermSvcType)) { - IValidationSupport inMemoryTermSvc = ReflectionUtil.newInstanceOrReturnNull(inMemoryTermSvcType, IValidationSupport.class, new Class[]{FhirContext.class}, new Object[]{this}); - IValidationSupport commonCodeSystemsSupport = ReflectionUtil.newInstanceOrReturnNull(commonCodeSystemsSupportType, IValidationSupport.class, new Class[]{FhirContext.class}, new Object[]{this}); - retVal = ReflectionUtil.newInstanceOrReturnNull("org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain", IValidationSupport.class, new Class[]{IValidationSupport[].class}, new Object[]{new IValidationSupport[]{ - retVal, - inMemoryTermSvc, - commonCodeSystemsSupport - }}); - assert retVal != null : "Failed to instantiate " + "org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain"; + IValidationSupport inMemoryTermSvc = ReflectionUtil.newInstanceOrReturnNull( + inMemoryTermSvcType, + IValidationSupport.class, + new Class[] {FhirContext.class}, + new Object[] {this}); + IValidationSupport commonCodeSystemsSupport = ReflectionUtil.newInstanceOrReturnNull( + commonCodeSystemsSupportType, + IValidationSupport.class, + new Class[] {FhirContext.class}, + new Object[] {this}); + retVal = ReflectionUtil.newInstanceOrReturnNull( + "org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain", + IValidationSupport.class, + new Class[] {IValidationSupport[].class}, + new Object[] {new IValidationSupport[] {retVal, inMemoryTermSvc, commonCodeSystemsSupport}}); + assert retVal != null + : "Failed to instantiate " + + "org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain"; } - myValidationSupport = retVal; } return retVal; @@ -758,20 +788,20 @@ public class FhirContext { return retVal; } - /** - * @return Returns true if the NDJSON serialization format is supported, based on the - * available libraries on the classpath. - * - * @since 5.6.0 - */ - public boolean isFormatNDJsonSupported() { - Boolean retVal = myFormatNDJsonSupported; - if (retVal == null) { - retVal = tryToInitParser(() -> newNDJsonParser()); - myFormatNDJsonSupported = retVal; - } - return retVal; - } + /** + * @return Returns true if the NDJSON serialization format is supported, based on the + * available libraries on the classpath. + * + * @since 5.6.0 + */ + public boolean isFormatNDJsonSupported() { + Boolean retVal = myFormatNDJsonSupported; + if (retVal == null) { + retVal = tryToInitParser(() -> newNDJsonParser()); + myFormatNDJsonSupported = retVal; + } + return retVal; + } /** * @return Returns true if the RDF serialization format is supported, based on the @@ -839,28 +869,28 @@ public class FhirContext { return new JsonParser(this, myParserErrorHandler); } - /** - * Create and return a new NDJSON parser. - * - *

- * Thread safety: Parsers are not guaranteed to be thread safe. Create a new parser instance for every thread - * or every message being parsed/encoded. - *

- *

- * Performance Note: This method is cheap to call, and may be called once for every message being processed - * without incurring any performance penalty - *

- *

- * The NDJsonParser provided here is expected to translate between legal NDJson and FHIR Bundles. - * In particular, it is able to encode the resources in a FHIR Bundle to NDJson, as well as decode - * NDJson into a FHIR "collection"-type Bundle populated with the resources described in the NDJson. - * It will throw an exception in the event where it is asked to encode to anything other than a FHIR Bundle - * or where it is asked to decode into anything other than a FHIR Bundle. - *

- */ - public IParser newNDJsonParser() { - return new NDJsonParser(this, myParserErrorHandler); - } + /** + * Create and return a new NDJSON parser. + * + *

+ * Thread safety: Parsers are not guaranteed to be thread safe. Create a new parser instance for every thread + * or every message being parsed/encoded. + *

+ *

+ * Performance Note: This method is cheap to call, and may be called once for every message being processed + * without incurring any performance penalty + *

+ *

+ * The NDJsonParser provided here is expected to translate between legal NDJson and FHIR Bundles. + * In particular, it is able to encode the resources in a FHIR Bundle to NDJson, as well as decode + * NDJson into a FHIR "collection"-type Bundle populated with the resources described in the NDJson. + * It will throw an exception in the event where it is asked to encode to anything other than a FHIR Bundle + * or where it is asked to decode into anything other than a FHIR Bundle. + *

+ */ + public IParser newNDJsonParser() { + return new NDJsonParser(this, myParserErrorHandler); + } /** * Create and return a new RDF parser. @@ -1005,7 +1035,8 @@ public class FhirContext { return (RuntimeResourceDefinition) defs.get(theResourceType); } - private synchronized Map, BaseRuntimeElementDefinition> scanResourceTypes(final Collection> theResourceTypes) { + private synchronized Map, BaseRuntimeElementDefinition> scanResourceTypes( + final Collection> theResourceTypes) { List> typesToScan = new ArrayList<>(); if (theResourceTypes != null) { typesToScan.addAll(theResourceTypes); @@ -1022,7 +1053,8 @@ public class FhirContext { Map> nameToElementDefinition = new HashMap<>(); nameToElementDefinition.putAll(myNameToElementDefinition); - for (Entry> next : scanner.getNameToElementDefinitions().entrySet()) { + for (Entry> next : + scanner.getNameToElementDefinitions().entrySet()) { if (!nameToElementDefinition.containsKey(next.getKey())) { nameToElementDefinition.put(next.getKey().toLowerCase(), next.getValue()); } @@ -1030,7 +1062,8 @@ public class FhirContext { Map nameToResourceDefinition = new HashMap<>(); nameToResourceDefinition.putAll(myNameToResourceDefinition); - for (Entry next : scanner.getNameToResourceDefinition().entrySet()) { + for (Entry next : + scanner.getNameToResourceDefinition().entrySet()) { if (!nameToResourceDefinition.containsKey(next.getKey())) { nameToResourceDefinition.put(next.getKey(), next.getValue()); } @@ -1043,7 +1076,8 @@ public class FhirContext { if (next instanceof RuntimeResourceDefinition) { if ("Bundle".equals(next.getName())) { if (!IBaseBundle.class.isAssignableFrom(next.getImplementingClass())) { - throw new ConfigurationException(Msg.code(1687) + "Resource type declares resource name Bundle but does not implement IBaseBundle"); + throw new ConfigurationException(Msg.code(1687) + + "Resource type declares resource name Bundle but does not implement IBaseBundle"); } } } @@ -1111,7 +1145,8 @@ public class FhirContext { } @SuppressWarnings({"cast"}) - private List> toElementList(final Collection> theResourceTypes) { + private List> toElementList( + final Collection> theResourceTypes) { if (theResourceTypes == null) { return null; } @@ -1146,13 +1181,15 @@ public class FhirContext { } public IPrimitiveType newPrimitiveBoolean(Boolean theValue) { - IPrimitiveType retval = (IPrimitiveType) getElementDefinition("boolean").newInstance(); + IPrimitiveType retval = + (IPrimitiveType) getElementDefinition("boolean").newInstance(); retval.setValue(theValue); return retval; } - public IPrimitiveType newPrimitiveString(String theValue) { - IPrimitiveType retval = (IPrimitiveType) getElementDefinition("string").newInstance(); + public IPrimitiveType newPrimitiveString(String theValue) { + IPrimitiveType retval = + (IPrimitiveType) getElementDefinition("string").newInstance(); retval.setValue(theValue); return retval; } @@ -1237,7 +1274,8 @@ public class FhirContext { return ourStaticContexts.computeIfAbsent(theFhirVersionEnum, v -> new FhirContext(v)); } - private static Collection> toCollection(Class theResourceType) { + private static Collection> toCollection( + Class theResourceType) { ArrayList> retVal = new ArrayList<>(1); retVal.add(theResourceType); return retVal; @@ -1248,11 +1286,11 @@ public class FhirContext { ArrayList> retVal = new ArrayList>(1); for (Class clazz : theResourceTypes) { if (!IResource.class.isAssignableFrom(clazz)) { - throw new IllegalArgumentException(Msg.code(1688) + clazz.getCanonicalName() + " is not an instance of " + IResource.class.getSimpleName()); + throw new IllegalArgumentException(Msg.code(1688) + clazz.getCanonicalName() + " is not an instance of " + + IResource.class.getSimpleName()); } retVal.add((Class) clazz); } return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirVersionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirVersionEnum.java index 15beeb61185..399fe3163f6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirVersionEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/FhirVersionEnum.java @@ -57,7 +57,11 @@ public enum FhirVersionEnum { private volatile IFhirVersion myVersionImplementation; private String myFhirVersionString; - FhirVersionEnum(String theVersionClass, FhirVersionEnum theEquivalent, boolean theIsRi, IVersionProvider theVersionExtractor) { + FhirVersionEnum( + String theVersionClass, + FhirVersionEnum theEquivalent, + boolean theIsRi, + IVersionProvider theVersionExtractor) { myVersionClass = theVersionClass; myEquivalent = theEquivalent; myFhirVersionString = theVersionExtractor.provideVersion(); @@ -74,7 +78,8 @@ public enum FhirVersionEnum { } if (myVersionImplementation == null) { try { - myVersionImplementation = (IFhirVersion) Class.forName(myVersionClass).newInstance(); + myVersionImplementation = + (IFhirVersion) Class.forName(myVersionClass).newInstance(); } catch (Exception e) { throw new InternalErrorException(Msg.code(1710) + "Failed to instantiate FHIR version " + name(), e); } @@ -143,7 +148,6 @@ public enum FhirVersionEnum { return FhirContext.forCached(this); } - private interface IVersionProvider { String provideVersion(); } @@ -168,7 +172,6 @@ public enum FhirVersionEnum { default: return determineVersionForType(theFhirType.getSuperclass()); } - } private static class Version implements IVersionProvider { @@ -184,7 +187,6 @@ public enum FhirVersionEnum { public String provideVersion() { return myVersion; } - } /** @@ -208,7 +210,6 @@ public enum FhirVersionEnum { public String provideVersion() { return myVersion; } - } private static class R4Version implements IVersionProvider { @@ -228,7 +229,6 @@ public enum FhirVersionEnum { public String provideVersion() { return myVersion; } - } private static class R4BVersion implements IVersionProvider { @@ -248,7 +248,6 @@ public enum FhirVersionEnum { public String provideVersion() { return myVersion; } - } private static class R5Version implements IVersionProvider { @@ -268,7 +267,6 @@ public enum FhirVersionEnum { public String provideVersion() { return myVersion; } - } /** @@ -312,5 +310,4 @@ public enum FhirVersionEnum { return null; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/IRuntimeDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/IRuntimeDatatypeDefinition.java index 2c385c2bab5..c943792ee17 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/IRuntimeDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/IRuntimeDatatypeDefinition.java @@ -25,11 +25,10 @@ import org.hl7.fhir.instance.model.api.IBaseDatatype; public interface IRuntimeDatatypeDefinition { boolean isSpecialization(); - + public Class getProfileOf(); boolean isProfileOf(Class theType); - - public Class getImplementingClass(); + public Class getImplementingClass(); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ModelScanner.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ModelScanner.java index b424d208bfb..002c34edb7e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ModelScanner.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ModelScanner.java @@ -48,7 +48,6 @@ import org.hl7.fhir.instance.model.api.ICompositeType; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.Annotation; @@ -68,6 +67,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -85,8 +85,12 @@ class ModelScanner { private Set> myVersionTypes; - ModelScanner(FhirContext theContext, FhirVersionEnum theVersion, Map, BaseRuntimeElementDefinition> theExistingDefinitions, - @Nonnull Collection> theResourceTypes) throws ConfigurationException { + ModelScanner( + FhirContext theContext, + FhirVersionEnum theVersion, + Map, BaseRuntimeElementDefinition> theExistingDefinitions, + @Nonnull Collection> theResourceTypes) + throws ConfigurationException { myContext = theContext; myVersion = theVersion; @@ -118,7 +122,9 @@ class ModelScanner { return myRuntimeChildUndeclaredExtensionDefinition; } - private void init(Map, BaseRuntimeElementDefinition> theExistingDefinitions, Set> theTypesToScan) { + private void init( + Map, BaseRuntimeElementDefinition> theExistingDefinitions, + Set> theTypesToScan) { if (theExistingDefinitions != null) { myClassToElementDefinitions.putAll(theExistingDefinitions); } @@ -140,7 +146,8 @@ class ModelScanner { myScanAlso.clear(); } while (!typesToScan.isEmpty()); - for (Entry, BaseRuntimeElementDefinition> nextEntry : myClassToElementDefinitions.entrySet()) { + for (Entry, BaseRuntimeElementDefinition> nextEntry : + myClassToElementDefinitions.entrySet()) { if (theExistingDefinitions != null && theExistingDefinitions.containsKey(nextEntry.getKey())) { continue; } @@ -178,7 +185,9 @@ class ModelScanner { ResourceDef resourceDefinition = pullAnnotation(theClass, ResourceDef.class); if (resourceDefinition != null) { if (!IBaseResource.class.isAssignableFrom(theClass)) { - throw new ConfigurationException(Msg.code(1714) + "Resource type contains a @" + ResourceDef.class.getSimpleName() + " annotation but does not implement " + IResource.class.getCanonicalName() + ": " + theClass.getCanonicalName()); + throw new ConfigurationException(Msg.code(1714) + "Resource type contains a @" + + ResourceDef.class.getSimpleName() + " annotation but does not implement " + + IResource.class.getCanonicalName() + ": " + theClass.getCanonicalName()); } @SuppressWarnings("unchecked") Class resClass = (Class) theClass; @@ -204,10 +213,14 @@ class ModelScanner { Block blockDefinition = pullAnnotation(theClass, Block.class); if (blockDefinition != null) { - if (IResourceBlock.class.isAssignableFrom(theClass) || IBaseBackboneElement.class.isAssignableFrom(theClass) || IBaseDatatypeElement.class.isAssignableFrom(theClass)) { + if (IResourceBlock.class.isAssignableFrom(theClass) + || IBaseBackboneElement.class.isAssignableFrom(theClass) + || IBaseDatatypeElement.class.isAssignableFrom(theClass)) { scanBlock(theClass); } else { - throw new ConfigurationException(Msg.code(1715) + "Type contains a @" + Block.class.getSimpleName() + " annotation but does not implement " + IResourceBlock.class.getCanonicalName() + ": " + theClass.getCanonicalName()); + throw new ConfigurationException(Msg.code(1715) + "Type contains a @" + Block.class.getSimpleName() + + " annotation but does not implement " + IResourceBlock.class.getCanonicalName() + ": " + + theClass.getCanonicalName()); } } @@ -216,7 +229,8 @@ class ModelScanner { return; } - throw new ConfigurationException(Msg.code(1716) + "Resource class[" + theClass.getName() + "] does not contain any valid HAPI-FHIR annotations"); + throw new ConfigurationException(Msg.code(1716) + "Resource class[" + theClass.getName() + + "] does not contain any valid HAPI-FHIR annotations"); } } @@ -228,11 +242,14 @@ class ModelScanner { // Just in case someone messes up when upgrading from DSTU2 if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) { if (BaseIdentifiableElement.class.isAssignableFrom(theClass)) { - throw new ConfigurationException(Msg.code(1717) + "@Block class for version " + myContext.getVersion().getVersion().name() + " should not extend " + BaseIdentifiableElement.class.getSimpleName() + ": " + theClass.getName()); + throw new ConfigurationException(Msg.code(1717) + "@Block class for version " + + myContext.getVersion().getVersion().name() + " should not extend " + + BaseIdentifiableElement.class.getSimpleName() + ": " + theClass.getName()); } } - RuntimeResourceBlockDefinition blockDef = new RuntimeResourceBlockDefinition(blockName, theClass, isStandardType(theClass), myContext, myClassToElementDefinitions); + RuntimeResourceBlockDefinition blockDef = new RuntimeResourceBlockDefinition( + blockName, theClass, isStandardType(theClass), myContext, myClassToElementDefinitions); blockDef.populateScanAlso(myScanAlso); myClassToElementDefinitions.put(theClass, blockDef); @@ -243,11 +260,13 @@ class ModelScanner { RuntimeCompositeDatatypeDefinition elementDef; if (theClass.equals(ExtensionDt.class)) { - elementDef = new RuntimeExtensionDtDefinition(theDatatypeDefinition, theClass, true, myContext, myClassToElementDefinitions); + elementDef = new RuntimeExtensionDtDefinition( + theDatatypeDefinition, theClass, true, myContext, myClassToElementDefinitions); // } else if (IBaseMetaType.class.isAssignableFrom(theClass)) { // resourceDef = new RuntimeMetaDefinition(theDatatypeDefinition, theClass, isStandardType(theClass)); } else { - elementDef = new RuntimeCompositeDatatypeDefinition(theDatatypeDefinition, theClass, isStandardType(theClass), myContext, myClassToElementDefinitions); + elementDef = new RuntimeCompositeDatatypeDefinition( + theDatatypeDefinition, theClass, isStandardType(theClass), myContext, myClassToElementDefinitions); } myClassToElementDefinitions.put(theClass, elementDef); myNameToElementDefinitions.put(elementDef.getName().toLowerCase(), elementDef); @@ -260,12 +279,14 @@ class ModelScanner { elementDef.populateScanAlso(myScanAlso); } - private String scanPrimitiveDatatype(Class> theClass, DatatypeDef theDatatypeDefinition) { + private String scanPrimitiveDatatype( + Class> theClass, DatatypeDef theDatatypeDefinition) { ourLog.debug("Scanning resource class: {}", theClass.getName()); String resourceName = theDatatypeDefinition.name(); if (isBlank(resourceName)) { - throw new ConfigurationException(Msg.code(1718) + "Resource type @" + ResourceDef.class.getSimpleName() + " annotation contains no resource name: " + theClass.getCanonicalName()); + throw new ConfigurationException(Msg.code(1718) + "Resource type @" + ResourceDef.class.getSimpleName() + + " annotation contains no resource name: " + theClass.getCanonicalName()); } BaseRuntimeElementDefinition elementDef; @@ -280,7 +301,8 @@ class ModelScanner { } else if (IIdType.class.isAssignableFrom(theClass)) { elementDef = new RuntimeIdDatatypeDefinition(theDatatypeDefinition, theClass, isStandardType(theClass)); } else { - elementDef = new RuntimePrimitiveDatatypeDefinition(theDatatypeDefinition, theClass, isStandardType(theClass)); + elementDef = + new RuntimePrimitiveDatatypeDefinition(theDatatypeDefinition, theClass, isStandardType(theClass)); } myClassToElementDefinitions.put(theClass, elementDef); if (!theDatatypeDefinition.isSpecialization()) { @@ -312,8 +334,9 @@ class ModelScanner { parent = parent.getSuperclass(); } if (isBlank(resourceName)) { - throw new ConfigurationException(Msg.code(1719) + "Resource type @" + ResourceDef.class.getSimpleName() + " annotation contains no resource name(): " + theClass.getCanonicalName() - + " - This is only allowed for types that extend other resource types "); + throw new ConfigurationException(Msg.code(1719) + "Resource type @" + ResourceDef.class.getSimpleName() + + " annotation contains no resource name(): " + theClass.getCanonicalName() + + " - This is only allowed for types that extend other resource types "); } } @@ -329,12 +352,17 @@ class ModelScanner { String resourceId = resourceDefinition.id(); if (!isBlank(resourceId)) { if (myIdToResourceDefinition.containsKey(resourceId)) { - throw new ConfigurationException(Msg.code(1720) + "The following resource types have the same ID of '" + resourceId + "' - " + theClass.getCanonicalName() + " and " - + myIdToResourceDefinition.get(resourceId).getImplementingClass().getCanonicalName()); + throw new ConfigurationException(Msg.code(1720) + "The following resource types have the same ID of '" + + resourceId + "' - " + theClass.getCanonicalName() + " and " + + myIdToResourceDefinition + .get(resourceId) + .getImplementingClass() + .getCanonicalName()); } } - RuntimeResourceDefinition resourceDef = new RuntimeResourceDefinition(myContext, resourceName, theClass, resourceDefinition, standardType, myClassToElementDefinitions); + RuntimeResourceDefinition resourceDef = new RuntimeResourceDefinition( + myContext, resourceName, theClass, resourceDefinition, standardType, myClassToElementDefinitions); myClassToElementDefinitions.put(theClass, resourceDef); if (primaryNameProvider) { if (resourceDef.getStructureVersion() == myVersion) { @@ -360,7 +388,8 @@ class ModelScanner { return resourceName; } - private void scanResourceForSearchParams(Class theClass, RuntimeResourceDefinition theResourceDef) { + private void scanResourceForSearchParams( + Class theClass, RuntimeResourceDefinition theResourceDef) { Map nameToParam = new HashMap<>(); Map compositeFields = new LinkedHashMap<>(); @@ -384,9 +413,11 @@ class ModelScanner { for (Field nextField : fields) { SearchParamDefinition searchParam = pullAnnotation(nextField, SearchParamDefinition.class); if (searchParam != null) { - RestSearchParameterTypeEnum paramType = RestSearchParameterTypeEnum.forCode(searchParam.type().toLowerCase()); + RestSearchParameterTypeEnum paramType = + RestSearchParameterTypeEnum.forCode(searchParam.type().toLowerCase()); if (paramType == null) { - throw new ConfigurationException(Msg.code(1721) + "Search param " + searchParam.name() + " has an invalid type: " + searchParam.type()); + throw new ConfigurationException(Msg.code(1721) + "Search param " + searchParam.name() + + " has an invalid type: " + searchParam.type()); } Set providesMembershipInCompartments; providesMembershipInCompartments = new HashSet<>(); @@ -425,12 +456,23 @@ class ModelScanner { String name = searchParam.name(); url = toCanonicalSearchParameterUri(theResourceDef, name); } - RuntimeSearchParam param = new RuntimeSearchParam(null, url, searchParam.name(), searchParam.description(), searchParam.path(), paramType, providesMembershipInCompartments, toTargetList(searchParam.target()), RuntimeSearchParamStatusEnum.ACTIVE, null, components, base); + RuntimeSearchParam param = new RuntimeSearchParam( + null, + url, + searchParam.name(), + searchParam.description(), + searchParam.path(), + paramType, + providesMembershipInCompartments, + toTargetList(searchParam.target()), + RuntimeSearchParamStatusEnum.ACTIVE, + null, + components, + base); theResourceDef.addSearchParam(param); nameToParam.put(param.getName(), param); } } - } private String toCanonicalSearchParameterUri(RuntimeResourceDefinition theResourceDef, String theName) { @@ -455,7 +497,9 @@ class ModelScanner { if (List.class.equals(nextElementType)) { nextElementType = ReflectionUtil.getGenericCollectionTypeOfField(next); } else if (Collection.class.isAssignableFrom(nextElementType)) { - throw new ConfigurationException(Msg.code(1722) + "Field '" + next.getName() + "' in type '" + next.getClass().getCanonicalName() + "' is a Collection - Only java.util.List curently supported"); + throw new ConfigurationException(Msg.code(1722) + "Field '" + next.getName() + "' in type '" + + next.getClass().getCanonicalName() + + "' is a Collection - Only java.util.List curently supported"); } return nextElementType; } @@ -464,7 +508,8 @@ class ModelScanner { static IValueSetEnumBinder> getBoundCodeBinder(Field theNext) { Class bound = getGenericCollectionTypeOfCodedField(theNext); if (bound == null) { - throw new ConfigurationException(Msg.code(1723) + "Field '" + theNext + "' has no parameter for " + BoundCodeDt.class.getSimpleName() + " to determine enum type"); + throw new ConfigurationException(Msg.code(1723) + "Field '" + theNext + "' has no parameter for " + + BoundCodeDt.class.getSimpleName() + " to determine enum type"); } String fieldName = "VALUESET_BINDER"; @@ -472,7 +517,11 @@ class ModelScanner { Field bindingField = bound.getField(fieldName); return (IValueSetEnumBinder>) bindingField.get(null); } catch (Exception e) { - throw new ConfigurationException(Msg.code(1724) + "Field '" + theNext + "' has type parameter " + bound.getCanonicalName() + " but this class has no valueset binding field (must have a field called " + fieldName + ")", e); + throw new ConfigurationException( + Msg.code(1724) + "Field '" + theNext + "' has type parameter " + bound.getCanonicalName() + + " but this class has no valueset binding field (must have a field called " + fieldName + + ")", + e); } } @@ -488,7 +537,8 @@ class ModelScanner { static Class> determineEnumTypeForBoundField(Field next) { @SuppressWarnings("unchecked") - Class> enumType = (Class>) ReflectionUtil.getGenericCollectionTypeOfFieldWithSecondOrderForList(next); + Class> enumType = + (Class>) ReflectionUtil.getGenericCollectionTypeOfFieldWithSecondOrderForList(next); return enumType; } @@ -506,7 +556,11 @@ class ModelScanner { return type; } - static Set> scanVersionPropertyFile(Set> theDatatypes, Map> theResourceTypes, FhirVersionEnum theVersion, Map, BaseRuntimeElementDefinition> theExistingElementDefinitions) { + static Set> scanVersionPropertyFile( + Set> theDatatypes, + Map> theResourceTypes, + FhirVersionEnum theVersion, + Map, BaseRuntimeElementDefinition> theExistingElementDefinitions) { Set> retVal = new HashSet<>(); try (InputStream str = theVersion.getVersionImplementation().getFhirVersionPropertiesFile()) { @@ -537,12 +591,16 @@ class ModelScanner { Class nextClass = (Class) dtType; theDatatypes.add(nextClass); } else { - ourLog.warn("Class is not assignable from " + IElement.class.getSimpleName() + " or " + IBaseDatatype.class.getSimpleName() + ": " + nextValue); + ourLog.warn("Class is not assignable from " + IElement.class.getSimpleName() + " or " + + IBaseDatatype.class.getSimpleName() + ": " + nextValue); continue; } } catch (ClassNotFoundException e) { - throw new ConfigurationException(Msg.code(1725) + "Unknown class[" + nextValue + "] for data type definition: " + nextKey.substring("datatype.".length()), e); + throw new ConfigurationException( + Msg.code(1725) + "Unknown class[" + nextValue + "] for data type definition: " + + nextKey.substring("datatype.".length()), + e); } } } else if (nextKey.startsWith("resource.")) { @@ -550,27 +608,33 @@ class ModelScanner { String resName = nextKey.substring("resource.".length()).toLowerCase(); try { @SuppressWarnings("unchecked") - Class nextClass = (Class) Class.forName(nextValue); + Class nextClass = + (Class) Class.forName(nextValue); if (theExistingElementDefinitions.containsKey(nextClass)) { continue; } if (!IBaseResource.class.isAssignableFrom(nextClass)) { - throw new ConfigurationException(Msg.code(1726) + "Class is not assignable from " + IBaseResource.class.getSimpleName() + ": " + nextValue); + throw new ConfigurationException(Msg.code(1726) + "Class is not assignable from " + + IBaseResource.class.getSimpleName() + ": " + nextValue); } theResourceTypes.put(resName, nextClass); } catch (ClassNotFoundException e) { - throw new ConfigurationException(Msg.code(1727) + "Unknown class[" + nextValue + "] for resource definition: " + nextKey.substring("resource.".length()), e); + throw new ConfigurationException( + Msg.code(1727) + "Unknown class[" + nextValue + "] for resource definition: " + + nextKey.substring("resource.".length()), + e); } } else { - throw new ConfigurationException(Msg.code(1728) + "Unexpected property in version property file: " + nextKey + "=" + nextValue); + throw new ConfigurationException(Msg.code(1728) + "Unexpected property in version property file: " + + nextKey + "=" + nextValue); } } } catch (IOException e) { - throw new ConfigurationException(Msg.code(1729) + "Failed to load model property file from classpath: " + "/ca/uhn/fhir/model/dstu/model.properties"); + throw new ConfigurationException(Msg.code(1729) + "Failed to load model property file from classpath: " + + "/ca/uhn/fhir/model/dstu/model.properties"); } return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ParserOptions.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ParserOptions.java index 351e02d00e4..065a5d6523d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ParserOptions.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/ParserOptions.java @@ -200,9 +200,9 @@ public class ParserOptions { * Bundle.entry.fullUrl * @return Returns a reference to this parser so that method calls can be chained together */ - public ParserOptions setOverrideResourceIdWithBundleEntryFullUrl(boolean theOverrideResourceIdWithBundleEntryFullUrl) { + public ParserOptions setOverrideResourceIdWithBundleEntryFullUrl( + boolean theOverrideResourceIdWithBundleEntryFullUrl) { myOverrideResourceIdWithBundleEntryFullUrl = theOverrideResourceIdWithBundleEntryFullUrl; return this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/PerformanceOptionsEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/PerformanceOptionsEnum.java index 856d554dafb..5721bfd9dc8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/PerformanceOptionsEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/PerformanceOptionsEnum.java @@ -25,15 +25,14 @@ package ca.uhn.fhir.context; public enum PerformanceOptionsEnum { /** - * When this option is set, model classes will not be scanned for children until the + * When this option is set, model classes will not be scanned for children until the * child list for the given type is actually accessed. *

* The effect of this option is that reflection operations to scan children will be * deferred, and some may never happen if specific model types aren't actually used. * This option is useful on environments where reflection is particularly slow, e.g. * Android or low powered devices. - *

+ *

*/ DEFERRED_MODEL_SCANNING - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildAny.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildAny.java index 241076babe8..9119d86331c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildAny.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildAny.java @@ -19,6 +19,15 @@ */ package ca.uhn.fhir.context; +import ca.uhn.fhir.model.api.IDatatype; +import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.model.api.annotation.Child; +import ca.uhn.fhir.model.api.annotation.Description; +import ca.uhn.fhir.model.primitive.XhtmlDt; +import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.IBaseDatatype; +import org.hl7.fhir.instance.model.api.IBaseReference; + import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collections; @@ -26,47 +35,43 @@ import java.util.Comparator; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBase; -import org.hl7.fhir.instance.model.api.IBaseDatatype; -import org.hl7.fhir.instance.model.api.IBaseReference; - -import ca.uhn.fhir.model.api.IDatatype; -import ca.uhn.fhir.model.api.IResource; -import ca.uhn.fhir.model.api.annotation.Child; -import ca.uhn.fhir.model.api.annotation.Description; -import ca.uhn.fhir.model.primitive.XhtmlDt; - public class RuntimeChildAny extends RuntimeChildChoiceDefinition { - public RuntimeChildAny(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation) { + public RuntimeChildAny( + Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation) { super(theField, theElementName, theChildAnnotation, theDescriptionAnnotation); } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { List> choiceTypes = new ArrayList>(); - + for (Class next : theClassToElementDefinitions.keySet()) { if (next.equals(XhtmlDt.class)) { continue; } - + BaseRuntimeElementDefinition nextDef = theClassToElementDefinitions.get(next); if (nextDef instanceof IRuntimeDatatypeDefinition) { if (((IRuntimeDatatypeDefinition) nextDef).isSpecialization()) { /* - * Things like BoundCodeDt shoudn't be considered as valid options for an "any" choice, since - * we'll already have CodeDt as an option + * Things like BoundCodeDt shoudn't be considered as valid options for an "any" choice, since + * we'll already have CodeDt as an option */ continue; } } - - if (IResource.class.isAssignableFrom(next) || IDatatype.class.isAssignableFrom(next) || IBaseDatatype.class.isAssignableFrom(next) || IBaseReference.class.isAssignableFrom(next)) { + + if (IResource.class.isAssignableFrom(next) + || IDatatype.class.isAssignableFrom(next) + || IBaseDatatype.class.isAssignableFrom(next) + || IBaseReference.class.isAssignableFrom(next)) { choiceTypes.add(next); } } - Collections.sort(choiceTypes,new Comparator>(){ + Collections.sort(choiceTypes, new Comparator>() { @Override public int compare(Class theO1, Class theO2) { boolean o1res = IResource.class.isAssignableFrom(theO1); @@ -77,16 +82,14 @@ public class RuntimeChildAny extends RuntimeChildChoiceDefinition { return -1; } else if (o1res == false && o2res == false) { return 0; - }else { + } else { return 1; } - }}); - + } + }); + setChoiceTypes(choiceTypes); - + super.sealAndInitialize(theContext, theClassToElementDefinitions); } - - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildChoiceDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildChoiceDefinition.java index 2a6f357805a..cc0a21c49c7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildChoiceDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildChoiceDefinition.java @@ -49,7 +49,12 @@ public class RuntimeChildChoiceDefinition extends BaseRuntimeDeclaredChildDefini /** * Constructor */ - public RuntimeChildChoiceDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation, List> theChoiceTypes) { + public RuntimeChildChoiceDefinition( + Field theField, + String theElementName, + Child theChildAnnotation, + Description theDescriptionAnnotation, + List> theChoiceTypes) { super(theField, theChildAnnotation, theDescriptionAnnotation, theElementName); myChoiceTypes = Collections.unmodifiableList(theChoiceTypes); @@ -57,10 +62,11 @@ public class RuntimeChildChoiceDefinition extends BaseRuntimeDeclaredChildDefini /** * Constructor - * + * * For extension, if myChoiceTypes will be set some other way */ - RuntimeChildChoiceDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation) { + RuntimeChildChoiceDefinition( + Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation) { super(theField, theChildAnnotation, theDescriptionAnnotation, theElementName); } @@ -79,14 +85,17 @@ public class RuntimeChildChoiceDefinition extends BaseRuntimeDeclaredChildDefini @Override public BaseRuntimeElementDefinition getChildByName(String theName) { - assert myNameToChildDefinition.containsKey(theName) : "Can't find child '" + theName + "' in names: " + myNameToChildDefinition.keySet(); + assert myNameToChildDefinition.containsKey(theName) + : "Can't find child '" + theName + "' in names: " + myNameToChildDefinition.keySet(); return myNameToChildDefinition.get(theName); } @SuppressWarnings("unchecked") @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { myNameToChildDefinition = new HashMap>(); myDatatypeToElementName = new HashMap, String>(); myDatatypeToElementDefinition = new HashMap, BaseRuntimeElementDefinition>(); @@ -105,7 +114,7 @@ public class RuntimeChildChoiceDefinition extends BaseRuntimeDeclaredChildDefini myNameToChildDefinition.put(getElementName() + "Reference", nextDef); myNameToChildDefinition.put(getElementName() + "Resource", nextDef); - + myResourceTypes.add((Class) next); } else { @@ -118,7 +127,7 @@ public class RuntimeChildChoiceDefinition extends BaseRuntimeDeclaredChildDefini * unprofiled datatype as the element name. E.g. if foo[x] allows markdown as a datatype, it calls the * element fooString when encoded, because markdown is a profile of string. This is according to the * FHIR spec - * + * * Note that as of HAPI 1.4 this applies only to non-primitive datatypes after discussion * with Grahame. */ @@ -158,7 +167,9 @@ public class RuntimeChildChoiceDefinition extends BaseRuntimeDeclaredChildDefini if (myDatatypeToElementName.containsKey(next)) { String existing = myDatatypeToElementName.get(next); if (!existing.equals(elementName)) { - throw new ConfigurationException(Msg.code(1693) + "Already have element name " + existing + " for datatype " + next.getSimpleName() + " in " + getElementName() + ", cannot add " + elementName); + throw new ConfigurationException( + Msg.code(1693) + "Already have element name " + existing + " for datatype " + + next.getSimpleName() + " in " + getElementName() + ", cannot add " + elementName); } } else { myDatatypeToElementName.put(next, elementName); @@ -171,7 +182,6 @@ public class RuntimeChildChoiceDefinition extends BaseRuntimeDeclaredChildDefini myResourceTypes = Collections.unmodifiableList(myResourceTypes); } - public List> getResourceTypes() { return myResourceTypes; } @@ -190,5 +200,4 @@ public class RuntimeChildChoiceDefinition extends BaseRuntimeDeclaredChildDefini public Set> getValidChildTypes() { return Collections.unmodifiableSet((myDatatypeToElementDefinition.keySet())); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildCompositeBoundDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildCompositeBoundDatatypeDefinition.java index 3dc29e327ab..00eaa70ad1b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildCompositeBoundDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildCompositeBoundDatatypeDefinition.java @@ -19,25 +19,31 @@ */ package ca.uhn.fhir.context; -import java.lang.reflect.Field; - -import org.apache.commons.lang3.Validate; -import org.hl7.fhir.instance.model.api.IBase; - import ca.uhn.fhir.model.api.IValueSetEnumBinder; import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.Description; +import org.apache.commons.lang3.Validate; +import org.hl7.fhir.instance.model.api.IBase; + +import java.lang.reflect.Field; public class RuntimeChildCompositeBoundDatatypeDefinition extends RuntimeChildCompositeDatatypeDefinition { private IValueSetEnumBinder> myBinder; private Class> myEnumType; - public RuntimeChildCompositeBoundDatatypeDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation, Class theDatatype, IValueSetEnumBinder> theBinder, Class> theEnumType) { + public RuntimeChildCompositeBoundDatatypeDefinition( + Field theField, + String theElementName, + Child theChildAnnotation, + Description theDescriptionAnnotation, + Class theDatatype, + IValueSetEnumBinder> theBinder, + Class> theEnumType) { super(theField, theElementName, theChildAnnotation, theDescriptionAnnotation, theDatatype); Validate.notNull(theBinder, "theBinder must not be null"); Validate.notNull(theEnumType, "theEnumType must not be null"); - + myBinder = theBinder; myEnumType = theEnumType; } @@ -51,5 +57,4 @@ public class RuntimeChildCompositeBoundDatatypeDefinition extends RuntimeChildCo public Class> getBoundEnumType() { return myEnumType; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildCompositeDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildCompositeDatatypeDefinition.java index 55666d5de3b..75eb6bc3f39 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildCompositeDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildCompositeDatatypeDefinition.java @@ -19,18 +19,20 @@ */ package ca.uhn.fhir.context; -import java.lang.reflect.Field; - -import org.hl7.fhir.instance.model.api.IBase; - import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.Description; +import org.hl7.fhir.instance.model.api.IBase; + +import java.lang.reflect.Field; public class RuntimeChildCompositeDatatypeDefinition extends BaseRuntimeChildDatatypeDefinition { - public RuntimeChildCompositeDatatypeDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation, Class theDatatype) { - super(theField, theElementName, theChildAnnotation,theDescriptionAnnotation, theDatatype); + public RuntimeChildCompositeDatatypeDefinition( + Field theField, + String theElementName, + Child theChildAnnotation, + Description theDescriptionAnnotation, + Class theDatatype) { + super(theField, theElementName, theChildAnnotation, theDescriptionAnnotation, theDatatype); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildContainedResources.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildContainedResources.java index 3c9821fdf02..d8c635f61ab 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildContainedResources.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildContainedResources.java @@ -36,7 +36,9 @@ public class RuntimeChildContainedResources extends BaseRuntimeDeclaredChildDefi private BaseRuntimeElementDefinition myElem; - RuntimeChildContainedResources(Field theField, Child theChildAnnotation, Description theDescriptionAnnotation, String theElementName) throws ConfigurationException { + RuntimeChildContainedResources( + Field theField, Child theChildAnnotation, Description theDescriptionAnnotation, String theElementName) + throws ConfigurationException { super(theField, theChildAnnotation, theDescriptionAnnotation, theElementName); } @@ -72,7 +74,9 @@ public class RuntimeChildContainedResources extends BaseRuntimeDeclaredChildDefi } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { Class actualType = theContext.getVersion().getContainedType(); if (BaseContainedDt.class.isAssignableFrom(actualType)) { @SuppressWarnings("unchecked") @@ -81,8 +85,8 @@ public class RuntimeChildContainedResources extends BaseRuntimeDeclaredChildDefi } else if (List.class.isAssignableFrom(actualType)) { myElem = new RuntimeElemContainedResourceList(IBaseResource.class, false); } else { - throw new ConfigurationException(Msg.code(1735) + "Fhir Version definition returned invalid contained type: " + actualType); + throw new ConfigurationException( + Msg.code(1735) + "Fhir Version definition returned invalid contained type: " + actualType); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildDeclaredExtensionDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildDeclaredExtensionDefinition.java index 81f81a83cad..4e54d193921 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildDeclaredExtensionDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildDeclaredExtensionDefinition.java @@ -19,7 +19,12 @@ */ package ca.uhn.fhir.context; -import static org.apache.commons.lang3.StringUtils.isNotBlank; +import ca.uhn.fhir.model.api.IElement; +import ca.uhn.fhir.model.api.annotation.Child; +import ca.uhn.fhir.model.api.annotation.Description; +import ca.uhn.fhir.model.api.annotation.Extension; +import ca.uhn.fhir.util.ReflectionUtil; +import org.hl7.fhir.instance.model.api.IBase; import java.lang.reflect.Field; import java.lang.reflect.Modifier; @@ -29,13 +34,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBase; - -import ca.uhn.fhir.model.api.IElement; -import ca.uhn.fhir.model.api.annotation.Child; -import ca.uhn.fhir.model.api.annotation.Description; -import ca.uhn.fhir.model.api.annotation.Extension; -import ca.uhn.fhir.util.ReflectionUtil; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class RuntimeChildDeclaredExtensionDefinition extends RuntimeChildChoiceDefinition { @@ -56,8 +55,15 @@ public class RuntimeChildDeclaredExtensionDefinition extends RuntimeChildChoiceD * @param theDefinedLocally * See {@link Extension#definedLocally()} */ - RuntimeChildDeclaredExtensionDefinition(Field theField, Child theChild, Description theDescriptionAnnotation, Extension theExtension, String theElementName, String theExtensionUrl, - Class theChildType, Object theBoundTypeBinder) + RuntimeChildDeclaredExtensionDefinition( + Field theField, + Child theChild, + Description theDescriptionAnnotation, + Extension theExtension, + String theElementName, + String theExtensionUrl, + Class theChildType, + Object theBoundTypeBinder) throws ConfigurationException { super(theField, theElementName, theChild, theDescriptionAnnotation); assert isNotBlank(theExtensionUrl); @@ -116,7 +122,6 @@ public class RuntimeChildDeclaredExtensionDefinition extends RuntimeChildChoiceD return "modifierExtension"; } return "extension"; - } return retVal; } @@ -124,7 +129,7 @@ public class RuntimeChildDeclaredExtensionDefinition extends RuntimeChildChoiceD @Override public BaseRuntimeElementDefinition getChildByName(String theName) { String name = theName; - if ("extension".equals(name)||"modifierExtension".equals(name)) { + if ("extension".equals(name) || "modifierExtension".equals(name)) { if (myChildResourceBlock != null) { return myChildResourceBlock; } @@ -136,7 +141,7 @@ public class RuntimeChildDeclaredExtensionDefinition extends RuntimeChildChoiceD if (getValidChildNames().contains(name) == false) { return null; } - + return super.getChildByName(name); } @@ -169,7 +174,9 @@ public class RuntimeChildDeclaredExtensionDefinition extends RuntimeChildChoiceD } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { myUrlToChildExtension = new HashMap(); BaseRuntimeElementDefinition elementDef = theClassToElementDefinitions.get(myChildType); @@ -184,17 +191,19 @@ public class RuntimeChildDeclaredExtensionDefinition extends RuntimeChildChoiceD } } - if (elementDef instanceof RuntimePrimitiveDatatypeDefinition || elementDef instanceof RuntimeCompositeDatatypeDefinition) { -// myDatatypeChildName = "value" + elementDef.getName().substring(0, 1).toUpperCase() + elementDef.getName().substring(1); -// if ("valueResourceReference".equals(myDatatypeChildName)) { - // Per one of the examples here: http://hl7.org/implement/standards/fhir/extensibility.html#extension -// myDatatypeChildName = "valueResource"; -// List> types = new ArrayList>(); -// types.add(IBaseResource.class); -// myChildDef = findResourceReferenceDefinition(theClassToElementDefinitions); -// } else { - myChildDef = elementDef; -// } + if (elementDef instanceof RuntimePrimitiveDatatypeDefinition + || elementDef instanceof RuntimeCompositeDatatypeDefinition) { + // myDatatypeChildName = "value" + elementDef.getName().substring(0, 1).toUpperCase() + + // elementDef.getName().substring(1); + // if ("valueResourceReference".equals(myDatatypeChildName)) { + // Per one of the examples here: http://hl7.org/implement/standards/fhir/extensibility.html#extension + // myDatatypeChildName = "valueResource"; + // List> types = new ArrayList>(); + // types.add(IBaseResource.class); + // myChildDef = findResourceReferenceDefinition(theClassToElementDefinitions); + // } else { + myChildDef = elementDef; + // } } else if (elementDef instanceof RuntimeResourceBlockDefinition) { RuntimeResourceBlockDefinition extDef = ((RuntimeResourceBlockDefinition) elementDef); for (RuntimeChildDeclaredExtensionDefinition next : extDef.getExtensions()) { @@ -215,5 +224,4 @@ public class RuntimeChildDeclaredExtensionDefinition extends RuntimeChildChoiceD public Class getChildType() { return myChildType; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildDirectResource.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildDirectResource.java index c7aa3e384b7..8454fbad54c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildDirectResource.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildDirectResource.java @@ -19,23 +19,24 @@ */ package ca.uhn.fhir.context; +import ca.uhn.fhir.model.api.annotation.Child; +import ca.uhn.fhir.model.api.annotation.Description; +import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.reflect.Field; import java.util.Collections; import java.util.Map; import java.util.Set; -import org.hl7.fhir.instance.model.api.IBase; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.model.api.annotation.Child; -import ca.uhn.fhir.model.api.annotation.Description; - public class RuntimeChildDirectResource extends BaseRuntimeDeclaredChildDefinition { -// private RuntimeElemContainedResources myElem; + // private RuntimeElemContainedResources myElem; private FhirContext myContext; - RuntimeChildDirectResource(Field theField, Child theChildAnnotation, Description theDescriptionAnnotation, String theElementName) throws ConfigurationException { + RuntimeChildDirectResource( + Field theField, Child theChildAnnotation, Description theDescriptionAnnotation, String theElementName) + throws ConfigurationException { super(theField, theChildAnnotation, theDescriptionAnnotation, theElementName); } @@ -61,8 +62,9 @@ public class RuntimeChildDirectResource extends BaseRuntimeDeclaredChildDefiniti } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { myContext = theContext; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildExt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildExt.java index 3a38c6e2930..c6af9951a0c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildExt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildExt.java @@ -112,7 +112,9 @@ public class RuntimeChildExt extends BaseRuntimeChildDefinition { } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { myNameToChild = new HashMap<>(); myDatatypeToChild = new HashMap<>(); myDatatypeToChildName = new HashMap<>(); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildExtension.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildExtension.java index 902a0ed35d3..1bf1de8d56d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildExtension.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildExtension.java @@ -19,20 +19,20 @@ */ package ca.uhn.fhir.context; +import ca.uhn.fhir.model.api.annotation.Child; +import ca.uhn.fhir.model.api.annotation.Description; +import org.hl7.fhir.instance.model.api.IBase; + import java.lang.reflect.Field; import java.util.Collections; import java.util.Set; -import org.hl7.fhir.instance.model.api.IBase; - -import ca.uhn.fhir.model.api.annotation.Child; -import ca.uhn.fhir.model.api.annotation.Description; - public class RuntimeChildExtension extends RuntimeChildAny { -// private RuntimeChildUndeclaredExtensionDefinition myExtensionElement; + // private RuntimeChildUndeclaredExtensionDefinition myExtensionElement; - public RuntimeChildExtension(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation) { + public RuntimeChildExtension( + Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation) { super(theField, theElementName, theChildAnnotation, theDescriptionAnnotation); } @@ -53,21 +53,21 @@ public class RuntimeChildExtension extends RuntimeChildAny { } return super.getChildByName(theName); } - -// @Override -// public BaseRuntimeElementDefinition getChildElementDefinitionByDatatype(Class theDatatype) { -// if (IBaseExtension.class.isAssignableFrom(theDatatype)) { -// return myExtensionElement; -// } -// return super.getChildElementDefinitionByDatatype(theDatatype); -// } -// -// @Override -// void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { -// super.sealAndInitialize(theContext, theClassToElementDefinitions); -// -// myExtensionElement = theContext.getRuntimeChildUndeclaredExtensionDefinition(); -// } + // @Override + // public BaseRuntimeElementDefinition getChildElementDefinitionByDatatype(Class theDatatype) { + // if (IBaseExtension.class.isAssignableFrom(theDatatype)) { + // return myExtensionElement; + // } + // return super.getChildElementDefinitionByDatatype(theDatatype); + // } + // + // @Override + // void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> + // theClassToElementDefinitions) { + // super.sealAndInitialize(theContext, theClassToElementDefinitions); + // + // myExtensionElement = theContext.getRuntimeChildUndeclaredExtensionDefinition(); + // } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildNarrativeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildNarrativeDefinition.java index a0889b5cfe4..b9e8162d1b5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildNarrativeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildNarrativeDefinition.java @@ -19,18 +19,20 @@ */ package ca.uhn.fhir.context; -import java.lang.reflect.Field; - -import org.hl7.fhir.instance.model.api.IBase; - import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.Description; +import org.hl7.fhir.instance.model.api.IBase; + +import java.lang.reflect.Field; public class RuntimeChildNarrativeDefinition extends RuntimeChildCompositeDatatypeDefinition { - public RuntimeChildNarrativeDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation, Class theDatatype) { - super(theField, theElementName, theChildAnnotation,theDescriptionAnnotation, theDatatype); + public RuntimeChildNarrativeDefinition( + Field theField, + String theElementName, + Child theChildAnnotation, + Description theDescriptionAnnotation, + Class theDatatype) { + super(theField, theElementName, theChildAnnotation, theDescriptionAnnotation, theDatatype); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveBoundCodeDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveBoundCodeDatatypeDefinition.java index b56696c2dfb..ecbfb1d6bd3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveBoundCodeDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveBoundCodeDatatypeDefinition.java @@ -19,19 +19,25 @@ */ package ca.uhn.fhir.context; -import java.lang.reflect.Field; - -import org.hl7.fhir.instance.model.api.IBase; - import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.Description; +import org.hl7.fhir.instance.model.api.IBase; + +import java.lang.reflect.Field; public class RuntimeChildPrimitiveBoundCodeDatatypeDefinition extends RuntimeChildPrimitiveDatatypeDefinition { private Object myBinder; private Class> myEnumType; - public RuntimeChildPrimitiveBoundCodeDatatypeDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation, Class theDatatype, Object theBinder, Class> theEnumType) { + public RuntimeChildPrimitiveBoundCodeDatatypeDefinition( + Field theField, + String theElementName, + Child theChildAnnotation, + Description theDescriptionAnnotation, + Class theDatatype, + Object theBinder, + Class> theEnumType) { super(theField, theElementName, theDescriptionAnnotation, theChildAnnotation, theDatatype); myBinder = theBinder; @@ -47,5 +53,4 @@ public class RuntimeChildPrimitiveBoundCodeDatatypeDefinition extends RuntimeChi public Object getInstanceConstructorArguments() { return myBinder; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveDatatypeDefinition.java index df760077a49..b913d51ed1e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveDatatypeDefinition.java @@ -19,17 +19,20 @@ */ package ca.uhn.fhir.context; -import java.lang.reflect.Field; - -import org.hl7.fhir.instance.model.api.IBase; - import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.Description; +import org.hl7.fhir.instance.model.api.IBase; + +import java.lang.reflect.Field; public class RuntimeChildPrimitiveDatatypeDefinition extends BaseRuntimeChildDatatypeDefinition { - public RuntimeChildPrimitiveDatatypeDefinition(Field theField, String theElementName, Description theDescriptionAnnotation, Child theChildAnnotation, Class theDatatype) { + public RuntimeChildPrimitiveDatatypeDefinition( + Field theField, + String theElementName, + Description theDescriptionAnnotation, + Child theChildAnnotation, + Class theDatatype) { super(theField, theElementName, theChildAnnotation, theDescriptionAnnotation, theDatatype); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveEnumerationDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveEnumerationDatatypeDefinition.java index 16c665cf585..4734bf812a6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveEnumerationDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildPrimitiveEnumerationDatatypeDefinition.java @@ -31,7 +31,13 @@ public class RuntimeChildPrimitiveEnumerationDatatypeDefinition extends RuntimeC private Object myBinder; private Class> myEnumType; - public RuntimeChildPrimitiveEnumerationDatatypeDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation, Class theDatatype, Class> theBinderType) { + public RuntimeChildPrimitiveEnumerationDatatypeDefinition( + Field theField, + String theElementName, + Child theChildAnnotation, + Description theDescriptionAnnotation, + Class theDatatype, + Class> theBinderType) { super(theField, theElementName, theDescriptionAnnotation, theChildAnnotation, theDatatype); myEnumType = theBinderType; @@ -64,5 +70,4 @@ public class RuntimeChildPrimitiveEnumerationDatatypeDefinition extends RuntimeC } return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildResourceBlockDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildResourceBlockDefinition.java index 54c543c0f94..a3862bfeeff 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildResourceBlockDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildResourceBlockDefinition.java @@ -19,23 +19,29 @@ */ package ca.uhn.fhir.context; +import ca.uhn.fhir.model.api.annotation.Child; +import ca.uhn.fhir.model.api.annotation.Description; +import org.hl7.fhir.instance.model.api.IBase; + import java.lang.reflect.Field; import java.util.Collections; import java.util.Map; import java.util.Set; -import org.hl7.fhir.instance.model.api.IBase; - -import ca.uhn.fhir.model.api.annotation.Child; -import ca.uhn.fhir.model.api.annotation.Description; - public class RuntimeChildResourceBlockDefinition extends BaseRuntimeDeclaredChildDefinition { -// private RuntimeResourceBlockDefinition myElementDef; + // private RuntimeResourceBlockDefinition myElementDef; private Class myResourceBlockType; private FhirContext myContext; - public RuntimeChildResourceBlockDefinition(FhirContext theContext, Field theField, Child theChildAnnotation, Description theDescriptionAnnotation, String theElementName, Class theResourceBlockType) throws ConfigurationException { + public RuntimeChildResourceBlockDefinition( + FhirContext theContext, + Field theField, + Child theChildAnnotation, + Description theDescriptionAnnotation, + String theElementName, + Class theResourceBlockType) + throws ConfigurationException { super(theField, theChildAnnotation, theDescriptionAnnotation, theElementName); myContext = theContext; myResourceBlockType = theResourceBlockType; @@ -75,8 +81,9 @@ public class RuntimeChildResourceBlockDefinition extends BaseRuntimeDeclaredChil } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { -// myElementDef = (RuntimeResourceBlockDefinition) theClassToElementDefinitions.get(myResourceBlockType); + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + // myElementDef = (RuntimeResourceBlockDefinition) theClassToElementDefinitions.get(myResourceBlockType); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildResourceDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildResourceDefinition.java index 47e0f738561..6fc8987c6cd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildResourceDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildResourceDefinition.java @@ -42,7 +42,12 @@ public class RuntimeChildResourceDefinition extends BaseRuntimeDeclaredChildDefi /** * Constructor */ - public RuntimeChildResourceDefinition(Field theField, String theElementName, Child theChildAnnotation, Description theDescriptionAnnotation, List> theResourceTypes) { + public RuntimeChildResourceDefinition( + Field theField, + String theElementName, + Child theChildAnnotation, + Description theDescriptionAnnotation, + List> theResourceTypes) { super(theField, theChildAnnotation, theDescriptionAnnotation, theElementName); myResourceTypes = theResourceTypes; @@ -79,12 +84,14 @@ public class RuntimeChildResourceDefinition extends BaseRuntimeDeclaredChildDefi } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { myRuntimeDef = findResourceReferenceDefinition(theClassToElementDefinitions); myValidChildNames = new HashSet(); myValidChildNames.add(getElementName()); - + /* * [elementName]Resource is not actually valid FHIR but we've encountered it in the wild * so we'll accept it just to be nice @@ -94,26 +101,28 @@ public class RuntimeChildResourceDefinition extends BaseRuntimeDeclaredChildDefi /* * Below has been disabled- We used to allow field names to contain the name of the resource * that they accepted. This wasn't valid but we accepted it just to be flexible because there - * were some bad examples containing this. This causes conflicts with actual field names in - * recent definitions though, so it has been disabled as of HAPI 0.9 + * were some bad examples containing this. This causes conflicts with actual field names in + * recent definitions though, so it has been disabled as of HAPI 0.9 */ -// for (Class next : myResourceTypes) { -// if (next == IResource.class) { -// for (Entry, BaseRuntimeElementDefinition> nextEntry : theClassToElementDefinitions.entrySet()) { -// if (IResource.class.isAssignableFrom(nextEntry.getKey())) { -// RuntimeResourceDefinition nextDef = (RuntimeResourceDefinition) nextEntry.getValue(); -// myValidChildNames.add(getElementName() + nextDef.getName()); -// } -// } -// } -// else { -// RuntimeResourceDefinition nextDef = (RuntimeResourceDefinition) theClassToElementDefinitions.get(next); -// if (nextDef == null) { -// throw new ConfigurationException(Msg.code(1691) + "Can't find child of type: " + next.getCanonicalName() + " in " + getField().getDeclaringClass()); -// } -// myValidChildNames.add(getElementName() + nextDef.getName()); -// } -// } + // for (Class next : myResourceTypes) { + // if (next == IResource.class) { + // for (Entry, BaseRuntimeElementDefinition> nextEntry : + // theClassToElementDefinitions.entrySet()) { + // if (IResource.class.isAssignableFrom(nextEntry.getKey())) { + // RuntimeResourceDefinition nextDef = (RuntimeResourceDefinition) nextEntry.getValue(); + // myValidChildNames.add(getElementName() + nextDef.getName()); + // } + // } + // } + // else { + // RuntimeResourceDefinition nextDef = (RuntimeResourceDefinition) theClassToElementDefinitions.get(next); + // if (nextDef == null) { + // throw new ConfigurationException(Msg.code(1691) + "Can't find child of type: " + next.getCanonicalName() + // + " in " + getField().getDeclaringClass()); + // } + // myValidChildNames.add(getElementName() + nextDef.getName()); + // } + // } myResourceTypes = Collections.unmodifiableList(myResourceTypes); myValidChildNames = Collections.unmodifiableSet(myValidChildNames); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildUndeclaredExtensionDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildUndeclaredExtensionDefinition.java index 7a69e18c45d..d76ee7ebfa2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildUndeclaredExtensionDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeChildUndeclaredExtensionDefinition.java @@ -37,7 +37,8 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD private static final String VALUE_REFERENCE = "valueReference"; private static final String VALUE_RESOURCE = "valueResource"; - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RuntimeChildUndeclaredExtensionDefinition.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(RuntimeChildUndeclaredExtensionDefinition.class); private Map> myAttributeNameToDefinition; private Map, String> myDatatypeToAttributeName; private Map, BaseRuntimeElementDefinition> myDatatypeToDefinition; @@ -46,7 +47,10 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD // nothing } - private void addReferenceBinding(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions, String value) { + private void addReferenceBinding( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions, + String value) { BaseRuntimeElementDefinition def = findResourceReferenceDefinition(theClassToElementDefinitions); myAttributeNameToDefinition.put(value, def); @@ -58,7 +62,6 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD myDatatypeToDefinition.put(BaseResourceReferenceDt.class, def); myDatatypeToDefinition.put(theContext.getVersion().getResourceReferenceType(), def); } - } @Override @@ -72,7 +75,6 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD } return new ArrayList<>(target.getUndeclaredExtensions()); } - }; } @@ -134,7 +136,9 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { Map> datatypeAttributeNameToDefinition = new HashMap<>(); myDatatypeToAttributeName = new HashMap<>(); myDatatypeToDefinition = new HashMap<>(); @@ -149,7 +153,6 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD ourLog.trace("Not adding specialization: {}", next.getImplementingClass()); } - if (!next.isStandardType()) { continue; } @@ -180,7 +183,10 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD // CodeType should win. If we aren't in a situation like that, there is a problem with the // model so we should bail. if (!existing.isStandardType()) { - throw new ConfigurationException(Msg.code(1734) + "More than one child of " + getElementName() + " matches attribute name " + attrName + ". Found [" + existing.getImplementingClass().getName() + "] and [" + next.getImplementingClass().getName() + "]"); + throw new ConfigurationException(Msg.code(1734) + "More than one child of " + getElementName() + + " matches attribute name " + attrName + ". Found [" + + existing.getImplementingClass().getName() + "] and [" + + next.getImplementingClass().getName() + "]"); } } @@ -192,7 +198,6 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD myAttributeNameToDefinition = datatypeAttributeNameToDefinition; - /* * Resource reference - The correct name is 'valueReference' in DSTU2 and 'valueResource' in DSTU1 */ @@ -203,5 +208,4 @@ public class RuntimeChildUndeclaredExtensionDefinition extends BaseRuntimeChildD public static String createExtensionChildName(BaseRuntimeElementDefinition next) { return "value" + WordUtils.capitalize(next.getName()); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeCompositeDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeCompositeDatatypeDefinition.java index 7309d73fa60..8df563ddada 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeCompositeDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeCompositeDatatypeDefinition.java @@ -30,32 +30,40 @@ import java.util.Map; import static org.apache.commons.lang3.StringUtils.isBlank; -public class RuntimeCompositeDatatypeDefinition extends BaseRuntimeElementCompositeDefinition implements IRuntimeDatatypeDefinition { +public class RuntimeCompositeDatatypeDefinition extends BaseRuntimeElementCompositeDefinition + implements IRuntimeDatatypeDefinition { private boolean mySpecialization; private Class myProfileOfType; private BaseRuntimeElementDefinition myProfileOf; - public RuntimeCompositeDatatypeDefinition(DatatypeDef theDef, Class theImplementingClass, boolean theStandardType, FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + public RuntimeCompositeDatatypeDefinition( + DatatypeDef theDef, + Class theImplementingClass, + boolean theStandardType, + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super(theDef.name(), theImplementingClass, theStandardType, theContext, theClassToElementDefinitions); - + String resourceName = theDef.name(); if (isBlank(resourceName)) { - throw new ConfigurationException(Msg.code(1712) + "Resource type @" + ResourceDef.class.getSimpleName() + " annotation contains no resource name: " + theImplementingClass.getCanonicalName()); + throw new ConfigurationException(Msg.code(1712) + "Resource type @" + ResourceDef.class.getSimpleName() + + " annotation contains no resource name: " + theImplementingClass.getCanonicalName()); } - + mySpecialization = theDef.isSpecialization(); myProfileOfType = theDef.profileOf(); if (myProfileOfType.equals(IBaseDatatype.class)) { myProfileOfType = null; } - } @Override - public void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + public void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super.sealAndInitialize(theContext, theClassToElementDefinitions); - + if (myProfileOfType != null) { myProfileOf = theClassToElementDefinitions.get(myProfileOfType); if (myProfileOf == null) { @@ -91,6 +99,4 @@ public class RuntimeCompositeDatatypeDefinition extends BaseRuntimeElementCompos } return false; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElemContainedResourceList.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElemContainedResourceList.java index e8ad9eefc26..7538a6c3d1b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElemContainedResourceList.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElemContainedResourceList.java @@ -22,7 +22,7 @@ package ca.uhn.fhir.context; import org.hl7.fhir.instance.model.api.IBaseResource; /** - * HL7org structures use a List for contained instead of a distinct datatype + * HL7org structures use a List for contained instead of a distinct datatype */ public class RuntimeElemContainedResourceList extends BaseRuntimeElementDefinition { @@ -34,5 +34,4 @@ public class RuntimeElemContainedResourceList extends BaseRuntimeElementDefiniti public ca.uhn.fhir.context.BaseRuntimeElementDefinition.ChildTypeEnum getChildType() { return ChildTypeEnum.CONTAINED_RESOURCE_LIST; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElemContainedResources.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElemContainedResources.java index 9ef2d9117bd..66a623823f6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElemContainedResources.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElemContainedResources.java @@ -32,5 +32,4 @@ public class RuntimeElemContainedResources extends BaseRuntimeElementDefinition< public ca.uhn.fhir.context.BaseRuntimeElementDefinition.ChildTypeEnum getChildType() { return ChildTypeEnum.CONTAINED_RESOURCES; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElementDirectResource.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElementDirectResource.java index 4fadae7243e..4857a566328 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElementDirectResource.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeElementDirectResource.java @@ -31,5 +31,4 @@ public class RuntimeElementDirectResource extends BaseRuntimeElementDefinition myChildren; - public RuntimeExtensionDtDefinition(DatatypeDef theDef, Class theImplementingClass, boolean theStandardType, FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + public RuntimeExtensionDtDefinition( + DatatypeDef theDef, + Class theImplementingClass, + boolean theStandardType, + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super(theDef, theImplementingClass, theStandardType, theContext, theClassToElementDefinitions); } @@ -47,14 +51,16 @@ public class RuntimeExtensionDtDefinition extends RuntimeCompositeDatatypeDefini } @Override - public void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + public void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super.sealAndInitialize(theContext, theClassToElementDefinitions); - + /* * The "url" child is a weird child because it is not parsed and encoded in the normal way, - * so we exclude it here + * so we exclude it here */ - + List superChildren = super.getChildren(); ArrayList children = new ArrayList(); for (BaseRuntimeChildDefinition baseRuntimeChildDefinition : superChildren) { @@ -63,8 +69,7 @@ public class RuntimeExtensionDtDefinition extends RuntimeCompositeDatatypeDefini } children.add(baseRuntimeChildDefinition); } - + myChildren = Collections.unmodifiableList(children); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeIdDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeIdDatatypeDefinition.java index dce07479870..8feaee1e2c8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeIdDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeIdDatatypeDefinition.java @@ -19,13 +19,14 @@ */ package ca.uhn.fhir.context; +import ca.uhn.fhir.model.api.annotation.DatatypeDef; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import ca.uhn.fhir.model.api.annotation.DatatypeDef; +public class RuntimeIdDatatypeDefinition extends RuntimePrimitiveDatatypeDefinition + implements IRuntimeDatatypeDefinition { -public class RuntimeIdDatatypeDefinition extends RuntimePrimitiveDatatypeDefinition implements IRuntimeDatatypeDefinition { - - public RuntimeIdDatatypeDefinition(DatatypeDef theDef, Class> theImplementingClass, boolean theStandardType) { + public RuntimeIdDatatypeDefinition( + DatatypeDef theDef, Class> theImplementingClass, boolean theStandardType) { super(theDef, theImplementingClass, theStandardType); } @@ -33,5 +34,4 @@ public class RuntimeIdDatatypeDefinition extends RuntimePrimitiveDatatypeDefinit public ca.uhn.fhir.context.BaseRuntimeElementDefinition.ChildTypeEnum getChildType() { return ChildTypeEnum.ID_DATATYPE; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeDefinition.java index 664e56d3b70..cedc7ec918b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeDefinition.java @@ -35,7 +35,8 @@ import java.util.Map; import static org.apache.commons.lang3.StringUtils.isBlank; -public class RuntimePrimitiveDatatypeDefinition extends BaseRuntimeElementDefinition> implements IRuntimeDatatypeDefinition { +public class RuntimePrimitiveDatatypeDefinition extends BaseRuntimeElementDefinition> + implements IRuntimeDatatypeDefinition { private Class myNativeType; private BaseRuntimeElementDefinition myProfileOf; @@ -44,12 +45,14 @@ public class RuntimePrimitiveDatatypeDefinition extends BaseRuntimeElementDefini private List myChildren; private RuntimeChildExt myRuntimeChildExt; - public RuntimePrimitiveDatatypeDefinition(DatatypeDef theDef, Class> theImplementingClass, boolean theStandardType) { + public RuntimePrimitiveDatatypeDefinition( + DatatypeDef theDef, Class> theImplementingClass, boolean theStandardType) { super(theDef.name(), theImplementingClass, theStandardType); String resourceName = theDef.name(); if (isBlank(resourceName)) { - throw new ConfigurationException(Msg.code(1689) + "Resource type @" + ResourceDef.class.getSimpleName() + " annotation contains no resource name: " + theImplementingClass.getCanonicalName()); + throw new ConfigurationException(Msg.code(1689) + "Resource type @" + ResourceDef.class.getSimpleName() + + " annotation contains no resource name: " + theImplementingClass.getCanonicalName()); } mySpecialization = theDef.isSpecialization(); @@ -83,7 +86,8 @@ public class RuntimePrimitiveDatatypeDefinition extends BaseRuntimeElementDefini Type rawType = superPt.getRawType(); if (rawType instanceof Class) { Class rawClass = (Class) rawType; - if (rawClass.getName().endsWith(".BasePrimitive") || rawClass.getName().endsWith(".PrimitiveType")) { + if (rawClass.getName().endsWith(".BasePrimitive") + || rawClass.getName().endsWith(".PrimitiveType")) { Type typeVariable = superPt.getActualTypeArguments()[0]; if (typeVariable instanceof Class) { myNativeType = (Class) typeVariable; @@ -128,7 +132,9 @@ public class RuntimePrimitiveDatatypeDefinition extends BaseRuntimeElementDefini } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super.sealAndInitialize(theContext, theClassToElementDefinitions); if (myProfileOfType != null) { @@ -153,5 +159,4 @@ public class RuntimePrimitiveDatatypeDefinition extends BaseRuntimeElementDefini myChildren.add(myRuntimeChildExt); myChildren = Collections.unmodifiableList(myChildren); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeNarrativeDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeNarrativeDefinition.java index 5c29d6a716e..f4e12118b7f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeNarrativeDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeNarrativeDefinition.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.context; -import java.util.Map; - +import ca.uhn.fhir.model.primitive.XhtmlDt; import org.hl7.fhir.instance.model.api.IBase; -import ca.uhn.fhir.model.primitive.XhtmlDt; +import java.util.Map; -public class RuntimePrimitiveDatatypeNarrativeDefinition extends BaseRuntimeElementDefinition { +public class RuntimePrimitiveDatatypeNarrativeDefinition extends BaseRuntimeElementDefinition { - public RuntimePrimitiveDatatypeNarrativeDefinition(String theName, Class theImplementingClass, boolean theStandardType) { + public RuntimePrimitiveDatatypeNarrativeDefinition( + String theName, Class theImplementingClass, boolean theStandardType) { super(theName, theImplementingClass, theStandardType); } @@ -37,8 +37,9 @@ public class RuntimePrimitiveDatatypeNarrativeDefinition extends BaseRuntimeEle } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { // nothing } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition.java index dbc81d960ca..adc80ca73ec 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition.java @@ -19,14 +19,15 @@ */ package ca.uhn.fhir.context; -import java.util.Map; - import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseXhtml; -public class RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition extends BaseRuntimeElementDefinition { +import java.util.Map; - public RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition(String theName, Class theImplementingClass, boolean theStandardType) { +public class RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition extends BaseRuntimeElementDefinition { + + public RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition( + String theName, Class theImplementingClass, boolean theStandardType) { super(theName, theImplementingClass, theStandardType); } @@ -36,8 +37,9 @@ public class RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition extends BaseRuntimeE } @Override - void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { // nothing } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceBlockDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceBlockDefinition.java index 693000f8dd8..2477d38b6b0 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceBlockDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceBlockDefinition.java @@ -19,13 +19,18 @@ */ package ca.uhn.fhir.context; -import java.util.Map; - import org.hl7.fhir.instance.model.api.IBase; +import java.util.Map; + public class RuntimeResourceBlockDefinition extends BaseRuntimeElementCompositeDefinition { - public RuntimeResourceBlockDefinition(String theName, Class theImplementingClass, boolean theStandardType, FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + public RuntimeResourceBlockDefinition( + String theName, + Class theImplementingClass, + boolean theStandardType, + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super(theName, theImplementingClass, theStandardType, theContext, theClassToElementDefinitions); } @@ -33,5 +38,4 @@ public class RuntimeResourceBlockDefinition extends BaseRuntimeElementCompositeD public ca.uhn.fhir.context.BaseRuntimeElementDefinition.ChildTypeEnum getChildType() { return ChildTypeEnum.RESOURCE_BLOCK; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java index 5e520ad95e2..758fd51ac9c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeResourceDefinition.java @@ -48,9 +48,13 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini private final FhirVersionEnum myStructureVersion; private volatile RuntimeResourceDefinition myBaseDefinition; - - - public RuntimeResourceDefinition(FhirContext theContext, String theResourceName, Class theClass, ResourceDef theResourceAnnotation, boolean theStandardType, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + public RuntimeResourceDefinition( + FhirContext theContext, + String theResourceName, + Class theClass, + ResourceDef theResourceAnnotation, + boolean theStandardType, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super(theResourceName, theClass, theStandardType, theContext, theClassToElementDefinitions); myContext = theContext; myResourceProfile = theResourceAnnotation.profile(); @@ -60,23 +64,35 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini try { instance = theClass.getConstructor().newInstance(); } catch (Exception e) { - throw new ConfigurationException(Msg.code(1730) + myContext.getLocalizer().getMessage(getClass(), "nonInstantiableType", theClass.getName(), e.toString()), e); + throw new ConfigurationException( + Msg.code(1730) + + myContext + .getLocalizer() + .getMessage(getClass(), "nonInstantiableType", theClass.getName(), e.toString()), + e); } myStructureVersion = instance.getStructureFhirVersionEnum(); if (myStructureVersion != theContext.getVersion().getVersion()) { - if (myStructureVersion == FhirVersionEnum.R5 && theContext.getVersion().getVersion() == FhirVersionEnum.R4B) { + if (myStructureVersion == FhirVersionEnum.R5 + && theContext.getVersion().getVersion() == FhirVersionEnum.R4B) { // TODO: remove this exception once we've bumped FHIR core to a new version // TODO: also fix the TODO in ModelScanner // TODO: also fix the TODO in RestfulServerUtils // TODO: also fix the TODO in BaseParser } else { - throw new ConfigurationException(Msg.code(1731) + myContext.getLocalizer().getMessage(getClass(), "typeWrongVersion", theContext.getVersion().getVersion(), theClass.getName(), myStructureVersion)); + throw new ConfigurationException(Msg.code(1731) + + myContext + .getLocalizer() + .getMessage( + getClass(), + "typeWrongVersion", + theContext.getVersion().getVersion(), + theClass.getName(), + myStructureVersion)); } } - } - public void addSearchParam(RuntimeSearchParam theParam) { myNameToSearchParam.put(theParam.getName(), theParam); } @@ -113,7 +129,8 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini @SuppressWarnings("unchecked") public Class getImplementingClass(Class theClass) { if (!theClass.isAssignableFrom(getImplementingClass())) { - throw new ConfigurationException(Msg.code(1732) + "Unable to convert " + getImplementingClass() + " to " + theClass); + throw new ConfigurationException( + Msg.code(1732) + "Unable to convert " + getImplementingClass() + " to " + theClass); } return (Class) getImplementingClass(); } @@ -176,7 +193,9 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini @SuppressWarnings("unchecked") @Override - public void sealAndInitialize(FhirContext theContext, Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { + public void sealAndInitialize( + FhirContext theContext, + Map, BaseRuntimeElementDefinition> theClassToElementDefinitions) { super.sealAndInitialize(theContext, theClassToElementDefinitions); myNameToSearchParam = Collections.unmodifiableMap(myNameToSearchParam); @@ -202,7 +221,8 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini if (!compartmentNameToSearchParams.containsKey(nextCompartment)) { compartmentNameToSearchParams.put(nextCompartment, new ArrayList<>()); } - List searchParamsForCompartment = compartmentNameToSearchParams.get(nextCompartment); + List searchParamsForCompartment = + compartmentNameToSearchParams.get(nextCompartment); searchParamsForCompartment.add(next); /* @@ -239,7 +259,7 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini myBaseType = (Class) target; } } while (target.equals(Object.class) == false); - + /* * See #504: * Bundle types may not have extensions @@ -247,11 +267,11 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini if (hasExtensions()) { if (IAnyResource.class.isAssignableFrom(getImplementingClass())) { if (!IDomainResource.class.isAssignableFrom(getImplementingClass())) { - throw new ConfigurationException(Msg.code(1733) + "Class \"" + getImplementingClass() + "\" is invalid. This resource type is not a DomainResource, it must not have extensions"); + throw new ConfigurationException(Msg.code(1733) + "Class \"" + getImplementingClass() + + "\" is invalid. This resource type is not a DomainResource, it must not have extensions"); } } } - } private String massagePathForCompartmentSimilarity(String thePath) { @@ -286,5 +306,4 @@ public class RuntimeResourceDefinition extends BaseRuntimeElementCompositeDefini return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java index 2dc1220b4c9..cba53cb118c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.context; import ca.uhn.fhir.context.phonetic.IPhoneticEncoder; -import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; @@ -28,10 +27,7 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IBaseExtension; import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -39,9 +35,10 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.StringTokenizer; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.trim; @@ -66,22 +63,67 @@ public class RuntimeSearchParam { /** * Constructor */ - public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, - Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus, Collection theBase) { - this(theId, theUri, theName, theDescription, thePath, theParamType, theProvidesMembershipInCompartments, theTargets, theStatus, null, Collections.emptyList(), theBase); + public RuntimeSearchParam( + IIdType theId, + String theUri, + String theName, + String theDescription, + String thePath, + RestSearchParameterTypeEnum theParamType, + Set theProvidesMembershipInCompartments, + Set theTargets, + RuntimeSearchParamStatusEnum theStatus, + Collection theBase) { + this( + theId, + theUri, + theName, + theDescription, + thePath, + theParamType, + theProvidesMembershipInCompartments, + theTargets, + theStatus, + null, + Collections.emptyList(), + theBase); } /** * Copy constructor */ public RuntimeSearchParam(RuntimeSearchParam theSp) { - this(theSp.getId(), theSp.getUri(), theSp.getName(), theSp.getDescription(), theSp.getPath(), theSp.getParamType(), theSp.getProvidesMembershipInCompartments(), theSp.getTargets(), theSp.getStatus(), theSp.getComboSearchParamType(), theSp.getComponents(), theSp.getBase()); + this( + theSp.getId(), + theSp.getUri(), + theSp.getName(), + theSp.getDescription(), + theSp.getPath(), + theSp.getParamType(), + theSp.getProvidesMembershipInCompartments(), + theSp.getTargets(), + theSp.getStatus(), + theSp.getComboSearchParamType(), + theSp.getComponents(), + theSp.getBase()); } /** * Constructor */ - public RuntimeSearchParam(IIdType theId, String theUri, String theName, String theDescription, String thePath, RestSearchParameterTypeEnum theParamType, Set theProvidesMembershipInCompartments, Set theTargets, RuntimeSearchParamStatusEnum theStatus, ComboSearchParamType theComboSearchParamType, List theComponents, Collection theBase) { + public RuntimeSearchParam( + IIdType theId, + String theUri, + String theName, + String theDescription, + String thePath, + RestSearchParameterTypeEnum theParamType, + Set theProvidesMembershipInCompartments, + Set theTargets, + RuntimeSearchParamStatusEnum theStatus, + ComboSearchParamType theComboSearchParamType, + List theComponents, + Collection theBase) { super(); myId = theId; @@ -160,12 +202,12 @@ public class RuntimeSearchParam { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("base", myBase) - .append("name", myName) - .append("path", myPath) - .append("id", myId) - .append("uri", myUri) - .toString(); + .append("base", myBase) + .append("name", myName) + .append("path", myPath) + .append("id", myId) + .append("uri", myUri) + .toString(); } public IIdType getId() { @@ -185,21 +227,21 @@ public class RuntimeSearchParam { RuntimeSearchParam that = (RuntimeSearchParam) theO; return new EqualsBuilder() - .append(getId(), that.getId()) - .append(getName(), that.getName()) - .append(getPath(), that.getPath()) - .append(getUri(), that.getUri()) - .isEquals(); + .append(getId(), that.getId()) + .append(getName(), that.getName()) + .append(getPath(), that.getPath()) + .append(getUri(), that.getUri()) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(getId()) - .append(getName()) - .append(getPath()) - .append(getUri()) - .toHashCode(); + .append(getId()) + .append(getName()) + .append(getPath()) + .append(getUri()) + .toHashCode(); } public Set getBase() { @@ -375,15 +417,14 @@ public class RuntimeSearchParam { public Component(String theExpression, String theReference) { myExpression = theExpression; myReference = theReference; - } @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("expression", myExpression) - .append("reference", myReference) - .toString(); + .append("expression", myExpression) + .append("reference", myReference) + .toString(); } public String getExpression() { @@ -394,5 +435,4 @@ public class RuntimeSearchParam { return myReference; } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/api/AddProfileTagEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/api/AddProfileTagEnum.java index ed3b50af11e..b383746547b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/api/AddProfileTagEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/api/AddProfileTagEnum.java @@ -23,7 +23,7 @@ import ca.uhn.fhir.context.FhirContext; /** * RESTful server behaviour for automatically adding profile tags when serializing resources - * + * * @see FhirContext#setAddProfileTagWhenEncoding(AddProfileTagEnum) */ public enum AddProfileTagEnum { @@ -34,7 +34,7 @@ public enum AddProfileTagEnum { /** * Add any profile tags that returned resources appear to conform to - * + * * @deprecated This mode causes even FHIR's default profiles to be exported in the * resource metadata section. This is not generally expected behaviour from other * systems and it offers no real benefit, so it will be removed at some point. This diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/api/BundleInclusionRule.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/api/BundleInclusionRule.java index e7e951996e6..4488a821045 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/api/BundleInclusionRule.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/api/BundleInclusionRule.java @@ -19,18 +19,18 @@ */ package ca.uhn.fhir.context.api; -import java.util.Set; - import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.util.ResourceReferenceInfo; +import java.util.Set; + /** * Created by Bill de Beaubien on 3/4/2015. * * Controls how bundles decide whether referenced resources should be included */ public enum BundleInclusionRule { - + /** * Decision is based on whether the resource's Include is in the IncludeSet (e.g. DiagnosticReport.result). Note that the resource has to be populated to be included. * @@ -38,7 +38,8 @@ public enum BundleInclusionRule { */ BASED_ON_INCLUDES { @Override - public boolean shouldIncludeReferencedResource(ResourceReferenceInfo theReferenceInfo, Set theIncludes) { + public boolean shouldIncludeReferencedResource( + ResourceReferenceInfo theReferenceInfo, Set theIncludes) { return theReferenceInfo.matchesIncludeSet(theIncludes); } }, @@ -50,10 +51,12 @@ public enum BundleInclusionRule { */ BASED_ON_RESOURCE_PRESENCE { @Override - public boolean shouldIncludeReferencedResource(ResourceReferenceInfo theReferenceInfo, Set theIncludes) { + public boolean shouldIncludeReferencedResource( + ResourceReferenceInfo theReferenceInfo, Set theIncludes) { return true; } }; - public abstract boolean shouldIncludeReferencedResource(ResourceReferenceInfo theReferenceInfo, Set theIncludes); + public abstract boolean shouldIncludeReferencedResource( + ResourceReferenceInfo theReferenceInfo, Set theIncludes); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/ApacheEncoder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/ApacheEncoder.java index a7517afb9ac..0257e4db986 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/ApacheEncoder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/ApacheEncoder.java @@ -46,7 +46,8 @@ public class ApacheEncoder implements IPhoneticEncoder { @Override public String encode(String theString) { try { - // If the string contains a space, encode alpha parts separately so, for example, numbers are preserved in address lines. + // If the string contains a space, encode alpha parts separately so, for example, numbers are preserved in + // address lines. if (theString.contains(" ")) { return encodeStringWithSpaces(theString); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java index 2976bf30aa5..a97bff3ef26 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/phonetic/NumericEncoder.java @@ -22,7 +22,8 @@ package ca.uhn.fhir.context.phonetic; import com.google.common.base.CharMatcher; // Useful for numerical identifiers like phone numbers, address parts etc. -// This should not be used where decimals are important. A new "quantity encoder" should be added to handle cases like that. +// This should not be used where decimals are important. A new "quantity encoder" should be added to handle cases like +// that. public class NumericEncoder implements IPhoneticEncoder { @Override public String name() { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ConceptValidationOptions.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ConceptValidationOptions.java index ee52d348bb0..d8223c6ba4f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ConceptValidationOptions.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ConceptValidationOptions.java @@ -39,8 +39,8 @@ public class ConceptValidationOptions { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("inferSystem", myInferSystem) - .toString(); + .append("inferSystem", myInferSystem) + .toString(); } public boolean isValidateDisplay() { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupport.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupport.java index 77c8bab0e08..4a103500ebf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupport.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupport.java @@ -27,12 +27,12 @@ import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nullable; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import javax.annotation.Nullable; /** * This class returns the vocabulary that is shipped with the base FHIR @@ -46,13 +46,15 @@ import java.util.Optional; */ public class DefaultProfileValidationSupport implements IValidationSupport { - private static final Map ourImplementations = Collections.synchronizedMap(new HashMap<>()); + private static final Map ourImplementations = + Collections.synchronizedMap(new HashMap<>()); private final FhirContext myCtx; /** * This module just delegates all calls to a concrete implementation which will * be in this field. Which implementation gets used depends on the FHIR version. */ private final IValidationSupport myDelegate; + private final Runnable myFlush; /** @@ -76,8 +78,12 @@ public class DefaultProfileValidationSupport implements IValidationSupport { * make this hard to clean up. At some point it'd be nice to figure out * a cleaner solution though. */ - strategy = ReflectionUtil.newInstance("org.hl7.fhir.common.hapi.validation.support.DefaultProfileValidationSupportNpmStrategy", IValidationSupport.class, new Class[]{FhirContext.class}, new Object[]{theFhirContext}); - ((ILockable)strategy).lock(); + strategy = ReflectionUtil.newInstance( + "org.hl7.fhir.common.hapi.validation.support.DefaultProfileValidationSupportNpmStrategy", + IValidationSupport.class, + new Class[] {FhirContext.class}, + new Object[] {theFhirContext}); + ((ILockable) strategy).lock(); } else { strategy = new DefaultProfileValidationSupportBundleStrategy(theFhirContext); } @@ -87,9 +93,9 @@ public class DefaultProfileValidationSupport implements IValidationSupport { myDelegate = strategy; if (myDelegate instanceof DefaultProfileValidationSupportBundleStrategy) { - myFlush = ()->((DefaultProfileValidationSupportBundleStrategy) myDelegate).flush(); + myFlush = () -> ((DefaultProfileValidationSupportBundleStrategy) myDelegate).flush(); } else { - myFlush = ()->{}; + myFlush = () -> {}; } } @@ -109,7 +115,6 @@ public class DefaultProfileValidationSupport implements IValidationSupport { return myDelegate.fetchAllNonBaseStructureDefinitions(); } - @Override public IBaseResource fetchCodeSystem(String theSystem) { return myDelegate.fetchCodeSystem(theSystem); @@ -134,17 +139,18 @@ public class DefaultProfileValidationSupport implements IValidationSupport { return myCtx; } - @Nullable public static String getConformanceResourceUrl(FhirContext theFhirContext, IBaseResource theResource) { String urlValueString = null; - Optional urlValue = theFhirContext.getResourceDefinition(theResource).getChildByName("url").getAccessor().getFirstValueOrNull(theResource); + Optional urlValue = theFhirContext + .getResourceDefinition(theResource) + .getChildByName("url") + .getAccessor() + .getFirstValueOrNull(theResource); if (urlValue.isPresent()) { IPrimitiveType urlValueType = (IPrimitiveType) urlValue.get(); urlValueString = urlValueType.getValueAsString(); } return urlValueString; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java index 3d15a742b7e..22a130a98fe 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupportBundleStrategy.java @@ -23,20 +23,16 @@ import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; -import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; -import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.parser.LenientErrorHandler; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.BundleUtil; import ca.uhn.fhir.util.ClasspathUtil; import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -46,8 +42,8 @@ import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Optional; import java.util.Properties; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -55,7 +51,8 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor private static final String URL_PREFIX_STRUCTURE_DEFINITION = "http://hl7.org/fhir/StructureDefinition/"; private static final String URL_PREFIX_STRUCTURE_DEFINITION_BASE = "http://hl7.org/fhir/"; - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultProfileValidationSupport.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(DefaultProfileValidationSupport.class); private final FhirContext myCtx; private Map myCodeSystems; @@ -73,7 +70,6 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor myCtx = theFhirContext; } - private void initializeResourceLists() { if (myTerminologyResources != null && myStructureDefinitionResources != null) { @@ -90,7 +86,8 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor terminologyResources.add("/org/hl7/fhir/instance/model/valueset/v3-codesystems.xml"); Properties profileNameProperties = new Properties(); try { - profileNameProperties.load(DefaultProfileValidationSupport.class.getResourceAsStream("/org/hl7/fhir/instance/model/profile/profiles.properties")); + profileNameProperties.load(DefaultProfileValidationSupport.class.getResourceAsStream( + "/org/hl7/fhir/instance/model/profile/profiles.properties")); for (Object nextKey : profileNameProperties.keySet()) { structureDefinitionResources.add("/org/hl7/fhir/instance/model/profile/" + nextKey); } @@ -148,7 +145,6 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor myStructureDefinitionResources = structureDefinitionResources; } - @Override public List fetchAllConformanceResources() { ArrayList retVal = new ArrayList<>(); @@ -169,7 +165,6 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor return null; } - @Override public IBaseResource fetchCodeSystem(String theSystem) { return fetchCodeSystemOrValueSet(theSystem, true); @@ -208,8 +203,12 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor candidate = valueSets.get(system); } - if (candidate != null && isNotBlank(version) && !system.startsWith("http://hl7.org") && !system.startsWith("http://terminology.hl7.org")) { - if (!StringUtils.equals(version, myCtx.newTerser().getSinglePrimitiveValueOrNull(candidate, "version"))) { + if (candidate != null + && isNotBlank(version) + && !system.startsWith("http://hl7.org") + && !system.startsWith("http://terminology.hl7.org")) { + if (!StringUtils.equals( + version, myCtx.newTerser().getSinglePrimitiveValueOrNull(candidate, "version"))) { candidate = null; } } @@ -239,7 +238,9 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor * search parameters eg "value.as(String)" when it should be * "value.as(string)". This lets us be a bit lenient about this. */ - if (myCtx.getVersion().getVersion() == FhirVersionEnum.R4 || myCtx.getVersion().getVersion() == FhirVersionEnum.R4B || myCtx.getVersion().getVersion() == FhirVersionEnum.R5) { + if (myCtx.getVersion().getVersion() == FhirVersionEnum.R4 + || myCtx.getVersion().getVersion() == FhirVersionEnum.R4B + || myCtx.getVersion().getVersion() == FhirVersionEnum.R5) { String end = url.substring(URL_PREFIX_STRUCTURE_DEFINITION.length()); if (Character.isUpperCase(end.charAt(0))) { String newEnd = Character.toLowerCase(end.charAt(0)) + end.substring(1); @@ -251,9 +252,7 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor } } } - } - } return retVal; } @@ -289,7 +288,8 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor return structureDefinitions; } - private void loadCodeSystems(Map theCodeSystems, Map theValueSets, String theClasspath) { + private void loadCodeSystems( + Map theCodeSystems, Map theValueSets, String theClasspath) { ourLog.info("Loading CodeSystem/ValueSet from classpath: {}", theClasspath); InputStream inputStream = DefaultProfileValidationSupport.class.getResourceAsStream(theClasspath); InputStreamReader reader = null; @@ -319,8 +319,8 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor switch (myCtx.getVersion().getVersion()) { case DSTU2: case DSTU2_HL7ORG: - - IPrimitiveType codeSystem = myCtx.newTerser().getSingleValueOrNull(next, "ValueSet.codeSystem.system", IPrimitiveType.class); + IPrimitiveType codeSystem = myCtx.newTerser() + .getSingleValueOrNull(next, "ValueSet.codeSystem.system", IPrimitiveType.class); if (codeSystem != null && isNotBlank(codeSystem.getValueAsString())) { theCodeSystems.put(codeSystem.getValueAsString(), next); } @@ -336,8 +336,6 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor break; } } - - } } finally { try { @@ -356,15 +354,14 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor // Load built-in system if (myCtx.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) { - String storageCodeEnum = ClasspathUtil.loadResource("ca/uhn/fhir/context/support/HapiFhirStorageResponseCode.json"); - IBaseResource storageCodeCodeSystem = myCtx.newJsonParser().setParserErrorHandler(new LenientErrorHandler()).parseResource(storageCodeEnum); + String storageCodeEnum = + ClasspathUtil.loadResource("ca/uhn/fhir/context/support/HapiFhirStorageResponseCode.json"); + IBaseResource storageCodeCodeSystem = myCtx.newJsonParser() + .setParserErrorHandler(new LenientErrorHandler()) + .parseResource(storageCodeEnum); String url = myCtx.newTerser().getSinglePrimitiveValueOrNull(storageCodeCodeSystem, "url"); theCodeSystems.put(url, storageCodeCodeSystem); } - - - - } private void loadStructureDefinitions(Map theCodeSystems, String theClasspath) { @@ -372,7 +369,12 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor String packageUserData = null; if (myCtx.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) { - packageUserData = "hl7.fhir." + myCtx.getVersion().getVersion().name().replace("DSTU", "R").toLowerCase(Locale.US); + packageUserData = "hl7.fhir." + + myCtx.getVersion() + .getVersion() + .name() + .replace("DSTU", "R") + .toLowerCase(Locale.US); } try (InputStream valueSetText = DefaultProfileValidationSupport.class.getResourceAsStream(theClasspath)) { @@ -389,7 +391,6 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor if (isNotBlank(url)) { theCodeSystems.put(url, next); } - } // This is used by the validator to determine which package a given SD came from. @@ -397,7 +398,6 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor if (packageUserData != null) { next.setUserData("package", packageUserData); } - } } } else { @@ -426,5 +426,4 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor ArrayList retVal = new ArrayList<>(theMap.values()); return (List) Collections.unmodifiableList(retVal); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/IValidationSupport.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/IValidationSupport.java index c0f181ef7ed..07b383687a5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/IValidationSupport.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/IValidationSupport.java @@ -34,8 +34,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -43,6 +41,8 @@ import java.util.List; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -80,7 +80,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public interface IValidationSupport { String URL_PREFIX_VALUE_SET = "http://hl7.org/fhir/ValueSet/"; - /** * Expands the given portion of a ValueSet * @@ -91,7 +90,10 @@ public interface IValidationSupport { * @return The expansion, or null */ @Nullable - default ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) { + default ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + @Nullable ValueSetExpansionOptions theExpansionOptions, + @Nonnull IBaseResource theValueSetToExpand) { return null; } @@ -107,11 +109,16 @@ public interface IValidationSupport { * @since 6.0.0 */ @Nullable - default ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetUrlToExpand) throws ResourceNotFoundException { + default ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + @Nullable ValueSetExpansionOptions theExpansionOptions, + @Nonnull String theValueSetUrlToExpand) + throws ResourceNotFoundException { Validate.notBlank(theValueSetUrlToExpand, "theValueSetUrlToExpand must not be null or blank"); IBaseResource valueSet = fetchValueSet(theValueSetUrlToExpand); if (valueSet == null) { - throw new ResourceNotFoundException(Msg.code(2024) + "Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSetUrlToExpand)); + throw new ResourceNotFoundException( + Msg.code(2024) + "Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSetUrlToExpand)); } return expandValueSet(theValidationSupportContext, theExpansionOptions, valueSet); } @@ -200,17 +207,14 @@ public interface IValidationSupport { Validate.notBlank(theUri, "theUri must not be null or blank"); if (theClass == null) { - Supplier[] sources = new Supplier[]{ - () -> fetchStructureDefinition(theUri), - () -> fetchValueSet(theUri), - () -> fetchCodeSystem(theUri) + Supplier[] sources = new Supplier[] { + () -> fetchStructureDefinition(theUri), () -> fetchValueSet(theUri), () -> fetchCodeSystem(theUri) }; - return (T) Arrays - .stream(sources) - .map(t -> t.get()) - .filter(t -> t != null) - .findFirst() - .orElse(null); + return (T) Arrays.stream(sources) + .map(t -> t.get()) + .filter(t -> t != null) + .findFirst() + .orElse(null); } switch (getFhirContext().getResourceType(theClass)) { @@ -289,7 +293,13 @@ public interface IValidationSupport { * @return Returns a validation result object */ @Nullable - default CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { + default CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { return null; } @@ -307,7 +317,13 @@ public interface IValidationSupport { * @return Returns a validation result object, or null if this validation support module can not handle this kind of request */ @Nullable - default CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + default CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { return null; } @@ -321,7 +337,11 @@ public interface IValidationSupport { * @param theDisplayLanguage to filter out the designation by the display language. To return all designation, set this value to null. */ @Nullable - default LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + default LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { return null; } @@ -334,7 +354,8 @@ public interface IValidationSupport { * @param theCode The code */ @Nullable - default LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode) { + default LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, String theSystem, String theCode) { return lookupCode(theValidationSupportContext, theSystem, theCode, null); } @@ -358,7 +379,12 @@ public interface IValidationSupport { * @return Returns null if this module does not know how to handle this request */ @Nullable - default IBaseResource generateSnapshot(ValidationSupportContext theValidationSupportContext, IBaseResource theInput, String theUrl, String theWebUrl, String theProfileName) { + default IBaseResource generateSnapshot( + ValidationSupportContext theValidationSupportContext, + IBaseResource theInput, + String theUrl, + String theWebUrl, + String theProfileName) { return null; } @@ -755,11 +781,13 @@ public interface IValidationSupport { public void throwNotFoundIfAppropriate() { if (isFound() == false) { - throw new ResourceNotFoundException(Msg.code(1738) + "Unable to find code[" + getSearchedForCode() + "] in system[" + getSearchedForSystem() + "]"); + throw new ResourceNotFoundException(Msg.code(1738) + "Unable to find code[" + getSearchedForCode() + + "] in system[" + getSearchedForSystem() + "]"); } } - public IBaseParameters toParameters(FhirContext theContext, List> theProperties) { + public IBaseParameters toParameters( + FhirContext theContext, List> theProperties) { IBaseParameters retVal = ParametersUtil.newInstance(theContext); if (isNotBlank(getCodeSystemDisplayName())) { @@ -775,10 +803,9 @@ public interface IValidationSupport { Set properties = Collections.emptySet(); if (theProperties != null) { - properties = theProperties - .stream() - .map(IPrimitiveType::getValueAsString) - .collect(Collectors.toSet()); + properties = theProperties.stream() + .map(IPrimitiveType::getValueAsString) + .collect(Collectors.toSet()); } for (IValidationSupport.BaseConceptProperty next : myProperties) { @@ -797,7 +824,8 @@ public interface IValidationSupport { ParametersUtil.addPartString(theContext, property, "value", prop.getValue()); } else if (next instanceof IValidationSupport.CodingConceptProperty) { IValidationSupport.CodingConceptProperty prop = (IValidationSupport.CodingConceptProperty) next; - ParametersUtil.addPartCoding(theContext, property, "value", prop.getCodeSystem(), prop.getCode(), prop.getDisplay()); + ParametersUtil.addPartCoding( + theContext, property, "value", prop.getCodeSystem(), prop.getCode(), prop.getDisplay()); } else { throw new IllegalStateException(Msg.code(1739) + "Don't know how to handle " + next.getClass()); } @@ -809,7 +837,8 @@ public interface IValidationSupport { IBase property = ParametersUtil.addParameterToParameters(theContext, retVal, "designation"); ParametersUtil.addPartCode(theContext, property, "language", next.getLanguage()); - ParametersUtil.addPartCoding(theContext, property, "use", next.getUseSystem(), next.getUseCode(), next.getUseDisplay()); + ParametersUtil.addPartCoding( + theContext, property, "use", next.getUseSystem(), next.getUseCode(), next.getUseDisplay()); ParametersUtil.addPartString(theContext, property, "value", next.getValue()); } } @@ -819,13 +848,12 @@ public interface IValidationSupport { public static LookupCodeResult notFound(String theSearchedForSystem, String theSearchedForCode) { return new LookupCodeResult() - .setFound(false) - .setSearchedForSystem(theSearchedForSystem) - .setSearchedForCode(theSearchedForCode); + .setFound(false) + .setSearchedForSystem(theSearchedForSystem) + .setSearchedForCode(theSearchedForCode); } } - class TranslateCodeRequest { private final String myTargetSystemUrl; private final String myConceptMapUrl; @@ -848,14 +876,14 @@ public interface IValidationSupport { } public TranslateCodeRequest( - List theCodings, - String theTargetSystemUrl, - String theConceptMapUrl, - String theConceptMapVersion, - String theSourceValueSetUrl, - String theTargetValueSetUrl, - IIdType theResourceId, - boolean theReverse) { + List theCodings, + String theTargetSystemUrl, + String theConceptMapUrl, + String theConceptMapVersion, + String theSourceValueSetUrl, + String theTargetValueSetUrl, + IIdType theResourceId, + boolean theReverse) { myCodings = theCodings; myTargetSystemUrl = theTargetSystemUrl; myConceptMapUrl = theConceptMapUrl; @@ -879,29 +907,29 @@ public interface IValidationSupport { TranslateCodeRequest that = (TranslateCodeRequest) theO; return new EqualsBuilder() - .append(myCodings, that.myCodings) - .append(myTargetSystemUrl, that.myTargetSystemUrl) - .append(myConceptMapUrl, that.myConceptMapUrl) - .append(myConceptMapVersion, that.myConceptMapVersion) - .append(mySourceValueSetUrl, that.mySourceValueSetUrl) - .append(myTargetValueSetUrl, that.myTargetValueSetUrl) - .append(myResourceId, that.myResourceId) - .append(myReverse, that.myReverse) - .isEquals(); + .append(myCodings, that.myCodings) + .append(myTargetSystemUrl, that.myTargetSystemUrl) + .append(myConceptMapUrl, that.myConceptMapUrl) + .append(myConceptMapVersion, that.myConceptMapVersion) + .append(mySourceValueSetUrl, that.mySourceValueSetUrl) + .append(myTargetValueSetUrl, that.myTargetValueSetUrl) + .append(myResourceId, that.myResourceId) + .append(myReverse, that.myReverse) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(myCodings) - .append(myTargetSystemUrl) - .append(myConceptMapUrl) - .append(myConceptMapVersion) - .append(mySourceValueSetUrl) - .append(myTargetValueSetUrl) - .append(myResourceId) - .append(myReverse) - .toHashCode(); + .append(myCodings) + .append(myTargetSystemUrl) + .append(myConceptMapUrl) + .append(myConceptMapVersion) + .append(mySourceValueSetUrl) + .append(myTargetValueSetUrl) + .append(myResourceId) + .append(myReverse) + .toHashCode(); } public List getCodings() { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/TranslateConceptResult.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/TranslateConceptResult.java index 54aae576eda..6ec2631dfed 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/TranslateConceptResult.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/TranslateConceptResult.java @@ -52,14 +52,14 @@ public class TranslateConceptResult { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("system", mySystem) - .append("code", myCode) - .append("display", myDisplay) - .append("equivalence", myEquivalence) - .append("conceptMapUrl", myConceptMapUrl) - .append("valueSet", myValueSet) - .append("systemVersion", mySystemVersion) - .toString(); + .append("system", mySystem) + .append("code", myCode) + .append("display", myDisplay) + .append("equivalence", myEquivalence) + .append("conceptMapUrl", myConceptMapUrl) + .append("valueSet", myValueSet) + .append("systemVersion", mySystemVersion) + .toString(); } public String getCode() { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValueSetExpansionOptions.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValueSetExpansionOptions.java index c0bd0cb04c8..5c2c0fdd57b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValueSetExpansionOptions.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/ValueSetExpansionOptions.java @@ -115,9 +115,7 @@ public class ValueSetExpansionOptions { } public static ValueSetExpansionOptions forOffsetAndCount(int theOffset, int theCount) { - return new ValueSetExpansionOptions() - .setOffset(theOffset) - .setCount(theCount); + return new ValueSetExpansionOptions().setOffset(theOffset).setCount(theCount); } public String getTheDisplayLanguage() { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/FhirPathExecutionException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/FhirPathExecutionException.java index ffdbb1334ba..65d2a889333 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/FhirPathExecutionException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/FhirPathExecutionException.java @@ -36,5 +36,4 @@ public class FhirPathExecutionException extends InternalErrorException { public FhirPathExecutionException(String theMessage) { super(theMessage); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPath.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPath.java index f3f5dfe64bc..59d3920b1cc 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPath.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/fhirpath/IFhirPath.java @@ -21,16 +21,16 @@ package ca.uhn.fhir.fhirpath; import org.hl7.fhir.instance.model.api.IBase; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; public interface IFhirPath { /** * Apply the given FhirPath expression against the given input and return * all results in a list - * + * * @param theInput The input object (generally a resource or datatype) * @param thePath The fluent path expression * @param theReturnType The type to return (in order to avoid casting) @@ -47,13 +47,11 @@ public interface IFhirPath { */ Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType); - /** * Parses the expression and throws an exception if it can not parse correctly */ void parse(String theExpression) throws Exception; - /** * This method can be used optionally to supply an evaluation context for the * FHIRPath evaluator instance. The context can be used to supply data needed by diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiErrorCode.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiErrorCode.java index ba58777a4bb..c09fba15b6d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiErrorCode.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiErrorCode.java @@ -19,7 +19,4 @@ */ package ca.uhn.fhir.i18n; -public final class HapiErrorCode { - -} - +public final class HapiErrorCode {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiLocalizer.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiLocalizer.java index f3e39d32298..b94dde62821 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiLocalizer.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/HapiLocalizer.java @@ -39,8 +39,6 @@ import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.trim; - - /** * This feature is not yet in its final state and should be considered an internal part of HAPI for now - use with caution */ @@ -48,6 +46,7 @@ public class HapiLocalizer { @SuppressWarnings("WeakerAccess") public static final String UNKNOWN_I18N_KEY_MESSAGE = "!MESSAGE!"; + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiLocalizer.class); private static boolean ourFailOnMissingMessage; private final Map myKeyToMessageFormat = new ConcurrentHashMap<>(); @@ -148,19 +147,18 @@ public class HapiLocalizer { MessageFormat newMessageFormat(String theFormatString) { StringBuilder pattern = new StringBuilder(theFormatString.trim()); - - for (int i = 0; i < (pattern.length()-1); i++) { + for (int i = 0; i < (pattern.length() - 1); i++) { if (pattern.charAt(i) == '{') { - char nextChar = pattern.charAt(i+1); + char nextChar = pattern.charAt(i + 1); if (nextChar >= '0' && nextChar <= '9') { continue; } - pattern.replace(i, i+1, "'{'"); + pattern.replace(i, i + 1, "'{'"); int closeBraceIndex = pattern.indexOf("}", i); if (closeBraceIndex > 0) { i = closeBraceIndex; - pattern.replace(i, i+1, "'}'"); + pattern.replace(i, i + 1, "'}'"); } } } @@ -175,11 +173,11 @@ public class HapiLocalizer { } } - public Locale getLocale() { + public Locale getLocale() { return myLocale; - } + } - /** + /** * This global setting causes the localizer to fail if any attempts * are made to retrieve a key that does not exist. This method is primarily for * unit tests. @@ -191,5 +189,4 @@ public class HapiLocalizer { public static String toKey(Class theType, String theKey) { return theType.getName() + '.' + theKey; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/Msg.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/Msg.java index e5d22645b0f..4e8bc672a78 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/Msg.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/i18n/Msg.java @@ -26,7 +26,6 @@ public final class Msg { * IMPORTANT: Please update the table in the following link after you add a new code: * https://github.com/hapifhir/hapi-fhir/wiki/Bumping-Error-Code */ - private Msg() {} public static String code(int theCode) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Hook.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Hook.java index d0f611b155e..a7974dd0c22 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Hook.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Hook.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.interceptor.api; import java.lang.annotation.ElementType; -import java.lang.annotation.Repeatable; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/HookParams.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/HookParams.java index 48df3e72343..f9f96ccb4c1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/HookParams.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/HookParams.java @@ -24,12 +24,12 @@ import com.google.common.collect.ListMultimap; import com.google.common.collect.Multimaps; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Supplier; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class HookParams { @@ -38,8 +38,7 @@ public class HookParams { /** * Constructor */ - public HookParams() { - } + public HookParams() {} /** * Constructor @@ -61,13 +60,13 @@ public class HookParams { return doAdd(theType, theParam); } -// /** -// * This is useful for providing a lazy-loaded (generally expensive to create) -// * parameters -// */ -// public HookParams addSupplier(Class theType, Supplier theParam) { -// return doAdd(theType, theParam); -// } + // /** + // * This is useful for providing a lazy-loaded (generally expensive to create) + // * parameters + // */ + // public HookParams addSupplier(Class theType, Supplier theParam) { + // return doAdd(theType, theParam); + // } private HookParams doAdd(Class theType, Object theParam) { Validate.isTrue(theType.equals(Supplier.class) == false, "Can not add parameters of type Supplier"); @@ -110,9 +109,7 @@ public class HookParams { } public Collection values() { - return - Collections.unmodifiableCollection(myParams.values()) - .stream() + return Collections.unmodifiableCollection(myParams.values()).stream() .map(t -> unwrapValue(t)) .collect(Collectors.toList()); } @@ -137,10 +134,10 @@ public class HookParams { StringBuilder b = new StringBuilder(); myParams.forEach((key, value) -> { b.append(" ") - .append(key.getSimpleName()) - .append(": ") - .append(value) - .append("\n"); + .append(key.getSimpleName()) + .append(": ") + .append(value) + .append("\n"); }); return b.toString(); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IAnonymousInterceptor.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IAnonymousInterceptor.java index b46b176c95d..c660138b8c2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IAnonymousInterceptor.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IAnonymousInterceptor.java @@ -31,5 +31,4 @@ import com.google.common.annotations.VisibleForTesting; public interface IAnonymousInterceptor { void invoke(IPointcut thePointcut, HookParams theArgs); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IBaseInterceptorService.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IBaseInterceptorService.java index 9457f9cdea7..d0fdcb61c94 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IBaseInterceptorService.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IBaseInterceptorService.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.interceptor.api; -import javax.annotation.Nullable; import java.util.Collection; import java.util.List; import java.util.function.Predicate; +import javax.annotation.Nullable; public interface IBaseInterceptorService extends IBaseInterceptorBroadcaster { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IInterceptorBroadcaster.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IInterceptorBroadcaster.java index 3fa8794a01e..2017b3117f8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IInterceptorBroadcaster.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IInterceptorBroadcaster.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.interceptor.api; -public interface IInterceptorBroadcaster extends IBaseInterceptorBroadcaster { - -} +public interface IInterceptorBroadcaster extends IBaseInterceptorBroadcaster {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IInterceptorService.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IInterceptorService.java index 2187cd47fe5..d7d9f3ad823 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IInterceptorService.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IInterceptorService.java @@ -24,5 +24,4 @@ public interface IInterceptorService extends IBaseInterceptorService, void registerAnonymousInterceptor(Pointcut thePointcut, IAnonymousInterceptor theInterceptor); void registerAnonymousInterceptor(Pointcut thePointcut, int theOrder, IAnonymousInterceptor theInterceptor); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IPointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IPointcut.java index 908b2897ad8..8ebd00bee4c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IPointcut.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/IPointcut.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.interceptor.api; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface IPointcut { @Nonnull diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Interceptor.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Interceptor.java index 269cecb5c9f..c438ff5eda6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Interceptor.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Interceptor.java @@ -35,7 +35,7 @@ import java.lang.annotation.Target; public @interface Interceptor { /** - * @see #order() + * @see #order() */ int DEFAULT_ORDER = 0; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java index f8aff921214..0ad2f6844fd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java @@ -27,13 +27,13 @@ import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.validation.ValidationResult; import org.hl7.fhir.instance.model.api.IBaseConformance; -import javax.annotation.Nonnull; import java.io.Writer; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; /** * Value for {@link Hook#value()} @@ -74,10 +74,8 @@ public enum Pointcut implements IPointcut { *

* Hook methods must return void. */ - CLIENT_REQUEST(void.class, - "ca.uhn.fhir.rest.client.api.IHttpRequest", - "ca.uhn.fhir.rest.client.api.IRestfulClient" - ), + CLIENT_REQUEST( + void.class, "ca.uhn.fhir.rest.client.api.IHttpRequest", "ca.uhn.fhir.rest.client.api.IRestfulClient"), /** * Client Hook: @@ -99,11 +97,11 @@ public enum Pointcut implements IPointcut { *

* Hook methods must return void. */ - CLIENT_RESPONSE(void.class, - "ca.uhn.fhir.rest.client.api.IHttpRequest", - "ca.uhn.fhir.rest.client.api.IHttpResponse", - "ca.uhn.fhir.rest.client.api.IRestfulClient" - ), + CLIENT_RESPONSE( + void.class, + "ca.uhn.fhir.rest.client.api.IHttpRequest", + "ca.uhn.fhir.rest.client.api.IHttpResponse", + "ca.uhn.fhir.rest.client.api.IRestfulClient"), /** * Server Hook: @@ -138,11 +136,11 @@ public enum Pointcut implements IPointcut { * chooses to modify the CapabilityStatement that was supplied to the interceptor, it is fine * for your hook method to return void or null. */ - SERVER_CAPABILITY_STATEMENT_GENERATED(IBaseConformance.class, - "org.hl7.fhir.instance.model.api.IBaseConformance", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + SERVER_CAPABILITY_STATEMENT_GENERATED( + IBaseConformance.class, + "org.hl7.fhir.instance.model.api.IBaseConformance", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Server Hook: @@ -167,10 +165,8 @@ public enum Pointcut implements IPointcut { * letting HAPI handle the response normally, you must return false. In this case, * no further processing will occur and no further interceptors will be called. */ - SERVER_INCOMING_REQUEST_PRE_PROCESSED(boolean.class, - "javax.servlet.http.HttpServletRequest", - "javax.servlet.http.HttpServletResponse" - ), + SERVER_INCOMING_REQUEST_PRE_PROCESSED( + boolean.class, "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse"), /** * Server Hook: @@ -215,13 +211,13 @@ public enum Pointcut implements IPointcut { * should return false, to indicate that they have handled the request and processing should stop. *

*/ - SERVER_HANDLE_EXCEPTION(boolean.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "javax.servlet.http.HttpServletRequest", - "javax.servlet.http.HttpServletResponse", - "ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException" - ), + SERVER_HANDLE_EXCEPTION( + boolean.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "javax.servlet.http.HttpServletRequest", + "javax.servlet.http.HttpServletResponse", + "ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException"), /** * Server Hook: @@ -263,12 +259,12 @@ public enum Pointcut implements IPointcut { * * @since 5.4.0 */ - SERVER_INCOMING_REQUEST_PRE_HANDLER_SELECTED(boolean.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "javax.servlet.http.HttpServletRequest", - "javax.servlet.http.HttpServletResponse" - ), + SERVER_INCOMING_REQUEST_PRE_HANDLER_SELECTED( + boolean.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "javax.servlet.http.HttpServletRequest", + "javax.servlet.http.HttpServletResponse"), /** * Server Hook: @@ -308,13 +304,12 @@ public enum Pointcut implements IPointcut { * to indicate that the interceptor has detected an unauthorized access * attempt. If thrown, processing will stop and an HTTP 401 will be returned to the client. */ - SERVER_INCOMING_REQUEST_POST_PROCESSED(boolean.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "javax.servlet.http.HttpServletRequest", - "javax.servlet.http.HttpServletResponse" - ), - + SERVER_INCOMING_REQUEST_POST_PROCESSED( + boolean.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "javax.servlet.http.HttpServletRequest", + "javax.servlet.http.HttpServletResponse"), /** * Server Hook: @@ -353,12 +348,11 @@ public enum Pointcut implements IPointcut { * will be aborted with an appropriate error returned to the client. *

*/ - SERVER_INCOMING_REQUEST_PRE_HANDLED(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.RestOperationTypeEnum" - ), - + SERVER_INCOMING_REQUEST_PRE_HANDLED( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.RestOperationTypeEnum"), /** * Server Hook: @@ -375,9 +369,8 @@ public enum Pointcut implements IPointcut { * Hook methods may modify the method binding, replace it, or return null to cancel the binding. *

*/ - SERVER_PROVIDER_METHOD_BOUND("ca.uhn.fhir.rest.server.method.BaseMethodBinding", - "ca.uhn.fhir.rest.server.method.BaseMethodBinding"), - + SERVER_PROVIDER_METHOD_BOUND( + "ca.uhn.fhir.rest.server.method.BaseMethodBinding", "ca.uhn.fhir.rest.server.method.BaseMethodBinding"), /** * Server Hook: @@ -431,13 +424,13 @@ public enum Pointcut implements IPointcut { * should return an exception. *

*/ - SERVER_PRE_PROCESS_OUTGOING_EXCEPTION(BaseServerResponseException.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "java.lang.Throwable", - "javax.servlet.http.HttpServletRequest", - "javax.servlet.http.HttpServletResponse" - ), + SERVER_PRE_PROCESS_OUTGOING_EXCEPTION( + BaseServerResponseException.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "java.lang.Throwable", + "javax.servlet.http.HttpServletRequest", + "javax.servlet.http.HttpServletResponse"), /** * Server Hook: @@ -483,15 +476,14 @@ public enum Pointcut implements IPointcut { * has detected an unauthorized access attempt. If thrown, processing will stop and an HTTP 401 * will be returned to the client. */ - SERVER_OUTGOING_RESPONSE(boolean.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.ResponseDetails", - "javax.servlet.http.HttpServletRequest", - "javax.servlet.http.HttpServletResponse" - ), - + SERVER_OUTGOING_RESPONSE( + boolean.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.ResponseDetails", + "javax.servlet.http.HttpServletRequest", + "javax.servlet.http.HttpServletResponse"), /** * Server Hook: @@ -525,12 +517,11 @@ public enum Pointcut implements IPointcut { * * @since 5.0.0 */ - SERVER_OUTGOING_WRITER_CREATED(Writer.class, - "java.io.Writer", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), - + SERVER_OUTGOING_WRITER_CREATED( + Writer.class, + "java.io.Writer", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Server Hook: @@ -576,15 +567,14 @@ public enum Pointcut implements IPointcut { * has detected an unauthorized access attempt. If thrown, processing will stop and an HTTP 401 * will be returned to the client. */ - SERVER_OUTGOING_GRAPHQL_RESPONSE(boolean.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "java.lang.String", - "java.lang.String", - "javax.servlet.http.HttpServletRequest", - "javax.servlet.http.HttpServletResponse" - ), - + SERVER_OUTGOING_GRAPHQL_RESPONSE( + boolean.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "java.lang.String", + "java.lang.String", + "javax.servlet.http.HttpServletRequest", + "javax.servlet.http.HttpServletResponse"), /** * Server Hook: @@ -615,12 +605,10 @@ public enum Pointcut implements IPointcut { *

*/ SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME( - void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "org.hl7.fhir.instance.model.api.IBaseOperationOutcome" - ), - + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "org.hl7.fhir.instance.model.api.IBaseOperationOutcome"), /** * Server Hook: @@ -660,12 +648,10 @@ public enum Pointcut implements IPointcut { *

*/ SERVER_PROCESSING_COMPLETED_NORMALLY( - void.class, - new ExceptionHandlingSpec() - .addLogAndSwallow(Throwable.class), - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + void.class, + new ExceptionHandlingSpec().addLogAndSwallow(Throwable.class), + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Server Hook: @@ -699,12 +685,10 @@ public enum Pointcut implements IPointcut { *

*/ SERVER_PROCESSING_COMPLETED( - void.class, - new ExceptionHandlingSpec() - .addLogAndSwallow(Throwable.class), - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + void.class, + new ExceptionHandlingSpec().addLogAndSwallow(Throwable.class), + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Subscription Hook: @@ -726,7 +710,6 @@ public enum Pointcut implements IPointcut { */ SUBSCRIPTION_RESOURCE_MODIFIED(boolean.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), - /** * Subscription Hook: * Invoked any time that a resource is matched by an individual subscription, and @@ -748,7 +731,11 @@ public enum Pointcut implements IPointcut { * returns false, delivery will be aborted. *

*/ - SUBSCRIPTION_RESOURCE_MATCHED(boolean.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage", "ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult"), + SUBSCRIPTION_RESOURCE_MATCHED( + boolean.class, + "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", + "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage", + "ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult"), /** * Subscription Hook: @@ -764,7 +751,8 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - SUBSCRIPTION_RESOURCE_DID_NOT_MATCH_ANY_SUBSCRIPTIONS(void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), + SUBSCRIPTION_RESOURCE_DID_NOT_MATCH_ANY_SUBSCRIPTIONS( + void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), /** * Subscription Hook: @@ -786,7 +774,10 @@ public enum Pointcut implements IPointcut { * returns false, processing will be aborted. *

*/ - SUBSCRIPTION_BEFORE_DELIVERY(boolean.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), + SUBSCRIPTION_BEFORE_DELIVERY( + boolean.class, + "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", + "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), /** * Subscription Hook: @@ -803,8 +794,10 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - SUBSCRIPTION_AFTER_DELIVERY(void.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), - + SUBSCRIPTION_AFTER_DELIVERY( + void.class, + "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", + "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), /** * Subscription Hook: @@ -827,7 +820,8 @@ public enum Pointcut implements IPointcut { * taken for the delivery. *

*/ - SUBSCRIPTION_AFTER_DELIVERY_FAILED(boolean.class, "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage", "java.lang.Exception"), + SUBSCRIPTION_AFTER_DELIVERY_FAILED( + boolean.class, "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage", "java.lang.Exception"), /** * Subscription Hook: @@ -845,7 +839,10 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - SUBSCRIPTION_AFTER_REST_HOOK_DELIVERY(void.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), + SUBSCRIPTION_AFTER_REST_HOOK_DELIVERY( + void.class, + "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", + "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), /** * Subscription Hook: @@ -866,7 +863,10 @@ public enum Pointcut implements IPointcut { * returns false, processing will be aborted. *

*/ - SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY(boolean.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), + SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY( + boolean.class, + "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", + "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), /** * Subscription Hook: @@ -884,7 +884,10 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - SUBSCRIPTION_AFTER_MESSAGE_DELIVERY(void.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), + SUBSCRIPTION_AFTER_MESSAGE_DELIVERY( + void.class, + "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", + "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage"), /** * Subscription Hook: @@ -909,8 +912,11 @@ public enum Pointcut implements IPointcut { * returns false, processing will be aborted. *

*/ - SUBSCRIPTION_BEFORE_MESSAGE_DELIVERY(boolean.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage", "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage"), - + SUBSCRIPTION_BEFORE_MESSAGE_DELIVERY( + boolean.class, + "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription", + "ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage", + "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage"), /** * Subscription Hook: @@ -929,8 +935,8 @@ public enum Pointcut implements IPointcut { * returns false, processing will be aborted. *

*/ - SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED(boolean.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), - + SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED( + boolean.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), /** * Subscription Hook: @@ -947,8 +953,8 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - SUBSCRIPTION_AFTER_PERSISTED_RESOURCE_CHECKED(void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), - + SUBSCRIPTION_AFTER_PERSISTED_RESOURCE_CHECKED( + void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), /** * Subscription Hook: @@ -967,7 +973,8 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED(void.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription"), + SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED( + void.class, "ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription"), /** * Subscription Hook: @@ -1024,12 +1031,11 @@ public enum Pointcut implements IPointcut { *

*/ STORAGE_CASCADE_DELETE( - void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.api.model.DeleteConflictList", - "org.hl7.fhir.instance.model.api.IBaseResource" - ), + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.api.model.DeleteConflictList", + "org.hl7.fhir.instance.model.api.IBaseResource"), /** * Subscription Topic Hook: @@ -1048,8 +1054,8 @@ public enum Pointcut implements IPointcut { * returns false, processing will be aborted. *

*/ - SUBSCRIPTION_TOPIC_BEFORE_PERSISTED_RESOURCE_CHECKED(boolean.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), - + SUBSCRIPTION_TOPIC_BEFORE_PERSISTED_RESOURCE_CHECKED( + boolean.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), /** * Subscription Topic Hook: @@ -1066,8 +1072,8 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - SUBSCRIPTION_TOPIC_AFTER_PERSISTED_RESOURCE_CHECKED(void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), - + SUBSCRIPTION_TOPIC_AFTER_PERSISTED_RESOURCE_CHECKED( + void.class, "ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage"), /** * Storage Hook: @@ -1100,12 +1106,10 @@ public enum Pointcut implements IPointcut { *

*/ STORAGE_INITIATE_BULK_EXPORT( - void.class, - "ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), - + void.class, + "ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -1135,12 +1139,9 @@ public enum Pointcut implements IPointcut { * @since 6.8.0 */ STORAGE_BULK_EXPORT_RESOURCE_INCLUSION( - boolean.class, - "ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters", - "org.hl7.fhir.instance.model.api.IBaseResource" - ), - - + boolean.class, + "ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters", + "org.hl7.fhir.instance.model.api.IBaseResource"), /** * Storage Hook: @@ -1172,13 +1173,11 @@ public enum Pointcut implements IPointcut { * which case the delete expunge will not occur. *

*/ - STORAGE_PRE_DELETE_EXPUNGE( - void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "java.lang.String" - ), + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "java.lang.String"), /** * Storage Hook: @@ -1221,15 +1220,13 @@ public enum Pointcut implements IPointcut { * which case the delete expunge will not occur. *

*/ - STORAGE_PRE_DELETE_EXPUNGE_PID_LIST( - void.class, - "java.lang.String", - "java.util.List", - "java.util.concurrent.atomic.AtomicLong", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + void.class, + "java.lang.String", + "java.util.List", + "java.util.concurrent.atomic.AtomicLong", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -1272,11 +1269,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PREACCESS_RESOURCES(void.class, - "ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + STORAGE_PREACCESS_RESOURCES( + void.class, + "ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -1310,11 +1307,11 @@ public enum Pointcut implements IPointcut { * search no matter what. *

*/ - STORAGE_PRECHECK_FOR_CACHED_SEARCH(boolean.class, - "ca.uhn.fhir.jpa.searchparam.SearchParameterMap", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + STORAGE_PRECHECK_FOR_CACHED_SEARCH( + boolean.class, + "ca.uhn.fhir.jpa.searchparam.SearchParameterMap", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -1353,13 +1350,13 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRESEARCH_REGISTERED(void.class, - "ca.uhn.fhir.rest.server.util.ICachedSearchDetails", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.searchparam.SearchParameterMap", - "ca.uhn.fhir.interceptor.model.RequestPartitionId" - ), + STORAGE_PRESEARCH_REGISTERED( + void.class, + "ca.uhn.fhir.rest.server.util.ICachedSearchDetails", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.searchparam.SearchParameterMap", + "ca.uhn.fhir.interceptor.model.RequestPartitionId"), /** * Storage Hook: @@ -1404,11 +1401,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRESHOW_RESOURCES(void.class, - "ca.uhn.fhir.rest.api.server.IPreResourceShowDetails", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + STORAGE_PRESHOW_RESOURCES( + void.class, + "ca.uhn.fhir.rest.api.server.IPreResourceShowDetails", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -1443,13 +1440,13 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRESTORAGE_RESOURCE_CREATED(void.class, - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails", - "ca.uhn.fhir.interceptor.model.RequestPartitionId" - ), + STORAGE_PRESTORAGE_RESOURCE_CREATED( + void.class, + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails", + "ca.uhn.fhir.interceptor.model.RequestPartitionId"), /** * Storage Hook: @@ -1474,10 +1471,8 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRESTORAGE_CLIENT_ASSIGNED_ID(void.class, - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails" - ), + STORAGE_PRESTORAGE_CLIENT_ASSIGNED_ID( + void.class, "org.hl7.fhir.instance.model.api.IBaseResource", "ca.uhn.fhir.rest.api.server.RequestDetails"), /** * Storage Hook: @@ -1514,13 +1509,13 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRESTORAGE_RESOURCE_UPDATED(void.class, - "org.hl7.fhir.instance.model.api.IBaseResource", - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails" - ), + STORAGE_PRESTORAGE_RESOURCE_UPDATED( + void.class, + "org.hl7.fhir.instance.model.api.IBaseResource", + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails"), /** * Storage Hook: @@ -1555,13 +1550,12 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRESTORAGE_RESOURCE_DELETED(void.class, - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails" - ), - + STORAGE_PRESTORAGE_RESOURCE_DELETED( + void.class, + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails"), /** * Storage Hook: @@ -1604,13 +1598,13 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRECOMMIT_RESOURCE_CREATED(void.class, - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails", - "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum" - ), + STORAGE_PRECOMMIT_RESOURCE_CREATED( + void.class, + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails", + "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"), /** * Storage Hook: @@ -1651,15 +1645,14 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRECOMMIT_RESOURCE_UPDATED(void.class, - "org.hl7.fhir.instance.model.api.IBaseResource", - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails", - "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum" - ), - + STORAGE_PRECOMMIT_RESOURCE_UPDATED( + void.class, + "org.hl7.fhir.instance.model.api.IBaseResource", + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails", + "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"), /** * Storage Hook: @@ -1695,13 +1688,13 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_PRECOMMIT_RESOURCE_DELETED(void.class, - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails", - "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum" - ), + STORAGE_PRECOMMIT_RESOURCE_DELETED( + void.class, + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails", + "ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum"), /** * Storage Hook: @@ -1733,11 +1726,11 @@ public enum Pointcut implements IPointcut { * @see #STORAGE_TRANSACTION_PROCESSED * @since 6.2.0 */ - STORAGE_TRANSACTION_PROCESSING(void.class, - "org.hl7.fhir.instance.model.api.IBaseBundle", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + STORAGE_TRANSACTION_PROCESSING( + void.class, + "org.hl7.fhir.instance.model.api.IBaseBundle", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -1773,14 +1766,13 @@ public enum Pointcut implements IPointcut { * * @see #STORAGE_TRANSACTION_PROCESSING */ - STORAGE_TRANSACTION_PROCESSED(void.class, - "org.hl7.fhir.instance.model.api.IBaseBundle", - "ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails" - ), - + STORAGE_TRANSACTION_PROCESSED( + void.class, + "org.hl7.fhir.instance.model.api.IBaseBundle", + "ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails"), /** * Storage Hook: @@ -1801,10 +1793,10 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE(void.class, - "ca.uhn.fhir.interceptor.model.TransactionWriteOperationsDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails" - ), + STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE( + void.class, + "ca.uhn.fhir.interceptor.model.TransactionWriteOperationsDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails"), /** * Storage Hook: @@ -1827,10 +1819,10 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - STORAGE_TRANSACTION_WRITE_OPERATIONS_POST(void.class, - "ca.uhn.fhir.interceptor.model.TransactionWriteOperationsDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails" - ), + STORAGE_TRANSACTION_WRITE_OPERATIONS_POST( + void.class, + "ca.uhn.fhir.interceptor.model.TransactionWriteOperationsDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails"), /** * Storage Hook: @@ -1865,14 +1857,13 @@ public enum Pointcut implements IPointcut { *

*/ STORAGE_PRESTORAGE_DELETE_CONFLICTS( - // Return type - "ca.uhn.fhir.jpa.delete.DeleteConflictOutcome", - // Params - "ca.uhn.fhir.jpa.api.model.DeleteConflictList", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.rest.api.server.storage.TransactionDetails" - ), + // Return type + "ca.uhn.fhir.jpa.delete.DeleteConflictOutcome", + // Params + "ca.uhn.fhir.jpa.api.model.DeleteConflictList", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.rest.api.server.storage.TransactionDetails"), /** * Storage Hook: @@ -1907,15 +1898,14 @@ public enum Pointcut implements IPointcut { *

*/ STORAGE_PRESTORAGE_EXPUNGE_RESOURCE( - // Return type - void.class, - // Params - "java.util.concurrent.atomic.AtomicInteger", - "org.hl7.fhir.instance.model.api.IIdType", - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + // Return type + void.class, + // Params + "java.util.concurrent.atomic.AtomicInteger", + "org.hl7.fhir.instance.model.api.IIdType", + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -1946,13 +1936,12 @@ public enum Pointcut implements IPointcut { *

*/ STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING( - // Return type - void.class, - // Params - "java.util.concurrent.atomic.AtomicInteger", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + // Return type + void.class, + // Params + "java.util.concurrent.atomic.AtomicInteger", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -1987,13 +1976,12 @@ public enum Pointcut implements IPointcut { * @see #STORAGE_PARTITION_IDENTIFY_ANY For an alternative that is not read/write specific */ STORAGE_PARTITION_IDENTIFY_CREATE( - // Return type - "ca.uhn.fhir.interceptor.model.RequestPartitionId", - // Params - "org.hl7.fhir.instance.model.api.IBaseResource", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + // Return type + "ca.uhn.fhir.interceptor.model.RequestPartitionId", + // Params + "org.hl7.fhir.instance.model.api.IBaseResource", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -2032,13 +2020,12 @@ public enum Pointcut implements IPointcut { * @see #STORAGE_PARTITION_IDENTIFY_ANY For an alternative that is not read/write specific */ STORAGE_PARTITION_IDENTIFY_READ( - // Return type - "ca.uhn.fhir.interceptor.model.RequestPartitionId", - // Params - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails" - ), + // Return type + "ca.uhn.fhir.interceptor.model.RequestPartitionId", + // Params + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails"), /** * Storage Hook: @@ -2079,12 +2066,11 @@ public enum Pointcut implements IPointcut { * @see #STORAGE_PARTITION_IDENTIFY_CREATE */ STORAGE_PARTITION_IDENTIFY_ANY( - // Return type - "ca.uhn.fhir.interceptor.model.RequestPartitionId", - // Params - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + // Return type + "ca.uhn.fhir.interceptor.model.RequestPartitionId", + // Params + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -2120,14 +2106,12 @@ public enum Pointcut implements IPointcut { *

*/ STORAGE_PARTITION_CREATED( - // Return type - void.class, - // Params - "ca.uhn.fhir.interceptor.model.RequestPartitionId", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), - + // Return type + void.class, + // Params + "ca.uhn.fhir.interceptor.model.RequestPartitionId", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Storage Hook: @@ -2167,14 +2151,13 @@ public enum Pointcut implements IPointcut { *

*/ STORAGE_PARTITION_SELECTED( - // Return type - void.class, - // Params - "ca.uhn.fhir.interceptor.model.RequestPartitionId", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.context.RuntimeResourceDefinition" - ), + // Return type + void.class, + // Params + "ca.uhn.fhir.interceptor.model.RequestPartitionId", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.context.RuntimeResourceDefinition"), /** * Storage Hook: @@ -2207,10 +2190,9 @@ public enum Pointcut implements IPointcut { *

*/ STORAGE_VERSION_CONFLICT( - "ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + "ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Validation Hook: @@ -2236,11 +2218,11 @@ public enum Pointcut implements IPointcut { *

* Hook methods may return an instance of {@link ca.uhn.fhir.validation.ValidationResult} if they wish to override the validation results, or they may return null or void otherwise. */ - VALIDATION_COMPLETED(ValidationResult.class, - "org.hl7.fhir.instance.model.api.IBaseResource", - "java.lang.String", - "ca.uhn.fhir.validation.ValidationResult" - ), + VALIDATION_COMPLETED( + ValidationResult.class, + "org.hl7.fhir.instance.model.api.IBaseResource", + "java.lang.String", + "ca.uhn.fhir.validation.ValidationResult"), /** * MDM(EMPI) Hook: @@ -2257,8 +2239,7 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - MDM_BEFORE_PERSISTED_RESOURCE_CHECKED(void.class, - "org.hl7.fhir.instance.model.api.IBaseResource"), + MDM_BEFORE_PERSISTED_RESOURCE_CHECKED(void.class, "org.hl7.fhir.instance.model.api.IBaseResource"), /** * MDM(EMPI) Hook: @@ -2276,11 +2257,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - MDM_AFTER_PERSISTED_RESOURCE_CHECKED(void.class, - "ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage", - "ca.uhn.fhir.rest.server.TransactionLogMessages", - "ca.uhn.fhir.mdm.api.MdmLinkEvent"), - + MDM_AFTER_PERSISTED_RESOURCE_CHECKED( + void.class, + "ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage", + "ca.uhn.fhir.rest.server.TransactionLogMessages", + "ca.uhn.fhir.mdm.api.MdmLinkEvent"), /** * JPA Hook: @@ -2302,8 +2283,9 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_RESOLVE_CROSS_PARTITION_REFERENCE("ca.uhn.fhir.jpa.model.cross.IResourceLookup", - "ca.uhn.fhir.jpa.searchparam.extractor.CrossPartitionReferenceDetails"), + JPA_RESOLVE_CROSS_PARTITION_REFERENCE( + "ca.uhn.fhir.jpa.model.cross.IResourceLookup", + "ca.uhn.fhir.jpa.searchparam.extractor.CrossPartitionReferenceDetails"), /** * Performance Tracing Hook: @@ -2337,11 +2319,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_INFO(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.model.search.StorageProcessingMessage" - ), + JPA_PERFTRACE_INFO( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.model.search.StorageProcessingMessage"), /** * Performance Tracing Hook: @@ -2375,11 +2357,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_WARNING(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.model.search.StorageProcessingMessage" - ), + JPA_PERFTRACE_WARNING( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.model.search.StorageProcessingMessage"), /** * Performance Tracing Hook: @@ -2414,11 +2396,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails" - ), + JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"), /** * Performance Tracing Hook: @@ -2455,12 +2437,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_SEARCH_SELECT_COMPLETE(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails" - ), - + JPA_PERFTRACE_SEARCH_SELECT_COMPLETE( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"), /** * Performance Tracing Hook: @@ -2494,11 +2475,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_SEARCH_FAILED(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails" - ), + JPA_PERFTRACE_SEARCH_FAILED( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"), /** * Performance Tracing Hook: @@ -2534,11 +2515,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_SEARCH_PASS_COMPLETE(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails" - ), + JPA_PERFTRACE_SEARCH_PASS_COMPLETE( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"), /** * Performance Tracing Hook: @@ -2572,11 +2553,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails" - ), + JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"), /** * Performance Tracing Hook: @@ -2612,11 +2593,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_SEARCH_REUSING_CACHED(boolean.class, - "ca.uhn.fhir.jpa.searchparam.SearchParameterMap", - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails" - ), + JPA_PERFTRACE_SEARCH_REUSING_CACHED( + boolean.class, + "ca.uhn.fhir.jpa.searchparam.SearchParameterMap", + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), /** * Performance Tracing Hook: @@ -2651,12 +2632,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_SEARCH_COMPLETE(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails" - ), - + JPA_PERFTRACE_SEARCH_COMPLETE( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"), /** * Performance Tracing Hook: @@ -2685,11 +2665,7 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_SEARCH_FOUND_ID(void.class, - "java.lang.Integer", - "java.lang.Object" - ), - + JPA_PERFTRACE_SEARCH_FOUND_ID(void.class, "java.lang.Integer", "java.lang.Object"), /** * Performance Tracing Hook: @@ -2724,11 +2700,11 @@ public enum Pointcut implements IPointcut { * Hooks should return void. *

*/ - JPA_PERFTRACE_RAW_SQL(void.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", - "ca.uhn.fhir.jpa.util.SqlQueryList" - ), + JPA_PERFTRACE_RAW_SQL( + void.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails", + "ca.uhn.fhir.jpa.util.SqlQueryList"), /** * Binary Blob Prefix Assigning Hook: @@ -2751,21 +2727,20 @@ public enum Pointcut implements IPointcut { * Hooks should return String, which represents the full prefix to be applied to the blob. *

*/ - STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX(String.class, - "ca.uhn.fhir.rest.api.server.RequestDetails", - "org.hl7.fhir.instance.model.api.IBaseResource" - ), - + STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX( + String.class, + "ca.uhn.fhir.rest.api.server.RequestDetails", + "org.hl7.fhir.instance.model.api.IBaseResource"), /** * This pointcut is used only for unit tests. Do not use in production code as it may be changed or * removed at any time. */ TEST_RB( - boolean.class, - new ExceptionHandlingSpec().addLogAndSwallow(IllegalStateException.class), - String.class.getName(), - String.class.getName()), + boolean.class, + new ExceptionHandlingSpec().addLogAndSwallow(IllegalStateException.class), + String.class.getName(), + String.class.getName()), /** * This pointcut is used only for unit tests. Do not use in production code as it may be changed or @@ -2781,7 +2756,10 @@ public enum Pointcut implements IPointcut { this(toReturnTypeClass(theReturnType), new ExceptionHandlingSpec(), theParameterTypes); } - Pointcut(@Nonnull Class theReturnType, @Nonnull ExceptionHandlingSpec theExceptionHandlingSpec, String... theParameterTypes) { + Pointcut( + @Nonnull Class theReturnType, + @Nonnull ExceptionHandlingSpec theExceptionHandlingSpec, + String... theParameterTypes) { myReturnType = theReturnType; myExceptionHandlingSpec = theExceptionHandlingSpec; myParameterTypes = Collections.unmodifiableList(Arrays.asList(theParameterTypes)); @@ -2813,8 +2791,7 @@ public enum Pointcut implements IPointcut { return myParameterTypes; } - private static class UnknownType { - } + private static class UnknownType {} private static class ExceptionHandlingSpec { @@ -2824,7 +2801,6 @@ public enum Pointcut implements IPointcut { myTypesToLogAndSwallow.add(theType); return this; } - } private static Class toReturnTypeClass(String theReturnType) { @@ -2834,5 +2810,4 @@ public enum Pointcut implements IPointcut { return UnknownType.class; } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/BaseInterceptorService.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/BaseInterceptorService.java index af3ac144c8d..07ac7df658b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/BaseInterceptorService.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/BaseInterceptorService.java @@ -38,8 +38,6 @@ import org.apache.commons.lang3.reflect.MethodUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.InvocationTargetException; @@ -59,8 +57,11 @@ import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public abstract class BaseInterceptorService & IPointcut> implements IBaseInterceptorService, IBaseInterceptorBroadcaster { +public abstract class BaseInterceptorService & IPointcut> + implements IBaseInterceptorService, IBaseInterceptorBroadcaster { private static final Logger ourLog = LoggerFactory.getLogger(BaseInterceptorService.class); private final List myInterceptors = new ArrayList<>(); private final ListMultimap myGlobalInvokers = ArrayListMultimap.create(); @@ -110,7 +111,6 @@ public abstract class BaseInterceptorService & I Validate.notNull(thePointcut); Validate.notNull(theInterceptor); synchronized (myRegistryMutex) { - myAnonymousInvokers.put(thePointcut, theInvoker); if (!isInterceptorAlreadyRegistered(theInterceptor)) { myInterceptors.add(theInterceptor); @@ -168,7 +168,8 @@ public abstract class BaseInterceptorService & I unregisterInterceptorsIf(theShouldUnregisterFunction, myAnonymousInvokers); } - private void unregisterInterceptorsIf(Predicate theShouldUnregisterFunction, ListMultimap theGlobalInvokers) { + private void unregisterInterceptorsIf( + Predicate theShouldUnregisterFunction, ListMultimap theGlobalInvokers) { synchronized (myRegistryMutex) { for (Map.Entry nextInvoker : new ArrayList<>(theGlobalInvokers.entries())) { if (theShouldUnregisterFunction.test(nextInvoker.getValue().getInterceptor())) { @@ -183,7 +184,6 @@ public abstract class BaseInterceptorService & I @Override public boolean registerInterceptor(Object theInterceptor) { synchronized (myRegistryMutex) { - if (isInterceptorAlreadyRegistered(theInterceptor)) { return false; } @@ -191,7 +191,9 @@ public abstract class BaseInterceptorService & I List addedInvokers = scanInterceptorAndAddToInvokerMultimap(theInterceptor, myGlobalInvokers); if (addedInvokers.isEmpty()) { if (myWarnOnInterceptorWithNoHooks) { - ourLog.warn("Interceptor registered with no valid hooks - Type was: {}", theInterceptor.getClass().getName()); + ourLog.warn( + "Interceptor registered with no valid hooks - Type was: {}", + theInterceptor.getClass().getName()); } return false; } @@ -300,10 +302,9 @@ public abstract class BaseInterceptorService & I @VisibleForTesting List getInterceptorsWithInvokersForPointcut(POINTCUT thePointcut) { - return getInvokersForPointcut(thePointcut) - .stream() - .map(BaseInvoker::getInterceptor) - .collect(Collectors.toList()); + return getInvokersForPointcut(thePointcut).stream() + .map(BaseInvoker::getInterceptor) + .collect(Collectors.toList()); } /** @@ -361,13 +362,11 @@ public abstract class BaseInterceptorService & I } else { - retVal = Arrays - .stream(theInvokersLists) - .filter(Objects::nonNull) - .flatMap(Collection::stream) - .sorted() - .collect(Collectors.toList()); - + retVal = Arrays.stream(theInvokersLists) + .filter(Objects::nonNull) + .flatMap(Collection::stream) + .sorted() + .collect(Collectors.toList()); } return retVal; @@ -377,8 +376,17 @@ public abstract class BaseInterceptorService & I * Only call this when assertions are enabled, it's expensive */ final boolean haveAppropriateParams(POINTCUT thePointcut, HookParams theParams) { - if (theParams.getParamsForType().values().size() != thePointcut.getParameterTypes().size()) { - throw new IllegalArgumentException(Msg.code(1909) + String.format("Wrong number of params for pointcut %s - Wanted %s but found %s", thePointcut.name(), toErrorString(thePointcut.getParameterTypes()), theParams.getParamsForType().values().stream().map(t -> t != null ? t.getClass().getSimpleName() : "null").sorted().collect(Collectors.toList()))); + if (theParams.getParamsForType().values().size() + != thePointcut.getParameterTypes().size()) { + throw new IllegalArgumentException(Msg.code(1909) + + String.format( + "Wrong number of params for pointcut %s - Wanted %s but found %s", + thePointcut.name(), + toErrorString(thePointcut.getParameterTypes()), + theParams.getParamsForType().values().stream() + .map(t -> t != null ? t.getClass().getSimpleName() : "null") + .sorted() + .collect(Collectors.toList()))); } List wantedTypes = new ArrayList<>(thePointcut.getParameterTypes()); @@ -387,15 +395,26 @@ public abstract class BaseInterceptorService & I for (Class nextTypeClass : givenTypes.keySet()) { String nextTypeName = nextTypeClass.getName(); for (Object nextParamValue : givenTypes.get(nextTypeClass)) { - Validate.isTrue(nextParamValue == null || nextTypeClass.isAssignableFrom(nextParamValue.getClass()), "Invalid params for pointcut %s - %s is not of type %s", thePointcut.name(), nextParamValue != null ? nextParamValue.getClass() : "null", nextTypeClass); - Validate.isTrue(wantedTypes.remove(nextTypeName), "Invalid params for pointcut %s - Wanted %s but found %s", thePointcut.name(), toErrorString(thePointcut.getParameterTypes()), nextTypeName); + Validate.isTrue( + nextParamValue == null || nextTypeClass.isAssignableFrom(nextParamValue.getClass()), + "Invalid params for pointcut %s - %s is not of type %s", + thePointcut.name(), + nextParamValue != null ? nextParamValue.getClass() : "null", + nextTypeClass); + Validate.isTrue( + wantedTypes.remove(nextTypeName), + "Invalid params for pointcut %s - Wanted %s but found %s", + thePointcut.name(), + toErrorString(thePointcut.getParameterTypes()), + nextTypeName); } } return true; } - private List scanInterceptorAndAddToInvokerMultimap(Object theInterceptor, ListMultimap theInvokers) { + private List scanInterceptorAndAddToInvokerMultimap( + Object theInterceptor, ListMultimap theInvokers) { Class interceptorClass = theInterceptor.getClass(); int typeOrder = determineOrder(interceptorClass); @@ -403,8 +422,8 @@ public abstract class BaseInterceptorService & I // Invoke the REGISTERED pointcut for any added hooks addedInvokers.stream() - .filter(t -> Pointcut.INTERCEPTOR_REGISTERED.equals(t.getPointcut())) - .forEach(t -> t.invoke(new HookParams())); + .filter(t -> Pointcut.INTERCEPTOR_REGISTERED.equals(t.getPointcut())) + .forEach(t -> t.invoke(new HookParams())); // Register the interceptor and its various hooks for (HookInvoker nextAddedHook : addedInvokers) { @@ -458,7 +477,8 @@ public abstract class BaseInterceptorService & I /** * Constructor */ - private HookInvoker(HookDescriptor theHook, @Nonnull Object theInterceptor, @Nonnull Method theHookMethod, int theOrder) { + private HookInvoker( + HookDescriptor theHook, @Nonnull Object theInterceptor, @Nonnull Method theHookMethod, int theOrder) { super(theInterceptor, theOrder); myPointcut = theHook.getPointcut(); myParameterTypes = theHookMethod.getParameterTypes(); @@ -466,11 +486,18 @@ public abstract class BaseInterceptorService & I Class returnType = theHookMethod.getReturnType(); if (myPointcut.getReturnType().equals(boolean.class)) { - Validate.isTrue(boolean.class.equals(returnType) || void.class.equals(returnType), "Method does not return boolean or void: %s", theHookMethod); + Validate.isTrue( + boolean.class.equals(returnType) || void.class.equals(returnType), + "Method does not return boolean or void: %s", + theHookMethod); } else if (myPointcut.getReturnType().equals(void.class)) { Validate.isTrue(void.class.equals(returnType), "Method does not return void: %s", theHookMethod); } else { - Validate.isTrue(myPointcut.getReturnType().isAssignableFrom(returnType) || void.class.equals(returnType), "Method does not return %s or void: %s", myPointcut.getReturnType(), theHookMethod); + Validate.isTrue( + myPointcut.getReturnType().isAssignableFrom(returnType) || void.class.equals(returnType), + "Method does not return %s or void: %s", + myPointcut.getReturnType(), + theHookMethod); } myParameterIndexes = new int[myParameterTypes.length]; @@ -486,8 +513,8 @@ public abstract class BaseInterceptorService & I @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("method", myMethod) - .toString(); + .append("method", myMethod) + .toString(); } public POINTCUT getPointcut() { @@ -525,14 +552,14 @@ public abstract class BaseInterceptorService & I if (targetException instanceof RuntimeException) { throw ((RuntimeException) targetException); } else { - throw new InternalErrorException(Msg.code(1910) + "Failure invoking interceptor for pointcut(s) " + getPointcut(), targetException); + throw new InternalErrorException( + Msg.code(1910) + "Failure invoking interceptor for pointcut(s) " + getPointcut(), + targetException); } } catch (Exception e) { throw new InternalErrorException(Msg.code(1911) + e); } - } - } protected class HookDescriptor { @@ -552,7 +579,6 @@ public abstract class BaseInterceptorService & I int getOrder() { return myOrder; } - } protected abstract static class BaseInvoker implements Comparable { @@ -577,7 +603,8 @@ public abstract class BaseInterceptorService & I } } - protected static Optional findAnnotation(AnnotatedElement theObject, Class theHookClass) { + protected static Optional findAnnotation( + AnnotatedElement theObject, Class theHookClass) { T annotation; if (theObject instanceof Method) { annotation = MethodUtils.getAnnotation((Method) theObject, theHookClass, true, true); @@ -589,15 +616,11 @@ public abstract class BaseInterceptorService & I private static int determineOrder(Class theInterceptorClass) { return findAnnotation(theInterceptorClass, Interceptor.class) - .map(Interceptor::order) - .orElse(Interceptor.DEFAULT_ORDER); + .map(Interceptor::order) + .orElse(Interceptor.DEFAULT_ORDER); } private static String toErrorString(List theParameterTypes) { - return theParameterTypes - .stream() - .sorted() - .collect(Collectors.joining(",")); + return theParameterTypes.stream().sorted().collect(Collectors.joining(",")); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/InterceptorService.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/InterceptorService.java index a641a52da5c..a1d8fb8875a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/InterceptorService.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/executor/InterceptorService.java @@ -32,7 +32,8 @@ import org.apache.commons.lang3.Validate; import java.lang.reflect.Method; import java.util.Optional; -public class InterceptorService extends BaseInterceptorService implements IInterceptorService, IInterceptorBroadcaster { +public class InterceptorService extends BaseInterceptorService + implements IInterceptorService, IInterceptorBroadcaster { /** * Constructor which uses a default name of "default" @@ -55,7 +56,6 @@ public class InterceptorService extends BaseInterceptorService impleme return findAnnotation(nextMethod, Hook.class).map(t -> new HookDescriptor(t.value(), t.order())); } - @Override @VisibleForTesting public void registerAnonymousInterceptor(Pointcut thePointcut, IAnonymousInterceptor theInterceptor) { @@ -70,7 +70,6 @@ public class InterceptorService extends BaseInterceptorService impleme registerAnonymousInterceptor(thePointcut, theInterceptor, invoker); } - private static class AnonymousLambdaInvoker extends BaseInvoker { private final IAnonymousInterceptor myHook; private final Pointcut myPointcut; @@ -87,6 +86,4 @@ public class InterceptorService extends BaseInterceptorService impleme return true; } } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java index a0ce6f22854..2486f655e0d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/RequestPartitionId.java @@ -30,8 +30,6 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.time.LocalDate; import java.util.ArrayList; import java.util.Arrays; @@ -39,6 +37,8 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; @@ -47,20 +47,26 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; */ public class RequestPartitionId implements IModelJson { private static final RequestPartitionId ALL_PARTITIONS = new RequestPartitionId(); - private static final ObjectMapper ourObjectMapper = new ObjectMapper().registerModule(new com.fasterxml.jackson.datatype.jsr310.JavaTimeModule()); + private static final ObjectMapper ourObjectMapper = + new ObjectMapper().registerModule(new com.fasterxml.jackson.datatype.jsr310.JavaTimeModule()); + @JsonProperty("partitionDate") private final LocalDate myPartitionDate; + @JsonProperty("allPartitions") private final boolean myAllPartitions; + @JsonProperty("partitionIds") private final List myPartitionIds; + @JsonProperty("partitionNames") private final List myPartitionNames; /** * Constructor for a single partition */ - private RequestPartitionId(@Nullable String thePartitionName, @Nullable Integer thePartitionId, @Nullable LocalDate thePartitionDate) { + private RequestPartitionId( + @Nullable String thePartitionName, @Nullable Integer thePartitionId, @Nullable LocalDate thePartitionDate) { myPartitionIds = toListOrNull(thePartitionId); myPartitionNames = toListOrNull(thePartitionName); myPartitionDate = thePartitionDate; @@ -70,7 +76,10 @@ public class RequestPartitionId implements IModelJson { /** * Constructor for a multiple partition */ - private RequestPartitionId(@Nullable List thePartitionName, @Nullable List thePartitionId, @Nullable LocalDate thePartitionDate) { + private RequestPartitionId( + @Nullable List thePartitionName, + @Nullable List thePartitionId, + @Nullable LocalDate thePartitionDate) { myPartitionIds = toListOrNull(thePartitionId); myPartitionNames = toListOrNull(thePartitionName); myPartitionDate = thePartitionDate; @@ -147,11 +156,11 @@ public class RequestPartitionId implements IModelJson { @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(myPartitionDate) - .append(myAllPartitions) - .append(myPartitionIds) - .append(myPartitionNames) - .toHashCode(); + .append(myPartitionDate) + .append(myAllPartitions) + .append(myPartitionIds) + .append(myPartitionNames) + .toHashCode(); } public String toJson() { @@ -180,7 +189,9 @@ public class RequestPartitionId implements IModelJson { if (isAllPartitions()) { return false; } - return hasPartitionIds() && getPartitionIds().size() == 1 && getPartitionIds().get(0) == null; + return hasPartitionIds() + && getPartitionIds().size() == 1 + && getPartitionIds().get(0) == null; } public boolean hasPartitionId(Integer thePartitionId) { @@ -253,7 +264,8 @@ public class RequestPartitionId implements IModelJson { } @Nonnull - public static RequestPartitionId fromPartitionId(@Nullable Integer thePartitionId, @Nullable LocalDate thePartitionDate) { + public static RequestPartitionId fromPartitionId( + @Nullable Integer thePartitionId, @Nullable LocalDate thePartitionDate) { return new RequestPartitionId(null, Collections.singletonList(thePartitionId), thePartitionDate); } @@ -263,7 +275,8 @@ public class RequestPartitionId implements IModelJson { } @Nonnull - public static RequestPartitionId fromPartitionIds(@Nonnull Collection thePartitionIds, @Nullable LocalDate thePartitionDate) { + public static RequestPartitionId fromPartitionIds( + @Nonnull Collection thePartitionIds, @Nullable LocalDate thePartitionDate) { return new RequestPartitionId(null, toListOrNull(thePartitionIds), thePartitionDate); } @@ -278,7 +291,8 @@ public class RequestPartitionId implements IModelJson { } @Nonnull - public static RequestPartitionId fromPartitionName(@Nullable String thePartitionName, @Nullable LocalDate thePartitionDate) { + public static RequestPartitionId fromPartitionName( + @Nullable String thePartitionName, @Nullable LocalDate thePartitionDate) { return new RequestPartitionId(thePartitionName, null, thePartitionDate); } @@ -293,17 +307,20 @@ public class RequestPartitionId implements IModelJson { } @Nonnull - public static RequestPartitionId fromPartitionIdAndName(@Nullable Integer thePartitionId, @Nullable String thePartitionName) { + public static RequestPartitionId fromPartitionIdAndName( + @Nullable Integer thePartitionId, @Nullable String thePartitionName) { return new RequestPartitionId(thePartitionName, thePartitionId, null); } @Nonnull - public static RequestPartitionId forPartitionIdAndName(@Nullable Integer thePartitionId, @Nullable String thePartitionName, @Nullable LocalDate thePartitionDate) { + public static RequestPartitionId forPartitionIdAndName( + @Nullable Integer thePartitionId, @Nullable String thePartitionName, @Nullable LocalDate thePartitionDate) { return new RequestPartitionId(thePartitionName, thePartitionId, thePartitionDate); } @Nonnull - public static RequestPartitionId forPartitionIdsAndNames(List thePartitionNames, List thePartitionIds, LocalDate thePartitionDate) { + public static RequestPartitionId forPartitionIdsAndNames( + List thePartitionNames, List thePartitionIds, LocalDate thePartitionDate) { return new RequestPartitionId(thePartitionNames, thePartitionIds, thePartitionDate); } @@ -316,11 +333,9 @@ public class RequestPartitionId implements IModelJson { String retVal = "(all)"; if (!theRequestPartitionId.isAllPartitions()) { assert theRequestPartitionId.hasPartitionIds(); - retVal = theRequestPartitionId - .getPartitionIds() - .stream() - .map(t -> defaultIfNull(t, "null").toString()) - .collect(Collectors.joining(" ")); + retVal = theRequestPartitionId.getPartitionIds().stream() + .map(t -> defaultIfNull(t, "null").toString()) + .collect(Collectors.joining(" ")); } return retVal; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/TransactionWriteOperationsDetails.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/TransactionWriteOperationsDetails.java index 345524cac99..b97f43609ab 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/TransactionWriteOperationsDetails.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/TransactionWriteOperationsDetails.java @@ -44,5 +44,4 @@ public class TransactionWriteOperationsDetails { public void setUpdateRequestUrls(List theUpdateRequestUrls) { myUpdateRequestUrls = theUpdateRequestUrls; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BaseElement.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BaseElement.java index 88b587a4764..0b877f2c434 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BaseElement.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BaseElement.java @@ -26,22 +26,42 @@ import org.hl7.fhir.instance.model.api.IBaseDatatype; import java.util.*; -public abstract class BaseElement implements /*IElement, */ISupportsUndeclaredExtensions { +public abstract class BaseElement implements /*IElement, */ ISupportsUndeclaredExtensions { private static final long serialVersionUID = -3092659584634499332L; private List myFormatCommentsPost; private List myFormatCommentsPre; private Map userData; - @Child(name = "extension", type = {ExtensionDt.class}, order = 0, min = 0, max = Child.MAX_UNLIMITED, modifier = false, summary = false) - @Description(shortDefinition = "Additional Content defined by implementations", formalDefinition = "May be used to represent additional information that is not part of the basic definition of the resource. In order to make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer is allowed to define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension.") + @Child( + name = "extension", + type = {ExtensionDt.class}, + order = 0, + min = 0, + max = Child.MAX_UNLIMITED, + modifier = false, + summary = false) + @Description( + shortDefinition = "Additional Content defined by implementations", + formalDefinition = + "May be used to represent additional information that is not part of the basic definition of the resource. In order to make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer is allowed to define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension.") private List myUndeclaredExtensions; /** * May be used to represent additional information that is not part of the basic definition of the resource, and that modifies the understanding of the element that contains it. Usually modifier elements provide negation or qualification. In order to make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer is allowed to define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension. Applications processing a resource are required to check for modifier extensions. */ - @Child(name = "modifierExtension", type = {ExtensionDt.class}, order = 1, min = 0, max = Child.MAX_UNLIMITED, modifier = true, summary = false) - @Description(shortDefinition = "Extensions that cannot be ignored", formalDefinition = "May be used to represent additional information that is not part of the basic definition of the resource, and that modifies the understanding of the element that contains it. Usually modifier elements provide negation or qualification. In order to make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer is allowed to define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension. Applications processing a resource are required to check for modifier extensions.") + @Child( + name = "modifierExtension", + type = {ExtensionDt.class}, + order = 1, + min = 0, + max = Child.MAX_UNLIMITED, + modifier = true, + summary = false) + @Description( + shortDefinition = "Extensions that cannot be ignored", + formalDefinition = + "May be used to represent additional information that is not part of the basic definition of the resource, and that modifies the understanding of the element that contains it. Usually modifier elements provide negation or qualification. In order to make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer is allowed to define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension. Applications processing a resource are required to check for modifier extensions.") private List myUndeclaredModifierExtensions; @Override @@ -100,15 +120,13 @@ public abstract class BaseElement implements /*IElement, */ISupportsUndeclaredEx @Override public List getFormatCommentsPost() { - if (myFormatCommentsPost == null) - myFormatCommentsPost = new ArrayList(); + if (myFormatCommentsPost == null) myFormatCommentsPost = new ArrayList(); return myFormatCommentsPost; } @Override public List getFormatCommentsPre() { - if (myFormatCommentsPre == null) - myFormatCommentsPre = new ArrayList(); + if (myFormatCommentsPre == null) myFormatCommentsPre = new ArrayList(); return myFormatCommentsPre; } @@ -142,13 +160,13 @@ public abstract class BaseElement implements /*IElement, */ISupportsUndeclaredEx @Override public boolean hasFormatComment() { - return (myFormatCommentsPre != null && !myFormatCommentsPre.isEmpty()) || (myFormatCommentsPost != null && !myFormatCommentsPost.isEmpty()); + return (myFormatCommentsPre != null && !myFormatCommentsPre.isEmpty()) + || (myFormatCommentsPost != null && !myFormatCommentsPost.isEmpty()); } @Override public Object getUserData(String name) { - if (userData == null) - return null; + if (userData == null) return null; return userData.get(name); } @@ -187,5 +205,4 @@ public abstract class BaseElement implements /*IElement, */ISupportsUndeclaredEx } return true; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BaseIdentifiableElement.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BaseIdentifiableElement.java index f3161348e5c..a5df208cf1f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BaseIdentifiableElement.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BaseIdentifiableElement.java @@ -83,8 +83,7 @@ public abstract class BaseIdentifiableElement extends BaseElement implements IId private static class LockedId extends IdDt { @CoverageIgnore - public LockedId() { - } + public LockedId() {} @CoverageIgnore public LockedId(String theElementSpecificId) { @@ -94,15 +93,15 @@ public abstract class BaseIdentifiableElement extends BaseElement implements IId @Override @CoverageIgnore public IdDt setValue(String theValue) throws DataFormatException { - throw new UnsupportedOperationException(Msg.code(1899) + "Use IElement#setElementSpecificId(String) to set the element ID for an element"); + throw new UnsupportedOperationException( + Msg.code(1899) + "Use IElement#setElementSpecificId(String) to set the element ID for an element"); } @Override @CoverageIgnore public void setValueAsString(String theValue) throws DataFormatException { - throw new UnsupportedOperationException(Msg.code(1900) + "Use IElement#setElementSpecificId(String) to set the element ID for an element"); + throw new UnsupportedOperationException( + Msg.code(1900) + "Use IElement#setElementSpecificId(String) to set the element ID for an element"); } - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BasePrimitive.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BasePrimitive.java index 9bf16db246a..a260025b7bc 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BasePrimitive.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/BasePrimitive.java @@ -19,25 +19,25 @@ */ package ca.uhn.fhir.model.api; +import ca.uhn.fhir.parser.DataFormatException; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; + import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.builder.EqualsBuilder; -import org.apache.commons.lang3.builder.HashCodeBuilder; - -import ca.uhn.fhir.parser.DataFormatException; - -public abstract class BasePrimitive extends BaseIdentifiableElement implements IPrimitiveDatatype, Externalizable { +public abstract class BasePrimitive extends BaseIdentifiableElement + implements IPrimitiveDatatype, Externalizable { private T myCoercedValue; private String myStringValue; /** * Subclasses must override to convert a "coerced" value into an encoded one. - * + * * @param theValue * Will not be null * @return May return null if the value does not correspond to anything @@ -82,7 +82,7 @@ public abstract class BasePrimitive extends BaseIdentifiableElement implement /** * Subclasses must override to convert an encoded representation of this datatype into a "coerced" one - * + * * @param theValue * Will not be null * @return May return null if the value does not correspond to anything @@ -123,7 +123,7 @@ public abstract class BasePrimitive extends BaseIdentifiableElement implement myStringValue = null; } else { // NB this might be null - myStringValue = encode(myCoercedValue); + myStringValue = encode(myCoercedValue); } } @@ -136,5 +136,4 @@ public abstract class BasePrimitive extends BaseIdentifiableElement implement public boolean hasValue() { return !StringUtils.isBlank(getValueAsString()); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ExtensionDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ExtensionDt.java index a02d5fba451..a25e5791376 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ExtensionDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ExtensionDt.java @@ -33,20 +33,20 @@ import java.util.ArrayList; import java.util.List; @DatatypeDef(name = "Extension") -public class ExtensionDt extends BaseIdentifiableElement implements ICompositeDatatype, IBaseExtension { +public class ExtensionDt extends BaseIdentifiableElement + implements ICompositeDatatype, IBaseExtension { private static final long serialVersionUID = 6399491332783085935L; private boolean myModifier; - - @Child(name="url", type=StringDt.class, order=0, min=1, max=1) + + @Child(name = "url", type = StringDt.class, order = 0, min = 1, max = 1) private StringDt myUrl; @Child(name = "value", type = IDatatype.class, order = 1, min = 0, max = 1) private IBaseDatatype myValue; - - public ExtensionDt() { - } + + public ExtensionDt() {} public ExtensionDt(boolean theIsModifier) { myModifier = theIsModifier; @@ -65,7 +65,7 @@ public class ExtensionDt extends BaseIdentifiableElement implements ICompositeDa myModifier = theIsModifier; myUrl = new StringDt(theUrl); - myValue=theValue; + myValue = theValue; } /** @@ -109,17 +109,19 @@ public class ExtensionDt extends BaseIdentifiableElement implements ICompositeDa * Note that if this extension contains extensions (instead of a datatype) then this method will return null. In that case, you must use {@link #getUndeclaredExtensions()} and * {@link #getUndeclaredModifierExtensions()} to retrieve the child extensions. *

- * + * * @throws ClassCastException * If the value of this extension is not a primitive datatype */ public IPrimitiveDatatype getValueAsPrimitive() { if (!(getValue() instanceof IPrimitiveDatatype)) { - throw new ClassCastException(Msg.code(1887) + "Extension with URL["+myUrl+"] can not be cast to primitive type, type is: "+ getClass().getCanonicalName()); + throw new ClassCastException( + Msg.code(1887) + "Extension with URL[" + myUrl + "] can not be cast to primitive type, type is: " + + getClass().getCanonicalName()); } return (IPrimitiveDatatype) getValue(); } - + @Override public boolean isEmpty() { return super.isBaseEmpty() && (myValue == null || myValue.isEmpty()); @@ -151,7 +153,7 @@ public class ExtensionDt extends BaseIdentifiableElement implements ICompositeDa } @Override - @Deprecated //override deprecated method + @Deprecated // override deprecated method public List getAllPopulatedChildElementsOfType(Class theType) { return new ArrayList(); } @@ -168,7 +170,4 @@ public class ExtensionDt extends BaseIdentifiableElement implements ICompositeDa retVal.append("value", getValue()); return retVal.build(); } - - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IBoundCodeableConcept.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IBoundCodeableConcept.java index ec1c657024e..575ca374b1f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IBoundCodeableConcept.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IBoundCodeableConcept.java @@ -21,6 +21,4 @@ package ca.uhn.fhir.model.api; import java.io.Serializable; -public interface IBoundCodeableConcept extends Serializable { - -} +public interface IBoundCodeableConcept extends Serializable {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICodingEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICodingEnum.java index 8e378dd745c..b337dd1bc93 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICodingEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICodingEnum.java @@ -22,7 +22,8 @@ package ca.uhn.fhir.model.api; public interface ICodingEnum { String getCode(); - String getSystem(); - String getDisplay(); + String getSystem(); + + String getDisplay(); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICompositeDatatype.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICompositeDatatype.java index d36b7b821b2..c7ab1acdfb1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICompositeDatatype.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICompositeDatatype.java @@ -21,8 +21,4 @@ package ca.uhn.fhir.model.api; import org.hl7.fhir.instance.model.api.ICompositeType; - - -public interface ICompositeDatatype extends IDatatype, ICompositeElement, ICompositeType { - -} +public interface ICompositeDatatype extends IDatatype, ICompositeElement, ICompositeType {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICompositeElement.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICompositeElement.java index 8e90bc5f5c6..c5097a9007d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICompositeElement.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ICompositeElement.java @@ -25,18 +25,16 @@ public interface ICompositeElement extends IElement { /** * Returns a list containing all child elements matching a given type - * + * * @param theType The type to match. If set to null, all child elements will be returned - * + * * @deprecated This method is not used by HAPI at this point, so there isn't much * point to keeping it around. We are not deleting it just so that we don't break * existing implementer code, but you do not need to supply an implementation - * of this code in your own structures. Deprecated in HAPI FHIR 2.3 (Jan 2017). + * of this code in your own structures. Deprecated in HAPI FHIR 2.3 (Jan 2017). * See See for * a discussion about this. */ @Deprecated - - List getAllPopulatedChildElementsOfType(Class theType); - + List getAllPopulatedChildElementsOfType(Class theType); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IDatatype.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IDatatype.java index dd1b967563f..eb4a023a7f9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IDatatype.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IDatatype.java @@ -21,6 +21,4 @@ package ca.uhn.fhir.model.api; import org.hl7.fhir.instance.model.api.IBaseDatatype; -public interface IDatatype extends IElement, IBaseDatatype { - -} +public interface IDatatype extends IElement, IBaseDatatype {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IElement.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IElement.java index 3ceb5fd01b6..2d6a65821f7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IElement.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IElement.java @@ -21,9 +21,4 @@ package ca.uhn.fhir.model.api; import org.hl7.fhir.instance.model.api.IBase; - - -public interface IElement extends IBase { - - -} +public interface IElement extends IBase {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IFhirVersion.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IFhirVersion.java index 7b6a76bfdce..f506a1f57aa 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IFhirVersion.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IFhirVersion.java @@ -19,20 +19,19 @@ */ package ca.uhn.fhir.model.api; -import java.io.InputStream; -import java.util.Date; - +import ca.uhn.fhir.context.*; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; import org.hl7.fhir.instance.model.api.*; -import ca.uhn.fhir.context.*; -import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; +import java.io.InputStream; +import java.util.Date; /** * Each structure version JAR will have an implementation of this interface. * This is used internally by HAPI and subject to change. Do not use this interface * directly in user code. - * + * * See also IFhirVersionServer for the hapi-fhir-server equivalent. */ public interface IFhirVersion { @@ -65,5 +64,4 @@ public interface IFhirVersion { * JAR is on the classpath. Otherwise it will result in a {@link ClassNotFoundException} */ Object getServerVersion(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IIdentifiableElement.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IIdentifiableElement.java index 5b02b6cdbae..3b25a55f18d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IIdentifiableElement.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IIdentifiableElement.java @@ -55,5 +55,4 @@ public interface IIdentifiableElement extends IElement { */ @Deprecated void setId(String theId); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IModelJson.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IModelJson.java index 886bd1a7715..ef4dd351206 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IModelJson.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IModelJson.java @@ -23,6 +23,10 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonInclude; @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) -public interface IModelJson { -} +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) +public interface IModelJson {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IPrimitiveDatatype.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IPrimitiveDatatype.java index f2c02f285e3..c535c2b00f3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IPrimitiveDatatype.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IPrimitiveDatatype.java @@ -19,9 +19,8 @@ */ package ca.uhn.fhir.model.api; -import org.hl7.fhir.instance.model.api.IPrimitiveType; - import ca.uhn.fhir.parser.DataFormatException; +import org.hl7.fhir.instance.model.api.IPrimitiveType; public interface IPrimitiveDatatype extends IDatatype, IPrimitiveType { @@ -33,7 +32,7 @@ public interface IPrimitiveDatatype extends IDatatype, IPrimitiveType { @Override T getValue(); - + @Override IPrimitiveType setValue(T theValue) throws DataFormatException; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterAnd.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterAnd.java index a387488538e..f1d50db7b02 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterAnd.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterAnd.java @@ -19,37 +19,35 @@ */ package ca.uhn.fhir.model.api; -import java.io.Serializable; - -import java.util.List; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.QualifiedParamList; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.io.Serializable; +import java.util.List; + public interface IQueryParameterAnd> extends Serializable { /** - * + * *

- * See FHIR specification + * See FHIR specification * 2.2.2 Search SearchParameter Types * for information on the token format *

* @param theContext TODO * @param theParamName TODO */ - void setValuesAsQueryTokens(FhirContext theContext, String theParamName, List theParameters) throws InvalidRequestException; + void setValuesAsQueryTokens(FhirContext theContext, String theParamName, List theParameters) + throws InvalidRequestException; /** - * + * *

- * See FHIR specification + * See FHIR specification * 2.2.2 Search SearchParameter Types * for information on the token format *

*/ List getValuesAsQueryTokens(); - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterOr.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterOr.java index d9e6a9fd95d..ca6494b6588 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterOr.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterOr.java @@ -19,17 +19,15 @@ */ package ca.uhn.fhir.model.api; -import java.io.Serializable; - -import java.util.List; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.QualifiedParamList; +import java.io.Serializable; +import java.util.List; + public interface IQueryParameterOr extends Serializable { void setValuesAsQueryTokens(FhirContext theContext, String theParamName, QualifiedParamList theParameters); List getValuesAsQueryTokens(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterType.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterType.java index 216f1c67cbb..f579bd272ef 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterType.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IQueryParameterType.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.model.api; -import java.io.Serializable; - import ca.uhn.fhir.context.FhirContext; +import java.io.Serializable; + public interface IQueryParameterType extends Serializable { /** - * This method is generally only called by HAPI itself, and should not need to be called from user code. - * + * This method is generally only called by HAPI itself, and should not need to be called from user code. + * *

* See FHIR specification 2.2.2 Search * SearchParameter Types for information on the token format @@ -45,36 +45,35 @@ public interface IQueryParameterType extends Serializable { /** * Returns a representation of this parameter's value as it will be represented "over the wire". In other - * words, how it will be presented in a URL (although not URL escaped) - * + * words, how it will be presented in a URL (although not URL escaped) + * *

* See FHIR specification 2.2.2 Search * SearchParameter Types for information on the token format *

* @param theContext TODO - * + * * @return Returns a representation of this parameter's value as it will be represented "over the wire". In other - * words, how it will be presented in a URL (although not URL escaped) + * words, how it will be presented in a URL (although not URL escaped) */ public String getValueAsQueryToken(FhirContext theContext); - + /** * This method will return any qualifier that should be appended to the parameter name (e.g ":exact"). Returns null if none are present. */ public String getQueryParameterQualifier(); /** - * If set to non-null value, indicates that this parameter has been populated with a "[name]:missing=true" or "[name]:missing=false" vale - * instead of a normal value + * If set to non-null value, indicates that this parameter has been populated with a "[name]:missing=true" or "[name]:missing=false" vale + * instead of a normal value */ Boolean getMissing(); /** - * If set to non-null value, indicates that this parameter has been populated with a "[name]:missing=true" or "[name]:missing=false" vale + * If set to non-null value, indicates that this parameter has been populated with a "[name]:missing=true" or "[name]:missing=false" vale * instead of a normal value - * + * * @return Returns a reference to this for easier method chaining */ IQueryParameterType setMissing(Boolean theMissing); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IResource.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IResource.java index 2e61b083550..0335ad0695a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IResource.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IResource.java @@ -19,18 +19,17 @@ */ package ca.uhn.fhir.model.api; -import org.hl7.fhir.instance.model.api.IBaseMetaType; - import ca.uhn.fhir.model.api.annotation.ResourceDef; import ca.uhn.fhir.model.base.composite.BaseContainedDt; import ca.uhn.fhir.model.base.composite.BaseNarrativeDt; import ca.uhn.fhir.model.base.resource.ResourceMetadataMap; import ca.uhn.fhir.model.primitive.CodeDt; import ca.uhn.fhir.model.primitive.IdDt; +import org.hl7.fhir.instance.model.api.IBaseMetaType; /** * This interface is the parent interface for all FHIR Resource definition classes. Classes implementing this interface should be annotated with the {@link ResourceDef @ResourceDef} annotation. - * + * *

* Note that this class is a part of HAPI's model API, used to define structure classes. Users will often interact with this interface, but should not need to implement it directly. *

@@ -66,8 +65,8 @@ public interface IResource extends ICompositeElement, org.hl7.fhir.instance.mode /** * Returns a view of the {@link #getResourceMetadata() resource metadata} map. * Note that getters from this map return immutable objects, but the addFoo() - * and setFoo() methods may be used to modify metadata. - * + * and setFoo() methods may be used to modify metadata. + * * @since 1.5 */ @Override @@ -78,14 +77,14 @@ public interface IResource extends ICompositeElement, org.hl7.fhir.instance.mode *

* Keys in this map are enumerated in the {@link ResourceMetadataKeyEnum}, and each key has a specific value type that it must use. *

- * + * * @see ResourceMetadataKeyEnum for a list of allowable keys and the object types that values of a given key must use. */ ResourceMetadataMap getResourceMetadata(); /** * Returns a String representing the name of this Resource. This return value is not used for anything by HAPI itself, but is provided as a convenience to developers using the API. - * + * * @return the name of this resource, e.g. "Patient", or "Observation" */ String getResourceName(); @@ -117,10 +116,9 @@ public interface IResource extends ICompositeElement, org.hl7.fhir.instance.mode /** * Sets the metadata map for this object. Metadata entries are used to get/set feed bundle entries, such as the resource version, or the last updated timestamp. - * + * * @throws NullPointerException * The map must not be null */ void setResourceMetadata(ResourceMetadataMap theMap); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IStreamingDatatype.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IStreamingDatatype.java index 0d4018c91c1..4b86d89996a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IStreamingDatatype.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IStreamingDatatype.java @@ -27,5 +27,4 @@ import java.io.Writer; public interface IStreamingDatatype extends IPrimitiveType { void writeAsText(Writer theWriter) throws IOException; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ISupportsUndeclaredExtensions.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ISupportsUndeclaredExtensions.java index 1af32fd40ce..9c5567f2604 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ISupportsUndeclaredExtensions.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ISupportsUndeclaredExtensions.java @@ -24,7 +24,7 @@ import org.hl7.fhir.instance.model.api.IBaseDatatype; import java.util.List; public interface ISupportsUndeclaredExtensions extends IElement { - + /** * Returns a list containing all undeclared non-modifier extensions. The returned list * is mutable, so it may be modified (e.g. to add or remove an extension). @@ -33,14 +33,14 @@ public interface ISupportsUndeclaredExtensions extends IElement { /** * Returns an immutable list containing all undeclared extensions (modifier and non-modifier) by extension URL - * + * * @see #getUndeclaredExtensions() To return a mutable list which may be used to remove extensions */ List getUndeclaredExtensionsByUrl(String theUrl); /** * Returns an immutable list containing all extensions (modifier and non-modifier). - * + * * @see #getUndeclaredExtensions() To return a mutable list which may be used to remove undeclared non-modifier extensions * @see #getUndeclaredModifierExtensions() To return a mutable list which may be used to remove undeclared modifier extensions */ @@ -51,7 +51,7 @@ public interface ISupportsUndeclaredExtensions extends IElement { * is mutable, so it may be modified (e.g. to add or remove an extension). */ List getUndeclaredModifierExtensions(); - + /** * Adds an extension to this object. This extension should have the * following properties set: @@ -63,15 +63,15 @@ public interface ISupportsUndeclaredExtensions extends IElement { *
  • {@link ExtensionDt#setValue(IBaseDatatype) A datatype value}
  • *
  • {@link #addUndeclaredExtension(ExtensionDt) Further sub-extensions}
  • * - * - * + * + * * @param theExtension The extension to add. Can not be null. */ void addUndeclaredExtension(ExtensionDt theExtension); - + /** * Adds an extension to this object - * + * * @see #getUndeclaredExtensions() To return a mutable list which may be used to remove extensions */ ExtensionDt addUndeclaredExtension(boolean theIsModifier, String theUrl, IBaseDatatype theValue); @@ -80,9 +80,8 @@ public interface ISupportsUndeclaredExtensions extends IElement { * Adds an extension to this object. This method is intended for use when * an extension is being added which will contain child extensions, as opposed to * a datatype. - * + * * @see #getUndeclaredExtensions() To return a mutable list which may be used to remove extensions */ ExtensionDt addUndeclaredExtension(boolean theIsModifier, String theUrl); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IValueSetEnumBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IValueSetEnumBinder.java index 904af055c36..c5ab1ccc5d5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IValueSetEnumBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/IValueSetEnumBinder.java @@ -19,18 +19,15 @@ */ package ca.uhn.fhir.model.api; - import java.io.Serializable; public interface IValueSetEnumBinder> extends Serializable { T fromCodeString(String theCodeString); - + String toCodeString(T theEnum); String toSystemString(T theEnum); T fromCodeString(String theCodeString, String theSystemString); - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Include.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Include.java index 6e27d5ea24a..088e116f7f9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Include.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Include.java @@ -42,7 +42,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class Include implements Serializable { private static final long serialVersionUID = 1L; - + private final boolean myImmutable; private boolean myIterate; private String myValue; @@ -52,7 +52,7 @@ public class Include implements Serializable { /** * Constructor for non-recursive include - * + * * @param theValue * The _include value, e.g. "Patient:name" */ @@ -62,7 +62,7 @@ public class Include implements Serializable { /** * Constructor for an include - * + * * @param theValue * The _include value, e.g. "Patient:name" * @param theIterate @@ -74,7 +74,7 @@ public class Include implements Serializable { /** * Constructor for an include - * + * * @param theValue * The _include value, e.g. "Patient:name" * @param theIterate @@ -147,7 +147,6 @@ public class Include implements Serializable { */ public String getParamTargetType() { return myParamTargetType; - } public String getValue() { @@ -209,7 +208,7 @@ public class Include implements Serializable { paramName = value.substring(firstColon + 1); paramTargetType = null; } else { - paramName = value.substring(firstColon + 1, secondColon); + paramName = value.substring(firstColon + 1, secondColon); paramTargetType = value.substring(secondColon + 1); } } @@ -218,7 +217,6 @@ public class Include implements Serializable { myParamName = paramName; myParamTargetType = paramTargetType; myValue = theValue; - } /** @@ -262,7 +260,7 @@ public class Include implements Serializable { * {@link IllegalStateException} * * - * + * * @param theResourceType * The resource type (e.g. "Organization") * @return A new copy of the include. Note that if this include is {@link #toLocked() locked}, the returned include @@ -270,16 +268,17 @@ public class Include implements Serializable { */ public Include withType(String theResourceType) { StringBuilder b = new StringBuilder(); - + String paramType = getParamType(); String paramName = getParamName(); if (isBlank(paramType) || isBlank(paramName)) { - throw new IllegalStateException(Msg.code(1889) + "This include does not contain a value in the format [ResourceType]:[paramName]"); + throw new IllegalStateException( + Msg.code(1889) + "This include does not contain a value in the format [ResourceType]:[paramName]"); } b.append(paramType); b.append(":"); b.append(paramName); - + if (isNotBlank(theResourceType)) { b.append(':'); b.append(theResourceType); @@ -287,5 +286,4 @@ public class Include implements Serializable { Include retVal = new Include(b.toString(), myIterate, myImmutable); return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnum.java index b39f7f9c265..0718c8cc2ab 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ResourceMetadataKeyEnum.java @@ -65,8 +65,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * Values for this key are of type {@link InstantDt} *

    */ - public static final ResourceMetadataKeyEnum> DELETED_AT = new ResourceMetadataKeyEnum<>("DELETED_AT", IPrimitiveType.class) { - }; + public static final ResourceMetadataKeyEnum> DELETED_AT = + new ResourceMetadataKeyEnum<>("DELETED_AT", IPrimitiveType.class) {}; /** * If present and populated with a {@link BundleEntrySearchModeEnum}, contains the "bundle entry search mode", which is the value of the status field in the Bundle entry containing this resource. * The value for this key corresponds to field Bundle.entry.search.mode. This value can be set to provide a status value of "include" for included resources being returned by a @@ -78,8 +78,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * Values for this key are of type {@link BundleEntrySearchModeEnum} *

    */ - public static final ResourceMetadataKeyEnum ENTRY_SEARCH_MODE = new ResourceMetadataKeyEnum<>("ENTRY_SEARCH_MODE", BundleEntrySearchModeEnum.class) { - }; + public static final ResourceMetadataKeyEnum ENTRY_SEARCH_MODE = + new ResourceMetadataKeyEnum<>("ENTRY_SEARCH_MODE", BundleEntrySearchModeEnum.class) {}; /** * If present and populated with a {@link BundleEntryTransactionMethodEnum}, contains the "bundle entry transaction operation", which is the value of the status field in the Bundle entry * containing this resource. The value for this key corresponds to field Bundle.entry.transaction.operation. This value can be set in resources being transmitted to a server to @@ -92,8 +92,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * Values for this key are of type {@link BundleEntryTransactionMethodEnum} *

    */ - public static final ResourceMetadataKeyEnum ENTRY_TRANSACTION_METHOD = new ResourceMetadataKeyEnum<>("ENTRY_TRANSACTION_OPERATION", BundleEntryTransactionMethodEnum.class) { - }; + public static final ResourceMetadataKeyEnum ENTRY_TRANSACTION_METHOD = + new ResourceMetadataKeyEnum<>("ENTRY_TRANSACTION_OPERATION", BundleEntryTransactionMethodEnum.class) {}; /** * The value for this key represents a {@link List} of profile IDs that this resource claims to conform to. *

    @@ -101,8 +101,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * Values for this key are of type List<IdDt>. Note that the returned list is unmodifiable, so you need to create a new list and call put to change its value. *

    */ - public static final ResourceMetadataKeyEnum> PROFILES = new ResourceMetadataKeyEnum<>("PROFILES", List.class) { - }; + public static final ResourceMetadataKeyEnum> PROFILES = + new ResourceMetadataKeyEnum<>("PROFILES", List.class) {}; /** * The value for this key is the bundle entry Published time. This is defined by FHIR as "Time resource copied into the feed", which is generally best left to the current time. *

    @@ -114,10 +114,11 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * * @see InstantDt */ - public static final ResourceMetadataKeyEnum PUBLISHED = new ResourceMetadataKeyEnum<>("PUBLISHED", InstantDt.class) { - }; - public static final ResourceMetadataKeyEnum> SECURITY_LABELS = new ResourceMetadataKeyEnum<>("SECURITY_LABELS", List.class) { - }; + public static final ResourceMetadataKeyEnum PUBLISHED = + new ResourceMetadataKeyEnum<>("PUBLISHED", InstantDt.class) {}; + + public static final ResourceMetadataKeyEnum> SECURITY_LABELS = + new ResourceMetadataKeyEnum<>("SECURITY_LABELS", List.class) {}; /** * The value for this key is the list of tags associated with this resource *

    @@ -126,8 +127,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * * @see TagList */ - public static final ResourceMetadataKeyEnum TAG_LIST = new ResourceMetadataKeyEnum<>("TAG_LIST", TagList.class) { - }; + public static final ResourceMetadataKeyEnum TAG_LIST = + new ResourceMetadataKeyEnum<>("TAG_LIST", TagList.class) {}; /** * The value for this key is the bundle entry Updated time. This is defined by FHIR as "Last Updated for resource". This value is also used for populating the "Last-Modified" header in the * case of methods that return a single resource (read, vread, etc.) @@ -137,8 +138,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * * @see InstantDt */ - public static final ResourceMetadataKeyEnum UPDATED = new ResourceMetadataKeyEnum<>("UPDATED", InstantDt.class) { - }; + public static final ResourceMetadataKeyEnum UPDATED = + new ResourceMetadataKeyEnum<>("UPDATED", InstantDt.class) {}; /** * The value for this key is the version ID of the resource object. *

    @@ -148,8 +149,8 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * @deprecated The {@link IResource#getId()} resource ID will now be populated with the version ID via the {@link IdDt#getVersionIdPart()} method */ @Deprecated - public static final ResourceMetadataKeyEnum VERSION = new ResourceMetadataKeyEnum<>("VERSION", String.class) { - }; + public static final ResourceMetadataKeyEnum VERSION = + new ResourceMetadataKeyEnum<>("VERSION", String.class) {}; /** * The value for this key is the version ID of the resource object. *

    @@ -159,8 +160,9 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { * @deprecated The {@link IResource#getId()} resource ID will now be populated with the version ID via the {@link IdDt#getVersionIdPart()} method */ @Deprecated - public static final ResourceMetadataKeyEnum VERSION_ID = new ResourceMetadataKeyEnum<>("VERSION_ID", IdDt.class) { - }; + public static final ResourceMetadataKeyEnum VERSION_ID = + new ResourceMetadataKeyEnum<>("VERSION_ID", IdDt.class) {}; + private static final long serialVersionUID = 1L; private final String myValue; private final Class myType; @@ -180,9 +182,10 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { } if (retVal != null && !myType.isAssignableFrom(retVal.getClass())) { - throw new InternalErrorException(Msg.code(1890) + "Found an object of type '" + retVal.getClass().getCanonicalName() - + "' in resource metadata for key " + this.name() + " - Expected " - + myType.getCanonicalName()); + throw new InternalErrorException(Msg.code(1890) + "Found an object of type '" + + retVal.getClass().getCanonicalName() + + "' in resource metadata for key " + this.name() + " - Expected " + + myType.getCanonicalName()); } //noinspection unchecked @@ -191,9 +194,10 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { public void put(IBaseResource theResource, T theValue) { if (theValue != null && !myType.isAssignableFrom(theValue.getClass())) { - throw new InternalErrorException(Msg.code(1891) + "Can not put object of type '" + theValue.getClass().getCanonicalName() - + "' in resource metadata for key " + this.name() + " - Expected " - + myType.getCanonicalName()); + throw new InternalErrorException(Msg.code(1891) + "Can not put object of type '" + + theValue.getClass().getCanonicalName() + + "' in resource metadata for key " + this.name() + " - Expected " + + myType.getCanonicalName()); } if (theResource instanceof IAnyResource) { @@ -205,12 +209,9 @@ public abstract class ResourceMetadataKeyEnum implements Serializable { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; ResourceMetadataKeyEnum other = (ResourceMetadataKeyEnum) obj; if (myValue == null) { return other.myValue == null; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/StorageResponseCodeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/StorageResponseCodeEnum.java index 2f46ee889d0..964fad03a83 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/StorageResponseCodeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/StorageResponseCodeEnum.java @@ -26,16 +26,20 @@ package ca.uhn.fhir.model.api; * This is used in CRUD response OperationOutcome resources. */ public enum StorageResponseCodeEnum implements ICodingEnum { - SUCCESSFUL_CREATE("Create succeeded."), - SUCCESSFUL_CREATE_NO_CONDITIONAL_MATCH("Conditional create succeeded: no existing resource matched the conditional URL."), - SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH("Conditional create succeeded: an existing resource matched the conditional URL so no action was taken."), + SUCCESSFUL_CREATE_NO_CONDITIONAL_MATCH( + "Conditional create succeeded: no existing resource matched the conditional URL."), + SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH( + "Conditional create succeeded: an existing resource matched the conditional URL so no action was taken."), SUCCESSFUL_UPDATE("Update succeeded."), SUCCESSFUL_UPDATE_AS_CREATE("Update as create succeeded."), SUCCESSFUL_UPDATE_NO_CHANGE("Update succeeded: No changes were detected so no action was taken."), - SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH("Conditional update succeeded: no existing resource matched the conditional URL so a new resource was created."), - SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH("Conditional update succeeded: an existing resource matched the conditional URL and was updated."), - SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE("Conditional update succeeded: an existing resource matched the conditional URL and was updated, but no changes were detected so no action was taken."), + SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH( + "Conditional update succeeded: no existing resource matched the conditional URL so a new resource was created."), + SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH( + "Conditional update succeeded: an existing resource matched the conditional URL and was updated."), + SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE( + "Conditional update succeeded: an existing resource matched the conditional URL and was updated, but no changes were detected so no action was taken."), SUCCESSFUL_DELETE("Delete succeeded."), SUCCESSFUL_DELETE_ALREADY_DELETED("Delete succeeded: Resource was already deleted so no action was taken."), SUCCESSFUL_DELETE_NOT_FOUND("Delete succeeded: No existing resource was found so no action was taken."), @@ -44,7 +48,8 @@ public enum StorageResponseCodeEnum implements ICodingEnum { SUCCESSFUL_PATCH_NO_CHANGE("Patch succeeded: No changes were detected so no action was taken."), SUCCESSFUL_CONDITIONAL_PATCH("Conditional patch succeeded."), - SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE("Conditional patch succeeded: No changes were detected so no action was taken."); + SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE( + "Conditional patch succeeded: No changes were detected so no action was taken."); public static final String SYSTEM = "https://hapifhir.io/fhir/CodeSystem/hapi-fhir-storage-response-code"; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Tag.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Tag.java index b0173ecc487..18ce262d52f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Tag.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/Tag.java @@ -30,14 +30,14 @@ import java.util.Objects; /** * A single tag *

    - * Note on equality- When computing hashCode or equals values for this class, only the - * {@link #getScheme() scheme} and + * Note on equality- When computing hashCode or equals values for this class, only the + * {@link #getScheme() scheme} and *

    */ public class Tag extends BaseElement implements IElement, IBaseCoding { - + private static final long serialVersionUID = 1L; - + public static final String ATTR_LABEL = "label"; public static final String ATTR_SCHEME = "scheme"; public static final String ATTR_TERM = "term"; @@ -61,8 +61,7 @@ public class Tag extends BaseElement implements IElement, IBaseCoding { private String myVersion; private Boolean myUserSelected; - public Tag() { - } + public Tag() {} /** * @deprecated There is no reason to create a tag with a term and not a scheme, so this constructor will be removed @@ -93,7 +92,6 @@ public class Tag extends BaseElement implements IElement, IBaseCoding { myLabel = theLabel; } - public String getLabel() { return myLabel; } @@ -108,19 +106,15 @@ public class Tag extends BaseElement implements IElement, IBaseCoding { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; Tag other = (Tag) obj; - return - Objects.equals(myScheme, other.myScheme) && - Objects.equals(myTerm, other.myTerm) && - Objects.equals(myVersion, other.myVersion) && - Objects.equals(myUserSelected, other.myUserSelected); + return Objects.equals(myScheme, other.myScheme) + && Objects.equals(myTerm, other.myTerm) + && Objects.equals(myVersion, other.myVersion) + && Objects.equals(myUserSelected, other.myUserSelected); } @Override @@ -211,7 +205,9 @@ public class Tag extends BaseElement implements IElement, IBaseCoding { } @Override - public String getVersion() { return myVersion; } + public String getVersion() { + return myVersion; + } @Override public IBaseCoding setVersion(String theVersion) { @@ -220,9 +216,13 @@ public class Tag extends BaseElement implements IElement, IBaseCoding { } @Override - public boolean getUserSelected() { return myUserSelected != null && myUserSelected; } + public boolean getUserSelected() { + return myUserSelected != null && myUserSelected; + } - public Boolean getUserSelectedBoolean() { return myUserSelected; } + public Boolean getUserSelectedBoolean() { + return myUserSelected; + } @Override public IBaseCoding setUserSelected(boolean theUserSelected) { @@ -233,5 +233,4 @@ public class Tag extends BaseElement implements IElement, IBaseCoding { public void setUserSelectedBoolean(Boolean theUserSelected) { myUserSelected = theUserSelected; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/TagList.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/TagList.java index 6fb7a125ffa..c010fa834cb 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/TagList.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/TagList.java @@ -34,7 +34,7 @@ import java.util.Set; /** * A collection of tags present on a single resource. TagList is backed by a {@link LinkedHashSet}, so the order of * added tags will be consistent, but duplicates will not be preserved. - * + * *

    * Thread safety: This class is not thread safe *

    @@ -101,7 +101,7 @@ public class TagList implements Set, Serializable, IBase { /** * Add a new tag instance - * + * * @param theScheme * The tag scheme (the system) * @param theTerm @@ -118,7 +118,7 @@ public class TagList implements Set, Serializable, IBase { /** * Add a new tag instance - * + * * @param theScheme * The tag scheme * @param theTerm @@ -153,18 +153,13 @@ public class TagList implements Set, Serializable, IBase { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; TagList other = (TagList) obj; if (myTagSet == null) { - if (other.myTagSet != null) - return false; - } else if (!myTagSet.equals(other.myTagSet)) - return false; + if (other.myTagSet != null) return false; + } else if (!myTagSet.equals(other.myTagSet)) return false; return true; } @@ -290,5 +285,4 @@ public class TagList implements Set, Serializable, IBase { public void setUserData(String theName, Object theValue) { throw new UnsupportedOperationException(Msg.code(1898)); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/TemporalPrecisionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/TemporalPrecisionEnum.java index 0857d89c683..a37c6674676 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/TemporalPrecisionEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/TemporalPrecisionEnum.java @@ -19,20 +19,19 @@ */ package ca.uhn.fhir.model.api; +import org.apache.commons.lang3.time.DateUtils; + import java.util.Calendar; import java.util.Date; -import org.apache.commons.lang3.time.DateUtils; - public enum TemporalPrecisionEnum { - YEAR(Calendar.YEAR) { @Override public Date add(Date theInput, int theAmount) { return DateUtils.addYears(theInput, theAmount); } }, - + MONTH(Calendar.MONTH) { @Override public Date add(Date theInput, int theAmount) { @@ -50,7 +49,6 @@ public enum TemporalPrecisionEnum { public Date add(Date theInput, int theAmount) { return DateUtils.addMinutes(theInput, theAmount); } - }, SECOND(Calendar.SECOND) { @Override @@ -58,16 +56,15 @@ public enum TemporalPrecisionEnum { return DateUtils.addSeconds(theInput, theAmount); } }, - + MILLI(Calendar.MILLISECOND) { @Override public Date add(Date theInput, int theAmount) { return DateUtils.addMilliseconds(theInput, theAmount); } - }, - + }, ; - + private int myCalendarConstant; TemporalPrecisionEnum(int theCalendarConstant) { @@ -75,7 +72,7 @@ public enum TemporalPrecisionEnum { } public abstract Date add(Date theInput, int theAmount); - + public int getCalendarConstant() { return myCalendarConstant; } @@ -85,12 +82,18 @@ public enum TemporalPrecisionEnum { */ public int stringLength() { switch (this) { - case YEAR: return 4; - case MONTH: return 7; - case DAY: return 10; - case MINUTE: return 16; - case SECOND: return 19; - case MILLI: return 23; + case YEAR: + return 4; + case MONTH: + return 7; + case DAY: + return 10; + case MINUTE: + return 16; + case SECOND: + return 19; + case MILLI: + return 23; } return 0; // ?? } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Binding.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Binding.java index 1e3c2896eb8..255cd5a14e7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Binding.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Binding.java @@ -28,7 +28,7 @@ import java.lang.annotation.Target; * Field annotation for fields which are bound to a given valueset */ @Retention(RetentionPolicy.RUNTIME) -@Target(value = { ElementType.FIELD }) +@Target(value = {ElementType.FIELD}) public @interface Binding { /** diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Block.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Block.java index 538a1b08caf..7c09704c675 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Block.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Block.java @@ -33,7 +33,7 @@ import java.lang.annotation.Target; *

    */ @Retention(RetentionPolicy.RUNTIME) -@Target(value= {ElementType.TYPE}) +@Target(value = {ElementType.TYPE}) public @interface Block { /** @@ -41,5 +41,4 @@ public @interface Block { */ @Deprecated String name() default ""; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Child.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Child.java index 54806d149c0..d35a4d9131a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Child.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Child.java @@ -19,19 +19,19 @@ */ package ca.uhn.fhir.model.api.annotation; +import ca.uhn.fhir.model.api.IElement; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import ca.uhn.fhir.model.api.IElement; - /** - * Field annotation for fields within resource and datatype definitions, indicating + * Field annotation for fields within resource and datatype definitions, indicating * a child of that type. */ @Retention(RetentionPolicy.RUNTIME) -@Target(value= {ElementType.FIELD}) +@Target(value = {ElementType.FIELD}) public @interface Child { /** @@ -39,7 +39,7 @@ public @interface Child { * elsewhere */ int ORDER_UNKNOWN = -1; - + /** * Constant value to supply for {@link #max()} to indicate '*' (no maximum) */ @@ -47,7 +47,7 @@ public @interface Child { /** * Constant value to supply for {@link #order()} to indicate that this child should replace the - * entry in the superclass with the same name (and take its {@link Child#order() order} value + * entry in the superclass with the same name (and take its {@link Child#order() order} value * in the process). This is useful if you wish to redefine an existing field in a resource/type * definition in order to constrain/extend it. */ @@ -57,9 +57,9 @@ public @interface Child { * The name of this field, as it will appear in serialized versions of the message */ String name(); - + /** - * The order in which this field comes within its parent. The first field should have a + * The order in which this field comes within its parent. The first field should have a * value of 0, the second a value of 1, etc. */ int order() default ORDER_UNKNOWN; @@ -87,23 +87,22 @@ public @interface Child { Class[] type() default {}; // Not implemented -// /** -// * This value is used when extending a built-in model class and defining a -// * field to replace a field within the built-in class. For example, the {@link Patient} -// * resource has a {@link Patient#getName() name} field, but if you wanted to extend Patient and -// * provide your own implementation of {@link HumanNameDt} (most likely your own subclass of -// * HumanNameDt which adds extensions of your choosing) you could do that using a replacement field. -// */ -// String replaces() default ""; + // /** + // * This value is used when extending a built-in model class and defining a + // * field to replace a field within the built-in class. For example, the {@link Patient} + // * resource has a {@link Patient#getName() name} field, but if you wanted to extend Patient and + // * provide your own implementation of {@link HumanNameDt} (most likely your own subclass of + // * HumanNameDt which adds extensions of your choosing) you could do that using a replacement field. + // */ + // String replaces() default ""; /** * Is this element a modifier? */ - boolean modifier() default false; + boolean modifier() default false; /** * Should this element be included in the summary view */ boolean summary() default false; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ChildOrder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ChildOrder.java index 08fa8806ffb..8e5f5c33dda 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ChildOrder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ChildOrder.java @@ -26,12 +26,12 @@ import java.lang.annotation.Target; /** * This annotation may be used on a resource type to specify an order for - * the child names. This annotation is intended for situations where the - * class hierarchy makes it impossible to specify the order using only + * the child names. This annotation is intended for situations where the + * class hierarchy makes it impossible to specify the order using only * the {@link Child#order()} property */ @Retention(RetentionPolicy.RUNTIME) -@Target(value= {ElementType.TYPE}) +@Target(value = {ElementType.TYPE}) public @interface ChildOrder { String[] names(); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Compartment.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Compartment.java index 6b4152e0aa3..72dfa0c9177 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Compartment.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Compartment.java @@ -26,13 +26,12 @@ import java.lang.annotation.Target; /** * This may only be populated on a reference search paramater field. On such a field, places the containing * resource in a compartment with the name(s) specified by the given strings, where the compartment - * belongs to the target resource. For example, this field could be populated with Patient on + * belongs to the target resource. For example, this field could be populated with Patient on * the Observation.subject field. */ @Retention(RetentionPolicy.RUNTIME) -@Target(value= {}) +@Target(value = {}) public @interface Compartment { String name(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/DatatypeDef.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/DatatypeDef.java index a6100666b09..e9c64fbdb5b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/DatatypeDef.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/DatatypeDef.java @@ -19,37 +19,36 @@ */ package ca.uhn.fhir.model.api.annotation; +import ca.uhn.fhir.model.primitive.BoundCodeDt; +import ca.uhn.fhir.model.primitive.CodeDt; +import org.hl7.fhir.instance.model.api.IBaseDatatype; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBaseDatatype; - -import ca.uhn.fhir.model.primitive.BoundCodeDt; -import ca.uhn.fhir.model.primitive.CodeDt; - /** * Class annotation to note a class which defines a datatype */ @Retention(RetentionPolicy.RUNTIME) -@Target(value= {ElementType.TYPE}) +@Target(value = {ElementType.TYPE}) public @interface DatatypeDef { /** * The defined name of this datatype */ String name(); - + /** * Set this to true (default is false) for any types that are * really only a specialization of another type. For example, - * {@link BoundCodeDt} is really just a specific type of + * {@link BoundCodeDt} is really just a specific type of * {@link CodeDt} and not a separate datatype, so it should * have this set to true. */ boolean isSpecialization() default false; - + /** * Indicates that this datatype is a profile of the given datatype, which * implies certain parsing/encoding rules (e.g. a choice element named @@ -57,5 +56,4 @@ public @interface DatatypeDef { * fooString because Markdown is a profile of string. */ Class profileOf() default IBaseDatatype.class; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ExampleSupplier.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ExampleSupplier.java index a298b4016e1..caff06fb138 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ExampleSupplier.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ExampleSupplier.java @@ -30,5 +30,4 @@ import java.util.function.Supplier; public @interface ExampleSupplier { Class>[] value(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Extension.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Extension.java index 5faa6f55879..2dbaa957557 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Extension.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/Extension.java @@ -28,20 +28,20 @@ import java.lang.annotation.Target; * Field modifier to be placed on a child field (a field also annotated with the {@link Child} annotation) which * indicates that this field is an extension. */ -@Target(value = { ElementType.FIELD }) +@Target(value = {ElementType.FIELD}) @Retention(RetentionPolicy.RUNTIME) public @interface Extension { /** * This parameter affects how the extension is treated when the element definition containing this resource is * exported to a profile. - * + * *

    * If set to true, the resource is taken to be a local resource and its definition is exported * along with the reference. Use this option for extension defintions that you have added locally (i.e. within your * own organization) *

    - * + * *

    * If set to false, the resource is taken to be a remote resource and its definition is * not exported to the profile. Use this option for extensions that are defined by other organizations (i.e. @@ -60,5 +60,4 @@ public @interface Extension { * The URL associated with this extension */ String url(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/PasswordField.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/PasswordField.java index 7ff98ae3bf5..27c74ac4a60 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/PasswordField.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/PasswordField.java @@ -24,7 +24,6 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; - /** * This annotation should be added to any {@link ca.uhn.fhir.model.api.IModelJson} * model fields @@ -36,5 +35,4 @@ import java.lang.annotation.Target; */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) -public @interface PasswordField { -} +public @interface PasswordField {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ResourceDef.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ResourceDef.java index 2c1e5e2e468..b68fb32e5d7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ResourceDef.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/ResourceDef.java @@ -30,13 +30,13 @@ import java.lang.annotation.Target; */ @Inherited @Retention(RetentionPolicy.RUNTIME) -@Target(value= {ElementType.TYPE}) +@Target(value = {ElementType.TYPE}) public @interface ResourceDef { /** * The name of the resource (e.g. "Patient" or "DiagnosticReport"). If you are defining your * own custom extension to a built-in FHIR resource definition type (e.g. you are extending - * the built-in Patient class) you do not need to supply a value for this property, as it + * the built-in Patient class) you do not need to supply a value for this property, as it * will be inferred from the parent class. */ String name() default ""; @@ -50,7 +50,7 @@ public @interface ResourceDef { * your server uses, not necessarily "http://localhost:8080/fhir") */ String id() default ""; - + /** * The URL indicating the profile for this resource definition. If specified, this URL will be * automatically added to the meta tag when the resource is serialized. @@ -60,5 +60,4 @@ public @interface ResourceDef { *

    */ String profile() default ""; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/SearchParamDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/SearchParamDefinition.java index 5652baab524..c9611a65c94 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/SearchParamDefinition.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/SearchParamDefinition.java @@ -19,14 +19,14 @@ */ package ca.uhn.fhir.model.api.annotation; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBaseResource; - -@Target(value=ElementType.FIELD) +@Target(value = ElementType.FIELD) @Retention(RetentionPolicy.RUNTIME) public @interface SearchParamDefinition { @@ -34,24 +34,24 @@ public @interface SearchParamDefinition { * The name for this parameter */ String name(); - + /** * The path for this parameter */ String path(); - + /** * A description of this parameter */ String description() default ""; - + /** * The type for this parameter, e.g. "string", or "token" */ String type() default "string"; - + /** - * If the parameter is of type "composite", this parameter lists the names of the parameters + * If the parameter is of type "composite", this parameter lists the names of the parameters * which this parameter is a composite of. E.g. "name-value-token" is a composite of "name" and "value-token". *

    * If the parameter is not a composite, this parameter must be empty @@ -64,11 +64,10 @@ public @interface SearchParamDefinition { * specify the resource type(s) that this parameter applies to. */ Class[] target() default {}; - + /** * Indicates that this field indicates that resources linked to by this parameter * (must be a reference parameter) place the resource in the given compartment. */ Compartment[] providesMembershipIn() default {}; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/SimpleSetter.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/SimpleSetter.java index e4325266ef8..434de7f66a4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/SimpleSetter.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/annotation/SimpleSetter.java @@ -26,22 +26,20 @@ import java.lang.annotation.Target; /** * Marker annotation for a primitive setter method that can be used to - * indicate a "simple setter" method on a resource or composite type. - * + * indicate a "simple setter" method on a resource or composite type. + * * This annotation is used by HAPI's code generator and can be ignored by * client code */ @Retention(RetentionPolicy.RUNTIME) -@Target(value= {ElementType.CONSTRUCTOR}) +@Target(value = {ElementType.CONSTRUCTOR}) public @interface SimpleSetter { - + String suffix() default ""; - - + @Retention(RetentionPolicy.RUNTIME) - @Target(value= {ElementType.PARAMETER}) + @Target(value = {ElementType.PARAMETER}) public @interface Parameter { String name(); - } - + } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseCodingDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseCodingDt.java index e7396f2f527..a2f335130e9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseCodingDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseCodingDt.java @@ -63,14 +63,14 @@ public abstract class BaseCodingDt extends BaseIdentifiableElement implements IC * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A representation of the meaning of the code in the system, following the rules of the system. - *

    + *

    + * Definition: + * A representation of the meaning of the code in the system, following the rules of the system. + *

    */ public abstract StringDt getDisplayElement(); - public abstract BaseCodingDt setDisplay( String theString); + public abstract BaseCodingDt setDisplay(String theString); /* todo: handle version @@ -85,8 +85,11 @@ public abstract class BaseCodingDt extends BaseIdentifiableElement implements IC @Override public String getValueAsQueryToken(FhirContext theContext) { if (getSystemElement().getValueAsString() != null) { - return ParameterUtil.escape(StringUtils.defaultString(getSystemElement().getValueAsString())) + '|' + ParameterUtil.escape(getCodeElement().getValueAsString()); - } + return ParameterUtil.escape( + StringUtils.defaultString(getSystemElement().getValueAsString())) + + '|' + + ParameterUtil.escape(getCodeElement().getValueAsString()); + } return ParameterUtil.escape(getCodeElement().getValueAsString()); } @@ -94,7 +97,8 @@ public abstract class BaseCodingDt extends BaseIdentifiableElement implements IC * {@inheritDoc} */ @Override - public void setValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theParameter) { + public void setValueAsQueryToken( + FhirContext theContext, String theParamName, String theQualifier, String theParameter) { int barIndex = ParameterUtil.nonEscapedIndexOf(theParameter, '|'); if (barIndex != -1) { setSystem(theParameter.substring(0, barIndex)); @@ -112,7 +116,8 @@ public abstract class BaseCodingDt extends BaseIdentifiableElement implements IC if (theCoding == null) { return false; } - return getCodeElement().equals(theCoding.getCodeElement()) && getSystemElement().equals(theCoding.getSystemElement()); + return getCodeElement().equals(theCoding.getCodeElement()) + && getSystemElement().equals(theCoding.getSystemElement()); } /** @@ -130,14 +135,11 @@ public abstract class BaseCodingDt extends BaseIdentifiableElement implements IC if (theSearchParam.isSystemPresent()) { if (theSearchParam.isSystemBlank()) { // [parameter]=|[code] matches a code/value that has no system namespace - if (isSystemPresent() && !isSystemBlank()) - return false; + if (isSystemPresent() && !isSystemBlank()) return false; } else { // [parameter]=[namespace]|[code] matches a code/value in the given system namespace - if (!isSystemPresent()) - return false; - if (!getSystemElement().equals(theSearchParam.getSystemElement())) - return false; + if (!isSystemPresent()) return false; + if (!getSystemElement().equals(theSearchParam.getSystemElement())) return false; } } else { // [parameter]=[code] matches a code/value irrespective of it's system namespace @@ -173,10 +175,9 @@ public abstract class BaseCodingDt extends BaseIdentifiableElement implements IC */ public abstract BaseCodingDt setSystem(String theUri); - /** * Not supported! - * + * * @deprecated get/setMissing is not supported in StringDt. Use {@link TokenParam} instead if you * need this functionality */ @@ -188,14 +189,15 @@ public abstract class BaseCodingDt extends BaseIdentifiableElement implements IC /** * Not supported! - * + * * @deprecated get/setMissing is not supported in StringDt. Use {@link TokenParam} instead if you * need this functionality */ @Deprecated @Override public IQueryParameterType setMissing(Boolean theMissing) { - throw new UnsupportedOperationException(Msg.code(1903) + "get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you need this functionality"); + throw new UnsupportedOperationException( + Msg.code(1903) + + "get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you need this functionality"); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseContainedDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseContainedDt.java index 50a72b68e93..d68ee47978a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseContainedDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseContainedDt.java @@ -58,5 +58,4 @@ public abstract class BaseContainedDt implements IDatatype { public boolean hasFormatComment() { return false; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseHumanNameDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseHumanNameDt.java index 3991550825b..3fbce8816a1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseHumanNameDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseHumanNameDt.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.model.base.composite; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.apache.commons.lang3.builder.ToStringStyle; - import ca.uhn.fhir.model.api.BaseIdentifiableElement; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.util.DatatypeUtil; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; + +import java.util.ArrayList; +import java.util.List; public abstract class BaseHumanNameDt extends BaseIdentifiableElement { @@ -44,7 +43,7 @@ public abstract class BaseHumanNameDt extends BaseIdentifiableElement { /** * Returns all repetitions of {@link #getFamily() family name} as a space separated string - * + * * @see DatatypeUtil#joinStringsSpaceSeparated(List) */ public String getFamilyAsSingleString() { @@ -62,7 +61,7 @@ public abstract class BaseHumanNameDt extends BaseIdentifiableElement { /** * Returns all repetitions of {@link #getGiven() given name} as a space separated string - * + * * @see DatatypeUtil#joinStringsSpaceSeparated(List) */ public String getGivenAsSingleString() { @@ -80,7 +79,7 @@ public abstract class BaseHumanNameDt extends BaseIdentifiableElement { /** * Returns all repetitions of {@link #getPrefix() prefix name} as a space separated string - * + * * @see DatatypeUtil#joinStringsSpaceSeparated(List) */ public String getPrefixAsSingleString() { @@ -98,7 +97,7 @@ public abstract class BaseHumanNameDt extends BaseIdentifiableElement { /** * Returns all repetitions of {@link #getSuffix() suffix} as a space separated string - * + * * @see DatatypeUtil#joinStringsSpaceSeparated(List) */ public String getSuffixAsSingleString() { @@ -132,8 +131,8 @@ public abstract class BaseHumanNameDt extends BaseIdentifiableElement { } /** - * Returns all of the components of the name (prefix, given, family, suffix) as a - * single string with a single spaced string separating each part. + * Returns all of the components of the name (prefix, given, family, suffix) as a + * single string with a single spaced string separating each part. *

    * If none of the parts are populated, returns the {@link #getTextElement() text} * element value instead. @@ -150,5 +149,4 @@ public abstract class BaseHumanNameDt extends BaseIdentifiableElement { } return getTextElement().getValue(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseIdentifierDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseIdentifierDt.java index 3b4a6fd8c90..bc6a133f833 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseIdentifierDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseIdentifierDt.java @@ -30,7 +30,8 @@ import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.param.StringParam; import org.apache.commons.lang3.StringUtils; -public abstract class BaseIdentifierDt extends BaseIdentifiableElement implements ICompositeDatatype, IQueryParameterType { +public abstract class BaseIdentifierDt extends BaseIdentifiableElement + implements ICompositeDatatype, IQueryParameterType { private static final long serialVersionUID = 4400972469749953077L; @@ -65,7 +66,9 @@ public abstract class BaseIdentifierDt extends BaseIdentifiableElement implement UriDt system = getSystemElement(); StringDt value = getValueElement(); if (system.getValueAsString() != null) { - return ParameterUtil.escape(StringUtils.defaultString(system.getValueAsString())) + '|' + ParameterUtil.escape(value.getValueAsString()); + return ParameterUtil.escape(StringUtils.defaultString(system.getValueAsString())) + + '|' + + ParameterUtil.escape(value.getValueAsString()); } return ParameterUtil.escape(value.getValueAsString()); } @@ -79,7 +82,8 @@ public abstract class BaseIdentifierDt extends BaseIdentifiableElement implement if (theIdentifier == null) { return false; } - return getValueElement().equals(theIdentifier.getValueElement()) && getSystemElement().equals(theIdentifier.getSystemElement()); + return getValueElement().equals(theIdentifier.getValueElement()) + && getSystemElement().equals(theIdentifier.getSystemElement()); } /** @@ -104,7 +108,8 @@ public abstract class BaseIdentifierDt extends BaseIdentifiableElement implement * {@inheritDoc} */ @Override - public void setValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theParameter) { + public void setValueAsQueryToken( + FhirContext theContext, String theParamName, String theQualifier, String theParameter) { int barIndex = ParameterUtil.nonEscapedIndexOf(theParameter, '|'); if (barIndex != -1) { setSystem(theParameter.substring(0, barIndex)); @@ -114,10 +119,9 @@ public abstract class BaseIdentifierDt extends BaseIdentifiableElement implement } } - /** * Not supported! - * + * * @deprecated get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you * need this functionality */ @@ -129,14 +133,15 @@ public abstract class BaseIdentifierDt extends BaseIdentifiableElement implement /** * Not supported! - * + * * @deprecated get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you * need this functionality */ @Deprecated @Override public IQueryParameterType setMissing(Boolean theMissing) { - throw new UnsupportedOperationException(Msg.code(1907) + "get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you need this functionality"); + throw new UnsupportedOperationException( + Msg.code(1907) + + "get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you need this functionality"); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseNarrativeDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseNarrativeDt.java index c5996bfb27b..ec51ab12e7a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseNarrativeDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseNarrativeDt.java @@ -19,24 +19,24 @@ */ package ca.uhn.fhir.model.base.composite; -import org.hl7.fhir.instance.model.api.INarrative; - import ca.uhn.fhir.model.api.BaseIdentifiableElement; import ca.uhn.fhir.model.api.ICompositeDatatype; import ca.uhn.fhir.model.primitive.BoundCodeDt; import ca.uhn.fhir.model.primitive.XhtmlDt; +import org.hl7.fhir.instance.model.api.INarrative; /** - * @param The narrative status enum type + * @param The narrative status enum type */ -public abstract class BaseNarrativeDt> extends BaseIdentifiableElement implements ICompositeDatatype, INarrative { +public abstract class BaseNarrativeDt> extends BaseIdentifiableElement + implements ICompositeDatatype, INarrative { private static final long serialVersionUID = -525238683230100077L; public abstract BoundCodeDt getStatus(); @Override - public void setDivAsString(String theString) { + public void setDivAsString(String theString) { getDiv().setValueAsString(theString); } @@ -57,5 +57,4 @@ public abstract class BaseNarrativeDt> extends BaseIdentifiabl } public abstract XhtmlDt getDiv(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseQuantityDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseQuantityDt.java index f77a8ac95ca..dc3e49dc635 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseQuantityDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseQuantityDt.java @@ -35,28 +35,29 @@ import org.apache.commons.lang3.StringUtils; import java.math.BigDecimal; -public abstract class BaseQuantityDt extends BaseIdentifiableElement implements ICompositeDatatype, IQueryParameterType { +public abstract class BaseQuantityDt extends BaseIdentifiableElement + implements ICompositeDatatype, IQueryParameterType { private static final long serialVersionUID = 1L; /** * Sets the value(s) for value (Numerical value (with implicit precision)) * - *

    - * Definition: - * The value of the measured amount. The value includes an implicit precision in the presentation of the value - *

    + *

    + * Definition: + * The value of the measured amount. The value includes an implicit precision in the presentation of the value + *

    */ public abstract BaseQuantityDt setValue(BigDecimal theValue); - @Override - public void setValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) { + public void setValueAsQueryToken( + FhirContext theContext, String theParamName, String theQualifier, String theValue) { getComparatorElement().setValue(null); - setCode( null); + setCode(null); setSystem(null); - setUnits( null); - setValue( null); + setUnits(null); + setValue(null); if (theValue == null) { return; @@ -64,19 +65,19 @@ public abstract class BaseQuantityDt extends BaseIdentifiableElement implements String[] parts = theValue.split("\\|"); if (parts.length > 0 && StringUtils.isNotBlank(parts[0])) { if (parts[0].startsWith("le")) { - //TODO: Use of a deprecated method should be resolved. + // TODO: Use of a deprecated method should be resolved. getComparatorElement().setValue(ParamPrefixEnum.LESSTHAN_OR_EQUALS.getValue()); setValue(new BigDecimal(parts[0].substring(2))); } else if (parts[0].startsWith("lt")) { - //TODO: Use of a deprecated method should be resolved. + // TODO: Use of a deprecated method should be resolved. getComparatorElement().setValue(ParamPrefixEnum.LESSTHAN.getValue()); setValue(new BigDecimal(parts[0].substring(1))); } else if (parts[0].startsWith("ge")) { - //TODO: Use of a deprecated method should be resolved. + // TODO: Use of a deprecated method should be resolved. getComparatorElement().setValue(ParamPrefixEnum.GREATERTHAN_OR_EQUALS.getValue()); setValue(new BigDecimal(parts[0].substring(2))); } else if (parts[0].startsWith("gt")) { - //TODO: Use of a deprecated method should be resolved. + // TODO: Use of a deprecated method should be resolved. getComparatorElement().setValue(ParamPrefixEnum.GREATERTHAN.getValue()); setValue(new BigDecimal(parts[0].substring(1))); } else { @@ -89,25 +90,24 @@ public abstract class BaseQuantityDt extends BaseIdentifiableElement implements if (parts.length > 2 && StringUtils.isNotBlank(parts[2])) { setUnits(parts[2]); } - } - + /** * Gets the value(s) for comparator (< | <= | >= | > - how to understand the value). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * How the value should be understood and represented - whether the actual value is greater or less than - * the stated value due to measurement issues. E.g. if the comparator is \"<\" , then the real value is < stated value - *

    + *

    + * Definition: + * How the value should be understood and represented - whether the actual value is greater or less than + * the stated value due to measurement issues. E.g. if the comparator is \"<\" , then the real value is < stated value + *

    */ public abstract BoundCodeDt getComparatorElement(); @Override public String getValueAsQueryToken(FhirContext theContext) { - StringBuilder b= new StringBuilder(); + StringBuilder b = new StringBuilder(); if (getComparatorElement() != null) { b.append(getComparatorElement().getValue()); } @@ -116,108 +116,100 @@ public abstract class BaseQuantityDt extends BaseIdentifiableElement implements } b.append('|'); if (!getSystemElement().isEmpty()) { - b.append(getSystemElement().getValueAsString()); + b.append(getSystemElement().getValueAsString()); } b.append('|'); if (!getUnitsElement().isEmpty()) { - b.append(getUnitsElement().getValueAsString()); + b.append(getUnitsElement().getValueAsString()); } - + return b.toString(); } - @Override public String getQueryParameterQualifier() { return null; - } - - - - + } - /** + /** * Sets the value for units (Unit representation) * - *

    - * Definition: - * A human-readable form of the units - *

    + *

    + * Definition: + * A human-readable form of the units + *

    */ - public abstract BaseQuantityDt setUnits( String theString); + public abstract BaseQuantityDt setUnits(String theString); - /** * Gets the value(s) for system (System that defines coded unit form). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The identification of the system that provides the coded form of the unit - *

    + *

    + * Definition: + * The identification of the system that provides the coded form of the unit + *

    */ public abstract UriDt getSystemElement(); - - - /** + /** * Sets the value for system (System that defines coded unit form) * - *

    - * Definition: - * The identification of the system that provides the coded form of the unit - *

    + *

    + * Definition: + * The identification of the system that provides the coded form of the unit + *

    */ - public abstract BaseQuantityDt setSystem( String theUri); - + public abstract BaseQuantityDt setSystem(String theUri); + /** * Gets the value(s) for code (Coded form of the unit). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A computer processable form of the units in some unit representation system - *

    + *

    + * Definition: + * A computer processable form of the units in some unit representation system + *

    */ public abstract CodeDt getCodeElement(); - /** + /** * Sets the value for code (Coded form of the unit) * - *

    - * Definition: - * A computer processable form of the units in some unit representation system - *

    + *

    + * Definition: + * A computer processable form of the units in some unit representation system + *

    */ - public abstract BaseQuantityDt setCode( String theCode); + public abstract BaseQuantityDt setCode(String theCode); /** * Gets the value(s) for units (Unit representation). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A human-readable form of the units - *

    + *

    + * Definition: + * A human-readable form of the units + *

    */ - public abstract StringDt getUnitsElement() ; + public abstract StringDt getUnitsElement(); /** * Gets the value(s) for value (Numerical value (with implicit precision)). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The value of the measured amount. The value includes an implicit precision in the presentation of the value - *

    + *

    + * Definition: + * The value of the measured amount. The value includes an implicit precision in the presentation of the value + *

    */ public abstract DecimalDt getValueElement(); - + /** * Not supported! - * + * * @deprecated get/setMissing is not supported in StringDt. Use {@link QuantityParam} instead if you * need this functionality */ @@ -229,15 +221,15 @@ public abstract class BaseQuantityDt extends BaseIdentifiableElement implements /** * Not supported! - * + * * @deprecated get/setMissing is not supported in StringDt. Use {@link QuantityParam} instead if you * need this functionality */ @Deprecated @Override public IQueryParameterType setMissing(Boolean theMissing) { - throw new UnsupportedOperationException(Msg.code(1904) + "get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you need this functionality"); + throw new UnsupportedOperationException( + Msg.code(1904) + + "get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you need this functionality"); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseResourceReferenceDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseResourceReferenceDt.java index b751beeb009..aa93433de3e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseResourceReferenceDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/composite/BaseResourceReferenceDt.java @@ -36,7 +36,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank; public abstract class BaseResourceReferenceDt extends BaseIdentifiableElement implements IBaseDatatype, IBaseReference { private static final long serialVersionUID = 1L; - + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseResourceReferenceDt.class); private IBaseResource myResource; @@ -49,7 +49,7 @@ public abstract class BaseResourceReferenceDt extends BaseIdentifiableElement im /** * Constructor - * + * * @param theResource * The loaded resource itself */ @@ -71,7 +71,7 @@ public abstract class BaseResourceReferenceDt extends BaseIdentifiableElement im * See the FHIR specification section on contained resources for more * information. - * + * * @see #loadResource(IRestfulClient) */ @Override @@ -99,14 +99,17 @@ public abstract class BaseResourceReferenceDt extends BaseIdentifiableElement im throw new IllegalStateException(Msg.code(1905) + "Reference has no resource ID defined"); } if (isBlank(resourceId.getBaseUrl()) || isBlank(resourceId.getResourceType())) { - throw new IllegalStateException(Msg.code(1906) + "Reference is not complete (must be in the form [baseUrl]/[resource type]/[resource ID]) - Reference is: " + resourceId.getValue()); + throw new IllegalStateException(Msg.code(1906) + + "Reference is not complete (must be in the form [baseUrl]/[resource type]/[resource ID]) - Reference is: " + + resourceId.getValue()); } String resourceUrl = resourceId.getValue(); ourLog.debug("Loading resource at URL: {}", resourceUrl); - RuntimeResourceDefinition definition = theClient.getFhirContext().getResourceDefinition(resourceId.getResourceType()); + RuntimeResourceDefinition definition = + theClient.getFhirContext().getResourceDefinition(resourceId.getResourceType()); Class resourceType = definition.getImplementingClass(); myResource = theClient.fetchResourceFromUrl(resourceType, resourceUrl); myResource.setId(resourceUrl); @@ -134,10 +137,10 @@ public abstract class BaseResourceReferenceDt extends BaseIdentifiableElement im @Override public String toString() { - org.apache.commons.lang3.builder.ToStringBuilder b = new org.apache.commons.lang3.builder.ToStringBuilder(this, org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE); + org.apache.commons.lang3.builder.ToStringBuilder b = new org.apache.commons.lang3.builder.ToStringBuilder( + this, org.apache.commons.lang3.builder.ToStringStyle.SHORT_PREFIX_STYLE); b.append("reference", getReference().getValueAsString()); b.append("loaded", getResource() != null); return b.toString(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseConformance.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseConformance.java index afde73ef3e5..4de38fe5471 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseConformance.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseConformance.java @@ -19,19 +19,17 @@ */ package ca.uhn.fhir.model.base.resource; -import org.hl7.fhir.instance.model.api.IBaseConformance; - import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.StringDt; +import org.hl7.fhir.instance.model.api.IBaseConformance; -//@ResourceDef(name="Conformance") +// @ResourceDef(name="Conformance") public interface BaseConformance extends IResource, IBaseConformance { public abstract StringDt getDescriptionElement(); public abstract StringDt getPublisherElement(); - - public abstract IdDt getFhirVersionElement(); + public abstract IdDt getFhirVersionElement(); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseOperationOutcome.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseOperationOutcome.java index 5f61c1505bb..ce470360e70 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseOperationOutcome.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseOperationOutcome.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.model.base.resource; -import java.util.List; - -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.model.api.BaseIdentifiableElement; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.IResourceBlock; import ca.uhn.fhir.model.primitive.CodeDt; import ca.uhn.fhir.model.primitive.StringDt; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; + +import java.util.List; public interface BaseOperationOutcome extends IResource, IBaseOperationOutcome { @@ -37,12 +36,12 @@ public interface BaseOperationOutcome extends IResource, IBaseOperationOutcome { public abstract BaseIssue getIssueFirstRep(); - public static abstract class BaseIssue extends BaseIdentifiableElement implements IResourceBlock { - + public abstract static class BaseIssue extends BaseIdentifiableElement implements IResourceBlock { + private static final long serialVersionUID = 6700020892151450738L; public abstract CodeDt getSeverityElement(); - + public abstract StringDt getDetailsElement(); public abstract BaseIssue addLocation(String theString); @@ -57,7 +56,4 @@ public interface BaseOperationOutcome extends IResource, IBaseOperationOutcome { public abstract StringDt getLocationFirstRep(); } - - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseSecurityEvent.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseSecurityEvent.java index 239f7737b3c..b7737a9ce02 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseSecurityEvent.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/BaseSecurityEvent.java @@ -21,6 +21,4 @@ package ca.uhn.fhir.model.base.resource; import ca.uhn.fhir.model.api.IResource; -public interface BaseSecurityEvent extends IResource { - -} +public interface BaseSecurityEvent extends IResource {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/ResourceMetadataMap.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/ResourceMetadataMap.java index 5a315b291af..9c996ba6549 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/ResourceMetadataMap.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/base/resource/ResourceMetadataMap.java @@ -19,12 +19,11 @@ */ package ca.uhn.fhir.model.base.resource; -import java.util.HashMap; - import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; +import java.util.HashMap; + public class ResourceMetadataMap extends HashMap, Object> { private static final long serialVersionUID = 1L; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/Base64BinaryDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/Base64BinaryDt.java index 839dcf24db8..a03fe3d80e3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/Base64BinaryDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/Base64BinaryDt.java @@ -19,12 +19,11 @@ */ package ca.uhn.fhir.model.primitive; -import org.apache.commons.codec.binary.Base64; - import ca.uhn.fhir.model.api.BasePrimitive; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.rest.api.Constants; +import org.apache.commons.codec.binary.Base64; @DatatypeDef(name = "base64Binary") public class Base64BinaryDt extends BasePrimitive { @@ -53,5 +52,4 @@ public class Base64BinaryDt extends BasePrimitive { protected String encode(byte[] theValue) { return new String(Base64.encodeBase64(theValue), Constants.CHARSET_UTF8); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BaseDateTimeDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BaseDateTimeDt.java index f29fbec0474..5f60f860fb8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BaseDateTimeDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BaseDateTimeDt.java @@ -44,7 +44,8 @@ public abstract class BaseDateTimeDt extends BasePrimitive { private static final Map timezoneCache = new ConcurrentHashMap<>(); private static final FastDateFormat ourHumanDateFormat = FastDateFormat.getDateInstance(FastDateFormat.MEDIUM); - private static final FastDateFormat ourHumanDateTimeFormat = FastDateFormat.getDateTimeInstance(FastDateFormat.MEDIUM, FastDateFormat.MEDIUM); + private static final FastDateFormat ourHumanDateTimeFormat = + FastDateFormat.getDateTimeInstance(FastDateFormat.MEDIUM, FastDateFormat.MEDIUM); public static final String NOW_DATE_CONSTANT = "%now"; public static final String TODAY_DATE_CONSTANT = "%today"; private String myFractionalSeconds; @@ -61,14 +62,15 @@ public abstract class BaseDateTimeDt extends BasePrimitive { /** * Constructor - * + * * @throws DataFormatException * If the specified precision is not allowed for this type */ public BaseDateTimeDt(Date theDate, TemporalPrecisionEnum thePrecision) { setValue(theDate, thePrecision); if (isPrecisionAllowed(thePrecision) == false) { - throw new DataFormatException(Msg.code(1880) + "Invalid date/time string (datatype " + getClass().getSimpleName() + " does not support " + thePrecision + " precision): " + theDate); + throw new DataFormatException(Msg.code(1880) + "Invalid date/time string (datatype " + + getClass().getSimpleName() + " does not support " + thePrecision + " precision): " + theDate); } } @@ -82,7 +84,7 @@ public abstract class BaseDateTimeDt extends BasePrimitive { /** * Constructor - * + * * @throws DataFormatException * If the specified precision is not allowed for this type */ @@ -244,7 +246,7 @@ public abstract class BaseDateTimeDt extends BasePrimitive { /** * Gets the precision for this datatype (using the default for the given type if not set) - * + * * @see #setPrecision(TemporalPrecisionEnum) */ public TemporalPrecisionEnum getPrecision() { @@ -310,7 +312,7 @@ public abstract class BaseDateTimeDt extends BasePrimitive { /** * Returns true if this object represents a date that is today's date - * + * * @throws NullPointerException * if {@link #getValue()} returns null */ @@ -369,7 +371,7 @@ public abstract class BaseDateTimeDt extends BasePrimitive { int offsetIdx = getOffsetIndex(value); String time; if (offsetIdx == -1) { - //throwBadDateFormat(theValue); + // throwBadDateFormat(theValue); // No offset - should this be an error? time = value.substring(11); } else { @@ -434,10 +436,9 @@ public abstract class BaseDateTimeDt extends BasePrimitive { if (precision == TemporalPrecisionEnum.MINUTE) { validatePrecisionAndThrowDataFormatException(value, precision); } - + setPrecision(precision); return cal.getTime(); - } private int parseInt(String theValue, String theSubstring, int theLowerBound, int theUpperBound) { @@ -463,7 +464,8 @@ public abstract class BaseDateTimeDt extends BasePrimitive { return this; } - private void setFieldValue(int theField, int theValue, String theFractionalSeconds, int theMinimum, int theMaximum) { + private void setFieldValue( + int theField, int theValue, String theFractionalSeconds, int theMinimum, int theMaximum) { validateValueInRange(theValue, theMinimum, theMaximum); Calendar cal; if (getValue() == null) { @@ -526,24 +528,24 @@ public abstract class BaseDateTimeDt extends BasePrimitive { *

    */ public BaseDateTimeDt setNanos(long theNanos) { - validateValueInRange(theNanos, 0, NANOS_PER_SECOND-1); + validateValueInRange(theNanos, 0, NANOS_PER_SECOND - 1); String fractionalSeconds = StringUtils.leftPad(Long.toString(theNanos), 9, '0'); // Strip trailing 0s for (int i = fractionalSeconds.length(); i > 0; i--) { - if (fractionalSeconds.charAt(i-1) != '0') { + if (fractionalSeconds.charAt(i - 1) != '0') { fractionalSeconds = fractionalSeconds.substring(0, i); break; } } - int millis = (int)(theNanos / NANOS_PER_MILLIS); + int millis = (int) (theNanos / NANOS_PER_MILLIS); setFieldValue(Calendar.MILLISECOND, millis, fractionalSeconds, 0, 999); return this; } /** * Sets the precision for this datatype - * + * * @throws DataFormatException */ public BaseDateTimeDt setPrecision(TemporalPrecisionEnum thePrecision) throws DataFormatException { @@ -615,7 +617,7 @@ public abstract class BaseDateTimeDt extends BasePrimitive { * Sets the value for this type using the given Java Date object as the time, and using the specified precision, as * well as the local timezone as determined by the local operating system. Both of * these properties may be modified in subsequent calls if neccesary. - * + * * @param theValue * The date value * @param thePrecision @@ -666,7 +668,8 @@ public abstract class BaseDateTimeDt extends BasePrimitive { } private void throwBadDateFormat(String theValue, String theMesssage) { - throw new DataFormatException(Msg.code(1883) + "Invalid date/time format: \"" + theValue + "\": " + theMesssage); + throw new DataFormatException( + Msg.code(1883) + "Invalid date/time format: \"" + theValue + "\": " + theMesssage); } /** @@ -685,39 +688,42 @@ public abstract class BaseDateTimeDt extends BasePrimitive { value.setTime(getValue()); switch (getPrecision()) { - case YEAR: - case MONTH: - case DAY: - return ourHumanDateFormat.format(value); - case MILLI: - case SECOND: - default: - return ourHumanDateTimeFormat.format(value); + case YEAR: + case MONTH: + case DAY: + return ourHumanDateFormat.format(value); + case MILLI: + case SECOND: + default: + return ourHumanDateTimeFormat.format(value); } } /** * Returns a human readable version of this date/time using the system local format, converted to the local timezone * if neccesary. - * + * * @see #toHumanDisplay() for a method which does not convert the time to the local timezone before rendering it. */ public String toHumanDisplayLocalTimezone() { switch (getPrecision()) { - case YEAR: - case MONTH: - case DAY: - return ourHumanDateFormat.format(getValue()); - case MILLI: - case SECOND: - default: - return ourHumanDateTimeFormat.format(getValue()); + case YEAR: + case MONTH: + case DAY: + return ourHumanDateFormat.format(getValue()); + case MILLI: + case SECOND: + default: + return ourHumanDateTimeFormat.format(getValue()); } } private void validateCharAtIndexIs(String theValue, int theIndex, char theChar) { if (theValue.charAt(theIndex) != theChar) { - throwBadDateFormat(theValue, "Expected character '" + theChar + "' at index " + theIndex + " but found " + theValue.charAt(theIndex)); + throwBadDateFormat( + theValue, + "Expected character '" + theChar + "' at index " + theIndex + " but found " + + theValue.charAt(theIndex)); } } @@ -729,14 +735,15 @@ public abstract class BaseDateTimeDt extends BasePrimitive { private void validateValueInRange(long theValue, long theMinimum, long theMaximum) { if (theValue < theMinimum || theValue > theMaximum) { - throw new IllegalArgumentException(Msg.code(1884) + "Value " + theValue + " is not between allowable range: " + theMinimum + " - " + theMaximum); + throw new IllegalArgumentException(Msg.code(1884) + "Value " + theValue + + " is not between allowable range: " + theMinimum + " - " + theMaximum); } } private void validatePrecisionAndThrowDataFormatException(String theValue, TemporalPrecisionEnum thePrecision) { if (isPrecisionAllowed(thePrecision) == false) { - throw new DataFormatException(Msg.code(1885) + "Invalid date/time string (datatype " + getClass().getSimpleName() + " does not support " + thePrecision + " precision): " + theValue); + throw new DataFormatException(Msg.code(1885) + "Invalid date/time string (datatype " + + getClass().getSimpleName() + " does not support " + thePrecision + " precision): " + theValue); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BooleanDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BooleanDt.java index 7c1a4e43c98..0a2b0a2f7e6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BooleanDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BooleanDt.java @@ -62,5 +62,4 @@ public class BooleanDt extends BasePrimitive implements IBaseBooleanDat } return "false"; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BoundCodeDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BoundCodeDt.java index 8c3c1c77591..a316526e82b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BoundCodeDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/BoundCodeDt.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.model.primitive; +import ca.uhn.fhir.model.api.IValueSetEnumBinder; +import ca.uhn.fhir.model.api.annotation.DatatypeDef; +import org.apache.commons.lang3.Validate; + import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; -import org.apache.commons.lang3.Validate; - -import ca.uhn.fhir.model.api.IValueSetEnumBinder; -import ca.uhn.fhir.model.api.annotation.DatatypeDef; - @DatatypeDef(name = "code", isSpecialization = true) public class BoundCodeDt> extends CodeDt { @@ -55,9 +54,10 @@ public class BoundCodeDt> extends CodeDt { public IValueSetEnumBinder getBinder() { return myBinder; } - + public T getValueAsEnum() { - Validate.notNull(myBinder, "This object does not have a binder. Constructor BoundCodeDt() should not be called!"); + Validate.notNull( + myBinder, "This object does not have a binder. Constructor BoundCodeDt() should not be called!"); T retVal = myBinder.fromCodeString(getValue()); if (retVal == null) { // TODO: throw special exception type? @@ -73,8 +73,9 @@ public class BoundCodeDt> extends CodeDt { } public void setValueAsEnum(T theValue) { - Validate.notNull(myBinder, "This object does not have a binder. Constructor BoundCodeDt() should not be called!"); - if (theValue==null) { + Validate.notNull( + myBinder, "This object does not have a binder. Constructor BoundCodeDt() should not be called!"); + if (theValue == null) { setValue(null); } else { setValue(myBinder.toCodeString(theValue)); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/CodeDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/CodeDt.java index 3e550d130d0..58b9a987373 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/CodeDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/CodeDt.java @@ -19,14 +19,14 @@ */ package ca.uhn.fhir.model.primitive; -import static org.apache.commons.lang3.StringUtils.defaultString; -import static org.apache.commons.lang3.StringUtils.isBlank; - import ca.uhn.fhir.model.api.BasePrimitive; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; -@DatatypeDef(name = "code", profileOf=StringDt.class) +import static org.apache.commons.lang3.StringUtils.defaultString; +import static org.apache.commons.lang3.StringUtils.isBlank; + +@DatatypeDef(name = "code", profileOf = StringDt.class) public class CodeDt extends BasePrimitive implements Comparable { /** @@ -66,5 +66,4 @@ public class CodeDt extends BasePrimitive implements Comparable protected String encode(String theValue) { return theValue; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DateDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DateDt.java index c1da96788a5..fdea5ff63ce 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DateDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DateDt.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.model.primitive; -import java.util.Calendar; - -import java.util.Date; -import java.util.GregorianCalendar; -import java.util.TimeZone; - import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.parser.DataFormatException; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + /** * Represents a FHIR date datatype. Valid precisions values for this type are: *
      @@ -37,7 +36,7 @@ import ca.uhn.fhir.parser.DataFormatException; *
    • {@link TemporalPrecisionEnum#MONTH} *
    • {@link TemporalPrecisionEnum#DAY} *
    - * + * *

    * Note on using Java Date objects: This type stores the date as a Java Date. Note that * the Java Date has more precision (millisecond precision), and does not store a timezone. As such, @@ -46,7 +45,7 @@ import ca.uhn.fhir.parser.DataFormatException; * intended. *

    *

    - * As such, it is recommended to use the Calendar or int,int,int constructors + * As such, it is recommended to use the Calendar or int,int,int constructors *

    */ @DatatypeDef(name = "date") @@ -91,18 +90,20 @@ public class DateDt extends BaseDateTimeDt { * * Please see the note on timezones on the {@link DateDt class documentation} for considerations * when using this constructor! - * + * * @throws DataFormatException * If the specified precision is not allowed for this type */ @SimpleSetter - public DateDt(@SimpleSetter.Parameter(name = "theDate") Date theDate, @SimpleSetter.Parameter(name = "thePrecision") TemporalPrecisionEnum thePrecision) { + public DateDt( + @SimpleSetter.Parameter(name = "theDate") Date theDate, + @SimpleSetter.Parameter(name = "thePrecision") TemporalPrecisionEnum thePrecision) { super(theDate, thePrecision); } /** * Constructor which accepts a date value and uses the {@link #DEFAULT_PRECISION} for this type. - * + * * @param theYear The year, e.g. 2015 * @param theMonth The month, e.g. 0 for January * @param theDay The day (1 indexed) e.g. 1 for the first day of the month @@ -113,7 +114,7 @@ public class DateDt extends BaseDateTimeDt { /** * Constructor which accepts a date as a string in FHIR format - * + * * @throws DataFormatException * If the precision in the date string is not allowed for this type */ @@ -123,7 +124,7 @@ public class DateDt extends BaseDateTimeDt { /** * Returns the default precision for this datatype - * + * * @see #DEFAULT_PRECISION */ @Override @@ -134,12 +135,12 @@ public class DateDt extends BaseDateTimeDt { @Override protected boolean isPrecisionAllowed(TemporalPrecisionEnum thePrecision) { switch (thePrecision) { - case YEAR: - case MONTH: - case DAY: - return true; - default: - return false; + case YEAR: + case MONTH: + case DAY: + return true; + default: + return false; } } @@ -150,5 +151,4 @@ public class DateDt extends BaseDateTimeDt { retVal.set(Calendar.DATE, theDay); return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DateTimeDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DateTimeDt.java index e9c74ae8488..629952df8d1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DateTimeDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DateTimeDt.java @@ -19,14 +19,14 @@ */ package ca.uhn.fhir.model.primitive; -import java.util.Date; -import java.util.TimeZone; - import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.parser.DataFormatException; +import java.util.Date; +import java.util.TimeZone; + /** * Represents a FHIR dateTime datatype. Valid precisions values for this type are: *
      @@ -69,18 +69,20 @@ public class DateTimeDt extends BaseDateTimeDt { *
    • {@link TemporalPrecisionEnum#SECOND} *
    • {@link TemporalPrecisionEnum#MILLI} *
    - * + * * @throws DataFormatException * If the specified precision is not allowed for this type */ @SimpleSetter - public DateTimeDt(@SimpleSetter.Parameter(name = "theDate") Date theDate, @SimpleSetter.Parameter(name = "thePrecision") TemporalPrecisionEnum thePrecision) { + public DateTimeDt( + @SimpleSetter.Parameter(name = "theDate") Date theDate, + @SimpleSetter.Parameter(name = "thePrecision") TemporalPrecisionEnum thePrecision) { super(theDate, thePrecision, TimeZone.getDefault()); } /** * Create a new instance using a string date/time - * + * * @throws DataFormatException * If the specified precision is not allowed for this type */ @@ -106,14 +108,14 @@ public class DateTimeDt extends BaseDateTimeDt { @Override protected boolean isPrecisionAllowed(TemporalPrecisionEnum thePrecision) { switch (thePrecision) { - case YEAR: - case MONTH: - case DAY: - case SECOND: - case MILLI: - return true; - default: - return false; + case YEAR: + case MONTH: + case DAY: + case SECOND: + case MILLI: + return true; + default: + return false; } } @@ -127,12 +129,11 @@ public class DateTimeDt extends BaseDateTimeDt { /** * Returns the default precision for this datatype - * + * * @see #DEFAULT_PRECISION */ @Override protected TemporalPrecisionEnum getDefaultPrecisionForDatatype() { return DEFAULT_PRECISION; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DecimalDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DecimalDt.java index b7d41dd2d59..ba931f4f0ba 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DecimalDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/DecimalDt.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.model.primitive; -import java.math.BigDecimal; -import java.math.MathContext; -import java.math.RoundingMode; - -import org.hl7.fhir.instance.model.api.IBaseDecimalDatatype; - import ca.uhn.fhir.model.api.BasePrimitive; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; +import org.hl7.fhir.instance.model.api.IBaseDecimalDatatype; + +import java.math.BigDecimal; +import java.math.MathContext; +import java.math.RoundingMode; @DatatypeDef(name = "decimal") public class DecimalDt extends BasePrimitive implements Comparable, IBaseDecimalDatatype { @@ -109,7 +108,7 @@ public class DecimalDt extends BasePrimitive implements Comparable implements Comparable implements Comparable */ @DatatypeDef(name = "id", profileOf = StringDt.class) -public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdType { +public class IdDt extends UriDt implements /*IPrimitiveDatatype, */ IIdType { private String myBaseUrl; private boolean myHaveComponentParts; @@ -182,7 +182,10 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy } private void setHaveComponentParts(IdDt theIdDt) { - if (isBlank(myBaseUrl) && isBlank(myResourceType) && isBlank(myUnqualifiedId) && isBlank(myUnqualifiedVersionId)) { + if (isBlank(myBaseUrl) + && isBlank(myResourceType) + && isBlank(myUnqualifiedId) + && isBlank(myUnqualifiedVersionId)) { myHaveComponentParts = false; } else { myHaveComponentParts = true; @@ -198,7 +201,8 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy } else if (theResouce instanceof IAnyResource) { ((IAnyResource) theResouce).setId(getValue()); } else { - throw new IllegalArgumentException(Msg.code(1876) + "Unknown resource class type, does not implement IResource or extend Resource"); + throw new IllegalArgumentException( + Msg.code(1876) + "Unknown resource class type, does not implement IResource or extend Resource"); } } @@ -230,7 +234,9 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy if (theId.isEmpty()) { return isEmpty(); } - return ObjectUtils.equals(getResourceType(), theId.getResourceType()) && ObjectUtils.equals(getIdPart(), theId.getIdPart()) && ObjectUtils.equals(getVersionIdPart(), theId.getVersionIdPart()); + return ObjectUtils.equals(getResourceType(), theId.getResourceType()) + && ObjectUtils.equals(getIdPart(), theId.getIdPart()) + && ObjectUtils.equals(getVersionIdPart(), theId.getVersionIdPart()); } /** @@ -285,7 +291,6 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy return myResourceType; } - /** * Returns the value of this ID. Note that this value may be a fully qualified URL, a relative/partial URL, or a simple ID. Use {@link #getIdPart()} to get just the ID portion. * @@ -395,8 +400,10 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy } if (typeIndex >= idIndex) { // e.g. http://example.org/foo - // 'foo' was the id but we're making that the resource type. Nullify the id part because we don't have an id. - // Also set null value to the super.setValue() and enable myHaveComponentParts so it forces getValue() to properly + // 'foo' was the id but we're making that the resource type. Nullify the id part because we + // don't have an id. + // Also set null value to the super.setValue() and enable myHaveComponentParts so it forces + // getValue() to properly // recreate the url myResourceType = myUnqualifiedId; myUnqualifiedId = null; @@ -409,10 +416,8 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy if (typeIndex > 4) { myBaseUrl = theValue.substring(0, typeIndex); } - } } - } return this; } @@ -555,7 +560,7 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy * * @deprecated */ - @Deprecated //override deprecated method + @Deprecated // override deprecated method @Override public void setId(IdDt theId) { setValue(theId.getValue()); @@ -564,11 +569,16 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy @Override public IIdType setParts(String theBaseUrl, String theResourceType, String theIdPart, String theVersionIdPart) { if (isNotBlank(theVersionIdPart)) { - Validate.notBlank(theResourceType, "If theVersionIdPart is populated, theResourceType and theIdPart must be populated"); - Validate.notBlank(theIdPart, "If theVersionIdPart is populated, theResourceType and theIdPart must be populated"); + Validate.notBlank( + theResourceType, + "If theVersionIdPart is populated, theResourceType and theIdPart must be populated"); + Validate.notBlank( + theIdPart, "If theVersionIdPart is populated, theResourceType and theIdPart must be populated"); } if (isNotBlank(theBaseUrl) && isNotBlank(theIdPart)) { - Validate.notBlank(theResourceType, "If theBaseUrl is populated and theIdPart is populated, theResourceType must be populated"); + Validate.notBlank( + theResourceType, + "If theBaseUrl is populated and theIdPart is populated, theResourceType must be populated"); } setValue(null); @@ -712,5 +722,4 @@ public class IdDt extends UriDt implements /*IPrimitiveDatatype, */IIdTy } return theIdPart.toString(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/InstantDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/InstantDt.java index d6cbdcdf0f7..f8e602c2769 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/InstantDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/InstantDt.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.model.primitive; -import java.util.Calendar; -import java.util.Date; -import java.util.TimeZone; - import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.parser.DataFormatException; +import java.util.Calendar; +import java.util.Date; +import java.util.TimeZone; + /** * Represents a FHIR instant datatype. Valid precisions values for this type are: *
      @@ -48,7 +48,7 @@ public class InstantDt extends BaseDateTimeDt { * that unlike the default constructor for the Java {@link Date} or * {@link Calendar} objects, this constructor does not initialize the object * with the current time. - * + * * @see #withCurrentTime() to create a new object that has been initialized * with the current time. */ @@ -65,7 +65,7 @@ public class InstantDt extends BaseDateTimeDt { /** * Create a new instance using the given date, precision level, and time zone - * + * * @throws DataFormatException * If the specified precision is not allowed for this type */ @@ -73,11 +73,10 @@ public class InstantDt extends BaseDateTimeDt { super(theDate, thePrecision, theTimezone); } - /** * Create a new DateTimeDt using an existing value. Use this constructor with caution, * as it may create more precision than warranted (since for example it is possible to pass in - * a DateTime with only a year, and this constructor will convert to an InstantDt with + * a DateTime with only a year, and this constructor will convert to an InstantDt with * milliseconds precision). */ public InstantDt(BaseDateTimeDt theDateTime) { @@ -104,7 +103,9 @@ public class InstantDt extends BaseDateTimeDt { *
    */ @SimpleSetter - public InstantDt(@SimpleSetter.Parameter(name = "theDate") Date theDate, @SimpleSetter.Parameter(name = "thePrecision") TemporalPrecisionEnum thePrecision) { + public InstantDt( + @SimpleSetter.Parameter(name = "theDate") Date theDate, + @SimpleSetter.Parameter(name = "thePrecision") TemporalPrecisionEnum thePrecision) { setValue(theDate); setPrecision(thePrecision); setTimeZone(TimeZone.getDefault()); @@ -112,7 +113,7 @@ public class InstantDt extends BaseDateTimeDt { /** * Create a new InstantDt from a string value - * + * * @param theString * The string representation of the string. Must be in a valid * format according to the FHIR specification @@ -125,7 +126,7 @@ public class InstantDt extends BaseDateTimeDt { /** * Invokes {@link Date#after(Date)} on the contained Date against the given * date - * + * * @throws NullPointerException * If the {@link #getValue() contained Date} is null */ @@ -136,7 +137,7 @@ public class InstantDt extends BaseDateTimeDt { /** * Invokes {@link Date#before(Date)} on the contained Date against the given * date - * + * * @throws NullPointerException * If the {@link #getValue() contained Date} is null */ @@ -158,11 +159,11 @@ public class InstantDt extends BaseDateTimeDt { @Override protected boolean isPrecisionAllowed(TemporalPrecisionEnum thePrecision) { switch (thePrecision) { - case SECOND: - case MILLI: - return true; - default: - return false; + case SECOND: + case MILLI: + return true; + default: + return false; } } @@ -176,12 +177,11 @@ public class InstantDt extends BaseDateTimeDt { /** * Returns the default precision for this datatype - * + * * @see #DEFAULT_PRECISION */ @Override protected TemporalPrecisionEnum getDefaultPrecisionForDatatype() { return DEFAULT_PRECISION; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/IntegerDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/IntegerDt.java index 6da18bc6ad7..21fc9f1afa8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/IntegerDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/IntegerDt.java @@ -46,7 +46,7 @@ public class IntegerDt extends BasePrimitive implements IBaseIntegerDat /** * Constructor - * + * * @param theIntegerAsString * A string representation of an integer * @throws DataFormatException @@ -69,5 +69,4 @@ public class IntegerDt extends BasePrimitive implements IBaseIntegerDat protected String encode(Integer theValue) { return Integer.toString(theValue); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/MarkdownDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/MarkdownDt.java index a8858f11d51..ac6f7aa4883 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/MarkdownDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/MarkdownDt.java @@ -22,8 +22,6 @@ package ca.uhn.fhir.model.primitive; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.util.CoverageIgnore; -@DatatypeDef(name = "markdown", profileOf=StringDt.class) +@DatatypeDef(name = "markdown", profileOf = StringDt.class) @CoverageIgnore -public class MarkdownDt extends StringDt { - -} +public class MarkdownDt extends StringDt {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/OidDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/OidDt.java index 4dbd0c76e20..7624dfb4f79 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/OidDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/OidDt.java @@ -21,9 +21,9 @@ package ca.uhn.fhir.model.primitive; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -@DatatypeDef(name = "oid", profileOf=UriDt.class) +@DatatypeDef(name = "oid", profileOf = UriDt.class) public class OidDt extends UriDt { // TODO: implement restrictions - + } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/PositiveIntDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/PositiveIntDt.java index c807641c31b..69468e963cd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/PositiveIntDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/PositiveIntDt.java @@ -24,7 +24,7 @@ import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.util.CoverageIgnore; -@DatatypeDef(name = "positiveInt", profileOf=IntegerDt.class) +@DatatypeDef(name = "positiveInt", profileOf = IntegerDt.class) @CoverageIgnore public class PositiveIntDt extends IntegerDt { @@ -45,7 +45,7 @@ public class PositiveIntDt extends IntegerDt { /** * Constructor - * + * * @param theIntegerAsString * A string representation of an integer * @throws DataFormatException @@ -54,5 +54,4 @@ public class PositiveIntDt extends IntegerDt { public PositiveIntDt(String theIntegerAsString) { setValueAsString(theIntegerAsString); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/StringDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/StringDt.java index 6e04bdb3ca4..dd74e853e0e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/StringDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/StringDt.java @@ -68,18 +68,13 @@ public class StringDt extends BasePrimitive implements IQueryParameterTy @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; StringDt other = (StringDt) obj; if (getValue() == null) { - if (other.getValue() != null) - return false; - } else if (!getValue().equals(other.getValue())) - return false; + if (other.getValue() != null) return false; + } else if (!getValue().equals(other.getValue())) return false; return true; } @@ -87,7 +82,8 @@ public class StringDt extends BasePrimitive implements IQueryParameterTy * {@inheritDoc} */ @Override - public void setValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) { + public void setValueAsQueryToken( + FhirContext theContext, String theParamName, String theQualifier, String theValue) { setValue(theValue); } @@ -125,7 +121,7 @@ public class StringDt extends BasePrimitive implements IQueryParameterTy /** * Not supported! - * + * * @deprecated get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you * need this functionality */ @@ -137,14 +133,15 @@ public class StringDt extends BasePrimitive implements IQueryParameterTy /** * Not supported! - * + * * @deprecated get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you * need this functionality */ @Deprecated @Override public IQueryParameterType setMissing(Boolean theMissing) { - throw new UnsupportedOperationException(Msg.code(1874) + "get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you need this functionality"); + throw new UnsupportedOperationException( + Msg.code(1874) + + "get/setMissing is not supported in StringDt. Use {@link StringParam} instead if you need this functionality"); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/TimeDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/TimeDt.java index 8f3aefe2ff2..0179e9c56be 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/TimeDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/TimeDt.java @@ -26,13 +26,13 @@ import ca.uhn.fhir.model.api.annotation.SimpleSetter; /** * Represents a Time datatype, per the FHIR specification. A time is a specification of hours and minutes (and optionally milliseconds), with NO date and NO timezone information attached. It is * expressed as a string in the form HH:mm:ss[.SSSS] - * + * *

    * This datatype is not valid in FHIR DSTU1 *

    - * + * * @since FHIR DSTU 2 / HAPI 0.8 - * + * * TODO: have a way of preventing this from being used in DSTU1 resources * TODO: validate time? */ @@ -54,5 +54,4 @@ public class TimeDt extends StringDt implements IQueryParameterType { this(); setValue(theValue); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/UnsignedIntDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/UnsignedIntDt.java index 80a163bb35e..7cb6ad6cc32 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/UnsignedIntDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/UnsignedIntDt.java @@ -24,7 +24,7 @@ import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.util.CoverageIgnore; -@DatatypeDef(name = "unsignedInt", profileOf=IntegerDt.class) +@DatatypeDef(name = "unsignedInt", profileOf = IntegerDt.class) @CoverageIgnore public class UnsignedIntDt extends IntegerDt { @@ -45,7 +45,7 @@ public class UnsignedIntDt extends IntegerDt { /** * Constructor - * + * * @param theIntegerAsString * A string representation of an integer * @throws DataFormatException @@ -54,5 +54,4 @@ public class UnsignedIntDt extends IntegerDt { public UnsignedIntDt(String theIntegerAsString) { setValueAsString(theIntegerAsString); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/UriDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/UriDt.java index 199e7292880..02d8ce97762 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/UriDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/UriDt.java @@ -19,14 +19,13 @@ */ package ca.uhn.fhir.model.primitive; -import java.net.URI; -import java.net.URISyntaxException; - -import org.apache.commons.lang3.StringUtils; - import ca.uhn.fhir.model.api.BasePrimitive; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; +import org.apache.commons.lang3.StringUtils; + +import java.net.URI; +import java.net.URISyntaxException; @DatatypeDef(name = "uri") public class UriDt extends BasePrimitive { @@ -55,12 +54,9 @@ public class UriDt extends BasePrimitive { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; UriDt other = (UriDt) obj; if (getValue() == null && other.getValue() == null) { @@ -122,7 +118,7 @@ public class UriDt extends BasePrimitive { /** * Creates a new UriDt instance which uses the given OID as the content (and prepends "urn:oid:" to the OID string * in the value of the newly created UriDt, per the FHIR specification). - * + * * @param theOid * The OID to use (null is acceptable and will result in a UriDt instance with a * null value) @@ -134,5 +130,4 @@ public class UriDt extends BasePrimitive { } return new UriDt("urn:oid:" + theOid); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/XhtmlDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/XhtmlDt.java index 546ba247201..cc8161f6d0f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/XhtmlDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/primitive/XhtmlDt.java @@ -84,7 +84,6 @@ public class XhtmlDt extends BasePrimitive { return theValue; } - /** * Note that as of HAPI FHIR 3.1.0, this method no longer uses * the StAX XMLEvent type as the XML representation, and uses a @@ -153,5 +152,4 @@ public class XhtmlDt extends BasePrimitive { } return value; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntrySearchModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntrySearchModeEnum.java index b17a14dcf39..4b60401f0e8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntrySearchModeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntrySearchModeEnum.java @@ -19,20 +19,18 @@ */ package ca.uhn.fhir.model.valueset; -import java.util.HashMap; -import java.util.Map; - import ca.uhn.fhir.model.api.IValueSetEnumBinder; import ca.uhn.fhir.util.CoverageIgnore; +import java.util.HashMap; +import java.util.Map; + @CoverageIgnore public enum BundleEntrySearchModeEnum { - MATCH("match", "http://hl7.org/fhir/search-entry-mode"), INCLUDE("include", "http://hl7.org/fhir/search-entry-mode"), - ; - + /** * Identifier for this Value Set: * http://hl7.org/fhir/vs/address-use @@ -45,37 +43,39 @@ public enum BundleEntrySearchModeEnum { */ public static final String VALUESET_NAME = "BundleEntryStatus"; - private static Map CODE_TO_ENUM = new HashMap(); - private static Map> SYSTEM_TO_CODE_TO_ENUM = new HashMap>(); - + private static Map CODE_TO_ENUM = + new HashMap(); + private static Map> SYSTEM_TO_CODE_TO_ENUM = + new HashMap>(); + private final String myCode; private final String mySystem; - + static { for (BundleEntrySearchModeEnum next : BundleEntrySearchModeEnum.values()) { CODE_TO_ENUM.put(next.getCode(), next); - + if (!SYSTEM_TO_CODE_TO_ENUM.containsKey(next.getSystem())) { SYSTEM_TO_CODE_TO_ENUM.put(next.getSystem(), new HashMap()); } - SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); + SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); } } - + /** * Returns the code associated with this enumerated value */ public String getCode() { return myCode; } - + /** * Returns the code system associated with this enumerated value */ public String getSystem() { return mySystem; } - + /** * Returns the enumerated value associated with this code */ @@ -87,43 +87,41 @@ public enum BundleEntrySearchModeEnum { /** * Converts codes to their respective enumerated values */ - public static final IValueSetEnumBinder VALUESET_BINDER = new IValueSetEnumBinder() { + public static final IValueSetEnumBinder VALUESET_BINDER = + new IValueSetEnumBinder() { - private static final long serialVersionUID = -3836039426814809083L; + private static final long serialVersionUID = -3836039426814809083L; - @Override - public String toCodeString(BundleEntrySearchModeEnum theEnum) { - return theEnum.getCode(); - } + @Override + public String toCodeString(BundleEntrySearchModeEnum theEnum) { + return theEnum.getCode(); + } - @Override - public String toSystemString(BundleEntrySearchModeEnum theEnum) { - return theEnum.getSystem(); - } - - @Override - public BundleEntrySearchModeEnum fromCodeString(String theCodeString) { - return CODE_TO_ENUM.get(theCodeString); - } - - @Override - public BundleEntrySearchModeEnum fromCodeString(String theCodeString, String theSystemString) { - Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); - if (map == null) { - return null; - } - return map.get(theCodeString); - } - - }; - - /** + @Override + public String toSystemString(BundleEntrySearchModeEnum theEnum) { + return theEnum.getSystem(); + } + + @Override + public BundleEntrySearchModeEnum fromCodeString(String theCodeString) { + return CODE_TO_ENUM.get(theCodeString); + } + + @Override + public BundleEntrySearchModeEnum fromCodeString(String theCodeString, String theSystemString) { + Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); + if (map == null) { + return null; + } + return map.get(theCodeString); + } + }; + + /** * Constructor */ BundleEntrySearchModeEnum(String theCode, String theSystem) { myCode = theCode; mySystem = theSystem; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntryTransactionMethodEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntryTransactionMethodEnum.java index b58070500f2..c971fec34a8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntryTransactionMethodEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleEntryTransactionMethodEnum.java @@ -20,12 +20,12 @@ package ca.uhn.fhir.model.valueset; -import java.util.HashMap; -import java.util.Map; - import ca.uhn.fhir.model.api.IValueSetEnumBinder; import ca.uhn.fhir.util.CoverageIgnore; +import java.util.HashMap; +import java.util.Map; + /** * This Enum is only used to support using the DSTU1 Bundle structure (ca.uhn.fhir.model.api.Bundle) * on a DSTU2 server. It is preferably to use the new DSTU2 Bundle (ca.uhn.fhir.model.dstu2.resource.Bundle) @@ -33,14 +33,12 @@ import ca.uhn.fhir.util.CoverageIgnore; */ @CoverageIgnore public enum BundleEntryTransactionMethodEnum { - GET("GET", "http://hl7.org/fhir/http-verb"), POST("POST", "http://hl7.org/fhir/http-verb"), PUT("PUT", "http://hl7.org/fhir/http-verb"), DELETE("DELETE", "http://hl7.org/fhir/http-verb"), - ; - + /** * Identifier for this Value Set: * http://hl7.org/fhir/vs/address-use @@ -53,37 +51,39 @@ public enum BundleEntryTransactionMethodEnum { */ public static final String VALUESET_NAME = "BundleEntryStatus"; - private static Map CODE_TO_ENUM = new HashMap(); - private static Map> SYSTEM_TO_CODE_TO_ENUM = new HashMap>(); - + private static Map CODE_TO_ENUM = + new HashMap(); + private static Map> SYSTEM_TO_CODE_TO_ENUM = + new HashMap>(); + private final String myCode; private final String mySystem; - + static { for (BundleEntryTransactionMethodEnum next : BundleEntryTransactionMethodEnum.values()) { CODE_TO_ENUM.put(next.getCode(), next); - + if (!SYSTEM_TO_CODE_TO_ENUM.containsKey(next.getSystem())) { SYSTEM_TO_CODE_TO_ENUM.put(next.getSystem(), new HashMap()); } - SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); + SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); } } - + /** * Returns the code associated with this enumerated value */ public String getCode() { return myCode; } - + /** * Returns the code system associated with this enumerated value */ public String getSystem() { return mySystem; } - + /** * Returns the enumerated value associated with this code */ @@ -95,43 +95,41 @@ public enum BundleEntryTransactionMethodEnum { /** * Converts codes to their respective enumerated values */ - public static final IValueSetEnumBinder VALUESET_BINDER = new IValueSetEnumBinder() { + public static final IValueSetEnumBinder VALUESET_BINDER = + new IValueSetEnumBinder() { - private static final long serialVersionUID = 7569681479045998433L; + private static final long serialVersionUID = 7569681479045998433L; - @Override - public String toCodeString(BundleEntryTransactionMethodEnum theEnum) { - return theEnum.getCode(); - } + @Override + public String toCodeString(BundleEntryTransactionMethodEnum theEnum) { + return theEnum.getCode(); + } - @Override - public String toSystemString(BundleEntryTransactionMethodEnum theEnum) { - return theEnum.getSystem(); - } - - @Override - public BundleEntryTransactionMethodEnum fromCodeString(String theCodeString) { - return CODE_TO_ENUM.get(theCodeString); - } - - @Override - public BundleEntryTransactionMethodEnum fromCodeString(String theCodeString, String theSystemString) { - Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); - if (map == null) { - return null; - } - return map.get(theCodeString); - } - - }; - - /** + @Override + public String toSystemString(BundleEntryTransactionMethodEnum theEnum) { + return theEnum.getSystem(); + } + + @Override + public BundleEntryTransactionMethodEnum fromCodeString(String theCodeString) { + return CODE_TO_ENUM.get(theCodeString); + } + + @Override + public BundleEntryTransactionMethodEnum fromCodeString(String theCodeString, String theSystemString) { + Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); + if (map == null) { + return null; + } + return map.get(theCodeString); + } + }; + + /** * Constructor */ BundleEntryTransactionMethodEnum(String theCode, String theSystem) { myCode = theCode; mySystem = theSystem; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleTypeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleTypeEnum.java index 5b316802c4e..cd150e37d76 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleTypeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/valueset/BundleTypeEnum.java @@ -19,34 +19,31 @@ */ package ca.uhn.fhir.model.valueset; -import java.util.HashMap; -import java.util.Map; - import ca.uhn.fhir.model.api.IValueSetEnumBinder; import ca.uhn.fhir.util.CoverageIgnore; +import java.util.HashMap; +import java.util.Map; + @CoverageIgnore public enum BundleTypeEnum { - TRANSACTION("transaction", "http://hl7.org/fhir/bundle-type"), - + DOCUMENT("document", "http://hl7.org/fhir/bundle-type"), - + MESSAGE("message", "http://hl7.org/fhir/bundle-type"), BATCH_RESPONSE("batch-response", "http://hl7.org/fhir/bundle-type"), TRANSACTION_RESPONSE("transaction-response", "http://hl7.org/fhir/bundle-type"), - + HISTORY("history", "http://hl7.org/fhir/bundle-type"), - + SEARCHSET("searchset", "http://hl7.org/fhir/bundle-type"), - + COLLECTION("collection", "http://hl7.org/fhir/bundle-type"), - - ; - + /** * Identifier for this Value Set: * http://hl7.org/fhir/vs/address-use @@ -60,36 +57,37 @@ public enum BundleTypeEnum { public static final String VALUESET_NAME = "BundleType"; private static Map CODE_TO_ENUM = new HashMap(); - private static Map> SYSTEM_TO_CODE_TO_ENUM = new HashMap>(); - + private static Map> SYSTEM_TO_CODE_TO_ENUM = + new HashMap>(); + private final String myCode; private final String mySystem; - + static { for (BundleTypeEnum next : BundleTypeEnum.values()) { CODE_TO_ENUM.put(next.getCode(), next); - + if (!SYSTEM_TO_CODE_TO_ENUM.containsKey(next.getSystem())) { SYSTEM_TO_CODE_TO_ENUM.put(next.getSystem(), new HashMap()); } - SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); + SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); } } - + /** * Returns the code associated with this enumerated value */ public String getCode() { return myCode; } - + /** * Returns the code system associated with this enumerated value */ public String getSystem() { return mySystem; } - + /** * Returns the enumerated value associated with this code */ @@ -101,43 +99,41 @@ public enum BundleTypeEnum { /** * Converts codes to their respective enumerated values */ - public static final IValueSetEnumBinder VALUESET_BINDER = new IValueSetEnumBinder() { + public static final IValueSetEnumBinder VALUESET_BINDER = + new IValueSetEnumBinder() { - private static final long serialVersionUID = -305725916208867517L; + private static final long serialVersionUID = -305725916208867517L; - @Override - public String toCodeString(BundleTypeEnum theEnum) { - return theEnum.getCode(); - } + @Override + public String toCodeString(BundleTypeEnum theEnum) { + return theEnum.getCode(); + } - @Override - public String toSystemString(BundleTypeEnum theEnum) { - return theEnum.getSystem(); - } - - @Override - public BundleTypeEnum fromCodeString(String theCodeString) { - return CODE_TO_ENUM.get(theCodeString); - } - - @Override - public BundleTypeEnum fromCodeString(String theCodeString, String theSystemString) { - Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); - if (map == null) { - return null; - } - return map.get(theCodeString); - } - - }; - - /** + @Override + public String toSystemString(BundleTypeEnum theEnum) { + return theEnum.getSystem(); + } + + @Override + public BundleTypeEnum fromCodeString(String theCodeString) { + return CODE_TO_ENUM.get(theCodeString); + } + + @Override + public BundleTypeEnum fromCodeString(String theCodeString, String theSystemString) { + Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); + if (map == null) { + return null; + } + return map.get(theCodeString); + } + }; + + /** * Constructor */ BundleTypeEnum(String theCode, String theSystem) { myCode = theCode; mySystem = theSystem; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/view/ViewGenerator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/view/ViewGenerator.java index f80d9354f29..3e7d7eecc59 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/view/ViewGenerator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/model/view/ViewGenerator.java @@ -65,7 +65,11 @@ public class ViewGenerator { return retVal; } - private void copyChildren(BaseRuntimeElementCompositeDefinition theSourceDef, IBase theSource, BaseRuntimeElementCompositeDefinition theTargetDef, IBase theTarget) { + private void copyChildren( + BaseRuntimeElementCompositeDefinition theSourceDef, + IBase theSource, + BaseRuntimeElementCompositeDefinition theTargetDef, + IBase theTarget) { if (!theSource.isEmpty()) { List targetChildren = theTargetDef.getChildren(); List targetExts = theTargetDef.getExtensions(); @@ -77,12 +81,14 @@ public class ViewGenerator { elementName = nextChild.getValidChildNames().iterator().next(); } - BaseRuntimeChildDefinition sourceChildEquivalent = theSourceDef.getChildByNameOrThrowDataFormatException(elementName); + BaseRuntimeChildDefinition sourceChildEquivalent = + theSourceDef.getChildByNameOrThrowDataFormatException(elementName); if (sourceChildEquivalent == null) { continue; } - List sourceValues = sourceChildEquivalent.getAccessor().getValues(theSource); + List sourceValues = + sourceChildEquivalent.getAccessor().getValues(theSource); for (IBase nextElement : sourceValues) { boolean handled = false; if (nextElement instanceof IBaseExtension) { @@ -106,12 +112,15 @@ public class ViewGenerator { String url = nextExt.getExtensionUrl(); addExtension(theSourceDef, theSource, theTarget, nextExt, url); } - - } } - private void addExtension(BaseRuntimeElementCompositeDefinition theSourceDef, IBase theSource, IBase theTarget, RuntimeChildDeclaredExtensionDefinition nextExt, String url) { + private void addExtension( + BaseRuntimeElementCompositeDefinition theSourceDef, + IBase theSource, + IBase theTarget, + RuntimeChildDeclaredExtensionDefinition nextExt, + String url) { RuntimeChildDeclaredExtensionDefinition sourceDeclaredExt = theSourceDef.getDeclaredExtension(url, ""); if (sourceDeclaredExt == null) { @@ -136,7 +145,6 @@ public class ViewGenerator { for (IBase nextElement : values) { nextExt.getMutator().addValue(theTarget, nextElement); } - } } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGenerator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGenerator.java index 86dc73286ec..ae911901d85 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGenerator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/BaseThymeleafNarrativeGenerator.java @@ -96,7 +96,6 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener retVal.add(new NarrativeAttributeProcessor(theDialectPrefix, theFhirContext)); return retVal; } - }; engine.setDialect(dialect); @@ -110,18 +109,19 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener Context context = new Context(); context.setVariable("resource", theTargetContext); context.setVariable("context", theTargetContext); - context.setVariable("fhirVersion", theFhirContext.getVersion().getVersion().name()); + context.setVariable( + "fhirVersion", theFhirContext.getVersion().getVersion().name()); return getTemplateEngine(theFhirContext).process(theTemplate.getTemplateName(), context); } - @Override protected EnumSet getStyle() { return EnumSet.of(TemplateTypeEnum.THYMELEAF); } - private String applyTemplateWithinTag(FhirContext theFhirContext, ITemplateContext theTemplateContext, String theName, String theElement) { + private String applyTemplateWithinTag( + FhirContext theFhirContext, ITemplateContext theTemplateContext, String theName, String theElement) { IEngineConfiguration configuration = theTemplateContext.getConfiguration(); IStandardExpressionParser expressionParser = StandardExpressions.getExpressionParser(configuration); final IStandardExpression expression = expressionParser.parseExpression(theTemplateContext, theElement); @@ -151,7 +151,6 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener myMessageResolver = theMessageResolver; } - private class NarrativeTemplateResolver extends DefaultTemplateResolver { private final FhirContext myFhirContext; @@ -160,40 +159,58 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener } @Override - protected boolean computeResolvable(IEngineConfiguration theConfiguration, String theOwnerTemplate, String theTemplate, Map theTemplateResolutionAttributes) { + protected boolean computeResolvable( + IEngineConfiguration theConfiguration, + String theOwnerTemplate, + String theTemplate, + Map theTemplateResolutionAttributes) { if (theOwnerTemplate == null) { - return getManifest().getTemplateByName(myFhirContext, getStyle(), theTemplate).size() > 0; + return getManifest() + .getTemplateByName(myFhirContext, getStyle(), theTemplate) + .size() + > 0; } else { - return getManifest().getTemplateByFragmentName(myFhirContext, getStyle(), theTemplate).size() > 0; + return getManifest() + .getTemplateByFragmentName(myFhirContext, getStyle(), theTemplate) + .size() + > 0; } } @Override - protected TemplateMode computeTemplateMode(IEngineConfiguration theConfiguration, String theOwnerTemplate, String theTemplate, Map theTemplateResolutionAttributes) { + protected TemplateMode computeTemplateMode( + IEngineConfiguration theConfiguration, + String theOwnerTemplate, + String theTemplate, + Map theTemplateResolutionAttributes) { return TemplateMode.XML; } @Override - protected ITemplateResource computeTemplateResource(IEngineConfiguration theConfiguration, String theOwnerTemplate, String theTemplate, Map theTemplateResolutionAttributes) { + protected ITemplateResource computeTemplateResource( + IEngineConfiguration theConfiguration, + String theOwnerTemplate, + String theTemplate, + Map theTemplateResolutionAttributes) { if (theOwnerTemplate == null) { - return getManifest() - .getTemplateByName(myFhirContext, getStyle(), theTemplate) - .stream() - .findFirst() - .map(t -> new StringTemplateResource(t.getTemplateText())) - .orElseThrow(() -> new IllegalArgumentException("Unknown template: " + theTemplate)); + return getManifest().getTemplateByName(myFhirContext, getStyle(), theTemplate).stream() + .findFirst() + .map(t -> new StringTemplateResource(t.getTemplateText())) + .orElseThrow(() -> new IllegalArgumentException("Unknown template: " + theTemplate)); } else { - return getManifest() - .getTemplateByFragmentName(myFhirContext, getStyle(), theTemplate) - .stream() - .findFirst() - .map(t -> new StringTemplateResource(t.getTemplateText())) - .orElseThrow(() -> new IllegalArgumentException("Unknown template: " + theTemplate)); + return getManifest().getTemplateByFragmentName(myFhirContext, getStyle(), theTemplate).stream() + .findFirst() + .map(t -> new StringTemplateResource(t.getTemplateText())) + .orElseThrow(() -> new IllegalArgumentException("Unknown template: " + theTemplate)); } } @Override - protected ICacheEntryValidity computeValidity(IEngineConfiguration theConfiguration, String theOwnerTemplate, String theTemplate, Map theTemplateResolutionAttributes) { + protected ICacheEntryValidity computeValidity( + IEngineConfiguration theConfiguration, + String theOwnerTemplate, + String theTemplate, + Map theTemplateResolutionAttributes) { return AlwaysValidCacheEntryValidity.INSTANCE; } } @@ -208,7 +225,10 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener } @Override - protected void doProcess(ITemplateContext theTemplateContext, IProcessableElementTag theTag, IElementTagStructureHandler theStructureHandler) { + protected void doProcess( + ITemplateContext theTemplateContext, + IProcessableElementTag theTag, + IElementTagStructureHandler theStructureHandler) { String name = theTag.getAttributeValue("th:name"); String element = theTag.getAttributeValue("th:element"); @@ -231,14 +251,17 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener } @Override - protected void doProcess(ITemplateContext theContext, IProcessableElementTag theTag, AttributeName theAttributeName, String theAttributeValue, IElementTagStructureHandler theStructureHandler) { + protected void doProcess( + ITemplateContext theContext, + IProcessableElementTag theTag, + AttributeName theAttributeName, + String theAttributeValue, + IElementTagStructureHandler theStructureHandler) { String text = applyTemplateWithinTag(myFhirContext, theContext, null, theAttributeValue); theStructureHandler.setBody(text, false); } - } - private class NarrativeGeneratorDialect implements IDialect, IExpressionObjectDialect { private final FhirContext myFhirContext; @@ -252,7 +275,6 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener return "NarrativeGeneratorDialect"; } - @Override public IExpressionObjectFactory getExpressionObjectFactory() { return new NarrativeGeneratorExpressionObjectFactory(myFhirContext); @@ -286,7 +308,6 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener } } - private class NarrativeGeneratorFhirPathExpressionObject { private final FhirContext myFhirContext; @@ -313,8 +334,5 @@ public abstract class BaseThymeleafNarrativeGenerator extends BaseNarrativeGener } return fhirPath; } - - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGenerator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGenerator.java index dd68463dc54..c1d4985128e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGenerator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/CustomThymeleafNarrativeGenerator.java @@ -40,14 +40,14 @@ public class CustomThymeleafNarrativeGenerator extends BaseThymeleafNarrativeGen } /** - * Create a new narrative generator - * - * @param theNarrativePropertyFiles The name of the property file, in one of the following formats: - *
      - *
    • file:/path/to/file/file.properties
    • - *
    • classpath:/com/package/file.properties
    • - *
    - */ + * Create a new narrative generator + * + * @param theNarrativePropertyFiles The name of the property file, in one of the following formats: + *
      + *
    • file:/path/to/file/file.properties
    • + *
    • classpath:/com/package/file.properties
    • + *
    + */ public CustomThymeleafNarrativeGenerator(String... theNarrativePropertyFiles) { this(); setPropertyFile(theNarrativePropertyFiles); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGenerator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGenerator.java index 0742691dc18..c2752931b01 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGenerator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative/DefaultThymeleafNarrativeGenerator.java @@ -27,7 +27,8 @@ import java.util.List; public class DefaultThymeleafNarrativeGenerator extends BaseThymeleafNarrativeGenerator implements INarrativeGenerator { public static final String NARRATIVES_PROPERTIES = "classpath:ca/uhn/fhir/narrative/narratives.properties"; - static final String HAPISERVER_NARRATIVES_PROPERTIES = "classpath:ca/uhn/fhir/narrative/narratives-hapiserver.properties"; + static final String HAPISERVER_NARRATIVES_PROPERTIES = + "classpath:ca/uhn/fhir/narrative/narratives-hapiserver.properties"; private boolean myUseHapiServerConformanceNarrative; private volatile NarrativeTemplateManifest myManifest; @@ -66,5 +67,4 @@ public class DefaultThymeleafNarrativeGenerator extends BaseThymeleafNarrativeGe public boolean isUseHapiServerConformanceNarrative() { return myUseHapiServerConformanceNarrative; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/BaseNarrativeGenerator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/BaseNarrativeGenerator.java index f87bf3d7a24..1917356c6cd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/BaseNarrativeGenerator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/BaseNarrativeGenerator.java @@ -28,19 +28,16 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.narrative.INarrativeGenerator; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.util.Logs; -import ch.qos.logback.classic.spi.LogbackServiceProvider; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.INarrative; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultIfEmpty; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -63,10 +60,15 @@ public abstract class BaseNarrativeGenerator implements INarrativeGenerator { List templates = getTemplateForElement(theFhirContext, theResource); INarrativeTemplate template = null; if (templates.isEmpty()) { - Logs.getNarrativeGenerationTroubleshootingLog().debug("No templates match for resource of type {}", theResource.getClass()); + Logs.getNarrativeGenerationTroubleshootingLog() + .debug("No templates match for resource of type {}", theResource.getClass()); } else { if (templates.size() > 1) { - Logs.getNarrativeGenerationTroubleshootingLog().debug("Multiple templates match for resource of type {} - Picking first from: {}", theResource.getClass(), templates); + Logs.getNarrativeGenerationTroubleshootingLog() + .debug( + "Multiple templates match for resource of type {} - Picking first from: {}", + theResource.getClass(), + templates); } template = templates.get(0); Logs.getNarrativeGenerationTroubleshootingLog().debug("Selected template: {}", template); @@ -78,7 +80,7 @@ public abstract class BaseNarrativeGenerator implements INarrativeGenerator { public String generateResourceNarrative(FhirContext theFhirContext, IBaseResource theResource) { INarrativeTemplate template = selectTemplate(theFhirContext, theResource); if (template != null) { - String narrative = applyTemplate(theFhirContext, template, (IBase)theResource); + String narrative = applyTemplate(theFhirContext, template, (IBase) theResource); return cleanWhitespace(narrative); } @@ -89,7 +91,8 @@ public abstract class BaseNarrativeGenerator implements INarrativeGenerator { return getManifest().getTemplateByElement(theFhirContext, getStyle(), theElement); } - private boolean applyTemplate(FhirContext theFhirContext, INarrativeTemplate theTemplate, IBaseResource theResource) { + private boolean applyTemplate( + FhirContext theFhirContext, INarrativeTemplate theTemplate, IBaseResource theResource) { if (templateDoesntApplyToResource(theTemplate, theResource)) { return false; } @@ -119,18 +122,19 @@ public abstract class BaseNarrativeGenerator implements INarrativeGenerator { throw new InternalErrorException(Msg.code(1865) + e); } } - } return retVal; } private INarrative getOrCreateNarrativeChildElement(FhirContext theFhirContext, IBase nextTargetContext) { - BaseRuntimeElementCompositeDefinition targetElementDef = (BaseRuntimeElementCompositeDefinition) theFhirContext.getElementDefinition(nextTargetContext.getClass()); + BaseRuntimeElementCompositeDefinition targetElementDef = (BaseRuntimeElementCompositeDefinition) + theFhirContext.getElementDefinition(nextTargetContext.getClass()); BaseRuntimeChildDefinition targetTextChild = targetElementDef.getChildByName("text"); List existing = targetTextChild.getAccessor().getValues(nextTargetContext); INarrative nextTargetNarrative; if (existing.isEmpty()) { - nextTargetNarrative = (INarrative) theFhirContext.getElementDefinition("narrative").newInstance(); + nextTargetNarrative = (INarrative) + theFhirContext.getElementDefinition("narrative").newInstance(); targetTextChild.getMutator().addValue(nextTargetContext, nextTargetNarrative); } else { nextTargetNarrative = (INarrative) existing.get(0); @@ -138,7 +142,8 @@ public abstract class BaseNarrativeGenerator implements INarrativeGenerator { return nextTargetNarrative; } - private List findElementsInResourceRequiringNarratives(FhirContext theFhirContext, IBaseResource theResource, String theContextPath) { + private List findElementsInResourceRequiringNarratives( + FhirContext theFhirContext, IBaseResource theResource, String theContextPath) { if (theFhirContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) { return Collections.singletonList(theResource); } @@ -146,17 +151,16 @@ public abstract class BaseNarrativeGenerator implements INarrativeGenerator { return fhirPath.evaluate(theResource, theContextPath, IBase.class); } - protected abstract String applyTemplate(FhirContext theFhirContext, INarrativeTemplate theTemplate, IBase theTargetContext); + protected abstract String applyTemplate( + FhirContext theFhirContext, INarrativeTemplate theTemplate, IBase theTargetContext); private boolean templateDoesntApplyToResource(INarrativeTemplate theTemplate, IBaseResource theResource) { boolean retVal = false; - if (theTemplate.getAppliesToProfiles() != null && !theTemplate.getAppliesToProfiles().isEmpty()) { - Set resourceProfiles = theResource - .getMeta() - .getProfile() - .stream() - .map(t -> t.getValueAsString()) - .collect(Collectors.toSet()); + if (theTemplate.getAppliesToProfiles() != null + && !theTemplate.getAppliesToProfiles().isEmpty()) { + Set resourceProfiles = theResource.getMeta().getProfile().stream() + .map(t -> t.getValueAsString()) + .collect(Collectors.toSet()); retVal = true; for (String next : theTemplate.getAppliesToProfiles()) { if (resourceProfiles.contains(next)) { @@ -208,7 +212,8 @@ public abstract class BaseNarrativeGenerator implements INarrativeGenerator { char char1 = Character.toLowerCase(theResult.charAt(i + 1)); char char2 = Character.toLowerCase(theResult.charAt(i + 2)); char char3 = Character.toLowerCase(theResult.charAt(i + 3)); - char char4 = Character.toLowerCase((i + 4 < theResult.length()) ? theResult.charAt(i + 4) : ' '); + char char4 = + Character.toLowerCase((i + 4 < theResult.length()) ? theResult.charAt(i + 4) : ' '); if (char1 == 'p' && char2 == 'r' && char3 == 'e') { inPre = true; } else if (char1 == '/' && char2 == 'p' && char3 == 'r' && char4 == 'e') { @@ -231,5 +236,4 @@ public abstract class BaseNarrativeGenerator implements INarrativeGenerator { } protected abstract NarrativeTemplateManifest getManifest(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplate.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplate.java index 8c5201399c9..457300849ae 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplate.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplate.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.narrative2; import org.hl7.fhir.instance.model.api.IBase; -import java.io.IOException; import java.util.Set; public interface INarrativeTemplate { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplateManifest.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplateManifest.java index dee8d60e71a..e12362bd861 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplateManifest.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/INarrativeTemplateManifest.java @@ -22,17 +22,28 @@ package ca.uhn.fhir.narrative2; import ca.uhn.fhir.context.FhirContext; import org.hl7.fhir.instance.model.api.IBase; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.EnumSet; import java.util.List; +import javax.annotation.Nonnull; public interface INarrativeTemplateManifest { - List getTemplateByResourceName(@Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull String theResourceName, @Nonnull Collection theProfiles); + List getTemplateByResourceName( + @Nonnull FhirContext theFhirContext, + @Nonnull EnumSet theStyles, + @Nonnull String theResourceName, + @Nonnull Collection theProfiles); - List getTemplateByName(@Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull String theName); + List getTemplateByName( + @Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull String theName); - List getTemplateByElement(@Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull IBase theElementValue); + List getTemplateByElement( + @Nonnull FhirContext theFhirContext, + @Nonnull EnumSet theStyles, + @Nonnull IBase theElementValue); - List getTemplateByFragmentName(@Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull String theFragmentName); + List getTemplateByFragmentName( + @Nonnull FhirContext theFhirContext, + @Nonnull EnumSet theStyles, + @Nonnull String theFragmentName); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplate.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplate.java index ee515a012ee..66645009956 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplate.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplate.java @@ -42,9 +42,9 @@ public class NarrativeTemplate implements INarrativeTemplate { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE) - .append("name", myTemplateName) - .append("fileName", myTemplateFileName) - .toString(); + .append("name", myTemplateName) + .append("fileName", myTemplateFileName) + .toString(); } public Set getAppliesToDataTypes() { @@ -130,5 +130,4 @@ public class NarrativeTemplate implements INarrativeTemplate { void addAppliesToDatatype(String theDataType) { myAppliesToDataTypes.add(theDataType); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplateManifest.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplateManifest.java index 6710e66d28f..8152d66f19c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplateManifest.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/NarrativeTemplateManifest.java @@ -37,14 +37,14 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.StringReader; -import java.util.*; import java.util.function.Consumer; import java.util.stream.Collectors; +import java.util.*; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -94,36 +94,46 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { } @Override - public List getTemplateByResourceName(@Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull String theResourceName, @Nonnull Collection theProfiles) { + public List getTemplateByResourceName( + @Nonnull FhirContext theFhirContext, + @Nonnull EnumSet theStyles, + @Nonnull String theResourceName, + @Nonnull Collection theProfiles) { return getFromMap(theStyles, theResourceName.toUpperCase(), myResourceTypeToTemplate, theProfiles); } @Override - public List getTemplateByName(@Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull String theName) { + public List getTemplateByName( + @Nonnull FhirContext theFhirContext, + @Nonnull EnumSet theStyles, + @Nonnull String theName) { return getFromMap(theStyles, theName, myNameToTemplate, Collections.emptyList()); } @Override - public List getTemplateByFragmentName(@Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull String theFragmentName) { + public List getTemplateByFragmentName( + @Nonnull FhirContext theFhirContext, + @Nonnull EnumSet theStyles, + @Nonnull String theFragmentName) { return getFromMap(theStyles, theFragmentName, myFragmentNameToTemplate, Collections.emptyList()); } @SuppressWarnings("PatternVariableCanBeUsed") @Override - public List getTemplateByElement(@Nonnull FhirContext theFhirContext, @Nonnull EnumSet theStyles, @Nonnull IBase theElement) { + public List getTemplateByElement( + @Nonnull FhirContext theFhirContext, + @Nonnull EnumSet theStyles, + @Nonnull IBase theElement) { List retVal = Collections.emptyList(); if (theElement instanceof IBaseResource) { IBaseResource resource = (IBaseResource) theElement; String resourceName = theFhirContext.getResourceDefinition(resource).getName(); - List profiles = resource - .getMeta() - .getProfile() - .stream() - .filter(Objects::nonNull) - .map(IPrimitiveType::getValueAsString) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + List profiles = resource.getMeta().getProfile().stream() + .filter(Objects::nonNull) + .map(IPrimitiveType::getValueAsString) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); retVal = getTemplateByResourceName(theFhirContext, theStyles, resourceName, profiles); } @@ -132,13 +142,13 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { } if (retVal.isEmpty()) { - String datatypeName = theFhirContext.getElementDefinition(theElement.getClass()).getName(); + String datatypeName = + theFhirContext.getElementDefinition(theElement.getClass()).getName(); retVal = getFromMap(theStyles, datatypeName.toUpperCase(), myDatatypeToTemplate, Collections.emptyList()); } return retVal; } - public static NarrativeTemplateManifest forManifestFileLocation(String... thePropertyFilePaths) { return forManifestFileLocation(Arrays.asList(thePropertyFilePaths)); } @@ -180,11 +190,13 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { file.load(new StringReader(theManifestText)); for (Object nextKeyObj : file.keySet()) { String nextKey = (String) nextKeyObj; - Validate.isTrue(StringUtils.countMatches(nextKey, ".") == 1, "Invalid narrative property file key: %s", nextKey); + Validate.isTrue( + StringUtils.countMatches(nextKey, ".") == 1, "Invalid narrative property file key: %s", nextKey); String name = nextKey.substring(0, nextKey.indexOf('.')); Validate.notBlank(name, "Invalid narrative property file key: %s", nextKey); - NarrativeTemplate nextTemplate = nameToTemplate.computeIfAbsent(name, t -> new NarrativeTemplate().setTemplateName(name)); + NarrativeTemplate nextTemplate = + nameToTemplate.computeIfAbsent(name, t -> new NarrativeTemplate().setTemplateName(name)); if (nextKey.endsWith(".class")) { String className = file.getProperty(nextKey); @@ -192,7 +204,8 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { try { nextTemplate.addAppliesToClass((Class) Class.forName(className)); } catch (ClassNotFoundException theE) { - throw new InternalErrorException(Msg.code(1867) + "Could not find class " + className + " declared in narrative manifest"); + throw new InternalErrorException(Msg.code(1867) + "Could not find class " + className + + " declared in narrative manifest"); } } } else if (nextKey.endsWith(".profile")) { @@ -226,21 +239,19 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { ourLog.debug("Ignoring title property as narrative generator no longer generates titles: {}", nextKey); } else { throw new ConfigurationException(Msg.code(1868) + "Invalid property name: " + nextKey - + " - the key must end in one of the expected extensions " - + "'.profile', '.resourceType', '.dataType', '.style', '.contextPath', '.narrative', '.title'"); + + " - the key must end in one of the expected extensions " + + "'.profile', '.resourceType', '.dataType', '.style', '.contextPath', '.narrative', '.title'"); } - } return nameToTemplate.values(); } private static void parseValuesAndAddToMap(String resourceType, Consumer addAppliesToResourceType) { - Arrays - .stream(resourceType.split(",")) - .map(String::trim) - .filter(StringUtils::isNotBlank) - .forEach(addAppliesToResourceType); + Arrays.stream(resourceType.split(",")) + .map(String::trim) + .filter(StringUtils::isNotBlank) + .forEach(addAppliesToResourceType); } static String loadResource(String theName) { @@ -257,17 +268,20 @@ public class NarrativeTemplateManifest implements INarrativeTemplateManifest { throw new InternalErrorException(Msg.code(1869) + e.getMessage(), e); } } else { - throw new InternalErrorException(Msg.code(1871) + "Invalid resource name: '" + theName + "' (must start with classpath: or file: )"); + throw new InternalErrorException( + Msg.code(1871) + "Invalid resource name: '" + theName + "' (must start with classpath: or file: )"); } } - private static List getFromMap(EnumSet theStyles, T theKey, ListMultimap theMap, Collection theProfiles) { - return theMap - .get(theKey) - .stream() - .filter(t -> theStyles.contains(t.getTemplateType())) - .filter(t -> theProfiles.isEmpty() || t.getAppliesToProfiles().stream().anyMatch(theProfiles::contains)) - .collect(Collectors.toList()); + private static List getFromMap( + EnumSet theStyles, + T theKey, + ListMultimap theMap, + Collection theProfiles) { + return theMap.get(theKey).stream() + .filter(t -> theStyles.contains(t.getTemplateType())) + .filter(t -> theProfiles.isEmpty() + || t.getAppliesToProfiles().stream().anyMatch(theProfiles::contains)) + .collect(Collectors.toList()); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/TemplateTypeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/TemplateTypeEnum.java index 273cd7d958c..ce50e0c6289 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/TemplateTypeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/narrative2/TemplateTypeEnum.java @@ -20,8 +20,6 @@ package ca.uhn.fhir.narrative2; public enum TemplateTypeEnum { - THYMELEAF, LIQUID - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java index 5b3dedfa4ad..831e5789257 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/BaseParser.java @@ -60,7 +60,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -79,6 +78,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -92,11 +92,13 @@ public abstract class BaseParser implements IParser { * * @since 5.0.0 */ - public static final String RESOURCE_CREATED_BY_PARSER = BaseParser.class.getName() + "_" + "RESOURCE_CREATED_BY_PARSER"; + public static final String RESOURCE_CREATED_BY_PARSER = + BaseParser.class.getName() + "_" + "RESOURCE_CREATED_BY_PARSER"; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseParser.class); - private static final Set notEncodeForContainedResource = new HashSet<>(Arrays.asList("security", "versionId", "lastUpdated")); + private static final Set notEncodeForContainedResource = + new HashSet<>(Arrays.asList("security", "versionId", "lastUpdated")); private FhirTerser.ContainedResources myContainedResources; private boolean myEncodeElementsAppliesToChildResourcesOnly; @@ -135,10 +137,8 @@ public abstract class BaseParser implements IParser { if (theDontEncodeElements == null || theDontEncodeElements.isEmpty()) { myDontEncodeElements = null; } else { - myDontEncodeElements = theDontEncodeElements - .stream() - .map(EncodeContextPath::new) - .collect(Collectors.toList()); + myDontEncodeElements = + theDontEncodeElements.stream().map(EncodeContextPath::new).collect(Collectors.toList()); } return this; } @@ -154,13 +154,13 @@ public abstract class BaseParser implements IParser { myEncodeElements = null; myEncodeElementsAppliesToResourceTypes = null; } else { - myEncodeElements = theEncodeElements - .stream() - .map(EncodeContextPath::new) - .collect(Collectors.toList()); + myEncodeElements = + theEncodeElements.stream().map(EncodeContextPath::new).collect(Collectors.toList()); myEncodeElementsAppliesToResourceTypes = new HashSet<>(); - for (String next : myEncodeElements.stream().map(t -> t.getPath().get(0).getName()).collect(Collectors.toList())) { + for (String next : myEncodeElements.stream() + .map(t -> t.getPath().get(0).getName()) + .collect(Collectors.toList())) { if (next.startsWith("*")) { myEncodeElementsAppliesToResourceTypes = null; break; @@ -172,45 +172,49 @@ public abstract class BaseParser implements IParser { myEncodeElementsAppliesToResourceTypes.add(next.substring(0, dotIdx)); } } - } return this; } - protected Iterable compositeChildIterator(IBase theCompositeElement, final boolean theContainedResource, final CompositeChildElement theParent, EncodeContext theEncodeContext) { - BaseRuntimeElementCompositeDefinition elementDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theCompositeElement.getClass()); - return theEncodeContext.getCompositeChildrenCache().computeIfAbsent(new Key(elementDef, theContainedResource, theParent, theEncodeContext), (k) -> { + protected Iterable compositeChildIterator( + IBase theCompositeElement, + final boolean theContainedResource, + final CompositeChildElement theParent, + EncodeContext theEncodeContext) { + BaseRuntimeElementCompositeDefinition elementDef = (BaseRuntimeElementCompositeDefinition) + myContext.getElementDefinition(theCompositeElement.getClass()); + return theEncodeContext + .getCompositeChildrenCache() + .computeIfAbsent(new Key(elementDef, theContainedResource, theParent, theEncodeContext), (k) -> { + final List children = elementDef.getChildrenAndExtension(); + final List result = new ArrayList<>(children.size()); - final List children = elementDef.getChildrenAndExtension(); - final List result = new ArrayList<>(children.size()); + for (final BaseRuntimeChildDefinition child : children) { + CompositeChildElement myNext = new CompositeChildElement(theParent, child, theEncodeContext); - for (final BaseRuntimeChildDefinition child : children) { - CompositeChildElement myNext = new CompositeChildElement(theParent, child, theEncodeContext); - - /* - * There are lots of reasons we might skip encoding a particular child - */ - if (myNext.getDef().getElementName().equals("id")) { - continue; - } else if (!myNext.shouldBeEncoded(theContainedResource)) { - continue; - } else if (myNext.getDef() instanceof RuntimeChildNarrativeDefinition) { - if (isSuppressNarratives() || isSummaryMode()) { - continue; + /* + * There are lots of reasons we might skip encoding a particular child + */ + if (myNext.getDef().getElementName().equals("id")) { + continue; + } else if (!myNext.shouldBeEncoded(theContainedResource)) { + continue; + } else if (myNext.getDef() instanceof RuntimeChildNarrativeDefinition) { + if (isSuppressNarratives() || isSummaryMode()) { + continue; + } + } else if (myNext.getDef() instanceof RuntimeChildContainedResources) { + if (theContainedResource) { + continue; + } + } + result.add(myNext); } - } else if (myNext.getDef() instanceof RuntimeChildContainedResources) { - if (theContainedResource) { - continue; - } - } - result.add(myNext); - } - return result; - }); + return result; + }); } - private String determineReferenceText(IBaseReference theRef, CompositeChildElement theCompositeChildElement) { IIdType ref = theRef.getReferenceElement(); if (isBlank(ref.getIdPart())) { @@ -231,7 +235,9 @@ public abstract class BaseParser implements IParser { reference = refId.getValue(); } else { if (!refId.hasResourceType()) { - refId = refId.withResourceType(myContext.getResourceDefinition(theRef.getResource()).getName()); + refId = refId.withResourceType(myContext + .getResourceDefinition(theRef.getResource()) + .getName()); } if (isStripVersionsFromReferences(theCompositeChildElement)) { reference = refId.toVersionless().getValue(); @@ -246,7 +252,8 @@ public abstract class BaseParser implements IParser { return reference; } if (!ref.hasResourceType() && !ref.isLocal() && theRef.getResource() != null) { - ref = ref.withResourceType(myContext.getResourceDefinition(theRef.getResource()).getName()); + ref = ref.withResourceType( + myContext.getResourceDefinition(theRef.getResource()).getName()); } if (isNotBlank(myServerBaseUrl) && StringUtils.equals(myServerBaseUrl, ref.getBaseUrl())) { if (isStripVersionsFromReferences(theCompositeChildElement)) { @@ -260,13 +267,17 @@ public abstract class BaseParser implements IParser { return ref.getValue(); } - protected abstract void doEncodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) throws IOException, DataFormatException; + protected abstract void doEncodeResourceToWriter( + IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) + throws IOException, DataFormatException; - protected void doEncodeToWriter(IBase theElement, Writer theWriter, EncodeContext theEncodeContext) throws IOException, DataFormatException { + protected void doEncodeToWriter(IBase theElement, Writer theWriter, EncodeContext theEncodeContext) + throws IOException, DataFormatException { throw new InternalErrorException(Msg.code(2363) + "This parser does not support encoding non-resource values"); } - protected abstract T doParseResource(Class theResourceType, Reader theReader) throws DataFormatException; + protected abstract T doParseResource(Class theResourceType, Reader theReader) + throws DataFormatException; @Override public String encodeResourceToString(IBaseResource theResource) throws DataFormatException { @@ -274,25 +285,27 @@ public abstract class BaseParser implements IParser { try { encodeResourceToWriter(theResource, stringWriter); } catch (IOException e) { - throw new Error(Msg.code(1828) + "Encountered IOException during write to string - This should not happen!", e); + throw new Error( + Msg.code(1828) + "Encountered IOException during write to string - This should not happen!", e); } return stringWriter.toString(); } @Override - public final void encodeResourceToWriter(IBaseResource theResource, Writer theWriter) throws IOException, DataFormatException { + public final void encodeResourceToWriter(IBaseResource theResource, Writer theWriter) + throws IOException, DataFormatException { EncodeContext encodeContext = new EncodeContext(); encodeResourceToWriter(theResource, theWriter, encodeContext); } - @Override public String encodeToString(IBase theElement) throws DataFormatException { Writer stringWriter = new StringBuilderWriter(); try { encodeToWriter(theElement, stringWriter); } catch (IOException e) { - throw new Error(Msg.code(2364) + "Encountered IOException during write to string - This should not happen!", e); + throw new Error( + Msg.code(2364) + "Encountered IOException during write to string - This should not happen!", e); } return stringWriter.toString(); } @@ -309,19 +322,24 @@ public abstract class BaseParser implements IParser { } } - - protected void encodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) throws IOException { + protected void encodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) + throws IOException { Validate.notNull(theResource, "theResource can not be null"); Validate.notNull(theWriter, "theWriter can not be null"); Validate.notNull(theEncodeContext, "theEncodeContext can not be null"); - if (myContext.getVersion().getVersion() == FhirVersionEnum.R4B && theResource.getStructureFhirVersionEnum() == FhirVersionEnum.R5) { + if (myContext.getVersion().getVersion() == FhirVersionEnum.R4B + && theResource.getStructureFhirVersionEnum() == FhirVersionEnum.R5) { // TODO: remove once we've bumped the core lib version - } else if (theResource.getStructureFhirVersionEnum() != myContext.getVersion().getVersion()) { - throw new IllegalArgumentException(Msg.code(1829) + "This parser is for FHIR version " + myContext.getVersion().getVersion() + " - Can not encode a structure for version " + theResource.getStructureFhirVersionEnum()); + } else if (theResource.getStructureFhirVersionEnum() + != myContext.getVersion().getVersion()) { + throw new IllegalArgumentException(Msg.code(1829) + "This parser is for FHIR version " + + myContext.getVersion().getVersion() + " - Can not encode a structure for version " + + theResource.getStructureFhirVersionEnum()); } - String resourceName = myContext.getElementDefinition(theResource.getClass()).getName(); + String resourceName = + myContext.getElementDefinition(theResource.getClass()).getName(); theEncodeContext.pushPath(resourceName, true); doEncodeResourceToWriter(theResource, theWriter, theEncodeContext); @@ -329,7 +347,8 @@ public abstract class BaseParser implements IParser { theEncodeContext.popPath(); } - protected void encodeToWriter(IBase theElement, Writer theWriter, EncodeContext theEncodeContext) throws IOException { + protected void encodeToWriter(IBase theElement, Writer theWriter, EncodeContext theEncodeContext) + throws IOException { Validate.notNull(theElement, "theElement can not be null"); Validate.notNull(theWriter, "theWriter can not be null"); Validate.notNull(theEncodeContext, "theEncodeContext can not be null"); @@ -379,7 +398,8 @@ public abstract class BaseParser implements IParser { */ BaseRuntimeElementDefinition elementDef = myContext.getElementDefinition(type); if (elementDef.getName().equals("code")) { - Class type2 = myContext.getElementDefinition("code").getImplementingClass(); + Class type2 = + myContext.getElementDefinition("code").getImplementingClass(); childDef = theChild.getChildElementDefinitionByDatatype(type2); childName = theChild.getChildNameByDatatype(type2); } @@ -390,7 +410,8 @@ public abstract class BaseParser implements IParser { Class nextSuperType = theValue.getClass(); while (IBase.class.isAssignableFrom(nextSuperType) && childDef == null) { if (Modifier.isAbstract(nextSuperType.getModifiers()) == false) { - BaseRuntimeElementDefinition def = myContext.getElementDefinition((Class) nextSuperType); + BaseRuntimeElementDefinition def = + myContext.getElementDefinition((Class) nextSuperType); Class nextChildType = def.getImplementingClass(); childDef = theChild.getChildElementDefinitionByDatatype((Class) nextChildType); childName = theChild.getChildNameByDatatype((Class) nextChildType); @@ -449,7 +470,8 @@ public abstract class BaseParser implements IParser { protected List, Object>> getExtensionMetadataKeys(IResource resource) { List, Object>> extensionMetadataKeys = new ArrayList<>(); - for (Map.Entry, Object> entry : resource.getResourceMetadata().entrySet()) { + for (Map.Entry, Object> entry : + resource.getResourceMetadata().entrySet()) { if (entry.getKey() instanceof ResourceMetadataKeyEnum.ExtensionResourceMetadataKey) { extensionMetadataKeys.add(entry); } @@ -461,7 +483,9 @@ public abstract class BaseParser implements IParser { protected String getExtensionUrl(final String extensionUrl) { String url = extensionUrl; if (StringUtils.isNotBlank(extensionUrl) && StringUtils.isNotBlank(myServerBaseUrl)) { - url = !UrlUtil.isValid(extensionUrl) && extensionUrl.startsWith("/") ? myServerBaseUrl + extensionUrl : extensionUrl; + url = !UrlUtil.isValid(extensionUrl) && extensionUrl.startsWith("/") + ? myServerBaseUrl + extensionUrl + : extensionUrl; } return url; } @@ -497,7 +521,8 @@ public abstract class BaseParser implements IParser { } @SuppressWarnings("deprecation") - protected > List getProfileTagsForEncoding(IBaseResource theResource, List theProfiles) { + protected > List getProfileTagsForEncoding( + IBaseResource theResource, List theProfiles) { switch (myContext.getAddProfileTagWhenEncoding()) { case NEVER: return theProfiles; @@ -555,8 +580,10 @@ public abstract class BaseParser implements IParser { } protected boolean isChildContained(BaseRuntimeElementDefinition childDef, boolean theIncludedResource) { - return (childDef.getChildType() == ChildTypeEnum.CONTAINED_RESOURCES || childDef.getChildType() == ChildTypeEnum.CONTAINED_RESOURCE_LIST) && getContainedResources().isEmpty() == false - && theIncludedResource == false; + return (childDef.getChildType() == ChildTypeEnum.CONTAINED_RESOURCES + || childDef.getChildType() == ChildTypeEnum.CONTAINED_RESOURCE_LIST) + && getContainedResources().isEmpty() == false + && theIncludedResource == false; } @Override @@ -595,13 +622,16 @@ public abstract class BaseParser implements IParser { Set dontStripVersionsFromReferencesAtPaths = myDontStripVersionsFromReferencesAtPaths; if (dontStripVersionsFromReferencesAtPaths != null) { - if (dontStripVersionsFromReferencesAtPaths.isEmpty() == false && theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths)) { + if (dontStripVersionsFromReferencesAtPaths.isEmpty() == false + && theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths)) { return false; } } - dontStripVersionsFromReferencesAtPaths = myContext.getParserOptions().getDontStripVersionsFromReferencesAtPaths(); - return dontStripVersionsFromReferencesAtPaths.isEmpty() != false || !theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths); + dontStripVersionsFromReferencesAtPaths = + myContext.getParserOptions().getDontStripVersionsFromReferencesAtPaths(); + return dontStripVersionsFromReferencesAtPaths.isEmpty() != false + || !theCompositeChildElement.anyPathMatches(dontStripVersionsFromReferencesAtPaths); } @Override @@ -625,12 +655,14 @@ public abstract class BaseParser implements IParser { } @Override - public T parseResource(Class theResourceType, InputStream theInputStream) throws DataFormatException { + public T parseResource(Class theResourceType, InputStream theInputStream) + throws DataFormatException { return parseResource(theResourceType, new InputStreamReader(theInputStream, Constants.CHARSET_UTF8)); } @Override - public T parseResource(Class theResourceType, Reader theReader) throws DataFormatException { + public T parseResource(Class theResourceType, Reader theReader) + throws DataFormatException { /* * We do this so that the context can verify that the structure is for @@ -656,7 +688,14 @@ public abstract class BaseParser implements IParser { if (isBlank(resourceId.getValue())) { resourceId.setValue(fullUrl); } else { - if (fullUrl.startsWith("urn:") && fullUrl.length() > resourceId.getIdPart().length() && fullUrl.charAt(fullUrl.length() - resourceId.getIdPart().length() - 1) == ':' && fullUrl.endsWith(resourceId.getIdPart())) { + if (fullUrl.startsWith("urn:") + && fullUrl.length() + > resourceId.getIdPart().length() + && fullUrl.charAt(fullUrl.length() + - resourceId.getIdPart().length() + - 1) + == ':' + && fullUrl.endsWith(resourceId.getIdPart())) { resourceId.setValue(fullUrl); } else { IIdType fullUrlId = myContext.getVersion().newIdType(); @@ -669,7 +708,8 @@ public abstract class BaseParser implements IParser { resourceId.setValue(newId.getValue()); } else if (StringUtils.equals(fullUrlId.getIdPart(), resourceId.getIdPart())) { if (fullUrlId.hasBaseUrl()) { - IIdType newResourceId = resourceId.withServerBase(fullUrlId.getBaseUrl(), resourceId.getResourceType()); + IIdType newResourceId = resourceId.withServerBase( + fullUrlId.getBaseUrl(), resourceId.getResourceType()); resourceId.setValue(newResourceId.getValue()); } } @@ -679,7 +719,6 @@ public abstract class BaseParser implements IParser { } }); } - } return retVal; @@ -702,8 +741,12 @@ public abstract class BaseParser implements IParser { return parseResource(null, theMessageString); } - protected List preProcessValues(BaseRuntimeChildDefinition theMetaChildUncast, IBaseResource theResource, List theValues, - CompositeChildElement theCompositeChildElement, EncodeContext theEncodeContext) { + protected List preProcessValues( + BaseRuntimeChildDefinition theMetaChildUncast, + IBaseResource theResource, + List theValues, + CompositeChildElement theCompositeChildElement, + EncodeContext theEncodeContext) { if (myContext.getVersion().getVersion().isRi()) { /* @@ -723,7 +766,8 @@ public abstract class BaseParser implements IParser { IBaseMetaType metaValue = (IBaseMetaType) theValues.get(0); try { - metaValue = (IBaseMetaType) metaValue.getClass().getMethod("copy").invoke(metaValue); + metaValue = (IBaseMetaType) + metaValue.getClass().getMethod("copy").invoke(metaValue); } catch (Exception e) { throw new InternalErrorException(Msg.code(1830) + "Failed to duplicate meta", e); } @@ -737,7 +781,8 @@ public abstract class BaseParser implements IParser { filterCodingsWithNoCodeOrSystem(metaValue.getTag()); filterCodingsWithNoCodeOrSystem(metaValue.getSecurity()); - List> newProfileList = getProfileTagsForEncoding(theResource, metaValue.getProfile()); + List> newProfileList = + getProfileTagsForEncoding(theResource, metaValue.getProfile()); List> oldProfileList = metaValue.getProfile(); if (oldProfileList != newProfileList) { oldProfileList.clear(); @@ -778,11 +823,11 @@ public abstract class BaseParser implements IParser { if (retVal == theValues) { retVal = new ArrayList<>(theValues); } - IBaseReference newRef = (IBaseReference) myContext.getElementDefinition(nextRef.getClass()).newInstance(); + IBaseReference newRef = (IBaseReference) + myContext.getElementDefinition(nextRef.getClass()).newInstance(); myContext.newTerser().cloneInto(nextRef, newRef, true); newRef.setReference(refText); retVal.set(i, newRef); - } } } @@ -872,12 +917,17 @@ public abstract class BaseParser implements IParser { return true; } if (myEncodeElements != null) { - if (isEncodeElementsAppliesToChildResourcesOnly() && theEncodeContext.getResourcePath().size() < 2) { + if (isEncodeElementsAppliesToChildResourcesOnly() + && theEncodeContext.getResourcePath().size() < 2) { return false; } - String currentResourceName = theEncodeContext.getResourcePath().get(theEncodeContext.getResourcePath().size() - 1).getName(); - return myEncodeElementsAppliesToResourceTypes == null || myEncodeElementsAppliesToResourceTypes.contains(currentResourceName); + String currentResourceName = theEncodeContext + .getResourcePath() + .get(theEncodeContext.getResourcePath().size() - 1) + .getName(); + return myEncodeElementsAppliesToResourceTypes == null + || myEncodeElementsAppliesToResourceTypes.contains(currentResourceName); } return false; @@ -894,7 +944,8 @@ public abstract class BaseParser implements IParser { retVal = false; } else if (myDontEncodeElements.stream().anyMatch(t -> t.equalsPath("*.id"))) { retVal = false; - } else if (theEncodeContext.getResourcePath().size() == 1 && myDontEncodeElements.stream().anyMatch(t -> t.equalsPath("id"))) { + } else if (theEncodeContext.getResourcePath().size() == 1 + && myDontEncodeElements.stream().anyMatch(t -> t.equalsPath("id"))) { retVal = false; } } @@ -926,7 +977,8 @@ public abstract class BaseParser implements IParser { return "Resource encoded in summary mode"; } - protected void throwExceptionForUnknownChildType(BaseRuntimeChildDefinition nextChild, Class theType) { + protected void throwExceptionForUnknownChildType( + BaseRuntimeChildDefinition nextChild, Class theType) { if (nextChild instanceof BaseRuntimeDeclaredChildDefinition) { StringBuilder b = new StringBuilder(); b.append(nextChild.getElementName()); @@ -975,7 +1027,6 @@ public abstract class BaseParser implements IParser { public String getChildName() { return myChildName; } - } /** @@ -988,17 +1039,18 @@ public abstract class BaseParser implements IParser { public Map> getCompositeChildrenCache() { return myCompositeChildrenCache; } - } - protected class CompositeChildElement { private final BaseRuntimeChildDefinition myDef; private final CompositeChildElement myParent; private final RuntimeResourceDefinition myResDef; private final EncodeContext myEncodeContext; - public CompositeChildElement(CompositeChildElement theParent, @Nullable BaseRuntimeChildDefinition theDef, EncodeContext theEncodeContext) { + public CompositeChildElement( + CompositeChildElement theParent, + @Nullable BaseRuntimeChildDefinition theDef, + EncodeContext theEncodeContext) { myDef = theDef; myParent = theParent; myResDef = null; @@ -1016,7 +1068,6 @@ public abstract class BaseParser implements IParser { } } } - } public CompositeChildElement(RuntimeResourceDefinition theResDef, EncodeContext theEncodeContext) { @@ -1026,12 +1077,12 @@ public abstract class BaseParser implements IParser { myEncodeContext = theEncodeContext; } - @Override - public String toString() { - return myDef.getElementName(); - } + @Override + public String toString() { + return myDef.getElementName(); + } - private void addParent(CompositeChildElement theParent, StringBuilder theB) { + private void addParent(CompositeChildElement theParent, StringBuilder theB) { if (theParent != null) { if (theParent.myResDef != null) { theB.append(theParent.myResDef.getName()); @@ -1079,8 +1130,12 @@ public abstract class BaseParser implements IParser { private boolean checkIfParentShouldBeEncodedAndBuildPath() { List encodeElements = myEncodeElements; - String currentResourceName = myEncodeContext.getResourcePath().get(myEncodeContext.getResourcePath().size() - 1).getName(); - if (myEncodeElementsAppliesToResourceTypes != null && !myEncodeElementsAppliesToResourceTypes.contains(currentResourceName)) { + String currentResourceName = myEncodeContext + .getResourcePath() + .get(myEncodeContext.getResourcePath().size() - 1) + .getName(); + if (myEncodeElementsAppliesToResourceTypes != null + && !myEncodeElementsAppliesToResourceTypes.contains(currentResourceName)) { encodeElements = null; } @@ -1092,7 +1147,8 @@ public abstract class BaseParser implements IParser { * the SUBSETTED tag */ if (!retVal) { - if ("meta".equals(myEncodeContext.getLeafResourcePathFirstField()) && shouldAddSubsettedTag(myEncodeContext)) { + if ("meta".equals(myEncodeContext.getLeafResourcePathFirstField()) + && shouldAddSubsettedTag(myEncodeContext)) { // The next element is a child of the element retVal = true; } else if ("meta".equals(myDef.getElementName()) && shouldAddSubsettedTag(myEncodeContext)) { @@ -1108,14 +1164,17 @@ public abstract class BaseParser implements IParser { return checkIfPathMatchesForEncoding(myDontEncodeElements, false); } - private boolean checkIfPathMatchesForEncoding(List theElements, boolean theCheckingForEncodeElements) { + private boolean checkIfPathMatchesForEncoding( + List theElements, boolean theCheckingForEncodeElements) { boolean retVal = false; if (myDef != null) { myEncodeContext.pushPath(myDef.getElementName(), false); } - if (theCheckingForEncodeElements && isEncodeElementsAppliesToChildResourcesOnly() && myEncodeContext.getResourcePath().size() < 2) { + if (theCheckingForEncodeElements + && isEncodeElementsAppliesToChildResourcesOnly() + && myEncodeContext.getResourcePath().size() < 2) { retVal = true; } else if (theElements == null) { retVal = true; @@ -1125,7 +1184,9 @@ public abstract class BaseParser implements IParser { for (EncodeContextPath next : theElements) { if (next.startsWith(currentResourcePath, true)) { - if (theCheckingForEncodeElements || next.getPath().size() == currentResourcePath.getPath().size()) { + if (theCheckingForEncodeElements + || next.getPath().size() + == currentResourcePath.getPath().size()) { retVal = true; break; } @@ -1136,12 +1197,12 @@ public abstract class BaseParser implements IParser { retVal = true; break; } - if (currentResourcePath.getPath().size() > next.getPath().size()) { + if (currentResourcePath.getPath().size() + > next.getPath().size()) { retVal = true; break; } } - } } @@ -1178,9 +1239,9 @@ public abstract class BaseParser implements IParser { // as of 2019-07. See // https://github.com/smart-on-fhir/Swift-FHIR/issues/26 // for example. - if (("Conformance".equals(resourceName) || "CapabilityStatement".equals(resourceName)) && - ("extension".equals(myDef.getElementName()) || "extension".equals(myEncodeContext.getLeafElementName()) - )) { + if (("Conformance".equals(resourceName) || "CapabilityStatement".equals(resourceName)) + && ("extension".equals(myDef.getElementName()) + || "extension".equals(myEncodeContext.getLeafElementName()))) { // skip } else { retVal = false; @@ -1203,16 +1264,15 @@ public abstract class BaseParser implements IParser { @Override public boolean equals(Object obj) { - if (this == obj) - return true; + if (this == obj) return true; if (obj instanceof CompositeChildElement) { final CompositeChildElement that = (CompositeChildElement) obj; - return Objects.equals(this.getEnclosingInstance(), that.getEnclosingInstance()) && - Objects.equals(this.myDef, that.myDef) && - Objects.equals(this.myParent, that.myParent) && - Objects.equals(this.myResDef, that.myResDef) && - Objects.equals(this.myEncodeContext, that.myEncodeContext); + return Objects.equals(this.getEnclosingInstance(), that.getEnclosingInstance()) + && Objects.equals(this.myDef, that.myDef) + && Objects.equals(this.myParent, that.myParent) + && Objects.equals(this.myResDef, that.myResDef) + && Objects.equals(this.myEncodeContext, that.myEncodeContext); } return false; } @@ -1228,7 +1288,11 @@ public abstract class BaseParser implements IParser { private final BaseParser.CompositeChildElement theParent; private final BaseParser.EncodeContext theEncodeContext; - public Key(BaseRuntimeElementCompositeDefinition resDef, final boolean theContainedResource, final BaseParser.CompositeChildElement theParent, BaseParser.EncodeContext theEncodeContext) { + public Key( + BaseRuntimeElementCompositeDefinition resDef, + final boolean theContainedResource, + final BaseParser.CompositeChildElement theParent, + BaseParser.EncodeContext theEncodeContext) { this.resDef = resDef; this.theContainedResource = theContainedResource; this.theParent = theParent; @@ -1253,16 +1317,15 @@ public abstract class BaseParser implements IParser { } if (obj instanceof Key) { final Key that = (Key) obj; - return Objects.equals(this.resDef, that.resDef) && - this.theContainedResource == that.theContainedResource && - Objects.equals(this.theParent, that.theParent) && - Objects.equals(this.theEncodeContext, that.theEncodeContext); + return Objects.equals(this.resDef, that.resDef) + && this.theContainedResource == that.theContainedResource + && Objects.equals(this.theParent, that.theParent) + && Objects.equals(this.theEncodeContext, that.theEncodeContext); } return false; } } - protected static List extractMetadataListNotNull(IResource resource, ResourceMetadataKeyEnum> key) { List securityLabels = key.get(resource); if (securityLabels == null) { @@ -1274,7 +1337,8 @@ public abstract class BaseParser implements IParser { static boolean hasNoExtensions(IBase theElement) { if (theElement instanceof ISupportsUndeclaredExtensions) { ISupportsUndeclaredExtensions res = (ISupportsUndeclaredExtensions) theElement; - if (res.getUndeclaredExtensions().size() > 0 || res.getUndeclaredModifierExtensions().size() > 0) { + if (res.getUndeclaredExtensions().size() > 0 + || res.getUndeclaredModifierExtensions().size() > 0) { return false; } } @@ -1290,5 +1354,4 @@ public abstract class BaseParser implements IParser { } return true; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/DataFormatException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/DataFormatException.java index 2d7f46b0008..3da8cd1cbc0 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/DataFormatException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/DataFormatException.java @@ -38,5 +38,4 @@ public class DataFormatException extends RuntimeException { public DataFormatException(Throwable theCause) { super(theCause); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ErrorHandlerAdapter.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ErrorHandlerAdapter.java index 1c0f67e6d79..19c4f5ac60f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ErrorHandlerAdapter.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ErrorHandlerAdapter.java @@ -33,7 +33,13 @@ public class ErrorHandlerAdapter implements IParserErrorHandler { } @Override - public void incorrectJsonType(IParseLocation theLocation, String theElementName, ValueType theExpected, ScalarType theExpectedScalarType, ValueType theFound, ScalarType theFoundScalarType) { + public void incorrectJsonType( + IParseLocation theLocation, + String theElementName, + ValueType theExpected, + ScalarType theExpectedScalarType, + ValueType theFound, + ScalarType theFoundScalarType) { // NOP } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IJsonLikeParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IJsonLikeParser.java index def9449c519..43c4aa9e38f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IJsonLikeParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IJsonLikeParser.java @@ -37,11 +37,12 @@ import java.io.IOException; */ public interface IJsonLikeParser extends IParser { - void encodeResourceToJsonLikeWriter(IBaseResource theResource, BaseJsonLikeWriter theJsonLikeWriter) throws IOException, DataFormatException; + void encodeResourceToJsonLikeWriter(IBaseResource theResource, BaseJsonLikeWriter theJsonLikeWriter) + throws IOException, DataFormatException; /** * Parses a resource from a JSON-like data structure - * + * * @param theResourceType * The resource type to use. This can be used to explicitly specify a class which extends a built-in type * (e.g. a custom type extending the default Patient class) @@ -51,11 +52,12 @@ public interface IJsonLikeParser extends IParser { * @throws DataFormatException * If the resource can not be parsed because the data is not recognized or invalid for any reason */ - T parseResource(Class theResourceType, JsonLikeStructure theJsonLikeStructure) throws DataFormatException; + T parseResource(Class theResourceType, JsonLikeStructure theJsonLikeStructure) + throws DataFormatException; /** * Parses a resource from a JSON-like data structure - * + * * @param theJsonLikeStructure * The JSON-like structure to parse * @return A parsed resource. Note that the returned object will be an instance of {@link IResource} or @@ -64,5 +66,4 @@ public interface IJsonLikeParser extends IParser { * If the resource can not be parsed because the data is not recognized or invalid for any reason */ IBaseResource parseResource(JsonLikeStructure theJsonLikeStructure) throws DataFormatException; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IParser.java index dbd3fb801bb..e90a9e65984 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IParser.java @@ -231,7 +231,8 @@ public interface IParser { * @return A parsed resource * @throws DataFormatException If the resource can not be parsed because the data is not recognized or invalid for any reason */ - T parseResource(Class theResourceType, InputStream theInputStream) throws DataFormatException; + T parseResource(Class theResourceType, InputStream theInputStream) + throws DataFormatException; /** * Parses a resource @@ -426,5 +427,4 @@ public interface IParser { * @see ParserOptions */ IParser setDontStripVersionsFromReferencesAtPaths(Collection thePaths); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IParserErrorHandler.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IParserErrorHandler.java index c9091d973df..4225702a609 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IParserErrorHandler.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/IParserErrorHandler.java @@ -48,7 +48,13 @@ public interface IParserErrorHandler { * @param theFoundScalarType If theFoundValueType is {@link ValueType#SCALAR}, this is the specific scalar type found. Otherwise this parameter will be null. * @since 2.2 */ - void incorrectJsonType(IParseLocation theLocation, String theElementName, ValueType theExpectedValueType, ScalarType theExpectedScalarType, ValueType theFoundValueType, ScalarType theFoundScalarType); + void incorrectJsonType( + IParseLocation theLocation, + String theElementName, + ValueType theExpectedValueType, + ScalarType theExpectedScalarType, + ValueType theFoundValueType, + ScalarType theFoundScalarType); /** * The parser detected an attribute value that was invalid (such as: empty "" values are not permitted) @@ -125,7 +131,5 @@ public interface IParserErrorHandler { * @since 2.1 */ String getParentElementName(); - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/JsonParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/JsonParser.java index e0fefa26aea..6f51c24e136 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/JsonParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/JsonParser.java @@ -109,7 +109,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { super(theContext, theParserErrorHandler); } - private boolean addToHeldComments(int valueIdx, List theCommentsToAdd, ArrayList> theListToAddTo) { + private boolean addToHeldComments( + int valueIdx, List theCommentsToAdd, ArrayList> theListToAddTo) { if (theCommentsToAdd.size() > 0) { theListToAddTo.ensureCapacity(valueIdx); while (theListToAddTo.size() <= valueIdx) { @@ -124,8 +125,16 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { return false; } - private boolean addToHeldExtensions(int valueIdx, List> ext, ArrayList> list, boolean theIsModifier, CompositeChildElement theChildElem, - CompositeChildElement theParent, EncodeContext theEncodeContext, boolean theContainedResource, IBase theContainingElement) { + private boolean addToHeldExtensions( + int valueIdx, + List> ext, + ArrayList> list, + boolean theIsModifier, + CompositeChildElement theChildElem, + CompositeChildElement theParent, + EncodeContext theEncodeContext, + boolean theContainedResource, + IBase theContainingElement) { boolean retVal = false; if (ext.size() > 0) { Boolean encodeExtension = null; @@ -137,7 +146,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { // Make sure we respect _summary and _elements if (encodeExtension == null) { - encodeExtension = isEncodeExtension(theParent, theEncodeContext, theContainedResource, theContainingElement); + encodeExtension = + isEncodeExtension(theParent, theEncodeContext, theContainedResource, theContainingElement); } if (encodeExtension) { @@ -171,11 +181,13 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { // private void assertObjectOfType(JsonLikeValue theResourceTypeObj, Object theValueType, String thePosition) { // if (theResourceTypeObj == null) { - // throw new DataFormatException(Msg.code(1836) + "Invalid JSON content detected, missing required element: '" + thePosition + "'"); + // throw new DataFormatException(Msg.code(1836) + "Invalid JSON content detected, missing required element: '" + + // thePosition + "'"); // } // // if (theResourceTypeObj.getValueType() != theValueType) { - // throw new DataFormatException(Msg.code(1837) + "Invalid content of element " + thePosition + ", expected " + theValueType); + // throw new DataFormatException(Msg.code(1837) + "Invalid content of element " + thePosition + ", expected " + + // theValueType); // } // } @@ -192,7 +204,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { return jsonStructure.getJsonLikeWriter(theWriter); } - public void doEncodeResourceToJsonLikeWriter(IBaseResource theResource, BaseJsonLikeWriter theEventWriter, EncodeContext theEncodeContext) throws IOException { + public void doEncodeResourceToJsonLikeWriter( + IBaseResource theResource, BaseJsonLikeWriter theEventWriter, EncodeContext theEncodeContext) + throws IOException { if (myPrettyPrint) { theEventWriter.setPrettyPrint(myPrettyPrint); } @@ -204,14 +218,16 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } @Override - protected void doEncodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) throws IOException { + protected void doEncodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) + throws IOException { BaseJsonLikeWriter eventWriter = createJsonWriter(theWriter); doEncodeResourceToJsonLikeWriter(theResource, eventWriter, theEncodeContext); eventWriter.close(); } @Override - protected void doEncodeToWriter(IBase theElement, Writer theWriter, EncodeContext theEncodeContext) throws IOException, DataFormatException { + protected void doEncodeToWriter(IBase theElement, Writer theWriter, EncodeContext theEncodeContext) + throws IOException, DataFormatException { BaseJsonLikeWriter eventWriter = createJsonWriter(theWriter); eventWriter.beginObject(); encodeCompositeElementToStreamWriter(null, null, theElement, eventWriter, false, null, theEncodeContext); @@ -234,12 +250,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { BaseJsonLikeValue resourceTypeObj = object.get("resourceType"); if (resourceTypeObj == null || !resourceTypeObj.isString() || isBlank(resourceTypeObj.getAsString())) { - throw new DataFormatException(Msg.code(1838) + "Invalid JSON content detected, missing required element: 'resourceType'"); + throw new DataFormatException( + Msg.code(1838) + "Invalid JSON content detected, missing required element: 'resourceType'"); } String resourceType = resourceTypeObj.getAsString(); - ParserState state = ParserState.getPreResourceInstance(this, theResourceType, getContext(), true, getErrorHandler()); + ParserState state = + ParserState.getPreResourceInstance(this, theResourceType, getContext(), true, getErrorHandler()); state.enteringNewElement(null, resourceType); parseChildren(object, state); @@ -253,9 +271,18 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { return retVal; } - private void encodeChildElementToStreamWriter(RuntimeResourceDefinition theResDef, IBaseResource theResource, BaseJsonLikeWriter theEventWriter, IBase theNextValue, - BaseRuntimeElementDefinition theChildDef, String theChildName, boolean theContainedResource, CompositeChildElement theChildElem, - boolean theForceEmpty, EncodeContext theEncodeContext) throws IOException { + private void encodeChildElementToStreamWriter( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + BaseJsonLikeWriter theEventWriter, + IBase theNextValue, + BaseRuntimeElementDefinition theChildDef, + String theChildName, + boolean theContainedResource, + CompositeChildElement theChildElem, + boolean theForceEmpty, + EncodeContext theEncodeContext) + throws IOException { switch (theChildDef.getChildType()) { case ID_DATATYPE: { @@ -345,7 +372,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } else { theEventWriter.beginObject(); } - encodeCompositeElementToStreamWriter(theResDef, theResource, theNextValue, theEventWriter, theContainedResource, theChildElem, theEncodeContext); + encodeCompositeElementToStreamWriter( + theResDef, + theResource, + theNextValue, + theEventWriter, + theContainedResource, + theChildElem, + theEncodeContext); theEventWriter.endObject(); break; } @@ -358,7 +392,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { for (IBaseResource next : containedResources) { IIdType resourceId = getContainedResources().getResourceId(next); String value = resourceId.getValue(); - encodeResourceToJsonStreamWriter(theResDef, next, theEventWriter, null, true, fixContainedResourceId(value), theEncodeContext); + encodeResourceToJsonStreamWriter( + theResDef, + next, + theEventWriter, + null, + true, + fixContainedResourceId(value), + theEncodeContext); } theEventWriter.endArray(); @@ -388,19 +429,27 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { RuntimeResourceDefinition def = getContext().getResourceDefinition(resource); theEncodeContext.pushPath(def.getName(), true); - encodeResourceToJsonStreamWriter(def, resource, theEventWriter, theChildName, theContainedResource, theEncodeContext); + encodeResourceToJsonStreamWriter( + def, resource, theEventWriter, theChildName, theContainedResource, theEncodeContext); theEncodeContext.popPath(); break; case UNDECL_EXT: default: - throw new IllegalStateException(Msg.code(1839) + "Should not have this state here: " + theChildDef.getChildType().name()); + throw new IllegalStateException(Msg.code(1839) + "Should not have this state here: " + + theChildDef.getChildType().name()); } - } - private void encodeCompositeElementChildrenToStreamWriter(RuntimeResourceDefinition theResDef, IBaseResource theResource, IBase theElement, BaseJsonLikeWriter theEventWriter, - boolean theContainedResource, CompositeChildElement theParent, EncodeContext theEncodeContext) throws IOException { + private void encodeCompositeElementChildrenToStreamWriter( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + IBase theElement, + BaseJsonLikeWriter theEventWriter, + boolean theContainedResource, + CompositeChildElement theParent, + EncodeContext theEncodeContext) + throws IOException { { String elementId = getCompositeElementId(theElement); @@ -410,15 +459,26 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } boolean haveWrittenExtensions = false; - Iterable compositeChildElements = super.compositeChildIterator(theElement, theContainedResource, theParent, theEncodeContext); + Iterable compositeChildElements = + super.compositeChildIterator(theElement, theContainedResource, theParent, theEncodeContext); for (CompositeChildElement nextChildElem : compositeChildElements) { BaseRuntimeChildDefinition nextChild = nextChildElem.getDef(); - if (nextChildElem.getDef().getElementName().equals("extension") || nextChildElem.getDef().getElementName().equals("modifierExtension") - || nextChild instanceof RuntimeChildDeclaredExtensionDefinition) { + if (nextChildElem.getDef().getElementName().equals("extension") + || nextChildElem.getDef().getElementName().equals("modifierExtension") + || nextChild instanceof RuntimeChildDeclaredExtensionDefinition) { if (!haveWrittenExtensions) { - extractAndWriteExtensionsAsDirectChild(theElement, theEventWriter, getContext().getElementDefinition(theElement.getClass()), theResDef, theResource, nextChildElem, theParent, theEncodeContext, theContainedResource); + extractAndWriteExtensionsAsDirectChild( + theElement, + theEventWriter, + getContext().getElementDefinition(theElement.getClass()), + theResDef, + theResource, + nextChildElem, + theParent, + theEncodeContext, + theContainedResource); haveWrittenExtensions = true; } continue; @@ -441,7 +501,17 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { RuntimeChildNarrativeDefinition child = (RuntimeChildNarrativeDefinition) nextChild; String childName = nextChild.getChildNameByDatatype(child.getDatatype()); BaseRuntimeElementDefinition type = child.getChildByName(childName); - encodeChildElementToStreamWriter(theResDef, theResource, theEventWriter, narr, type, childName, theContainedResource, nextChildElem, false, theEncodeContext); + encodeChildElementToStreamWriter( + theResDef, + theResource, + theEventWriter, + narr, + type, + childName, + theContainedResource, + nextChildElem, + false, + theEncodeContext); continue; } } @@ -449,7 +519,17 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } else if (nextChild instanceof RuntimeChildContainedResources) { String childName = nextChild.getValidChildNames().iterator().next(); BaseRuntimeElementDefinition child = nextChild.getChildByName(childName); - encodeChildElementToStreamWriter(theResDef, theResource, theEventWriter, null, child, childName, theContainedResource, nextChildElem, false, theEncodeContext); + encodeChildElementToStreamWriter( + theResDef, + theResource, + theEventWriter, + null, + child, + childName, + theContainedResource, + nextChildElem, + false, + theEncodeContext); continue; } @@ -501,7 +581,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { BaseRuntimeElementDefinition childDef = childNameAndDef.getChildDef(); boolean primitive = childDef.getChildType() == ChildTypeEnum.PRIMITIVE_DATATYPE; - if ((childDef.getChildType() == ChildTypeEnum.CONTAINED_RESOURCES || childDef.getChildType() == ChildTypeEnum.CONTAINED_RESOURCE_LIST) && theContainedResource) { + if ((childDef.getChildType() == ChildTypeEnum.CONTAINED_RESOURCES + || childDef.getChildType() == ChildTypeEnum.CONTAINED_RESOURCE_LIST) + && theContainedResource) { continue; } @@ -509,20 +591,56 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { if (primitive) { if (nextValue instanceof ISupportsUndeclaredExtensions) { List ext = ((ISupportsUndeclaredExtensions) nextValue).getUndeclaredExtensions(); - force |= addToHeldExtensions(valueIdx, ext, extensions, false, nextChildElem, theParent, theEncodeContext, theContainedResource, theElement); + force |= addToHeldExtensions( + valueIdx, + ext, + extensions, + false, + nextChildElem, + theParent, + theEncodeContext, + theContainedResource, + theElement); ext = ((ISupportsUndeclaredExtensions) nextValue).getUndeclaredModifierExtensions(); - force |= addToHeldExtensions(valueIdx, ext, modifierExtensions, true, nextChildElem, theParent, theEncodeContext, theContainedResource, theElement); + force |= addToHeldExtensions( + valueIdx, + ext, + modifierExtensions, + true, + nextChildElem, + theParent, + theEncodeContext, + theContainedResource, + theElement); } else { if (nextValue instanceof IBaseHasExtensions) { IBaseHasExtensions element = (IBaseHasExtensions) nextValue; List> ext = element.getExtension(); - force |= addToHeldExtensions(valueIdx, ext, extensions, false, nextChildElem, theParent, theEncodeContext, theContainedResource, theElement); + force |= addToHeldExtensions( + valueIdx, + ext, + extensions, + false, + nextChildElem, + theParent, + theEncodeContext, + theContainedResource, + theElement); } if (nextValue instanceof IBaseHasModifierExtensions) { IBaseHasModifierExtensions element = (IBaseHasModifierExtensions) nextValue; List> ext = element.getModifierExtension(); - force |= addToHeldExtensions(valueIdx, ext, modifierExtensions, true, nextChildElem, theParent, theEncodeContext, theContainedResource, theElement); + force |= addToHeldExtensions( + valueIdx, + ext, + modifierExtensions, + true, + nextChildElem, + theParent, + theEncodeContext, + theContainedResource, + theElement); } } if (nextValue.hasFormatComment()) { @@ -541,16 +659,48 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { theEventWriter.endArray(); } BaseRuntimeChildDefinition replacedParentDefinition = nextChild.getReplacedParentDefinition(); - if (isMultipleCardinality(nextChild.getMax()) || (replacedParentDefinition != null && isMultipleCardinality(replacedParentDefinition.getMax()))) { + if (isMultipleCardinality(nextChild.getMax()) + || (replacedParentDefinition != null + && isMultipleCardinality(replacedParentDefinition.getMax()))) { beginArray(theEventWriter, nextChildSpecificName); inArray = true; - encodeChildElementToStreamWriter(theResDef, theResource, theEventWriter, nextValue, childDef, null, theContainedResource, nextChildElem, force, theEncodeContext); + encodeChildElementToStreamWriter( + theResDef, + theResource, + theEventWriter, + nextValue, + childDef, + null, + theContainedResource, + nextChildElem, + force, + theEncodeContext); } else { - encodeChildElementToStreamWriter(theResDef, theResource, theEventWriter, nextValue, childDef, nextChildSpecificName, theContainedResource, nextChildElem, false, theEncodeContext); + encodeChildElementToStreamWriter( + theResDef, + theResource, + theEventWriter, + nextValue, + childDef, + nextChildSpecificName, + theContainedResource, + nextChildElem, + false, + theEncodeContext); } currentChildName = nextChildSpecificName; } else { - encodeChildElementToStreamWriter(theResDef, theResource, theEventWriter, nextValue, childDef, null, theContainedResource, nextChildElem, force, theEncodeContext); + encodeChildElementToStreamWriter( + theResDef, + theResource, + theEventWriter, + nextValue, + childDef, + null, + theContainedResource, + nextChildElem, + force, + theEncodeContext); } valueIdx++; @@ -561,8 +711,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { theEventWriter.endArray(); } - - if (!extensions.isEmpty() || !modifierExtensions.isEmpty() || (!comments.isEmpty() && isSupportsFhirComment())) { + if (!extensions.isEmpty() + || !modifierExtensions.isEmpty() + || (!comments.isEmpty() && isSupportsFhirComment())) { if (inArray) { // If this is a repeatable field, the extensions go in an array too beginArray(theEventWriter, '_' + currentChildName); @@ -575,12 +726,16 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { List heldExts = Collections.emptyList(); List heldModExts = Collections.emptyList(); - if (extensions.size() > i && extensions.get(i) != null && extensions.get(i).isEmpty() == false) { + if (extensions.size() > i + && extensions.get(i) != null + && extensions.get(i).isEmpty() == false) { haveContent = true; heldExts = extensions.get(i); } - if (modifierExtensions.size() > i && modifierExtensions.get(i) != null && modifierExtensions.get(i).isEmpty() == false) { + if (modifierExtensions.size() > i + && modifierExtensions.get(i) != null + && modifierExtensions.get(i).isEmpty() == false) { haveContent = true; heldModExts = modifierExtensions.get(i); } @@ -619,7 +774,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { theEventWriter.endArray(); } } - writeExtensionsAsDirectChild(theResource, theEventWriter, theResDef, heldExts, heldModExts, theEncodeContext, theContainedResource); + writeExtensionsAsDirectChild( + theResource, + theEventWriter, + theResDef, + heldExts, + heldModExts, + theEncodeContext, + theContainedResource); if (inArray) { theEventWriter.endObject(); } @@ -646,19 +808,38 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { return maxCardinality > 1 || maxCardinality == Child.MAX_UNLIMITED; } - private void encodeCompositeElementToStreamWriter(RuntimeResourceDefinition theResDef, IBaseResource theResource, IBase theNextValue, BaseJsonLikeWriter theEventWriter, boolean theContainedResource, CompositeChildElement theParent, EncodeContext theEncodeContext) throws IOException, DataFormatException { + private void encodeCompositeElementToStreamWriter( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + IBase theNextValue, + BaseJsonLikeWriter theEventWriter, + boolean theContainedResource, + CompositeChildElement theParent, + EncodeContext theEncodeContext) + throws IOException, DataFormatException { writeCommentsPreAndPost(theNextValue, theEventWriter); - encodeCompositeElementChildrenToStreamWriter(theResDef, theResource, theNextValue, theEventWriter, theContainedResource, theParent, theEncodeContext); + encodeCompositeElementChildrenToStreamWriter( + theResDef, + theResource, + theNextValue, + theEventWriter, + theContainedResource, + theParent, + theEncodeContext); } @Override - public void encodeResourceToJsonLikeWriter(IBaseResource theResource, BaseJsonLikeWriter theJsonLikeWriter) throws IOException, DataFormatException { + public void encodeResourceToJsonLikeWriter(IBaseResource theResource, BaseJsonLikeWriter theJsonLikeWriter) + throws IOException, DataFormatException { Validate.notNull(theResource, "theResource can not be null"); Validate.notNull(theJsonLikeWriter, "theJsonLikeWriter can not be null"); - if (theResource.getStructureFhirVersionEnum() != getContext().getVersion().getVersion()) { - throw new IllegalArgumentException(Msg.code(1840) + "This parser is for FHIR version " + getContext().getVersion().getVersion() + " - Can not encode a structure for version " + theResource.getStructureFhirVersionEnum()); + if (theResource.getStructureFhirVersionEnum() + != getContext().getVersion().getVersion()) { + throw new IllegalArgumentException(Msg.code(1840) + "This parser is for FHIR version " + + getContext().getVersion().getVersion() + " - Can not encode a structure for version " + + theResource.getStructureFhirVersionEnum()); } EncodeContext encodeContext = new EncodeContext(); @@ -667,8 +848,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { doEncodeResourceToJsonLikeWriter(theResource, theJsonLikeWriter, encodeContext); } - private void encodeResourceToJsonStreamWriter(RuntimeResourceDefinition theResDef, IBaseResource theResource, BaseJsonLikeWriter theEventWriter, String theObjectNameOrNull, - boolean theContainedResource, EncodeContext theEncodeContext) throws IOException { + private void encodeResourceToJsonStreamWriter( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + BaseJsonLikeWriter theEventWriter, + String theObjectNameOrNull, + boolean theContainedResource, + EncodeContext theEncodeContext) + throws IOException { IIdType resourceId = null; if (StringUtils.isNotBlank(theResource.getIdElement().getIdPart())) { @@ -686,11 +873,25 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } } - encodeResourceToJsonStreamWriter(theResDef, theResource, theEventWriter, theObjectNameOrNull, theContainedResource, resourceId, theEncodeContext); + encodeResourceToJsonStreamWriter( + theResDef, + theResource, + theEventWriter, + theObjectNameOrNull, + theContainedResource, + resourceId, + theEncodeContext); } - private void encodeResourceToJsonStreamWriter(RuntimeResourceDefinition theResDef, IBaseResource theResource, BaseJsonLikeWriter theEventWriter, String theObjectNameOrNull, - boolean theContainedResource, IIdType theResourceId, EncodeContext theEncodeContext) throws IOException { + private void encodeResourceToJsonStreamWriter( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + BaseJsonLikeWriter theEventWriter, + String theObjectNameOrNull, + boolean theContainedResource, + IIdType theResourceId, + EncodeContext theEncodeContext) + throws IOException { if (!super.shouldEncodeResource(theResDef.getName())) { return; @@ -714,7 +915,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { final List extensions = new ArrayList<>(0); final List modifierExtensions = new ArrayList<>(0); // Undeclared extensions - extractUndeclaredExtensions(theResourceId, extensions, modifierExtensions, null, null, theEncodeContext, theContainedResource); + extractUndeclaredExtensions( + theResourceId, extensions, modifierExtensions, null, null, theEncodeContext, theContainedResource); boolean haveExtension = false; if (!extensions.isEmpty()) { haveExtension = true; @@ -726,7 +928,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { writeCommentsPreAndPost(theResourceId, theEventWriter); } if (haveExtension) { - writeExtensionsAsDirectChild(theResource, theEventWriter, theResDef, extensions, modifierExtensions, theEncodeContext, theContainedResource); + writeExtensionsAsDirectChild( + theResource, + theEventWriter, + theResDef, + extensions, + modifierExtensions, + theEncodeContext, + theContainedResource); } theEventWriter.endObject(); } @@ -736,16 +945,31 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { parseMetaForDSTU2(theResDef, theResource, theEventWriter, theContainedResource, theEncodeContext, resDef); } - encodeCompositeElementToStreamWriter(theResDef, theResource, theResource, theEventWriter, theContainedResource, new CompositeChildElement(resDef, theEncodeContext), theEncodeContext); + encodeCompositeElementToStreamWriter( + theResDef, + theResource, + theResource, + theEventWriter, + theContainedResource, + new CompositeChildElement(resDef, theEncodeContext), + theEncodeContext); theEventWriter.endObject(); } - private void parseMetaForDSTU2(RuntimeResourceDefinition theResDef, IBaseResource theResource, BaseJsonLikeWriter theEventWriter, boolean theContainedResource, EncodeContext theEncodeContext, RuntimeResourceDefinition resDef) throws IOException { + private void parseMetaForDSTU2( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + BaseJsonLikeWriter theEventWriter, + boolean theContainedResource, + EncodeContext theEncodeContext, + RuntimeResourceDefinition resDef) + throws IOException { IResource resource = (IResource) theResource; // Object securityLabelRawObj = - List securityLabels = extractMetadataListNotNull(resource, ResourceMetadataKeyEnum.SECURITY_LABELS); + List securityLabels = + extractMetadataListNotNull(resource, ResourceMetadataKeyEnum.SECURITY_LABELS); List profiles = extractMetadataListNotNull(resource, ResourceMetadataKeyEnum.PROFILES); profiles = super.getProfileTagsForEncoding(resource, profiles); @@ -758,7 +982,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } List, Object>> extensionMetadataKeys = getExtensionMetadataKeys(resource); - if (super.shouldEncodeResourceMeta(resource) && (ElementUtil.isEmpty(versionIdPart, updated, securityLabels, tags, profiles) == false) || !extensionMetadataKeys.isEmpty()) { + if (super.shouldEncodeResourceMeta(resource) + && (ElementUtil.isEmpty(versionIdPart, updated, securityLabels, tags, profiles) == false) + || !extensionMetadataKeys.isEmpty()) { beginObject(theEventWriter, "meta"); if (shouldEncodePath(resource, "meta.versionId")) { @@ -783,7 +1009,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { for (BaseCodingDt securityLabel : securityLabels) { theEventWriter.beginObject(); theEncodeContext.pushPath("security", false); - encodeCompositeElementChildrenToStreamWriter(resDef, resource, securityLabel, theEventWriter, theContainedResource, null, theEncodeContext); + encodeCompositeElementChildrenToStreamWriter( + resDef, + resource, + securityLabel, + theEventWriter, + theContainedResource, + null, + theEncodeContext); theEncodeContext.popPath(); theEventWriter.endObject(); } @@ -806,18 +1039,28 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { theEventWriter.endArray(); } - addExtensionMetadata(theResDef, theResource, theContainedResource, extensionMetadataKeys, resDef, theEventWriter, theEncodeContext); + addExtensionMetadata( + theResDef, + theResource, + theContainedResource, + extensionMetadataKeys, + resDef, + theEventWriter, + theEncodeContext); theEventWriter.endObject(); // end meta } } - - private void addExtensionMetadata(RuntimeResourceDefinition theResDef, IBaseResource theResource, - boolean theContainedResource, - List, Object>> extensionMetadataKeys, - RuntimeResourceDefinition resDef, - BaseJsonLikeWriter theEventWriter, EncodeContext theEncodeContext) throws IOException { + private void addExtensionMetadata( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + boolean theContainedResource, + List, Object>> extensionMetadataKeys, + RuntimeResourceDefinition resDef, + BaseJsonLikeWriter theEventWriter, + EncodeContext theEncodeContext) + throws IOException { if (extensionMetadataKeys.isEmpty()) { return; } @@ -826,20 +1069,43 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { for (Map.Entry, Object> entry : extensionMetadataKeys) { metaResource.addUndeclaredExtension((ExtensionDt) entry.getValue()); } - encodeCompositeElementToStreamWriter(theResDef, theResource, metaResource, theEventWriter, theContainedResource, new CompositeChildElement(resDef, theEncodeContext), theEncodeContext); + encodeCompositeElementToStreamWriter( + theResDef, + theResource, + metaResource, + theEventWriter, + theContainedResource, + new CompositeChildElement(resDef, theEncodeContext), + theEncodeContext); } /** * This is useful only for the two cases where extensions are encoded as direct children (e.g. not in some object * called _name): resource extensions, and extension extensions */ - private void extractAndWriteExtensionsAsDirectChild(IBase theElement, BaseJsonLikeWriter theEventWriter, BaseRuntimeElementDefinition theElementDef, RuntimeResourceDefinition theResDef, - IBaseResource theResource, CompositeChildElement theChildElem, CompositeChildElement theParent, EncodeContext theEncodeContext, boolean theContainedResource) throws IOException { + private void extractAndWriteExtensionsAsDirectChild( + IBase theElement, + BaseJsonLikeWriter theEventWriter, + BaseRuntimeElementDefinition theElementDef, + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + CompositeChildElement theChildElem, + CompositeChildElement theParent, + EncodeContext theEncodeContext, + boolean theContainedResource) + throws IOException { List extensions = new ArrayList<>(0); List modifierExtensions = new ArrayList<>(0); // Undeclared extensions - extractUndeclaredExtensions(theElement, extensions, modifierExtensions, theChildElem, theParent, theEncodeContext, theContainedResource); + extractUndeclaredExtensions( + theElement, + extensions, + modifierExtensions, + theChildElem, + theParent, + theEncodeContext, + theContainedResource); // Declared extensions if (theElementDef != null) { @@ -847,11 +1113,22 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } // Write the extensions - writeExtensionsAsDirectChild(theResource, theEventWriter, theResDef, extensions, modifierExtensions, theEncodeContext, theContainedResource); + writeExtensionsAsDirectChild( + theResource, + theEventWriter, + theResDef, + extensions, + modifierExtensions, + theEncodeContext, + theContainedResource); } - private void extractDeclaredExtensions(IBase theResource, BaseRuntimeElementDefinition resDef, List extensions, List modifierExtensions, - CompositeChildElement theChildElem) { + private void extractDeclaredExtensions( + IBase theResource, + BaseRuntimeElementDefinition resDef, + List extensions, + List modifierExtensions, + CompositeChildElement theChildElem) { for (RuntimeChildDeclaredExtensionDefinition nextDef : resDef.getExtensionsNonModifier()) { for (IBase nextValue : nextDef.getAccessor().getValues(theResource)) { if (nextValue != null) { @@ -874,8 +1151,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } } - private void extractUndeclaredExtensions(IBase theElement, List extensions, List modifierExtensions, CompositeChildElement theChildElem, - CompositeChildElement theParent, EncodeContext theEncodeContext, boolean theContainedResource) { + private void extractUndeclaredExtensions( + IBase theElement, + List extensions, + List modifierExtensions, + CompositeChildElement theChildElem, + CompositeChildElement theParent, + EncodeContext theEncodeContext, + boolean theContainedResource) { if (theElement instanceof ISupportsUndeclaredExtensions) { ISupportsUndeclaredExtensions element = (ISupportsUndeclaredExtensions) theElement; List ext = element.getUndeclaredExtensions(); @@ -899,7 +1182,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { List> ext = element.getExtension(); Boolean encodeExtension = null; for (IBaseExtension next : ext) { - if (next == null || (ElementUtil.isEmpty(next.getValue()) && next.getExtension().isEmpty())) { + if (next == null + || (ElementUtil.isEmpty(next.getValue()) + && next.getExtension().isEmpty())) { continue; } @@ -911,7 +1196,6 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { HeldExtension extension = new HeldExtension(next, false, theChildElem, theParent); extensions.add(extension); } - } } if (theElement instanceof IBaseHasModifierExtensions) { @@ -929,11 +1213,17 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } } - private boolean isEncodeExtension(CompositeChildElement theParent, EncodeContext theEncodeContext, boolean theContainedResource, IBase theElement) { - BaseRuntimeElementDefinition runtimeElementDefinition = getContext().getElementDefinition(theElement.getClass()); + private boolean isEncodeExtension( + CompositeChildElement theParent, + EncodeContext theEncodeContext, + boolean theContainedResource, + IBase theElement) { + BaseRuntimeElementDefinition runtimeElementDefinition = + getContext().getElementDefinition(theElement.getClass()); boolean retVal = true; if (runtimeElementDefinition instanceof BaseRuntimeElementCompositeDefinition) { - BaseRuntimeElementCompositeDefinition definition = (BaseRuntimeElementCompositeDefinition) runtimeElementDefinition; + BaseRuntimeElementCompositeDefinition definition = + (BaseRuntimeElementCompositeDefinition) runtimeElementDefinition; BaseRuntimeChildDefinition childDef = definition.getChildByName("extension"); CompositeChildElement c = new CompositeChildElement(theParent, childDef, theEncodeContext); retVal = c.shouldBeEncoded(theContainedResource); @@ -952,12 +1242,18 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { return null; } if (!object.isArray()) { - throw new DataFormatException(Msg.code(1841) + "Syntax error parsing JSON FHIR structure: Expected ARRAY at element '" + thePosition + "', found '" + object.getJsonType() + "'"); + throw new DataFormatException( + Msg.code(1841) + "Syntax error parsing JSON FHIR structure: Expected ARRAY at element '" + + thePosition + "', found '" + object.getJsonType() + "'"); } return object.getAsArray(); } - private void parseAlternates(BaseJsonLikeValue theAlternateVal, ParserState theState, String theElementName, String theAlternateName) { + private void parseAlternates( + BaseJsonLikeValue theAlternateVal, + ParserState theState, + String theElementName, + String theAlternateName) { if (theAlternateVal == null || theAlternateVal.isNull()) { return; } @@ -965,7 +1261,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { if (theAlternateVal.isArray()) { BaseJsonLikeArray array = theAlternateVal.getAsArray(); if (array.size() > 1) { - throw new DataFormatException(Msg.code(1842) + "Unexpected array of length " + array.size() + " (expected 0 or 1) for element: " + theElementName); + throw new DataFormatException(Msg.code(1842) + "Unexpected array of length " + array.size() + + " (expected 0 or 1) for element: " + theElementName); } if (array.size() == 0) { return; @@ -976,7 +1273,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { BaseJsonLikeValue alternateVal = theAlternateVal; if (alternateVal.isObject() == false) { - getErrorHandler().incorrectJsonType(null, theAlternateName, ValueType.OBJECT, null, alternateVal.getJsonType(), null); + getErrorHandler() + .incorrectJsonType( + null, theAlternateName, ValueType.OBJECT, null, alternateVal.getJsonType(), null); return; } @@ -997,7 +1296,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { if (nextVal.isString()) { theState.attributeValue("id", nextVal.getAsString()); } else { - getErrorHandler().incorrectJsonType(null, "id", ValueType.SCALAR, ScalarType.STRING, nextVal.getJsonType(), nextVal.getDataType()); + getErrorHandler() + .incorrectJsonType( + null, + "id", + ValueType.SCALAR, + ScalarType.STRING, + nextVal.getJsonType(), + nextVal.getDataType()); } } else if ("fhir_comments".equals(nextKey)) { parseFhirComments(nextVal, theState); @@ -1039,7 +1345,6 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } parseChildren(theState, nextName, nextVal, alternateVal, alternateName, false); - } // if (elementId != null) { @@ -1071,19 +1376,33 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { theState.endingElement(); } } else { - getErrorHandler().incorrectJsonType(null, alternateName, ValueType.OBJECT, null, nextValue.getJsonType(), null); + getErrorHandler() + .incorrectJsonType( + null, alternateName, ValueType.OBJECT, null, nextValue.getJsonType(), null); } } } } } - } - private void parseChildren(ParserState theState, String theName, BaseJsonLikeValue theJsonVal, BaseJsonLikeValue theAlternateVal, String theAlternateName, boolean theInArray) { + private void parseChildren( + ParserState theState, + String theName, + BaseJsonLikeValue theJsonVal, + BaseJsonLikeValue theAlternateVal, + String theAlternateName, + boolean theInArray) { if (theName.equals("id")) { if (!theJsonVal.isString()) { - getErrorHandler().incorrectJsonType(null, "id", ValueType.SCALAR, ScalarType.STRING, theJsonVal.getJsonType(), theJsonVal.getDataType()); + getErrorHandler() + .incorrectJsonType( + null, + "id", + ValueType.SCALAR, + ScalarType.STRING, + theJsonVal.getJsonType(), + theJsonVal.getDataType()); } } @@ -1092,7 +1411,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { BaseJsonLikeValue alternateVal = theAlternateVal; if (alternateVal != null && alternateVal.isArray() == false) { - getErrorHandler().incorrectJsonType(null, theAlternateName, ValueType.ARRAY, null, alternateVal.getJsonType(), null); + getErrorHandler() + .incorrectJsonType( + null, theAlternateName, ValueType.ARRAY, null, alternateVal.getJsonType(), null); alternateVal = null; } @@ -1117,7 +1438,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { if (theState.isPreResource()) { BaseJsonLikeValue resType = nextObject.get("resourceType"); if (resType == null || !resType.isString()) { - throw new DataFormatException(Msg.code(1843) + "Missing required element 'resourceType' from JSON resource object, unable to parse"); + throw new DataFormatException(Msg.code(1843) + + "Missing required element 'resourceType' from JSON resource object, unable to parse"); } theState.enteringNewElement(null, resType.getAsString()); preResource = true; @@ -1156,7 +1478,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } else { parentElementName = "extension"; } - getErrorHandler().missingRequiredElement(new ParseLocation().setParentElementName(parentElementName), "url"); + getErrorHandler() + .missingRequiredElement(new ParseLocation().setParentElementName(parentElementName), "url"); url = null; } else { url = getExtensionUrl(jsonElement.getAsString()); @@ -1206,7 +1529,14 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { theState.endingElement(); } } else { - getErrorHandler().incorrectJsonType(null, alternateName, ValueType.OBJECT, null, nextValue.getJsonType(), null); + getErrorHandler() + .incorrectJsonType( + null, + alternateName, + ValueType.OBJECT, + null, + nextValue.getJsonType(), + null); } } } @@ -1234,7 +1564,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } @Override - public T parseResource(Class theResourceType, JsonLikeStructure theJsonLikeStructure) throws DataFormatException { + public T parseResource(Class theResourceType, JsonLikeStructure theJsonLikeStructure) + throws DataFormatException { /***************************************************** * ************************************************* * @@ -1261,7 +1592,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { if ("Bundle".equals(def.getName())) { BaseRuntimeChildDefinition entryChild = def.getChildByName("entry"); - BaseRuntimeElementCompositeDefinition entryDef = (BaseRuntimeElementCompositeDefinition) entryChild.getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryDef = + (BaseRuntimeElementCompositeDefinition) entryChild.getChildByName("entry"); List entries = entryChild.getAccessor().getValues(retVal); if (entries != null) { for (IBase nextEntry : entries) { @@ -1269,7 +1601,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { /** * If Bundle.entry.fullUrl is populated, set the resource ID to that */ - // TODO: should emit a warning and maybe notify the error handler if the resource ID doesn't match the + // TODO: should emit a warning and maybe notify the error handler if the resource ID doesn't match + // the // fullUrl idPart BaseRuntimeChildDefinition fullUrlChild = entryDef.getChildByName("fullUrl"); if (fullUrlChild == null) { @@ -1279,7 +1612,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { if (fullUrl != null && !fullUrl.isEmpty()) { IPrimitiveType value = (IPrimitiveType) fullUrl.get(0); if (value.isEmpty() == false) { - List entryResources = entryDef.getChildByName("resource").getAccessor().getValues(nextEntry); + List entryResources = entryDef.getChildByName("resource") + .getAccessor() + .getValues(nextEntry); if (entryResources != null && entryResources.size() > 0) { IBaseResource res = (IBaseResource) entryResources.get(0); String versionId = res.getIdElement().getVersionIdPart(); @@ -1290,10 +1625,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } } } - } } - } return retVal; @@ -1342,7 +1675,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { // theState.endingElement(); // } - private void write(BaseJsonLikeWriter theEventWriter, String theChildName, BigDecimal theDecimalValue) throws IOException { + private void write(BaseJsonLikeWriter theEventWriter, String theChildName, BigDecimal theDecimalValue) + throws IOException { theEventWriter.write(theChildName, theDecimalValue); } @@ -1371,8 +1705,15 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } } - private void writeExtensionsAsDirectChild(IBaseResource theResource, BaseJsonLikeWriter theEventWriter, RuntimeResourceDefinition resDef, List extensions, - List modifierExtensions, EncodeContext theEncodeContext, boolean theContainedResource) throws IOException { + private void writeExtensionsAsDirectChild( + IBaseResource theResource, + BaseJsonLikeWriter theEventWriter, + RuntimeResourceDefinition resDef, + List extensions, + List modifierExtensions, + EncodeContext theEncodeContext, + boolean theContainedResource) + throws IOException { // Write Extensions if (extensions.isEmpty() == false) { theEncodeContext.pushPath("extension", false); @@ -1396,7 +1737,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } } - private void writeOptionalTagWithTextNode(BaseJsonLikeWriter theEventWriter, String theElementName, IPrimitiveDatatype thePrimitive) throws IOException { + private void writeOptionalTagWithTextNode( + BaseJsonLikeWriter theEventWriter, String theElementName, IPrimitiveDatatype thePrimitive) + throws IOException { if (thePrimitive == null) { return; } @@ -1404,7 +1747,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { writeOptionalTagWithTextNode(theEventWriter, theElementName, str); } - private void writeOptionalTagWithTextNode(BaseJsonLikeWriter theEventWriter, String theElementName, String theValue) throws IOException { + private void writeOptionalTagWithTextNode(BaseJsonLikeWriter theEventWriter, String theElementName, String theValue) + throws IOException { if (StringUtils.isNotBlank(theValue)) { write(theEventWriter, theElementName, theValue); } @@ -1423,7 +1767,11 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { private IBase myValue; private CompositeChildElement myParent; - public HeldExtension(IBaseExtension theUndeclaredExtension, boolean theModifier, CompositeChildElement theChildElem, CompositeChildElement theParent) { + public HeldExtension( + IBaseExtension theUndeclaredExtension, + boolean theModifier, + CompositeChildElement theChildElem, + CompositeChildElement theParent) { assert theUndeclaredExtension != null; myUndeclaredExtension = theUndeclaredExtension; myModifier = theModifier; @@ -1431,7 +1779,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { myParent = theParent; } - public HeldExtension(RuntimeChildDeclaredExtensionDefinition theDef, IBase theValue, CompositeChildElement theChildElem) { + public HeldExtension( + RuntimeChildDeclaredExtensionDefinition theDef, IBase theValue, CompositeChildElement theChildElem) { assert theDef != null; assert theValue != null; myDef = theDef; @@ -1442,18 +1791,35 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { @Override public int compareTo(HeldExtension theArg0) { String url1 = myDef != null ? myDef.getExtensionUrl() : myUndeclaredExtension.getUrl(); - String url2 = theArg0.myDef != null ? theArg0.myDef.getExtensionUrl() : theArg0.myUndeclaredExtension.getUrl(); + String url2 = + theArg0.myDef != null ? theArg0.myDef.getExtensionUrl() : theArg0.myUndeclaredExtension.getUrl(); url1 = defaultString(getExtensionUrl(url1)); url2 = defaultString(getExtensionUrl(url2)); return url1.compareTo(url2); } - private void managePrimitiveExtension(final IBase theValue, final RuntimeResourceDefinition theResDef, final IBaseResource theResource, final BaseJsonLikeWriter theEventWriter, final BaseRuntimeElementDefinition def, final String childName, EncodeContext theEncodeContext, boolean theContainedResource) throws IOException { + private void managePrimitiveExtension( + final IBase theValue, + final RuntimeResourceDefinition theResDef, + final IBaseResource theResource, + final BaseJsonLikeWriter theEventWriter, + final BaseRuntimeElementDefinition def, + final String childName, + EncodeContext theEncodeContext, + boolean theContainedResource) + throws IOException { if (def.getChildType().equals(ID_DATATYPE) || def.getChildType().equals(PRIMITIVE_DATATYPE)) { final List extensions = new ArrayList(0); final List modifierExtensions = new ArrayList(0); // Undeclared extensions - extractUndeclaredExtensions(theValue, extensions, modifierExtensions, myParent, null, theEncodeContext, theContainedResource); + extractUndeclaredExtensions( + theValue, + extensions, + modifierExtensions, + myParent, + null, + theEncodeContext, + theContainedResource); // Declared extensions extractDeclaredExtensions(theValue, def, extensions, modifierExtensions, myParent); boolean haveContent = false; @@ -1462,15 +1828,34 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } if (haveContent) { beginObject(theEventWriter, '_' + childName); - writeExtensionsAsDirectChild(theResource, theEventWriter, theResDef, extensions, modifierExtensions, theEncodeContext, theContainedResource); + writeExtensionsAsDirectChild( + theResource, + theEventWriter, + theResDef, + extensions, + modifierExtensions, + theEncodeContext, + theContainedResource); theEventWriter.endObject(); } } } - public void write(RuntimeResourceDefinition theResDef, IBaseResource theResource, BaseJsonLikeWriter theEventWriter, EncodeContext theEncodeContext, boolean theContainedResource) throws IOException { + public void write( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + BaseJsonLikeWriter theEventWriter, + EncodeContext theEncodeContext, + boolean theContainedResource) + throws IOException { if (myUndeclaredExtension != null) { - writeUndeclaredExtension(theResDef, theResource, theEventWriter, myUndeclaredExtension, theEncodeContext, theContainedResource); + writeUndeclaredExtension( + theResDef, + theResource, + theEventWriter, + myUndeclaredExtension, + theEncodeContext, + theContainedResource); } else { theEventWriter.beginObject(); @@ -1484,7 +1869,8 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { * * See #327 */ - List preProcessedValue = preProcessValues(myDef, theResource, Collections.singletonList(myValue), myChildElem, theEncodeContext); + List preProcessedValue = preProcessValues( + myDef, theResource, Collections.singletonList(myValue), myChildElem, theEncodeContext); // // Check for undeclared extensions on the declared extension // // (grrrrrr....) @@ -1503,18 +1889,52 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { BaseRuntimeElementDefinition def = myDef.getChildElementDefinitionByDatatype(myValue.getClass()); if (def.getChildType() == ChildTypeEnum.RESOURCE_BLOCK) { - extractAndWriteExtensionsAsDirectChild(myValue, theEventWriter, def, theResDef, theResource, myChildElem, null, theEncodeContext, theContainedResource); + extractAndWriteExtensionsAsDirectChild( + myValue, + theEventWriter, + def, + theResDef, + theResource, + myChildElem, + null, + theEncodeContext, + theContainedResource); } else { String childName = myDef.getChildNameByDatatype(myValue.getClass()); - encodeChildElementToStreamWriter(theResDef, theResource, theEventWriter, myValue, def, childName, false, myParent, false, theEncodeContext); - managePrimitiveExtension(myValue, theResDef, theResource, theEventWriter, def, childName, theEncodeContext, theContainedResource); + encodeChildElementToStreamWriter( + theResDef, + theResource, + theEventWriter, + myValue, + def, + childName, + false, + myParent, + false, + theEncodeContext); + managePrimitiveExtension( + myValue, + theResDef, + theResource, + theEventWriter, + def, + childName, + theEncodeContext, + theContainedResource); } theEventWriter.endObject(); } } - private void writeUndeclaredExtension(RuntimeResourceDefinition theResDef, IBaseResource theResource, BaseJsonLikeWriter theEventWriter, IBaseExtension ext, EncodeContext theEncodeContext, boolean theContainedResource) throws IOException { + private void writeUndeclaredExtension( + RuntimeResourceDefinition theResDef, + IBaseResource theResource, + BaseJsonLikeWriter theEventWriter, + IBaseExtension ext, + EncodeContext theEncodeContext, + boolean theContainedResource) + throws IOException { IBase value = ext.getValue(); final String extensionUrl = getExtensionUrl(ext.getUrl()); @@ -1558,10 +1978,15 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { } for (Object next : ext.getExtension()) { - writeUndeclaredExtension(theResDef, theResource, theEventWriter, (IBaseExtension) next, theEncodeContext, theContainedResource); + writeUndeclaredExtension( + theResDef, + theResource, + theEventWriter, + (IBaseExtension) next, + theEncodeContext, + theContainedResource); } theEventWriter.endArray(); - } // Write value @@ -1572,19 +1997,46 @@ public class JsonParser extends BaseParser implements IJsonLikeParser { * Pre-process value - This is called in case the value is a reference * since we might modify the text */ - value = preProcessValues(myDef, theResource, Collections.singletonList(value), myChildElem, theEncodeContext).get(0); + value = preProcessValues( + myDef, theResource, Collections.singletonList(value), myChildElem, theEncodeContext) + .get(0); - RuntimeChildUndeclaredExtensionDefinition extDef = getContext().getRuntimeChildUndeclaredExtensionDefinition(); + RuntimeChildUndeclaredExtensionDefinition extDef = + getContext().getRuntimeChildUndeclaredExtensionDefinition(); String childName = extDef.getChildNameByDatatype(value.getClass()); if (childName == null) { - childName = "value" + WordUtils.capitalize(getContext().getElementDefinition(value.getClass()).getName()); + childName = "value" + + WordUtils.capitalize(getContext() + .getElementDefinition(value.getClass()) + .getName()); } - BaseRuntimeElementDefinition childDef = extDef.getChildElementDefinitionByDatatype(value.getClass()); + BaseRuntimeElementDefinition childDef = + extDef.getChildElementDefinitionByDatatype(value.getClass()); if (childDef == null) { - throw new ConfigurationException(Msg.code(1844) + "Unable to encode extension, unrecognized child element type: " + value.getClass().getCanonicalName()); + throw new ConfigurationException( + Msg.code(1844) + "Unable to encode extension, unrecognized child element type: " + + value.getClass().getCanonicalName()); } - encodeChildElementToStreamWriter(theResDef, theResource, theEventWriter, value, childDef, childName, false, myParent, false, theEncodeContext); - managePrimitiveExtension(value, theResDef, theResource, theEventWriter, childDef, childName, theEncodeContext, theContainedResource); + encodeChildElementToStreamWriter( + theResDef, + theResource, + theEventWriter, + value, + childDef, + childName, + false, + myParent, + false, + theEncodeContext); + managePrimitiveExtension( + value, + theResDef, + theResource, + theEventWriter, + childDef, + childName, + theEncodeContext, + theContainedResource); theEncodeContext.popPath(); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/LenientErrorHandler.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/LenientErrorHandler.java index d159f1e999b..29acc761be0 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/LenientErrorHandler.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/LenientErrorHandler.java @@ -33,7 +33,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank; * can lead to data loss (since invalid values are silently ignored). See * {@link #setErrorOnInvalidValue(boolean)} for information on this. *

    - * + * * @see IParser#setParserErrorHandler(IParserErrorHandler) * @see FhirContext#setParserErrorHandler(IParserErrorHandler) * @@ -61,7 +61,7 @@ public class LenientErrorHandler extends ParseErrorHandler implements IParserErr /** * Constructor - * + * * @param theLogErrors * Should errors be logged? * @since 1.2 @@ -78,10 +78,17 @@ public class LenientErrorHandler extends ParseErrorHandler implements IParserErr } @Override - public void incorrectJsonType(IParseLocation theLocation, String theElementName, ValueType theExpected, ScalarType theExpectedScalarType, ValueType theFound, ScalarType theFoundScalarType) { + public void incorrectJsonType( + IParseLocation theLocation, + String theElementName, + ValueType theExpected, + ScalarType theExpectedScalarType, + ValueType theFound, + ScalarType theFoundScalarType) { if (myLogErrors) { if (ourLog.isWarnEnabled()) { - String message = createIncorrectJsonTypeMessage(theElementName, theExpected, theExpectedScalarType, theFound, theFoundScalarType); + String message = createIncorrectJsonTypeMessage( + theElementName, theExpected, theExpectedScalarType, theFound, theFoundScalarType); ourLog.warn(message); } } @@ -106,7 +113,7 @@ public class LenientErrorHandler extends ParseErrorHandler implements IParserErr * Note that empty values (e.g. "") will not lead to an error when this is set to * true, only invalid values (e.g. a gender code of foo) *

    - * + * * @see #setErrorOnInvalidValue(boolean) */ public boolean isErrorOnInvalidValue() { @@ -139,7 +146,7 @@ public class LenientErrorHandler extends ParseErrorHandler implements IParserErr * Note that empty values (e.g. "") will not lead to an error when this is set to * true, only invalid values (e.g. a gender code of foo) *

    - * + * * @return Returns a reference to this for easy method chaining * @see #isErrorOnInvalidValue() */ @@ -173,14 +180,17 @@ public class LenientErrorHandler extends ParseErrorHandler implements IParserErr @Override public void unexpectedRepeatingElement(IParseLocation theLocation, String theElementName) { if (myLogErrors) { - ourLog.warn("{}Multiple repetitions of non-repeatable element '{}' found while parsing", describeLocation(theLocation), theElementName); + ourLog.warn( + "{}Multiple repetitions of non-repeatable element '{}' found while parsing", + describeLocation(theLocation), + theElementName); } } @Override public void unknownAttribute(IParseLocation theLocation, String theElementName) { if (myLogErrors) { - ourLog.warn("{}Unknown attribute '{}' found while parsing",describeLocation(theLocation), theElementName); + ourLog.warn("{}Unknown attribute '{}' found while parsing", describeLocation(theLocation), theElementName); } } @@ -199,7 +209,7 @@ public class LenientErrorHandler extends ParseErrorHandler implements IParserErr } @Override - public void extensionContainsValueAndNestedExtensions(IParseLocation theLocation){ + public void extensionContainsValueAndNestedExtensions(IParseLocation theLocation) { if (myErrorOnInvalidExtension) { STRICT_ERROR_HANDLER.extensionContainsValueAndNestedExtensions(theLocation); } else if (myLogErrors) { @@ -207,7 +217,12 @@ public class LenientErrorHandler extends ParseErrorHandler implements IParserErr } } - public static String createIncorrectJsonTypeMessage(String theElementName, ValueType theExpected, ScalarType theExpectedScalarType, ValueType theFound, ScalarType theFoundScalarType) { + public static String createIncorrectJsonTypeMessage( + String theElementName, + ValueType theExpected, + ScalarType theExpectedScalarType, + ValueType theFound, + ScalarType theFoundScalarType) { StringBuilder b = new StringBuilder(); b.append("Found incorrect type for element "); b.append(theElementName); @@ -228,5 +243,4 @@ public class LenientErrorHandler extends ParseErrorHandler implements IParserErr String message = b.toString(); return message; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/NDJsonParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/NDJsonParser.java index f44a81851b0..9b8c17bb7d4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/NDJsonParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/NDJsonParser.java @@ -33,7 +33,6 @@ import java.io.Reader; import java.io.Writer; import java.util.List; - /** * This class is the FHIR NDJSON parser/encoder. Users should not interact with this class directly, but should use * {@link FhirContext#newNDJsonParser()} to get an instance. @@ -42,8 +41,8 @@ public class NDJsonParser extends BaseParser { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(NDJsonParser.class); - private IParser myJsonParser; - private FhirContext myFhirContext; + private IParser myJsonParser; + private FhirContext myFhirContext; /** * Do not use this constructor, the recommended way to obtain a new instance of the NDJSON parser is to invoke @@ -53,69 +52,73 @@ public class NDJsonParser extends BaseParser { */ public NDJsonParser(FhirContext theContext, IParserErrorHandler theParserErrorHandler) { super(theContext, theParserErrorHandler); - myFhirContext = theContext; + myFhirContext = theContext; - myJsonParser = theContext.newJsonParser(); - } - - @Override - public IParser setPrettyPrint(boolean thePrettyPrint) { - myJsonParser.setPrettyPrint(thePrettyPrint); - return this; - } - - @Override - public EncodingEnum getEncoding() { - return EncodingEnum.NDJSON; - } - - @Override - protected void doEncodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) throws IOException { - // We only encode bundles to NDJSON. - if (!(IBaseBundle.class.isAssignableFrom(theResource.getClass()))) { - throw new IllegalArgumentException(Msg.code(1833) + "NDJsonParser can only encode Bundle types. Received " + theResource.getClass().getName()); - } - - // Ok, convert the bundle to a list of resources. - List theBundleResources = BundleUtil.toListOfResources(myFhirContext, (IBaseBundle) theResource); - - // Now we write each one in turn. - // Use newline only as a line separator, not at the end of the file. - boolean isFirstResource = true; - for (IBaseResource theBundleEntryResource : theBundleResources) { - if (!(isFirstResource)) { - theWriter.write("\n"); - } - isFirstResource = false; - - myJsonParser.encodeResourceToWriter(theBundleEntryResource, theWriter); - } + myJsonParser = theContext.newJsonParser(); } @Override - public T doParseResource(Class theResourceType, Reader theReader) throws DataFormatException { - // We can only parse to bundles. - if ((theResourceType != null) && (!(IBaseBundle.class.isAssignableFrom(theResourceType)))) { - throw new DataFormatException(Msg.code(1834) + "NDJsonParser can only parse to Bundle types. Received " + theResourceType.getName()); - } + public IParser setPrettyPrint(boolean thePrettyPrint) { + myJsonParser.setPrettyPrint(thePrettyPrint); + return this; + } - try { - // Now we go through line-by-line parsing the JSON and then stuffing it into a bundle. - BundleBuilder myBuilder = new BundleBuilder(myFhirContext); - myBuilder.setType("collection"); - BufferedReader myBufferedReader = new BufferedReader(theReader); - String jsonString = myBufferedReader.readLine(); - while (jsonString != null) { - // And add it to a collection in a Bundle. - // The string must be trimmed, as per the NDJson spec 3.2 - myBuilder.addCollectionEntry(myJsonParser.parseResource(jsonString.trim())); - // Try to read another line. - jsonString = myBufferedReader.readLine(); - } + @Override + public EncodingEnum getEncoding() { + return EncodingEnum.NDJSON; + } - return (T) myBuilder.getBundle(); - } catch (IOException err) { - throw new DataFormatException(Msg.code(1835) + err.getMessage()); - } + @Override + protected void doEncodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) + throws IOException { + // We only encode bundles to NDJSON. + if (!(IBaseBundle.class.isAssignableFrom(theResource.getClass()))) { + throw new IllegalArgumentException(Msg.code(1833) + "NDJsonParser can only encode Bundle types. Received " + + theResource.getClass().getName()); + } + + // Ok, convert the bundle to a list of resources. + List theBundleResources = BundleUtil.toListOfResources(myFhirContext, (IBaseBundle) theResource); + + // Now we write each one in turn. + // Use newline only as a line separator, not at the end of the file. + boolean isFirstResource = true; + for (IBaseResource theBundleEntryResource : theBundleResources) { + if (!(isFirstResource)) { + theWriter.write("\n"); + } + isFirstResource = false; + + myJsonParser.encodeResourceToWriter(theBundleEntryResource, theWriter); + } + } + + @Override + public T doParseResource(Class theResourceType, Reader theReader) + throws DataFormatException { + // We can only parse to bundles. + if ((theResourceType != null) && (!(IBaseBundle.class.isAssignableFrom(theResourceType)))) { + throw new DataFormatException(Msg.code(1834) + "NDJsonParser can only parse to Bundle types. Received " + + theResourceType.getName()); + } + + try { + // Now we go through line-by-line parsing the JSON and then stuffing it into a bundle. + BundleBuilder myBuilder = new BundleBuilder(myFhirContext); + myBuilder.setType("collection"); + BufferedReader myBufferedReader = new BufferedReader(theReader); + String jsonString = myBufferedReader.readLine(); + while (jsonString != null) { + // And add it to a collection in a Bundle. + // The string must be trimmed, as per the NDJson spec 3.2 + myBuilder.addCollectionEntry(myJsonParser.parseResource(jsonString.trim())); + // Try to read another line. + jsonString = myBufferedReader.readLine(); + } + + return (T) myBuilder.getBundle(); + } catch (IOException err) { + throw new DataFormatException(Msg.code(1835) + err.getMessage()); + } } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ParseErrorHandler.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ParseErrorHandler.java index b28c00e5538..3bd637899b0 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ParseErrorHandler.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ParseErrorHandler.java @@ -28,6 +28,4 @@ class ParseErrorHandler { return theLocation.toString() + " "; } } - } - diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ParserState.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ParserState.java index 544945159d7..4ebe4b3ea62 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ParserState.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/ParserState.java @@ -72,8 +72,6 @@ import org.hl7.fhir.instance.model.api.ICompositeType; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.xml.stream.events.StartElement; -import javax.xml.stream.events.XMLEvent; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -81,6 +79,8 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import javax.xml.stream.events.StartElement; +import javax.xml.stream.events.XMLEvent; import static org.apache.commons.lang3.StringUtils.defaultIfBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -100,7 +100,8 @@ class ParserState { private List myGlobalResources = new ArrayList<>(); private List myGlobalReferences = new ArrayList<>(); - private ParserState(IParser theParser, FhirContext theContext, boolean theJsonMode, IParserErrorHandler theErrorHandler) { + private ParserState( + IParser theParser, FhirContext theContext, boolean theJsonMode, IParserErrorHandler theErrorHandler) { myParser = theParser; myContext = theContext; myJsonMode = theJsonMode; @@ -136,7 +137,8 @@ class ParserState { myState.enteringNewElement(theNamespaceUri, theName); } - void enteringNewElementExtension(StartElement theElem, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { + void enteringNewElementExtension( + StartElement theElem, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { myState.enteringNewElementExtension(theElem, theUrlAttr, theIsModifier, baseServerUrl); } @@ -153,7 +155,10 @@ class ParserState { } private Object newContainedDt(IResource theTarget) { - return ReflectionUtil.newInstance(theTarget.getStructureFhirVersionEnum().getVersionImplementation().getContainedType()); + return ReflectionUtil.newInstance(theTarget + .getStructureFhirVersionEnum() + .getVersionImplementation() + .getContainedType()); } @SuppressWarnings("unchecked") @@ -197,8 +202,12 @@ class ParserState { return theDefinition.newInstance(); } - public ICompositeType newCompositeInstance(BasePreResourceState thePreResourceState, BaseRuntimeChildDefinition theChild, BaseRuntimeElementCompositeDefinition theCompositeTarget) { - ICompositeType retVal = (ICompositeType) theCompositeTarget.newInstance(theChild.getInstanceConstructorArguments()); + public ICompositeType newCompositeInstance( + BasePreResourceState thePreResourceState, + BaseRuntimeChildDefinition theChild, + BaseRuntimeElementCompositeDefinition theCompositeTarget) { + ICompositeType retVal = + (ICompositeType) theCompositeTarget.newInstance(theChild.getInstanceConstructorArguments()); if (retVal instanceof IBaseReference) { IBaseReference ref = (IBaseReference) retVal; myGlobalReferences.add(ref); @@ -207,7 +216,8 @@ class ParserState { return retVal; } - public ICompositeType newCompositeTypeInstance(BasePreResourceState thePreResourceState, BaseRuntimeElementCompositeDefinition theCompositeTarget) { + public ICompositeType newCompositeTypeInstance( + BasePreResourceState thePreResourceState, BaseRuntimeElementCompositeDefinition theCompositeTarget) { ICompositeType retVal = (ICompositeType) theCompositeTarget.newInstance(); if (retVal instanceof IBaseReference) { IBaseReference ref = (IBaseReference) retVal; @@ -217,11 +227,16 @@ class ParserState { return retVal; } - public IPrimitiveType newPrimitiveInstance(RuntimeChildDeclaredExtensionDefinition theDefinition, RuntimePrimitiveDatatypeDefinition thePrimitiveTarget) { + public IPrimitiveType newPrimitiveInstance( + RuntimeChildDeclaredExtensionDefinition theDefinition, + RuntimePrimitiveDatatypeDefinition thePrimitiveTarget) { return thePrimitiveTarget.newInstance(theDefinition.getInstanceConstructorArguments()); } - public IPrimitiveType getPrimitiveInstance(BaseRuntimeChildDefinition theChild, RuntimePrimitiveDatatypeDefinition thePrimitiveTarget, String theChildName) { + public IPrimitiveType getPrimitiveInstance( + BaseRuntimeChildDefinition theChild, + RuntimePrimitiveDatatypeDefinition thePrimitiveTarget, + String theChildName) { return thePrimitiveTarget.newInstance(theChild.getInstanceConstructorArguments()); } @@ -283,7 +298,8 @@ class ParserState { * Default implementation just handles undeclared extensions */ @SuppressWarnings("unused") - public void enteringNewElementExtension(StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { + public void enteringNewElementExtension( + StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { if (myPreResourceState != null && getCurrentElement() instanceof ISupportsUndeclaredExtensions) { ExtensionDt newExtension = new ExtensionDt(theIsModifier); newExtension.setUrl(theUrlAttr); @@ -303,7 +319,8 @@ class ParserState { } } else { if (getCurrentElement() instanceof IBaseHasModifierExtensions) { - IBaseExtension ext = ((IBaseHasModifierExtensions) getCurrentElement()).addModifierExtension(); + IBaseExtension ext = + ((IBaseHasModifierExtensions) getCurrentElement()).addModifierExtension(); ext.setUrl(theUrlAttr); ParserState.ExtensionState newState = new ExtensionState(myPreResourceState, ext); push(newState); @@ -352,7 +369,6 @@ class ParserState { public void xmlEvent(XMLEvent theNextEvent) { // ignore } - } private class ContainedResourcesStateHapi extends BasePreResourceState { @@ -390,10 +406,10 @@ class ParserState { IResource preResCurrentElement = (IResource) getPreResourceState().getCurrentElement(); @SuppressWarnings("unchecked") - List containedResources = (List) preResCurrentElement.getContained().getContainedResources(); + List containedResources = + (List) preResCurrentElement.getContained().getContainedResources(); containedResources.add(res); } - } private class ContainedResourcesStateHl7Org extends BasePreResourceState { @@ -424,14 +440,15 @@ class ParserState { myErrorHandler.containedResourceWithNoId(null); } else { res.getIdElement().setValue('#' + res.getIdElement().getIdPart()); - getPreResourceState().getContainedResources().put(res.getIdElement().getValue(), res); + getPreResourceState() + .getContainedResources() + .put(res.getIdElement().getValue(), res); } IBaseResource preResCurrentElement = getPreResourceState().getCurrentElement(); RuntimeResourceDefinition def = myContext.getResourceDefinition(preResCurrentElement); def.getChildByName("contained").getMutator().addValue(preResCurrentElement, res); } - } @SuppressWarnings("EnumSwitchStatementWhichMissesCases") @@ -442,7 +459,10 @@ class ParserState { private IBase myParentInstance; private BasePreResourceState myPreResourceState; - public DeclaredExtensionState(BasePreResourceState thePreResourceState, RuntimeChildDeclaredExtensionDefinition theDefinition, IBase theParentInstance) { + public DeclaredExtensionState( + BasePreResourceState thePreResourceState, + RuntimeChildDeclaredExtensionDefinition theDefinition, + IBase theParentInstance) { super(thePreResourceState); myPreResourceState = thePreResourceState; myDefinition = theDefinition; @@ -474,10 +494,13 @@ class ParserState { switch (target.getChildType()) { case COMPOSITE_DATATYPE: { - BaseRuntimeElementCompositeDefinition compositeTarget = (BaseRuntimeElementCompositeDefinition) target; - ICompositeType newChildInstance = newCompositeInstance(getPreResourceState(), myDefinition, compositeTarget); + BaseRuntimeElementCompositeDefinition compositeTarget = + (BaseRuntimeElementCompositeDefinition) target; + ICompositeType newChildInstance = + newCompositeInstance(getPreResourceState(), myDefinition, compositeTarget); myDefinition.getMutator().addValue(myParentInstance, newChildInstance); - ElementCompositeState newState = new ElementCompositeState(myPreResourceState, theLocalPart, compositeTarget, newChildInstance); + ElementCompositeState newState = new ElementCompositeState( + myPreResourceState, theLocalPart, compositeTarget, newChildInstance); push(newState); return; } @@ -486,7 +509,8 @@ class ParserState { RuntimePrimitiveDatatypeDefinition primitiveTarget = (RuntimePrimitiveDatatypeDefinition) target; IPrimitiveType newChildInstance = newPrimitiveInstance(myDefinition, primitiveTarget); myDefinition.getMutator().addValue(myParentInstance, newChildInstance); - PrimitiveState newState = new PrimitiveState(getPreResourceState(), newChildInstance, theLocalPart, primitiveTarget.getName()); + PrimitiveState newState = new PrimitiveState( + getPreResourceState(), newChildInstance, theLocalPart, primitiveTarget.getName()); push(newState); return; } @@ -501,26 +525,27 @@ class ParserState { } @Override - public void enteringNewElementExtension(StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { - RuntimeChildDeclaredExtensionDefinition declaredExtension = myDefinition.getChildExtensionForUrl(theUrlAttr); + public void enteringNewElementExtension( + StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { + RuntimeChildDeclaredExtensionDefinition declaredExtension = + myDefinition.getChildExtensionForUrl(theUrlAttr); if (declaredExtension != null) { if (myChildInstance == null) { myChildInstance = newInstance(myDefinition); myDefinition.getMutator().addValue(myParentInstance, myChildInstance); } - BaseState newState = new DeclaredExtensionState(getPreResourceState(), declaredExtension, myChildInstance); + BaseState newState = + new DeclaredExtensionState(getPreResourceState(), declaredExtension, myChildInstance); push(newState); } else { super.enteringNewElementExtension(theElement, theUrlAttr, theIsModifier, baseServerUrl); } } - @Override protected IBase getCurrentElement() { return myParentInstance; } - } private class ElementCompositeState extends BaseState { @@ -530,7 +555,11 @@ class ParserState { private final Set myParsedNonRepeatableNames = new HashSet<>(); private final String myElementName; - ElementCompositeState(BasePreResourceState thePreResourceState, String theElementName, BaseRuntimeElementCompositeDefinition theDef, IBase theInstance) { + ElementCompositeState( + BasePreResourceState thePreResourceState, + String theElementName, + BaseRuntimeElementCompositeDefinition theDef, + IBase theInstance) { super(thePreResourceState); myDefinition = theDef; myInstance = theInstance; @@ -547,7 +576,8 @@ class ParserState { } } else { if (myJsonMode) { - myErrorHandler.incorrectJsonType(null, myElementName, ValueType.OBJECT, null, ValueType.SCALAR, ScalarType.STRING); + myErrorHandler.incorrectJsonType( + null, myElementName, ValueType.OBJECT, null, ValueType.SCALAR, ScalarType.STRING); } else { myErrorHandler.unknownAttribute(null, theName); } @@ -574,7 +604,8 @@ class ParserState { if (child == null) { if (theChildName.equals("id")) { if (getCurrentElement() instanceof IIdentifiableElement) { - push(new IdentifiableElementIdState(getPreResourceState(), (IIdentifiableElement) getCurrentElement())); + push(new IdentifiableElementIdState( + getPreResourceState(), (IIdentifiableElement) getCurrentElement())); return; } } @@ -596,7 +627,7 @@ class ParserState { } else { nameToCheck = theChildName; } - if(!myParsedNonRepeatableNames.add(nameToCheck)) { + if (!myParsedNonRepeatableNames.add(nameToCheck)) { myErrorHandler.unexpectedRepeatingElement(null, nameToCheck); push(new SwallowChildrenWholeState(getPreResourceState())); return; @@ -606,15 +637,20 @@ class ParserState { BaseRuntimeElementDefinition target = child.getChildByName(theChildName); if (target == null) { // This is a bug with the structures and shouldn't happen.. - throw new DataFormatException(Msg.code(1809) + "Found unexpected element '" + theChildName + "' in parent element '" + myDefinition.getName() + "'. Valid names are: " + child.getValidChildNames()); + throw new DataFormatException( + Msg.code(1809) + "Found unexpected element '" + theChildName + "' in parent element '" + + myDefinition.getName() + "'. Valid names are: " + child.getValidChildNames()); } switch (target.getChildType()) { case COMPOSITE_DATATYPE: { - BaseRuntimeElementCompositeDefinition compositeTarget = (BaseRuntimeElementCompositeDefinition) target; - ICompositeType newChildInstance = newCompositeInstance(getPreResourceState(), child, compositeTarget); + BaseRuntimeElementCompositeDefinition compositeTarget = + (BaseRuntimeElementCompositeDefinition) target; + ICompositeType newChildInstance = + newCompositeInstance(getPreResourceState(), child, compositeTarget); child.getMutator().addValue(myInstance, newChildInstance); - ParserState.ElementCompositeState newState = new ElementCompositeState(getPreResourceState(), theChildName, compositeTarget, newChildInstance); + ParserState.ElementCompositeState newState = new ElementCompositeState( + getPreResourceState(), theChildName, compositeTarget, newChildInstance); push(newState); return; } @@ -624,7 +660,8 @@ class ParserState { IPrimitiveType newChildInstance; newChildInstance = getPrimitiveInstance(child, primitiveTarget, theChildName); child.getMutator().addValue(myInstance, newChildInstance); - PrimitiveState newState = new PrimitiveState(getPreResourceState(), newChildInstance, theChildName, primitiveTarget.getName()); + PrimitiveState newState = new PrimitiveState( + getPreResourceState(), newChildInstance, theChildName, primitiveTarget.getName()); push(newState); return; } @@ -632,12 +669,14 @@ class ParserState { RuntimeResourceBlockDefinition blockTarget = (RuntimeResourceBlockDefinition) target; IBase newBlockInstance = newInstance(blockTarget); child.getMutator().addValue(myInstance, newBlockInstance); - ElementCompositeState newState = new ElementCompositeState(getPreResourceState(), theChildName, blockTarget, newBlockInstance); + ElementCompositeState newState = new ElementCompositeState( + getPreResourceState(), theChildName, blockTarget, newBlockInstance); push(newState); return; } case PRIMITIVE_XHTML: { - RuntimePrimitiveDatatypeNarrativeDefinition xhtmlTarget = (RuntimePrimitiveDatatypeNarrativeDefinition) target; + RuntimePrimitiveDatatypeNarrativeDefinition xhtmlTarget = + (RuntimePrimitiveDatatypeNarrativeDefinition) target; XhtmlDt newDt = newInstance(xhtmlTarget); child.getMutator().addValue(myInstance, newDt); XhtmlState state = new XhtmlState(getPreResourceState(), newDt, true); @@ -645,7 +684,8 @@ class ParserState { return; } case PRIMITIVE_XHTML_HL7ORG: { - RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition xhtmlTarget = (RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition) target; + RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition xhtmlTarget = + (RuntimePrimitiveDatatypeXhtmlHl7OrgDefinition) target; IBaseXhtml newDt = newInstance(xhtmlTarget); child.getMutator().addValue(myInstance, newDt); XhtmlStateHl7Org state = new XhtmlStateHl7Org(getPreResourceState(), newDt); @@ -668,11 +708,15 @@ class ParserState { return; } case RESOURCE: { - if (myInstance instanceof IAnyResource || myInstance instanceof IBaseBackboneElement || myInstance instanceof IBaseElement) { - ParserState.PreResourceStateHl7Org state = new PreResourceStateHl7Org(myInstance, child.getMutator(), null); + if (myInstance instanceof IAnyResource + || myInstance instanceof IBaseBackboneElement + || myInstance instanceof IBaseElement) { + ParserState.PreResourceStateHl7Org state = + new PreResourceStateHl7Org(myInstance, child.getMutator(), null); push(state); } else { - ParserState.PreResourceStateHapi state = new PreResourceStateHapi(myInstance, child.getMutator(), null); + ParserState.PreResourceStateHapi state = + new PreResourceStateHapi(myInstance, child.getMutator(), null); push(state); } return; @@ -688,8 +732,10 @@ class ParserState { } @Override - public void enteringNewElementExtension(StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { - RuntimeChildDeclaredExtensionDefinition declaredExtension = myDefinition.getDeclaredExtension(theUrlAttr, baseServerUrl); + public void enteringNewElementExtension( + StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { + RuntimeChildDeclaredExtensionDefinition declaredExtension = + myDefinition.getDeclaredExtension(theUrlAttr, baseServerUrl); if (declaredExtension != null) { BaseState newState = new DeclaredExtensionState(getPreResourceState(), declaredExtension, myInstance); push(newState); @@ -702,7 +748,6 @@ class ParserState { protected IBase getCurrentElement() { return myInstance; } - } public class ElementIdState extends BaseState { @@ -723,7 +768,6 @@ class ParserState { public void endingElement() { pop(); } - } private class ExtensionState extends BaseState { @@ -757,7 +801,8 @@ class ParserState { @Override public void endingElement() throws DataFormatException { if (myExtension.getValue() != null && myExtension.getExtension().size() > 0) { - throw new DataFormatException(Msg.code(1811) + "Extension (URL='" + myExtension.getUrl() + "') must not have both a value and other contained extensions"); + throw new DataFormatException(Msg.code(1811) + "Extension (URL='" + myExtension.getUrl() + + "') must not have both a value and other contained extensions"); } pop(); } @@ -769,29 +814,36 @@ class ParserState { push(new ElementIdState(getPreResourceState(), (IBaseElement) getCurrentElement())); return; } else if (getCurrentElement() instanceof IIdentifiableElement) { - push(new IdentifiableElementIdState(getPreResourceState(), (IIdentifiableElement) getCurrentElement())); + push(new IdentifiableElementIdState( + getPreResourceState(), (IIdentifiableElement) getCurrentElement())); return; } } - BaseRuntimeElementDefinition target = myContext.getRuntimeChildUndeclaredExtensionDefinition().getChildByName(theLocalPart); + BaseRuntimeElementDefinition target = + myContext.getRuntimeChildUndeclaredExtensionDefinition().getChildByName(theLocalPart); if (target != null) { switch (target.getChildType()) { case COMPOSITE_DATATYPE: { - BaseRuntimeElementCompositeDefinition compositeTarget = (BaseRuntimeElementCompositeDefinition) target; - ICompositeType newChildInstance = newCompositeTypeInstance(getPreResourceState(), compositeTarget); + BaseRuntimeElementCompositeDefinition compositeTarget = + (BaseRuntimeElementCompositeDefinition) target; + ICompositeType newChildInstance = + newCompositeTypeInstance(getPreResourceState(), compositeTarget); myExtension.setValue(newChildInstance); - ElementCompositeState newState = new ElementCompositeState(getPreResourceState(), theLocalPart, compositeTarget, newChildInstance); + ElementCompositeState newState = new ElementCompositeState( + getPreResourceState(), theLocalPart, compositeTarget, newChildInstance); push(newState); return; } case ID_DATATYPE: case PRIMITIVE_DATATYPE: { - RuntimePrimitiveDatatypeDefinition primitiveTarget = (RuntimePrimitiveDatatypeDefinition) target; + RuntimePrimitiveDatatypeDefinition primitiveTarget = + (RuntimePrimitiveDatatypeDefinition) target; IPrimitiveType newChildInstance = newInstance(primitiveTarget); myExtension.setValue(newChildInstance); - PrimitiveState newState = new PrimitiveState(getPreResourceState(), newChildInstance, theLocalPart, primitiveTarget.getName()); + PrimitiveState newState = new PrimitiveState( + getPreResourceState(), newChildInstance, theLocalPart, primitiveTarget.getName()); push(newState); return; } @@ -816,7 +868,6 @@ class ParserState { protected IBaseExtension getCurrentElement() { return myExtension; } - } public class IdentifiableElementIdState extends BaseState { @@ -837,7 +888,6 @@ class ParserState { public void endingElement() { pop(); } - } private class MetaElementState extends BaseState { @@ -874,7 +924,8 @@ class ParserState { myMap.put(ResourceMetadataKeyEnum.SECURITY_LABELS, securityLabels); } IBase securityLabel = myContext.getVersion().newCodingDt(); - BaseRuntimeElementCompositeDefinition codinfDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(securityLabel.getClass()); + BaseRuntimeElementCompositeDefinition codinfDef = (BaseRuntimeElementCompositeDefinition) + myContext.getElementDefinition(securityLabel.getClass()); push(new SecurityLabelElementStateHapi(getPreResourceState(), codinfDef, securityLabel)); securityLabels.add(securityLabel); break; @@ -908,8 +959,10 @@ class ParserState { } @Override - public void enteringNewElementExtension(StartElement theElem, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { - ResourceMetadataKeyEnum.ExtensionResourceMetadataKey resourceMetadataKeyEnum = new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey(theUrlAttr); + public void enteringNewElementExtension( + StartElement theElem, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { + ResourceMetadataKeyEnum.ExtensionResourceMetadataKey resourceMetadataKeyEnum = + new ResourceMetadataKeyEnum.ExtensionResourceMetadataKey(theUrlAttr); Object metadataValue = myMap.get(resourceMetadataKeyEnum); ExtensionDt newExtension; if (metadataValue == null) { @@ -917,7 +970,9 @@ class ParserState { } else if (metadataValue instanceof ExtensionDt) { newExtension = (ExtensionDt) metadataValue; } else { - throw new IllegalStateException(Msg.code(1812) + "Expected ExtensionDt as custom resource metadata type, got: " + metadataValue.getClass().getSimpleName()); + throw new IllegalStateException( + Msg.code(1812) + "Expected ExtensionDt as custom resource metadata type, got: " + + metadataValue.getClass().getSimpleName()); } newExtension.setUrl(theUrlAttr); myMap.put(resourceMetadataKeyEnum, newExtension); @@ -925,7 +980,6 @@ class ParserState { ExtensionState newState = new ExtensionState(getPreResourceState(), newExtension); push(newState); } - } private class MetaVersionElementState extends BaseState { @@ -952,7 +1006,6 @@ class ParserState { myErrorHandler.unknownElement(null, theLocalPart); push(new SwallowChildrenWholeState(getPreResourceState())); } - } private abstract class BasePreResourceState extends BaseState { @@ -962,12 +1015,14 @@ class ParserState { private IBaseResource myInstance; private FhirVersionEnum myParentVersion; private Class myResourceType; + BasePreResourceState(Class theResourceType) { super(null); myResourceType = theResourceType; myContainedResources = new HashMap<>(); if (theResourceType != null) { - myParentVersion = myContext.getResourceDefinition(theResourceType).getStructureVersion(); + myParentVersion = + myContext.getResourceDefinition(theResourceType).getStructureVersion(); } else { myParentVersion = myContext.getVersion().getVersion(); } @@ -1007,23 +1062,34 @@ class ParserState { definition = myContext.getResourceDefinition(myParentVersion, theLocalPart); } if ((definition == null)) { - throw new DataFormatException(Msg.code(1813) + "Element '" + theLocalPart + "' is not a known resource type, expected a resource at this position"); + throw new DataFormatException(Msg.code(1813) + "Element '" + theLocalPart + + "' is not a known resource type, expected a resource at this position"); } } else { definition = myContext.getResourceDefinition(myResourceType); if (!StringUtils.equals(theLocalPart, definition.getName())) { - throw new DataFormatException(Msg.code(1814) + myContext.getLocalizer().getMessage(ParserState.class, "wrongResourceTypeFound", definition.getName(), theLocalPart)); + throw new DataFormatException(Msg.code(1814) + + myContext + .getLocalizer() + .getMessage( + ParserState.class, + "wrongResourceTypeFound", + definition.getName(), + theLocalPart)); } } RuntimeResourceDefinition def = definition; - if (!definition.getName().equals(theLocalPart) && definition.getName().equalsIgnoreCase(theLocalPart)) { - throw new DataFormatException(Msg.code(1815) + "Unknown resource type '" + theLocalPart + "': Resource names are case sensitive, found similar name: '" + definition.getName() + "'"); + if (!definition.getName().equals(theLocalPart) + && definition.getName().equalsIgnoreCase(theLocalPart)) { + throw new DataFormatException(Msg.code(1815) + "Unknown resource type '" + theLocalPart + + "': Resource names are case sensitive, found similar name: '" + definition.getName() + "'"); } myInstance = newInstance(def); if (myInstance instanceof IResource) { - push(new ResourceStateHapi(getRootPreResourceState(), def, (IResource) myInstance, myContainedResources)); + push(new ResourceStateHapi( + getRootPreResourceState(), def, (IResource) myInstance, myContainedResources)); } else { push(new ResourceStateHl7Org(getRootPreResourceState(), def, myInstance)); } @@ -1069,7 +1135,11 @@ class ParserState { if (wantedProfileType != null && !wantedProfileType.equals(myInstance.getClass())) { if (myResourceType == null || myResourceType.isAssignableFrom(wantedProfileType)) { - ourLog.debug("Converting resource of type {} to type defined for profile \"{}\": {}", myInstance.getClass().getName(), usedProfile, wantedProfileType); + ourLog.debug( + "Converting resource of type {} to type defined for profile \"{}\": {}", + myInstance.getClass().getName(), + usedProfile, + wantedProfileType); /* * This isn't the most efficient thing really.. If we want a specific @@ -1084,7 +1154,8 @@ class ParserState { // Clean up the cached resources myGlobalResources.remove(myInstance); - myGlobalReferences.removeAll(t.getAllPopulatedChildElementsOfType(myInstance, IBaseReference.class)); + myGlobalReferences.removeAll( + t.getAllPopulatedChildElementsOfType(myInstance, IBaseReference.class)); IParser parser = myContext.newJsonParser(); String asString = parser.encodeResourceToString(myInstance); @@ -1092,7 +1163,8 @@ class ParserState { // Add newly created instance myGlobalResources.add(myInstance); - myGlobalReferences.addAll(t.getAllPopulatedChildElementsOfType(myInstance, IBaseReference.class)); + myGlobalReferences.addAll( + t.getAllPopulatedChildElementsOfType(myInstance, IBaseReference.class)); } } } @@ -1134,7 +1206,8 @@ class ParserState { for (IBaseReference nextRef : myGlobalReferences) { if (!nextRef.isEmpty() && nextRef.getReferenceElement() != null) { - IIdType unqualifiedVersionless = nextRef.getReferenceElement().toUnqualifiedVersionless(); + IIdType unqualifiedVersionless = + nextRef.getReferenceElement().toUnqualifiedVersionless(); IBaseResource target = idToResource.get(unqualifiedVersionless.getValueAsString()); // resource can already be filled with local contained resource by populateTarget() if (target != null && nextRef.getResource() == null) { @@ -1146,15 +1219,17 @@ class ParserState { /* * Set resource IDs based on Bundle.entry.request.url */ - List> urlsAndResources = BundleUtil.getBundleEntryUrlsAndResources(myContext, (IBaseBundle) myInstance); + List> urlsAndResources = + BundleUtil.getBundleEntryUrlsAndResources(myContext, (IBaseBundle) myInstance); for (Pair pair : urlsAndResources) { - if (pair.getRight() != null && isNotBlank(pair.getLeft()) && pair.getRight().getIdElement().isEmpty()) { + if (pair.getRight() != null + && isNotBlank(pair.getLeft()) + && pair.getRight().getIdElement().isEmpty()) { if (pair.getLeft().startsWith("urn:")) { pair.getRight().setId(pair.getLeft()); } } } - } } @@ -1173,21 +1248,18 @@ class ParserState { } } } - } @Override public void wereBack() { postProcess(); } - } private class PreResourceStateHapi extends BasePreResourceState { private IMutator myMutator; private IBase myTarget; - PreResourceStateHapi(Class theResourceType) { super(theResourceType); assert theResourceType == null || IResource.class.isAssignableFrom(theResourceType); @@ -1231,7 +1303,6 @@ class ParserState { // } } } - } private class PreResourceStateHl7Org extends BasePreResourceState { @@ -1269,14 +1340,17 @@ class ParserState { // Resource has no ID } else if (!elem.getIdElement().getIdPart().startsWith("urn:")) { if (StringUtils.isNotBlank(versionId)) { - elem.getIdElement().setValue(resourceName + "/" + elem.getIdElement().getIdPart() + "/_history/" + versionId); + elem.getIdElement() + .setValue(resourceName + "/" + + elem.getIdElement().getIdPart() + "/_history/" + versionId); } else { - elem.getIdElement().setValue(resourceName + "/" + elem.getIdElement().getIdPart()); + elem.getIdElement() + .setValue( + resourceName + "/" + elem.getIdElement().getIdPart()); } } } } - } private class PreTagListState extends BaseState { @@ -1296,7 +1370,8 @@ class ParserState { @Override public void enteringNewElement(String theNamespaceUri, String theLocalPart) throws DataFormatException { if (!TagList.ELEMENT_NAME_LC.equals(theLocalPart.toLowerCase())) { - throw new DataFormatException(Msg.code(1816) + "resourceType does not appear to be 'TagList', found: " + theLocalPart); + throw new DataFormatException( + Msg.code(1816) + "resourceType does not appear to be 'TagList', found: " + theLocalPart); } push(new TagListState(myTagList)); @@ -1311,7 +1386,6 @@ class ParserState { public boolean isPreResource() { return true; } - } private class PrimitiveState extends BaseState { @@ -1319,7 +1393,11 @@ class ParserState { private final String myTypeName; private IPrimitiveType myInstance; - PrimitiveState(BasePreResourceState thePreResourceState, IPrimitiveType theInstance, String theChildName, String theTypeName) { + PrimitiveState( + BasePreResourceState thePreResourceState, + IPrimitiveType theInstance, + String theChildName, + String theTypeName) { super(thePreResourceState); myInstance = theInstance; myChildName = theChildName; @@ -1390,14 +1468,17 @@ class ParserState { protected IBase getCurrentElement() { return myInstance; } - } private class ResourceStateHapi extends ElementCompositeState { private IResource myInstance; - public ResourceStateHapi(BasePreResourceState thePreResourceState, BaseRuntimeElementCompositeDefinition theDef, IResource theInstance, Map theContainedResources) { + public ResourceStateHapi( + BasePreResourceState thePreResourceState, + BaseRuntimeElementCompositeDefinition theDef, + IResource theInstance, + Map theContainedResources) { super(thePreResourceState, theDef.getName(), theDef, theInstance); myInstance = theInstance; } @@ -1416,15 +1497,20 @@ class ParserState { private class ResourceStateHl7Org extends ElementCompositeState { - ResourceStateHl7Org(BasePreResourceState thePreResourceState, BaseRuntimeElementCompositeDefinition theDef, IBaseResource theInstance) { + ResourceStateHl7Org( + BasePreResourceState thePreResourceState, + BaseRuntimeElementCompositeDefinition theDef, + IBaseResource theInstance) { super(thePreResourceState, theDef.getName(), theDef, theInstance); } - } private class SecurityLabelElementStateHapi extends ElementCompositeState { - SecurityLabelElementStateHapi(BasePreResourceState thePreResourceState, BaseRuntimeElementCompositeDefinition theDef, IBase codingDt) { + SecurityLabelElementStateHapi( + BasePreResourceState thePreResourceState, + BaseRuntimeElementCompositeDefinition theDef, + IBase codingDt) { super(thePreResourceState, theDef.getName(), theDef, codingDt); } @@ -1432,7 +1518,6 @@ class ParserState { public void endingElement() throws DataFormatException { pop(); } - } private class SwallowChildrenWholeState extends BaseState { @@ -1462,10 +1547,10 @@ class ParserState { } @Override - public void enteringNewElementExtension(StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { + public void enteringNewElementExtension( + StartElement theElement, String theUrlAttr, boolean theIsModifier, final String baseServerUrl) { myDepth++; } - } private class TagListState extends BaseState { @@ -1495,7 +1580,6 @@ class ParserState { protected IBase getCurrentElement() { return myTagList; } - } private class TagState extends BaseState { @@ -1566,7 +1650,6 @@ class ParserState { throw new DataFormatException(Msg.code(1818) + "Unexpected element: " + theLocalPart); } } - } private class XhtmlState extends BaseState { @@ -1575,7 +1658,8 @@ class ParserState { private List myEvents = new ArrayList(); private boolean myIncludeOuterEvent; - private XhtmlState(BasePreResourceState thePreResourceState, XhtmlDt theXhtmlDt, boolean theIncludeOuterEvent) throws DataFormatException { + private XhtmlState(BasePreResourceState thePreResourceState, XhtmlDt theXhtmlDt, boolean theIncludeOuterEvent) + throws DataFormatException { super(thePreResourceState); myDepth = 0; myDt = theXhtmlDt; @@ -1640,7 +1724,6 @@ class ParserState { } } } - } private class XhtmlStateHl7Org extends XhtmlState { @@ -1659,14 +1742,18 @@ class ParserState { super.doPop(); } - } /** * @param theResourceType May be null */ - static ParserState getPreResourceInstance(IParser theParser, Class theResourceType, FhirContext theContext, boolean theJsonMode, IParserErrorHandler theErrorHandler) - throws DataFormatException { + static ParserState getPreResourceInstance( + IParser theParser, + Class theResourceType, + FhirContext theContext, + boolean theJsonMode, + IParserErrorHandler theErrorHandler) + throws DataFormatException { ParserState retVal = new ParserState(theParser, theContext, theJsonMode, theErrorHandler); if (theResourceType == null) { if (theContext.getVersion().getVersion().isRi()) { @@ -1684,10 +1771,10 @@ class ParserState { return retVal; } - static ParserState getPreTagListInstance(IParser theParser, FhirContext theContext, boolean theJsonMode, IParserErrorHandler theErrorHandler) { + static ParserState getPreTagListInstance( + IParser theParser, FhirContext theContext, boolean theJsonMode, IParserErrorHandler theErrorHandler) { ParserState retVal = new ParserState(theParser, theContext, theJsonMode, theErrorHandler); retVal.push(retVal.new PreTagListState()); return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/RDFParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/RDFParser.java index 2a814c510ad..ba6850d0a7d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/RDFParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/RDFParser.java @@ -33,7 +33,6 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.narrative.INarrativeGenerator; import ca.uhn.fhir.rest.api.EncodingEnum; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.util.rdf.RDFUtil; import org.apache.commons.lang3.StringUtils; import org.apache.jena.datatypes.xsd.XSDDatatype; @@ -60,7 +59,6 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.INarrative; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.util.Arrays; @@ -98,7 +96,8 @@ public class RDFParser extends BaseParser { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RDFParser.class); public static final String NODE_ROLE = "nodeRole"; - private static final List ignoredPredicates = Arrays.asList(RDF.type.getURI(), FHIR_NS+FHIR_INDEX, FHIR_NS + NODE_ROLE); + private static final List ignoredPredicates = + Arrays.asList(RDF.type.getURI(), FHIR_NS + FHIR_INDEX, FHIR_NS + NODE_ROLE); public static final String TREE_ROOT = "treeRoot"; public static final String RESOURCE_ID = "Resource.id"; public static final String ID = "id"; @@ -139,11 +138,12 @@ public class RDFParser extends BaseParser { * @param encodeContext encoding content from parent */ @Override - protected void doEncodeResourceToWriter(final IBaseResource resource, final Writer writer, final EncodeContext encodeContext) { + protected void doEncodeResourceToWriter( + final IBaseResource resource, final Writer writer, final EncodeContext encodeContext) { Model rdfModel = RDFUtil.initializeRDFModel(); // Establish the namespaces and prefixes needed - HashMap prefixes = new HashMap<>(); + HashMap prefixes = new HashMap<>(); prefixes.put(RDF_PREFIX, RDF_NS); prefixes.put(RDFS_PREFIX, RDFS_NS); prefixes.put(XSD_PREFIX, XSD_NS); @@ -170,17 +170,20 @@ public class RDFParser extends BaseParser { * @throws DataFormatException Exception that can be thrown from parser */ @Override - protected T doParseResource(final Class resourceType, final Reader reader) throws DataFormatException { + protected T doParseResource(final Class resourceType, final Reader reader) + throws DataFormatException { Model model = RDFUtil.readRDFToModel(reader, this.lang); return parseResource(resourceType, model); } - private Resource encodeResourceToRDFStreamWriter(final IBaseResource resource, - final Model rdfModel, - final boolean containedResource, - final IIdType resourceId, - final EncodeContext encodeContext, - final boolean rootResource, Resource parentResource) { + private Resource encodeResourceToRDFStreamWriter( + final IBaseResource resource, + final Model rdfModel, + final boolean containedResource, + final IIdType resourceId, + final EncodeContext encodeContext, + final boolean rootResource, + Resource parentResource) { RuntimeResourceDefinition resDef = getContext().getResourceDefinition(resource); if (resDef == null) { @@ -192,7 +195,8 @@ public class RDFParser extends BaseParser { } if (!(resource instanceof IAnyResource)) { - throw new IllegalStateException(Msg.code(1846) + "Unsupported resource found: " + resource.getClass().getName()); + throw new IllegalStateException(Msg.code(1846) + "Unsupported resource found: " + + resource.getClass().getName()); } // Create absolute IRI for the resource @@ -212,11 +216,14 @@ public class RDFParser extends BaseParser { parentResource = rdfModel.getResource(null); } else { - String resourceUri = IRIs.resolve(uriBase, resource.getIdElement().toUnqualified().toString()).toString(); + String resourceUri = IRIs.resolve( + uriBase, resource.getIdElement().toUnqualified().toString()) + .toString(); parentResource = rdfModel.getResource(resourceUri); } // If the resource already exists and has statements, return that existing resource. - if (parentResource != null && parentResource.listProperties().toList().size() > 0) { + if (parentResource != null + && parentResource.listProperties().toList().size() > 0) { return parentResource; } else if (parentResource == null) { return null; @@ -227,14 +234,24 @@ public class RDFParser extends BaseParser { // Only the top-level resource should have the nodeRole set to treeRoot if (rootResource) { - parentResource.addProperty(rdfModel.createProperty(FHIR_NS + NODE_ROLE), rdfModel.createProperty(FHIR_NS + TREE_ROOT)); + parentResource.addProperty( + rdfModel.createProperty(FHIR_NS + NODE_ROLE), rdfModel.createProperty(FHIR_NS + TREE_ROOT)); } if (resourceId != null && resourceId.getIdPart() != null) { - parentResource.addProperty(rdfModel.createProperty(FHIR_NS + RESOURCE_ID), createFhirValueBlankNode(rdfModel, resourceId.getIdPart())); + parentResource.addProperty( + rdfModel.createProperty(FHIR_NS + RESOURCE_ID), + createFhirValueBlankNode(rdfModel, resourceId.getIdPart())); } - encodeCompositeElementToStreamWriter(resource, resource, rdfModel, parentResource, containedResource, new CompositeChildElement(resDef, encodeContext), encodeContext); + encodeCompositeElementToStreamWriter( + resource, + resource, + rdfModel, + parentResource, + containedResource, + new CompositeChildElement(resDef, encodeContext), + encodeContext); return parentResource; } @@ -256,11 +273,15 @@ public class RDFParser extends BaseParser { * @param cardinalityIndex if a collection, this value is written as a fhir:index predicate * @return Blank node resource containing fhir:value (and possibly fhir:index) */ - private Resource createFhirValueBlankNode(Model rdfModel, String value, XSDDatatype xsdDataType, Integer cardinalityIndex) { - Resource fhirValueBlankNodeResource = rdfModel.createResource().addProperty(rdfModel.createProperty(FHIR_NS + VALUE), rdfModel.createTypedLiteral(value, xsdDataType)); + private Resource createFhirValueBlankNode( + Model rdfModel, String value, XSDDatatype xsdDataType, Integer cardinalityIndex) { + Resource fhirValueBlankNodeResource = rdfModel.createResource() + .addProperty(rdfModel.createProperty(FHIR_NS + VALUE), rdfModel.createTypedLiteral(value, xsdDataType)); if (cardinalityIndex != null && cardinalityIndex > -1) { - fhirValueBlankNodeResource.addProperty(rdfModel.createProperty(FHIR_NS + FHIR_INDEX), rdfModel.createTypedLiteral(cardinalityIndex, XSDDatatype.XSDinteger)); + fhirValueBlankNodeResource.addProperty( + rdfModel.createProperty(FHIR_NS + FHIR_INDEX), + rdfModel.createTypedLiteral(cardinalityIndex, XSDDatatype.XSDinteger)); } return fhirValueBlankNodeResource; } @@ -272,25 +293,29 @@ public class RDFParser extends BaseParser { * @param childName childName which been massaged for different data types * @return String of predicate name */ - private String constructPredicateName(IBaseResource resource, BaseRuntimeChildDefinition definition, String childName, IBase parentElement) { + private String constructPredicateName( + IBaseResource resource, BaseRuntimeChildDefinition definition, String childName, IBase parentElement) { String basePropertyName = FHIR_NS + resource.fhirType() + "." + childName; String classBasedPropertyName; if (definition instanceof BaseRuntimeDeclaredChildDefinition) { - BaseRuntimeDeclaredChildDefinition declaredDef = (BaseRuntimeDeclaredChildDefinition)definition; + BaseRuntimeDeclaredChildDefinition declaredDef = (BaseRuntimeDeclaredChildDefinition) definition; Class declaringClass = declaredDef.getField().getDeclaringClass(); if (declaringClass != resource.getClass()) { String property = null; - if (IBaseBackboneElement.class.isAssignableFrom(declaringClass) || IBaseDatatypeElement.class.isAssignableFrom(declaringClass)) { + if (IBaseBackboneElement.class.isAssignableFrom(declaringClass) + || IBaseDatatypeElement.class.isAssignableFrom(declaringClass)) { if (classToFhirTypeMap.containsKey(declaringClass)) { property = classToFhirTypeMap.get(declaringClass); } else { try { - IBase elem = (IBase)declaringClass.getDeclaredConstructor().newInstance(); + IBase elem = (IBase) + declaringClass.getDeclaredConstructor().newInstance(); property = elem.fhirType(); classToFhirTypeMap.put(declaringClass, property); } catch (Exception ex) { - logger.debug("Error instantiating an " + declaringClass.getSimpleName() + " to retrieve its FhirType"); + logger.debug("Error instantiating an " + declaringClass.getSimpleName() + + " to retrieve its FhirType"); } } } else { @@ -307,14 +332,19 @@ public class RDFParser extends BaseParser { return basePropertyName; } - private Model encodeChildElementToStreamWriter(final IBaseResource resource, IBase parentElement, Model rdfModel, Resource rdfResource, - final BaseRuntimeChildDefinition childDefinition, - final IBase element, - final String childName, - final BaseRuntimeElementDefinition childDef, - final boolean includedResource, - final CompositeChildElement parent, - final EncodeContext encodeContext, final Integer cardinalityIndex) { + private Model encodeChildElementToStreamWriter( + final IBaseResource resource, + IBase parentElement, + Model rdfModel, + Resource rdfResource, + final BaseRuntimeChildDefinition childDefinition, + final IBase element, + final String childName, + final BaseRuntimeElementDefinition childDef, + final boolean includedResource, + final CompositeChildElement parent, + final EncodeContext encodeContext, + final Integer cardinalityIndex) { String childGenericName = childDefinition.getElementName(); @@ -335,10 +365,14 @@ public class RDFParser extends BaseParser { if (StringUtils.isNotBlank(encodedValue) || !hasNoExtensions(value)) { if (StringUtils.isNotBlank(encodedValue)) { - String propertyName = constructPredicateName(resource, childDefinition, childName, parentElement); + String propertyName = + constructPredicateName(resource, childDefinition, childName, parentElement); if (element != null) { XSDDatatype dataType = getXSDDataTypeForFhirType(element.fhirType(), encodedValue); - rdfResource.addProperty(rdfModel.createProperty(propertyName), this.createFhirValueBlankNode(rdfModel, encodedValue, dataType, cardinalityIndex)); + rdfResource.addProperty( + rdfModel.createProperty(propertyName), + this.createFhirValueBlankNode( + rdfModel, encodedValue, dataType, cardinalityIndex)); } } } @@ -350,19 +384,34 @@ public class RDFParser extends BaseParser { String value = pd.getValueAsString(); if (value != null || !hasNoExtensions(pd)) { if (value != null) { - String propertyName = constructPredicateName(resource, childDefinition, childName, parentElement); + String propertyName = + constructPredicateName(resource, childDefinition, childName, parentElement); XSDDatatype dataType = getXSDDataTypeForFhirType(pd.fhirType(), value); - Resource valueResource = this.createFhirValueBlankNode(rdfModel, value, dataType, cardinalityIndex); + Resource valueResource = + this.createFhirValueBlankNode(rdfModel, value, dataType, cardinalityIndex); if (!hasNoExtensions(pd)) { - IBaseHasExtensions hasExtension = (IBaseHasExtensions)pd; - if (hasExtension.getExtension() != null && hasExtension.getExtension().size() > 0) { + IBaseHasExtensions hasExtension = (IBaseHasExtensions) pd; + if (hasExtension.getExtension() != null + && hasExtension.getExtension().size() > 0) { int i = 0; for (IBaseExtension extension : hasExtension.getExtension()) { - RuntimeResourceDefinition resDef = getContext().getResourceDefinition(resource); + RuntimeResourceDefinition resDef = + getContext().getResourceDefinition(resource); Resource extensionResource = rdfModel.createResource(); - extensionResource.addProperty(rdfModel.createProperty(FHIR_NS+FHIR_INDEX), rdfModel.createTypedLiteral(i, XSDDatatype.XSDinteger)); - valueResource.addProperty(rdfModel.createProperty(FHIR_NS + ELEMENT_EXTENSION), extensionResource); - encodeCompositeElementToStreamWriter(resource, extension, rdfModel, extensionResource, false, new CompositeChildElement(resDef, encodeContext), encodeContext); + extensionResource.addProperty( + rdfModel.createProperty(FHIR_NS + FHIR_INDEX), + rdfModel.createTypedLiteral(i, XSDDatatype.XSDinteger)); + valueResource.addProperty( + rdfModel.createProperty(FHIR_NS + ELEMENT_EXTENSION), + extensionResource); + encodeCompositeElementToStreamWriter( + resource, + extension, + rdfModel, + extensionResource, + false, + new CompositeChildElement(resDef, encodeContext), + encodeContext); } } } @@ -382,29 +431,41 @@ public class RDFParser extends BaseParser { if (resourceId != null) { idString = resourceId.getIdPart(); } - } - else if (element instanceof IBaseElement) { + } else if (element instanceof IBaseElement) { idPredicate = FHIR_NS + ELEMENT_ID; - if (((IBaseElement)element).getId() != null) { - idString = ((IBaseElement)element).getId(); + if (((IBaseElement) element).getId() != null) { + idString = ((IBaseElement) element).getId(); } } if (idString != null) { - rdfResource.addProperty(rdfModel.createProperty(idPredicate), createFhirValueBlankNode(rdfModel, idString)); + rdfResource.addProperty( + rdfModel.createProperty(idPredicate), createFhirValueBlankNode(rdfModel, idString)); } - rdfModel = encodeCompositeElementToStreamWriter(resource, element, rdfModel, rdfResource, includedResource, parent, encodeContext); + rdfModel = encodeCompositeElementToStreamWriter( + resource, element, rdfModel, rdfResource, includedResource, parent, encodeContext); break; } case CONTAINED_RESOURCE_LIST: case CONTAINED_RESOURCES: { if (element != null) { - IIdType resourceId = ((IBaseResource)element).getIdElement(); + IIdType resourceId = ((IBaseResource) element).getIdElement(); Resource containedResource = rdfModel.createResource(); - rdfResource.addProperty(rdfModel.createProperty(FHIR_NS+ DOMAIN_RESOURCE_CONTAINED), containedResource); + rdfResource.addProperty( + rdfModel.createProperty(FHIR_NS + DOMAIN_RESOURCE_CONTAINED), containedResource); if (cardinalityIndex != null) { - containedResource.addProperty(rdfModel.createProperty(FHIR_NS + FHIR_INDEX), cardinalityIndex.toString(), XSDDatatype.XSDinteger ); + containedResource.addProperty( + rdfModel.createProperty(FHIR_NS + FHIR_INDEX), + cardinalityIndex.toString(), + XSDDatatype.XSDinteger); } - encodeResourceToRDFStreamWriter((IBaseResource)element, rdfModel, true, super.fixContainedResourceId(resourceId.getValue()), encodeContext, false, containedResource); + encodeResourceToRDFStreamWriter( + (IBaseResource) element, + rdfModel, + true, + super.fixContainedResourceId(resourceId.getValue()), + encodeContext, + false, + containedResource); } break; } @@ -422,10 +483,11 @@ public class RDFParser extends BaseParser { } case PRIMITIVE_XHTML: case PRIMITIVE_XHTML_HL7ORG: { - IBaseXhtml xHtmlNode = (IBaseXhtml)element; + IBaseXhtml xHtmlNode = (IBaseXhtml) element; if (xHtmlNode != null) { String value = xHtmlNode.getValueAsString(); - String propertyName = constructPredicateName(resource, childDefinition, childName, parentElement); + String propertyName = + constructPredicateName(resource, childDefinition, childName, parentElement); rdfResource.addProperty(rdfModel.createProperty(propertyName), value); } break; @@ -433,7 +495,8 @@ public class RDFParser extends BaseParser { case EXTENSION_DECLARED: case UNDECL_EXT: default: { - throw new IllegalStateException(Msg.code(1847) + "Unexpected node - should not happen: " + childDef.getName()); + throw new IllegalStateException( + Msg.code(1847) + "Unexpected node - should not happen: " + childDef.getName()); } } } finally { @@ -496,36 +559,56 @@ public class RDFParser extends BaseParser { return resourceId; } - private Model encodeExtension(final IBaseResource resource, Model rdfModel, Resource rdfResource, - final boolean containedResource, - final CompositeChildElement nextChildElem, - final BaseRuntimeChildDefinition nextChild, - final IBase nextValue, - final String childName, - final BaseRuntimeElementDefinition childDef, - final EncodeContext encodeContext, Integer cardinalityIndex) { + private Model encodeExtension( + final IBaseResource resource, + Model rdfModel, + Resource rdfResource, + final boolean containedResource, + final CompositeChildElement nextChildElem, + final BaseRuntimeChildDefinition nextChild, + final IBase nextValue, + final String childName, + final BaseRuntimeElementDefinition childDef, + final EncodeContext encodeContext, + Integer cardinalityIndex) { BaseRuntimeDeclaredChildDefinition extDef = (BaseRuntimeDeclaredChildDefinition) nextChild; Resource childResource = rdfModel.createResource(); String extensionPredicateName = constructPredicateName(resource, extDef, extDef.getElementName(), null); rdfResource.addProperty(rdfModel.createProperty(extensionPredicateName), childResource); if (cardinalityIndex != null && cardinalityIndex > -1) { - childResource.addProperty(rdfModel.createProperty(FHIR_NS + FHIR_INDEX), cardinalityIndex.toString(), XSDDatatype.XSDinteger ); + childResource.addProperty( + rdfModel.createProperty(FHIR_NS + FHIR_INDEX), cardinalityIndex.toString(), XSDDatatype.XSDinteger); } - rdfModel = encodeChildElementToStreamWriter(resource, null, rdfModel, childResource, nextChild, nextValue, childName, - childDef, containedResource, nextChildElem, encodeContext, cardinalityIndex); + rdfModel = encodeChildElementToStreamWriter( + resource, + null, + rdfModel, + childResource, + nextChild, + nextValue, + childName, + childDef, + containedResource, + nextChildElem, + encodeContext, + cardinalityIndex); return rdfModel; } - private Model encodeCompositeElementToStreamWriter(final IBaseResource resource, - final IBase element, Model rdfModel, Resource rdfResource, - final boolean containedResource, - final CompositeChildElement parent, - final EncodeContext encodeContext) { + private Model encodeCompositeElementToStreamWriter( + final IBaseResource resource, + final IBase element, + Model rdfModel, + Resource rdfResource, + final boolean containedResource, + final CompositeChildElement parent, + final EncodeContext encodeContext) { - for (CompositeChildElement nextChildElem : super.compositeChildIterator(element, containedResource, parent, encodeContext)) { + for (CompositeChildElement nextChildElem : + super.compositeChildIterator(element, containedResource, parent, encodeContext)) { BaseRuntimeChildDefinition nextChild = nextChildElem.getDef(); @@ -543,8 +626,7 @@ public class RDFParser extends BaseParser { assert narrative != null; if (narrative.isEmpty()) { gen.populateResourceNarrative(getContext(), resource); - } - else { + } else { RuntimeChildNarrativeDefinition child = (RuntimeChildNarrativeDefinition) nextChild; // This is where we populate the parent of the narrative @@ -555,9 +637,19 @@ public class RDFParser extends BaseParser { String childName = nextChild.getChildNameByDatatype(child.getDatatype()); BaseRuntimeElementDefinition type = child.getChildByName(childName); - rdfModel = encodeChildElementToStreamWriter(resource, element, - rdfModel, childResource, nextChild, narrative, childName, type, - containedResource, nextChildElem, encodeContext, null); + rdfModel = encodeChildElementToStreamWriter( + resource, + element, + rdfModel, + childResource, + nextChild, + narrative, + childName, + type, + containedResource, + nextChildElem, + encodeContext, + null); continue; } } @@ -570,9 +662,16 @@ public class RDFParser extends BaseParser { continue; } - IBaseResource directChildResource = (IBaseResource)values.get(0); + IBaseResource directChildResource = (IBaseResource) values.get(0); // If it is a direct resource, we need to create a new subject for it. - Resource childResource = encodeResourceToRDFStreamWriter(directChildResource, rdfModel, false, directChildResource.getIdElement(), encodeContext, false, null); + Resource childResource = encodeResourceToRDFStreamWriter( + directChildResource, + rdfModel, + false, + directChildResource.getIdElement(), + encodeContext, + false, + null); String propertyName = constructPredicateName(resource, nextChild, nextChild.getElementName(), element); rdfResource.addProperty(rdfModel.createProperty(propertyName), childResource); @@ -583,10 +682,19 @@ public class RDFParser extends BaseParser { List values = nextChild.getAccessor().getValues(element); int i = 0; for (IBase containedResourceEntity : values) { - rdfModel = encodeChildElementToStreamWriter(resource, element, rdfModel, rdfResource, nextChild, containedResourceEntity, - nextChild.getChildNameByDatatype(null), - nextChild.getChildElementDefinitionByDatatype(null), - containedResource, nextChildElem, encodeContext, i); + rdfModel = encodeChildElementToStreamWriter( + resource, + element, + rdfModel, + rdfResource, + nextChild, + containedResourceEntity, + nextChild.getChildNameByDatatype(null), + nextChild.getChildElementDefinitionByDatatype(null), + containedResource, + nextChildElem, + encodeContext, + i); i++; } } else { @@ -620,38 +728,83 @@ public class RDFParser extends BaseParser { String extensionUrl = getExtensionUrl(nextChild.getExtensionUrl()); if (extensionUrl != null && !childName.equals(EXTENSION)) { - rdfModel = encodeExtension(resource, rdfModel, rdfResource, containedResource, nextChildElem, nextChild, - nextValue, childName, childDef, encodeContext, cardinalityIndex); + rdfModel = encodeExtension( + resource, + rdfModel, + rdfResource, + containedResource, + nextChildElem, + nextChild, + nextValue, + childName, + childDef, + encodeContext, + cardinalityIndex); } else if (nextChild instanceof RuntimeChildExtension) { IBaseExtension extension = (IBaseExtension) nextValue; - if ((extension.getValue() == null || extension.getValue().isEmpty())) { + if ((extension.getValue() == null + || extension.getValue().isEmpty())) { if (extension.getExtension().isEmpty()) { continue; } } - rdfModel = encodeExtension(resource, rdfModel, rdfResource, containedResource, nextChildElem, nextChild, - nextValue, childName, childDef, encodeContext, cardinalityIndex); + rdfModel = encodeExtension( + resource, + rdfModel, + rdfResource, + containedResource, + nextChildElem, + nextChild, + nextValue, + childName, + childDef, + encodeContext, + cardinalityIndex); } else if (!(nextChild instanceof RuntimeChildNarrativeDefinition) || !containedResource) { - - // If the child is not a value type, create a child object (blank node) for subordinate predicates to be attached to - if (childDef.getChildType() != PRIMITIVE_DATATYPE && - childDef.getChildType() != PRIMITIVE_XHTML_HL7ORG && - childDef.getChildType() != PRIMITIVE_XHTML && - childDef.getChildType() != ID_DATATYPE) { + // If the child is not a value type, create a child object (blank node) for subordinate + // predicates to be attached to + if (childDef.getChildType() != PRIMITIVE_DATATYPE + && childDef.getChildType() != PRIMITIVE_XHTML_HL7ORG + && childDef.getChildType() != PRIMITIVE_XHTML + && childDef.getChildType() != ID_DATATYPE) { Resource childResource = rdfModel.createResource(); String propertyName = constructPredicateName(resource, nextChild, childName, nextValue); rdfResource.addProperty(rdfModel.createProperty(propertyName), childResource); if (cardinalityIndex != null && cardinalityIndex > -1) { - childResource.addProperty(rdfModel.createProperty(FHIR_NS + FHIR_INDEX), cardinalityIndex.toString(), XSDDatatype.XSDinteger ); + childResource.addProperty( + rdfModel.createProperty(FHIR_NS + FHIR_INDEX), + cardinalityIndex.toString(), + XSDDatatype.XSDinteger); } - rdfModel = encodeChildElementToStreamWriter(resource, element, rdfModel, childResource, nextChild, nextValue, - childName, childDef, containedResource, nextChildElem, encodeContext, cardinalityIndex); - } - else { - rdfModel = encodeChildElementToStreamWriter(resource, element, rdfModel, rdfResource, nextChild, nextValue, - childName, childDef, containedResource, nextChildElem, encodeContext, cardinalityIndex); + rdfModel = encodeChildElementToStreamWriter( + resource, + element, + rdfModel, + childResource, + nextChild, + nextValue, + childName, + childDef, + containedResource, + nextChildElem, + encodeContext, + cardinalityIndex); + } else { + rdfModel = encodeChildElementToStreamWriter( + resource, + element, + rdfModel, + rdfResource, + nextChild, + nextValue, + childName, + childDef, + containedResource, + nextChildElem, + encodeContext, + cardinalityIndex); } } } @@ -663,15 +816,16 @@ public class RDFParser extends BaseParser { private T parseResource(Class resourceType, Model rdfModel) { // jsonMode of true is passed in so that the xhtml parser state behaves as expected // Push PreResourceState - ParserState parserState = ParserState.getPreResourceInstance(this, resourceType, getContext(), true, getErrorHandler()); + ParserState parserState = + ParserState.getPreResourceInstance(this, resourceType, getContext(), true, getErrorHandler()); return parseRootResource(rdfModel, parserState, resourceType); } - private T parseRootResource(Model rdfModel, ParserState parserState, Class resourceType) { logger.trace("Entering parseRootResource with state: {}", parserState); - StmtIterator rootStatementIterator = rdfModel.listStatements(null, rdfModel.getProperty(FHIR_NS + NODE_ROLE), rdfModel.getProperty(FHIR_NS + TREE_ROOT)); + StmtIterator rootStatementIterator = rdfModel.listStatements( + null, rdfModel.getProperty(FHIR_NS + NODE_ROLE), rdfModel.getProperty(FHIR_NS + TREE_ROOT)); Resource rootResource; String fhirResourceType, fhirTypeString; @@ -738,18 +892,22 @@ public class RDFParser extends BaseParser { return null; } - String predicateObjectAttribute = predicateUri.substring(predicateUri.lastIndexOf("/")+1); + String predicateObjectAttribute = predicateUri.substring(predicateUri.lastIndexOf("/") + 1); String predicateAttributeName; if (predicateObjectAttribute.contains(".")) { - predicateAttributeName = predicateObjectAttribute.substring(predicateObjectAttribute.lastIndexOf(".")+1); + predicateAttributeName = predicateObjectAttribute.substring(predicateObjectAttribute.lastIndexOf(".") + 1); } else { predicateAttributeName = predicateObjectAttribute; } return predicateAttributeName; } - private void processStatementObject(ParserState parserState, String predicateAttributeName, RDFNode statementObject) { - logger.trace("Entering processStatementObject with state: {}, for attribute {}", parserState, predicateAttributeName); + private void processStatementObject( + ParserState parserState, String predicateAttributeName, RDFNode statementObject) { + logger.trace( + "Entering processStatementObject with state: {}, for attribute {}", + parserState, + predicateAttributeName); // Push attribute element parserState.enteringNewElement(FHIR_NS, predicateAttributeName); @@ -764,15 +922,24 @@ public class RDFParser extends BaseParser { boolean containedResource = false; if (predicateAttributeName.equals(CONTAINED)) { containedResource = true; - parserState.enteringNewElement(FHIR_NS, resourceObject.getProperty(resourceObject.getModel().createProperty(RDF.type.getURI())).getObject().toString().replace(FHIR_NS, "")); + parserState.enteringNewElement( + FHIR_NS, + resourceObject + .getProperty(resourceObject.getModel().createProperty(RDF.type.getURI())) + .getObject() + .toString() + .replace(FHIR_NS, "")); } - List objectStatements = resourceObject.listProperties().toList(); + List objectStatements = + resourceObject.listProperties().toList(); objectStatements.sort(new FhirIndexStatementComparator()); for (Statement objectProperty : objectStatements) { if (objectProperty.getPredicate().hasURI(FHIR_NS + VALUE)) { predicateAttributeName = VALUE; - parserState.attributeValue(predicateAttributeName, objectProperty.getObject().asLiteral().getLexicalForm()); + parserState.attributeValue( + predicateAttributeName, + objectProperty.getObject().asLiteral().getLexicalForm()); } else { // Otherwise, process it as a net-new node predicateAttributeName = extractAttributeNameFromPredicate(objectProperty); @@ -810,9 +977,13 @@ public class RDFParser extends BaseParser { private void processExtension(ParserState parserState, RDFNode statementObject, boolean isModifier) { logger.trace("Entering processExtension with state: {}", parserState); Resource resource = statementObject.asResource(); - Statement urlProperty = resource.getProperty(resource.getModel().createProperty(FHIR_NS+EXTENSION_URL)); + Statement urlProperty = resource.getProperty(resource.getModel().createProperty(FHIR_NS + EXTENSION_URL)); Resource urlPropertyResource = urlProperty.getObject().asResource(); - String extensionUrl = urlPropertyResource.getProperty(resource.getModel().createProperty(FHIR_NS+VALUE)).getObject().asLiteral().getString(); + String extensionUrl = urlPropertyResource + .getProperty(resource.getModel().createProperty(FHIR_NS + VALUE)) + .getObject() + .asLiteral() + .getString(); List extensionStatements = resource.listProperties().toList(); String extensionValueType = null; @@ -821,9 +992,17 @@ public class RDFParser extends BaseParser { String propertyUri = statement.getPredicate().getURI(); if (propertyUri.contains("Extension.value")) { extensionValueType = propertyUri.replace(FHIR_NS + "Extension.", ""); - BaseRuntimeElementDefinition target = getContext().getRuntimeChildUndeclaredExtensionDefinition().getChildByName(extensionValueType); - if (target.getChildType().equals(ID_DATATYPE) || target.getChildType().equals(PRIMITIVE_DATATYPE)) { - extensionValueResource = statement.getObject().asResource().getProperty(resource.getModel().createProperty(FHIR_NS+VALUE)).getObject().asLiteral(); + BaseRuntimeElementDefinition target = getContext() + .getRuntimeChildUndeclaredExtensionDefinition() + .getChildByName(extensionValueType); + if (target.getChildType().equals(ID_DATATYPE) + || target.getChildType().equals(PRIMITIVE_DATATYPE)) { + extensionValueResource = statement + .getObject() + .asResource() + .getProperty(resource.getModel().createProperty(FHIR_NS + VALUE)) + .getObject() + .asLiteral(); } else { extensionValueResource = statement.getObject().asResource(); } @@ -851,7 +1030,8 @@ public class RDFParser extends BaseParser { @Override public int compare(Statement arg0, Statement arg1) { - int result = arg0.getPredicate().getURI().compareTo(arg1.getPredicate().getURI()); + int result = + arg0.getPredicate().getURI().compareTo(arg1.getPredicate().getURI()); if (result == 0) { if (arg0.getObject().isResource() && arg1.getObject().isResource()) { Resource resource0 = arg0.getObject().asResource(); @@ -859,14 +1039,14 @@ public class RDFParser extends BaseParser { result = Integer.compare(getFhirIndex(resource0), getFhirIndex(resource1)); } - } return result; } private int getFhirIndex(Resource resource) { - if (resource.hasProperty(resource.getModel().createProperty(FHIR_NS+FHIR_INDEX))) { - return resource.getProperty(resource.getModel().createProperty(FHIR_NS+FHIR_INDEX)).getInt(); + if (resource.hasProperty(resource.getModel().createProperty(FHIR_NS + FHIR_INDEX))) { + return resource.getProperty(resource.getModel().createProperty(FHIR_NS + FHIR_INDEX)) + .getInt(); } return -1; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/StrictErrorHandler.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/StrictErrorHandler.java index 11180a065cd..8aee0f46c4e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/StrictErrorHandler.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/StrictErrorHandler.java @@ -28,7 +28,7 @@ import ca.uhn.fhir.util.UrlUtil; /** * Parser error handler which throws a {@link DataFormatException} any time an * issue is found while parsing. - * + * * @see IParser#setParserErrorHandler(IParserErrorHandler) * @see FhirContext#setParserErrorHandler(IParserErrorHandler) */ @@ -40,14 +40,22 @@ public class StrictErrorHandler extends ParseErrorHandler implements IParserErro } @Override - public void incorrectJsonType(IParseLocation theLocation, String theElementName, ValueType theExpected, ScalarType theExpectedScalarType, ValueType theFound, ScalarType theFoundScalarType) { - String message = LenientErrorHandler.createIncorrectJsonTypeMessage(theElementName, theExpected, theExpectedScalarType, theFound, theFoundScalarType); + public void incorrectJsonType( + IParseLocation theLocation, + String theElementName, + ValueType theExpected, + ScalarType theExpectedScalarType, + ValueType theFound, + ScalarType theFoundScalarType) { + String message = LenientErrorHandler.createIncorrectJsonTypeMessage( + theElementName, theExpected, theExpectedScalarType, theFound, theFoundScalarType); throw new DataFormatException(Msg.code(1820) + message); } @Override public void invalidValue(IParseLocation theLocation, String theValue, String theError) { - throw new DataFormatException(Msg.code(1821) + describeLocation(theLocation) + "Invalid attribute value \"" + UrlUtil.sanitizeUrlPart(theValue) + "\": " + theError); + throw new DataFormatException(Msg.code(1821) + describeLocation(theLocation) + "Invalid attribute value \"" + + UrlUtil.sanitizeUrlPart(theValue) + "\": " + theError); } @Override @@ -66,27 +74,31 @@ public class StrictErrorHandler extends ParseErrorHandler implements IParserErro @Override public void unexpectedRepeatingElement(IParseLocation theLocation, String theElementName) { - throw new DataFormatException(Msg.code(1823) + describeLocation(theLocation) + "Multiple repetitions of non-repeatable element '" + theElementName + "' found during parse"); + throw new DataFormatException(Msg.code(1823) + describeLocation(theLocation) + + "Multiple repetitions of non-repeatable element '" + theElementName + "' found during parse"); } @Override public void unknownAttribute(IParseLocation theLocation, String theAttributeName) { - throw new DataFormatException(Msg.code(1824) + describeLocation(theLocation) + "Unknown attribute '" + theAttributeName + "' found during parse"); + throw new DataFormatException(Msg.code(1824) + describeLocation(theLocation) + "Unknown attribute '" + + theAttributeName + "' found during parse"); } @Override public void unknownElement(IParseLocation theLocation, String theElementName) { - throw new DataFormatException(Msg.code(1825) + describeLocation(theLocation) + "Unknown element '" + theElementName + "' found during parse"); + throw new DataFormatException(Msg.code(1825) + describeLocation(theLocation) + "Unknown element '" + + theElementName + "' found during parse"); } @Override public void unknownReference(IParseLocation theLocation, String theReference) { - throw new DataFormatException(Msg.code(1826) + describeLocation(theLocation) + "Resource has invalid reference: " + theReference); + throw new DataFormatException( + Msg.code(1826) + describeLocation(theLocation) + "Resource has invalid reference: " + theReference); } @Override public void extensionContainsValueAndNestedExtensions(IParseLocation theLocation) { - throw new DataFormatException(Msg.code(1827) + describeLocation(theLocation) + "Extension contains both a value and nested extensions"); + throw new DataFormatException(Msg.code(1827) + describeLocation(theLocation) + + "Extension contains both a value and nested extensions"); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/XmlParser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/XmlParser.java index 71ae7534831..fba60711839 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/XmlParser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/XmlParser.java @@ -58,6 +58,13 @@ import org.hl7.fhir.instance.model.api.IBaseXhtml; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; +import java.io.IOException; +import java.io.Reader; +import java.io.Writer; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; import javax.xml.namespace.QName; import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.XMLEventReader; @@ -71,13 +78,6 @@ import javax.xml.stream.events.EntityReference; import javax.xml.stream.events.Namespace; import javax.xml.stream.events.StartElement; import javax.xml.stream.events.XMLEvent; -import java.io.IOException; -import java.io.Reader; -import java.io.Writer; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Optional; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -129,7 +129,8 @@ public class XmlParser extends BaseParser { } @Override - public void doEncodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) throws DataFormatException { + public void doEncodeResourceToWriter(IBaseResource theResource, Writer theWriter, EncodeContext theEncodeContext) + throws DataFormatException { XMLStreamWriter eventWriter; try { eventWriter = createXmlWriter(theWriter); @@ -142,13 +143,20 @@ public class XmlParser extends BaseParser { } @Override - protected void doEncodeToWriter(IBase theElement, Writer theWriter, EncodeContext theEncodeContext) throws IOException, DataFormatException { + protected void doEncodeToWriter(IBase theElement, Writer theWriter, EncodeContext theEncodeContext) + throws IOException, DataFormatException { XMLStreamWriter eventWriter; try { eventWriter = createXmlWriter(theWriter); eventWriter.writeStartElement("element"); - encodeCompositeElementToStreamWriter(null, theElement, eventWriter, false, new CompositeChildElement(null, theEncodeContext), theEncodeContext); + encodeCompositeElementToStreamWriter( + null, + theElement, + eventWriter, + false, + new CompositeChildElement(null, theEncodeContext), + theEncodeContext); eventWriter.writeEndElement(); eventWriter.flush(); @@ -183,7 +191,9 @@ public class XmlParser extends BaseParser { Attribute urlAttr = elem.getAttributeByName(new QName("url")); String url; if (urlAttr == null || isBlank(urlAttr.getValue())) { - getErrorHandler().missingRequiredElement(new ParseLocation().setParentElementName("extension"), "url"); + getErrorHandler() + .missingRequiredElement( + new ParseLocation().setParentElementName("extension"), "url"); url = null; } else { url = urlAttr.getValue(); @@ -193,7 +203,10 @@ public class XmlParser extends BaseParser { Attribute urlAttr = elem.getAttributeByName(new QName("url")); String url; if (urlAttr == null || isBlank(urlAttr.getValue())) { - getErrorHandler().missingRequiredElement(new ParseLocation().setParentElementName("modifierExtension"), "url"); + getErrorHandler() + .missingRequiredElement( + new ParseLocation().setParentElementName("modifierExtension"), + "url"); url = null; } else { url = urlAttr.getValue(); @@ -244,7 +257,10 @@ public class XmlParser extends BaseParser { parserState.xmlEvent(nextEvent); } catch (DataFormatException e) { - throw new DataFormatException(Msg.code(1851) + "DataFormatException at [" + nextEvent.getLocation().toString() + "]: " + e.getMessage(), e); + throw new DataFormatException( + Msg.code(1851) + "DataFormatException at [" + + nextEvent.getLocation().toString() + "]: " + e.getMessage(), + e); } } return parserState.getObject(); @@ -253,8 +269,18 @@ public class XmlParser extends BaseParser { } } - private void encodeChildElementToStreamWriter(IBaseResource theResource, XMLStreamWriter theEventWriter, BaseRuntimeChildDefinition theChildDefinition, IBase theElement, String theChildName, BaseRuntimeElementDefinition childDef, - String theExtensionUrl, boolean theIncludedResource, CompositeChildElement theParent, EncodeContext theEncodeContext) throws XMLStreamException, DataFormatException { + private void encodeChildElementToStreamWriter( + IBaseResource theResource, + XMLStreamWriter theEventWriter, + BaseRuntimeChildDefinition theChildDefinition, + IBase theElement, + String theChildName, + BaseRuntimeElementDefinition childDef, + String theExtensionUrl, + boolean theIncludedResource, + CompositeChildElement theParent, + EncodeContext theEncodeContext) + throws XMLStreamException, DataFormatException { /* * Often the two values below will be the same thing. There are cases though @@ -287,7 +313,8 @@ public class XmlParser extends BaseParser { if (StringUtils.isNotBlank(encodedValue)) { theEventWriter.writeAttribute("value", encodedValue); } - encodeExtensionsIfPresent(theResource, theEventWriter, theElement, theIncludedResource, theEncodeContext); + encodeExtensionsIfPresent( + theResource, theEventWriter, theElement, theIncludedResource, theEncodeContext); theEventWriter.writeEndElement(); } break; @@ -304,7 +331,8 @@ public class XmlParser extends BaseParser { if (value != null) { theEventWriter.writeAttribute("value", value); } - encodeExtensionsIfPresent(theResource, theEventWriter, theElement, theIncludedResource, theEncodeContext); + encodeExtensionsIfPresent( + theResource, theEventWriter, theElement, theIncludedResource, theEncodeContext); theEventWriter.writeEndElement(); } break; @@ -319,7 +347,8 @@ public class XmlParser extends BaseParser { if (isNotBlank(theExtensionUrl)) { theEventWriter.writeAttribute("url", theExtensionUrl); } - encodeCompositeElementToStreamWriter(theResource, theElement, theEventWriter, theIncludedResource, theParent, theEncodeContext); + encodeCompositeElementToStreamWriter( + theResource, theElement, theEventWriter, theIncludedResource, theParent, theEncodeContext); theEventWriter.writeEndElement(); break; } @@ -334,7 +363,8 @@ public class XmlParser extends BaseParser { IIdType resourceId = getContainedResources().getResourceId(next); theEventWriter.writeStartElement("contained"); String value = resourceId.getValue(); - encodeResourceToXmlStreamWriter(next, theEventWriter, true, fixContainedResourceId(value), theEncodeContext); + encodeResourceToXmlStreamWriter( + next, theEventWriter, true, fixContainedResourceId(value), theEncodeContext); theEventWriter.writeEndElement(); } break; @@ -380,13 +410,19 @@ public class XmlParser extends BaseParser { } finally { theEncodeContext.popPath(); } - } - private void encodeCompositeElementToStreamWriter(IBaseResource theResource, IBase theElement, XMLStreamWriter theEventWriter, boolean theContainedResource, CompositeChildElement theParent, EncodeContext theEncodeContext) - throws XMLStreamException, DataFormatException { + private void encodeCompositeElementToStreamWriter( + IBaseResource theResource, + IBase theElement, + XMLStreamWriter theEventWriter, + boolean theContainedResource, + CompositeChildElement theParent, + EncodeContext theEncodeContext) + throws XMLStreamException, DataFormatException { - for (CompositeChildElement nextChildElem : super.compositeChildIterator(theElement, theContainedResource, theParent, theEncodeContext)) { + for (CompositeChildElement nextChildElem : + super.compositeChildIterator(theElement, theContainedResource, theParent, theEncodeContext)) { BaseRuntimeChildDefinition nextChild = nextChildElem.getDef(); @@ -410,13 +446,33 @@ public class XmlParser extends BaseParser { RuntimeChildNarrativeDefinition child = (RuntimeChildNarrativeDefinition) nextChild; String childName = nextChild.getChildNameByDatatype(child.getDatatype()); BaseRuntimeElementDefinition type = child.getChildByName(childName); - encodeChildElementToStreamWriter(theResource, theEventWriter, nextChild, narr.get(), childName, type, null, theContainedResource, nextChildElem, theEncodeContext); + encodeChildElementToStreamWriter( + theResource, + theEventWriter, + nextChild, + narr.get(), + childName, + type, + null, + theContainedResource, + nextChildElem, + theEncodeContext); continue; } } if (nextChild instanceof RuntimeChildContainedResources) { - encodeChildElementToStreamWriter(theResource, theEventWriter, nextChild, null, nextChild.getChildNameByDatatype(null), nextChild.getChildElementDefinitionByDatatype(null), null, theContainedResource, nextChildElem, theEncodeContext); + encodeChildElementToStreamWriter( + theResource, + theEventWriter, + nextChild, + null, + nextChild.getChildNameByDatatype(null), + nextChild.getChildElementDefinitionByDatatype(null), + null, + theContainedResource, + nextChildElem, + theEncodeContext); } else { List values = nextChild.getAccessor().getValues(theElement); @@ -453,26 +509,66 @@ public class XmlParser extends BaseParser { } if (extensionUrl != null && isExtension == false) { - encodeExtension(theResource, theEventWriter, theContainedResource, nextChildElem, nextChild, nextValue, childName, extensionUrl, childDef, theEncodeContext); + encodeExtension( + theResource, + theEventWriter, + theContainedResource, + nextChildElem, + nextChild, + nextValue, + childName, + extensionUrl, + childDef, + theEncodeContext); } else if (nextChild instanceof RuntimeChildExtension) { IBaseExtension extension = (IBaseExtension) nextValue; - if ((extension.getValue() == null || extension.getValue().isEmpty())) { + if ((extension.getValue() == null + || extension.getValue().isEmpty())) { if (extension.getExtension().isEmpty()) { continue; } } - encodeChildElementToStreamWriter(theResource, theEventWriter, nextChild, nextValue, childName, childDef, getExtensionUrl(extension.getUrl()), theContainedResource, nextChildElem, theEncodeContext); + encodeChildElementToStreamWriter( + theResource, + theEventWriter, + nextChild, + nextValue, + childName, + childDef, + getExtensionUrl(extension.getUrl()), + theContainedResource, + nextChildElem, + theEncodeContext); } else { - encodeChildElementToStreamWriter(theResource, theEventWriter, nextChild, nextValue, childName, childDef, extensionUrl, theContainedResource, nextChildElem, theEncodeContext); + encodeChildElementToStreamWriter( + theResource, + theEventWriter, + nextChild, + nextValue, + childName, + childDef, + extensionUrl, + theContainedResource, + nextChildElem, + theEncodeContext); } - } } } } - private void encodeExtension(IBaseResource theResource, XMLStreamWriter theEventWriter, boolean theContainedResource, CompositeChildElement nextChildElem, BaseRuntimeChildDefinition nextChild, IBase nextValue, String childName, String extensionUrl, BaseRuntimeElementDefinition childDef, EncodeContext theEncodeContext) - throws XMLStreamException { + private void encodeExtension( + IBaseResource theResource, + XMLStreamWriter theEventWriter, + boolean theContainedResource, + CompositeChildElement nextChildElem, + BaseRuntimeChildDefinition nextChild, + IBase nextValue, + String childName, + String extensionUrl, + BaseRuntimeElementDefinition childDef, + EncodeContext theEncodeContext) + throws XMLStreamException { BaseRuntimeDeclaredChildDefinition extDef = (BaseRuntimeDeclaredChildDefinition) nextChild; if (extDef.isModifier()) { theEventWriter.writeStartElement("modifierExtension"); @@ -492,27 +588,67 @@ public class XmlParser extends BaseParser { theEventWriter.writeAttribute("url", extensionUrl); } - encodeChildElementToStreamWriter(theResource, theEventWriter, nextChild, nextValue, childName, childDef, null, theContainedResource, nextChildElem, theEncodeContext); + encodeChildElementToStreamWriter( + theResource, + theEventWriter, + nextChild, + nextValue, + childName, + childDef, + null, + theContainedResource, + nextChildElem, + theEncodeContext); theEventWriter.writeEndElement(); } - private void encodeExtensionsIfPresent(IBaseResource theResource, XMLStreamWriter theWriter, IBase theElement, boolean theIncludedResource, EncodeContext theEncodeContext) throws XMLStreamException, DataFormatException { + private void encodeExtensionsIfPresent( + IBaseResource theResource, + XMLStreamWriter theWriter, + IBase theElement, + boolean theIncludedResource, + EncodeContext theEncodeContext) + throws XMLStreamException, DataFormatException { if (theElement instanceof ISupportsUndeclaredExtensions) { ISupportsUndeclaredExtensions res = (ISupportsUndeclaredExtensions) theElement; - encodeUndeclaredExtensions(theResource, theWriter, toBaseExtensionList(res.getUndeclaredExtensions()), "extension", theIncludedResource, theEncodeContext); - encodeUndeclaredExtensions(theResource, theWriter, toBaseExtensionList(res.getUndeclaredModifierExtensions()), "modifierExtension", theIncludedResource, theEncodeContext); + encodeUndeclaredExtensions( + theResource, + theWriter, + toBaseExtensionList(res.getUndeclaredExtensions()), + "extension", + theIncludedResource, + theEncodeContext); + encodeUndeclaredExtensions( + theResource, + theWriter, + toBaseExtensionList(res.getUndeclaredModifierExtensions()), + "modifierExtension", + theIncludedResource, + theEncodeContext); } if (theElement instanceof IBaseHasExtensions) { IBaseHasExtensions res = (IBaseHasExtensions) theElement; - encodeUndeclaredExtensions(theResource, theWriter, res.getExtension(), "extension", theIncludedResource, theEncodeContext); + encodeUndeclaredExtensions( + theResource, theWriter, res.getExtension(), "extension", theIncludedResource, theEncodeContext); } if (theElement instanceof IBaseHasModifierExtensions) { IBaseHasModifierExtensions res = (IBaseHasModifierExtensions) theElement; - encodeUndeclaredExtensions(theResource, theWriter, res.getModifierExtension(), "modifierExtension", theIncludedResource, theEncodeContext); + encodeUndeclaredExtensions( + theResource, + theWriter, + res.getModifierExtension(), + "modifierExtension", + theIncludedResource, + theEncodeContext); } } - private void encodeResourceToXmlStreamWriter(IBaseResource theResource, XMLStreamWriter theEventWriter, boolean theIncludedResource, EncodeContext theEncodeContext) throws XMLStreamException, DataFormatException { + private void encodeResourceToXmlStreamWriter( + IBaseResource theResource, + XMLStreamWriter theEventWriter, + boolean theIncludedResource, + EncodeContext theEncodeContext) + throws XMLStreamException, DataFormatException { IIdType resourceId = null; if (StringUtils.isNotBlank(theResource.getIdElement().getIdPart())) { @@ -533,7 +669,13 @@ public class XmlParser extends BaseParser { encodeResourceToXmlStreamWriter(theResource, theEventWriter, theIncludedResource, resourceId, theEncodeContext); } - private void encodeResourceToXmlStreamWriter(IBaseResource theResource, XMLStreamWriter theEventWriter, boolean theContainedResource, IIdType theResourceId, EncodeContext theEncodeContext) throws XMLStreamException { + private void encodeResourceToXmlStreamWriter( + IBaseResource theResource, + XMLStreamWriter theEventWriter, + boolean theContainedResource, + IIdType theResourceId, + EncodeContext theEncodeContext) + throws XMLStreamException { RuntimeResourceDefinition resDef = getContext().getResourceDefinition(theResource); if (resDef == null) { throw new ConfigurationException(Msg.code(1854) + "Unknown resource type: " + theResource.getClass()); @@ -557,7 +699,13 @@ public class XmlParser extends BaseParser { writeCommentsPost(theEventWriter, theResourceId); } - encodeCompositeElementToStreamWriter(theResource, theResource, theEventWriter, theContainedResource, new CompositeChildElement(resDef, theEncodeContext), theEncodeContext); + encodeCompositeElementToStreamWriter( + theResource, + theResource, + theEventWriter, + theContainedResource, + new CompositeChildElement(resDef, theEncodeContext), + theEncodeContext); } else { @@ -565,9 +713,9 @@ public class XmlParser extends BaseParser { IResource resource = (IResource) theResource; if (theResourceId != null) { - /* writeCommentsPre(theEventWriter, theResourceId); - writeOptionalTagWithValue(theEventWriter, "id", theResourceId.getIdPart()); - writeCommentsPost(theEventWriter, theResourceId);*/ + /* writeCommentsPre(theEventWriter, theResourceId); + writeOptionalTagWithValue(theEventWriter, "id", theResourceId.getIdPart()); + writeCommentsPost(theEventWriter, theResourceId);*/ theEventWriter.writeStartElement("id"); theEventWriter.writeAttribute("value", theResourceId.getIdPart()); encodeExtensionsIfPresent(theResource, theEventWriter, theResourceId, false, theEncodeContext); @@ -581,13 +729,15 @@ public class XmlParser extends BaseParser { if (isBlank(versionIdPart)) { versionIdPart = ResourceMetadataKeyEnum.VERSION.get(resource); } - List securityLabels = extractMetadataListNotNull(resource, ResourceMetadataKeyEnum.SECURITY_LABELS); + List securityLabels = + extractMetadataListNotNull(resource, ResourceMetadataKeyEnum.SECURITY_LABELS); List profiles = extractMetadataListNotNull(resource, ResourceMetadataKeyEnum.PROFILES); profiles = super.getProfileTagsForEncoding(resource, profiles); TagList tags = getMetaTagsForEncoding((resource), theEncodeContext); - if (super.shouldEncodeResourceMeta(resource) && ElementUtil.isEmpty(versionIdPart, updated, securityLabels, tags, profiles) == false) { + if (super.shouldEncodeResourceMeta(resource) + && ElementUtil.isEmpty(versionIdPart, updated, securityLabels, tags, profiles) == false) { theEventWriter.writeStartElement("meta"); if (shouldEncodePath(resource, "meta.versionId")) { writeOptionalTagWithValue(theEventWriter, "versionId", versionIdPart); @@ -605,7 +755,8 @@ public class XmlParser extends BaseParser { } for (BaseCodingDt securityLabel : securityLabels) { theEventWriter.writeStartElement("security"); - encodeCompositeElementToStreamWriter(resource, securityLabel, theEventWriter, theContainedResource, null, theEncodeContext); + encodeCompositeElementToStreamWriter( + resource, securityLabel, theEventWriter, theContainedResource, null, theEncodeContext); theEventWriter.writeEndElement(); } if (tags != null) { @@ -628,18 +779,31 @@ public class XmlParser extends BaseParser { writeOptionalTagWithValue(theEventWriter, "contentType", bin.getContentType()); writeOptionalTagWithValue(theEventWriter, "content", bin.getContentAsBase64()); } else { - encodeCompositeElementToStreamWriter(theResource, theResource, theEventWriter, theContainedResource, new CompositeChildElement(resDef, theEncodeContext), theEncodeContext); + encodeCompositeElementToStreamWriter( + theResource, + theResource, + theEventWriter, + theContainedResource, + new CompositeChildElement(resDef, theEncodeContext), + theEncodeContext); } - } theEventWriter.writeEndElement(); } - private void encodeUndeclaredExtensions(IBaseResource theResource, XMLStreamWriter theEventWriter, List> theExtensions, String tagName, boolean theIncludedResource, EncodeContext theEncodeContext) - throws XMLStreamException, DataFormatException { + private void encodeUndeclaredExtensions( + IBaseResource theResource, + XMLStreamWriter theEventWriter, + List> theExtensions, + String tagName, + boolean theIncludedResource, + EncodeContext theEncodeContext) + throws XMLStreamException, DataFormatException { for (IBaseExtension next : theExtensions) { - if (next == null || (ElementUtil.isEmpty(next.getValue()) && next.getExtension().isEmpty())) { + if (next == null + || (ElementUtil.isEmpty(next.getValue()) + && next.getExtension().isEmpty())) { continue; } @@ -659,22 +823,37 @@ public class XmlParser extends BaseParser { if (next.getValue() != null) { IBaseDatatype value = next.getValue(); - RuntimeChildUndeclaredExtensionDefinition extDef = getContext().getRuntimeChildUndeclaredExtensionDefinition(); + RuntimeChildUndeclaredExtensionDefinition extDef = + getContext().getRuntimeChildUndeclaredExtensionDefinition(); String childName = extDef.getChildNameByDatatype(value.getClass()); BaseRuntimeElementDefinition childDef; if (childName == null) { childDef = getContext().getElementDefinition(value.getClass()); if (childDef == null) { - throw new ConfigurationException(Msg.code(1855) + "Unable to encode extension, unrecognized child element type: " + value.getClass().getCanonicalName()); + throw new ConfigurationException( + Msg.code(1855) + "Unable to encode extension, unrecognized child element type: " + + value.getClass().getCanonicalName()); } childName = RuntimeChildUndeclaredExtensionDefinition.createExtensionChildName(childDef); } else { childDef = extDef.getChildElementDefinitionByDatatype(value.getClass()); if (childDef == null) { - throw new ConfigurationException(Msg.code(1856) + "Unable to encode extension, unrecognized child element type: " + value.getClass().getCanonicalName()); + throw new ConfigurationException( + Msg.code(1856) + "Unable to encode extension, unrecognized child element type: " + + value.getClass().getCanonicalName()); } } - encodeChildElementToStreamWriter(theResource, theEventWriter, extDef, value, childName, childDef, null, theIncludedResource, null, theEncodeContext); + encodeChildElementToStreamWriter( + theResource, + theEventWriter, + extDef, + value, + childName, + childDef, + null, + theIncludedResource, + null, + theEncodeContext); } // child extensions @@ -683,11 +862,9 @@ public class XmlParser extends BaseParser { theEventWriter.writeEndElement(); writeCommentsPost(theEventWriter, next); - } } - private void encodeXhtml(XhtmlDt theDt, XMLStreamWriter theEventWriter) throws XMLStreamException { if (theDt == null || theDt.getValue() == null) { return; @@ -704,10 +881,17 @@ public class XmlParser extends BaseParser { if (isBlank(attr.getName().getNamespaceURI())) { theEventWriter.writeAttribute(attr.getName().getLocalPart(), attr.getValue()); } else { - theEventWriter.writeAttribute(attr.getName().getNamespaceURI(), attr.getName().getLocalPart(), attr.getValue()); + theEventWriter.writeAttribute( + attr.getName().getNamespaceURI(), + attr.getName().getLocalPart(), + attr.getValue()); } } else { - theEventWriter.writeAttribute(attr.getName().getPrefix(), attr.getName().getNamespaceURI(), attr.getName().getLocalPart(), attr.getValue()); + theEventWriter.writeAttribute( + attr.getName().getPrefix(), + attr.getName().getNamespaceURI(), + attr.getName().getLocalPart(), + attr.getValue()); } break; @@ -746,7 +930,8 @@ public class XmlParser extends BaseParser { } else { String prefix = se.getName().getPrefix(); String namespaceURI = se.getName().getNamespaceURI(); - theEventWriter.writeStartElement(prefix, se.getName().getLocalPart(), namespaceURI); + theEventWriter.writeStartElement( + prefix, se.getName().getLocalPart(), namespaceURI); theEventWriter.writeNamespace(prefix, namespaceURI); } firstElement = false; @@ -756,14 +941,20 @@ public class XmlParser extends BaseParser { theEventWriter.writeStartElement(se.getName().getLocalPart()); } else { if (StringUtils.isBlank(se.getName().getPrefix())) { - theEventWriter.writeStartElement(se.getName().getLocalPart()); + theEventWriter.writeStartElement( + se.getName().getLocalPart()); // theEventWriter.writeDefaultNamespace(se.getName().getNamespaceURI()); } else { - theEventWriter.writeStartElement(se.getName().getNamespaceURI(), se.getName().getLocalPart()); + theEventWriter.writeStartElement( + se.getName().getNamespaceURI(), + se.getName().getLocalPart()); } } } else { - theEventWriter.writeStartElement(se.getName().getPrefix(), se.getName().getLocalPart(), se.getName().getNamespaceURI()); + theEventWriter.writeStartElement( + se.getName().getPrefix(), + se.getName().getLocalPart(), + se.getName().getNamespaceURI()); } } for (Iterator attrIter = se.getAttributes(); attrIter.hasNext(); ) { @@ -771,7 +962,11 @@ public class XmlParser extends BaseParser { if (isBlank(next.getName().getNamespaceURI())) { theEventWriter.writeAttribute(next.getName().getLocalPart(), next.getValue()); } else { - theEventWriter.writeAttribute(next.getName().getPrefix(), next.getName().getNamespaceURI(), next.getName().getLocalPart(), next.getValue()); + theEventWriter.writeAttribute( + next.getName().getPrefix(), + next.getName().getNamespaceURI(), + next.getName().getLocalPart(), + next.getValue()); } } break; @@ -783,7 +978,6 @@ public class XmlParser extends BaseParser { case XMLStreamConstants.START_DOCUMENT: break; } - } } @@ -793,7 +987,8 @@ public class XmlParser extends BaseParser { } private T parseResource(Class theResourceType, XMLEventReader theStreamReader) { - ParserState parserState = ParserState.getPreResourceInstance(this, theResourceType, getContext(), false, getErrorHandler()); + ParserState parserState = + ParserState.getPreResourceInstance(this, theResourceType, getContext(), false, getErrorHandler()); return doXmlLoop(theStreamReader, parserState); } @@ -834,12 +1029,12 @@ public class XmlParser extends BaseParser { } } - private void writeOptionalTagWithValue(XMLStreamWriter theEventWriter, String theName, String theValue) throws XMLStreamException { + private void writeOptionalTagWithValue(XMLStreamWriter theEventWriter, String theName, String theValue) + throws XMLStreamException { if (StringUtils.isNotBlank(theValue)) { theEventWriter.writeStartElement(theName); theEventWriter.writeAttribute("value", theValue); theEventWriter.writeEndElement(); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeArray.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeArray.java index 9d456fba8b2..9623b7da6f2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeArray.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeArray.java @@ -25,7 +25,7 @@ public abstract class BaseJsonLikeArray extends BaseJsonLikeValue { public ValueType getJsonType() { return ValueType.ARRAY; } - + @Override public ScalarType getDataType() { return null; @@ -46,7 +46,7 @@ public abstract class BaseJsonLikeArray extends BaseJsonLikeValue { return null; } - public abstract int size (); - - public abstract BaseJsonLikeValue get (int index); + public abstract int size(); + + public abstract BaseJsonLikeValue get(int index); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeObject.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeObject.java index 2254cce90fd..8d8f67c5147 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeObject.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeObject.java @@ -51,5 +51,4 @@ public abstract class BaseJsonLikeObject extends BaseJsonLikeValue { public abstract Iterator keyIterator(); public abstract BaseJsonLikeValue get(String key); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeValue.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeValue.java index d2000d14881..1f84c17e692 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeValue.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeValue.java @@ -24,205 +24,216 @@ package ca.uhn.fhir.parser.json; * element in a JSON structure. This could be a JSON object, * a JSON array, a scalar value (number, string, boolean), * or a null. - * + * */ public abstract class BaseJsonLikeValue { - + public enum ValueType { - ARRAY, OBJECT, SCALAR, NULL + ARRAY, + OBJECT, + SCALAR, + NULL }; - + public enum ScalarType { - NUMBER, STRING, BOOLEAN + NUMBER, + STRING, + BOOLEAN } - - public abstract ValueType getJsonType (); - - public abstract ScalarType getDataType (); - - public abstract Object getValue (); - - public boolean isArray () { + + public abstract ValueType getJsonType(); + + public abstract ScalarType getDataType(); + + public abstract Object getValue(); + + public boolean isArray() { return this.getJsonType() == ValueType.ARRAY; } - - public boolean isObject () { + + public boolean isObject() { return this.getJsonType() == ValueType.OBJECT; } - - public boolean isScalar () { + + public boolean isScalar() { return this.getJsonType() == ValueType.SCALAR; } - - public boolean isString () { + + public boolean isString() { return this.getJsonType() == ValueType.SCALAR && this.getDataType() == ScalarType.STRING; } - - public boolean isNumber () { + + public boolean isNumber() { return this.getJsonType() == ValueType.SCALAR && this.getDataType() == ScalarType.NUMBER; } - - public boolean isNull () { + + public boolean isNull() { return this.getJsonType() == ValueType.NULL; } - - public BaseJsonLikeArray getAsArray () { + + public BaseJsonLikeArray getAsArray() { return null; } - public BaseJsonLikeObject getAsObject () { + + public BaseJsonLikeObject getAsObject() { return null; } - public String getAsString () { + + public String getAsString() { return this.toString(); } - public Number getAsNumber () { - return this.isNumber() ? (Number)this.getValue() : null; + + public Number getAsNumber() { + return this.isNumber() ? (Number) this.getValue() : null; } - public boolean getAsBoolean () { + + public boolean getAsBoolean() { return !isNull(); } - - public static BaseJsonLikeArray asArray (BaseJsonLikeValue element) { + + public static BaseJsonLikeArray asArray(BaseJsonLikeValue element) { if (element != null) { return element.getAsArray(); } return null; } - public static BaseJsonLikeObject asObject (BaseJsonLikeValue element) { + + public static BaseJsonLikeObject asObject(BaseJsonLikeValue element) { if (element != null) { return element.getAsObject(); } return null; } - public static String asString (BaseJsonLikeValue element) { + + public static String asString(BaseJsonLikeValue element) { if (element != null) { return element.getAsString(); } return null; } - public static boolean asBoolean (BaseJsonLikeValue element) { + + public static boolean asBoolean(BaseJsonLikeValue element) { if (element != null) { return element.getAsBoolean(); } return false; } - - public static final BaseJsonLikeValue NULL = new BaseJsonLikeValue() { - @Override - public ValueType getJsonType() { - return ValueType.NULL; - } + public static final BaseJsonLikeValue NULL = new BaseJsonLikeValue() { + @Override + public ValueType getJsonType() { + return ValueType.NULL; + } - @Override + @Override public ScalarType getDataType() { return null; } @Override public Object getValue() { - return null; + return null; } @Override - public boolean equals (Object obj) { - if (this == obj){ - return true; - } - if (obj instanceof BaseJsonLikeValue) { - return getJsonType().equals(((BaseJsonLikeValue)obj).getJsonType()); - } - return false; - } + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof BaseJsonLikeValue) { + return getJsonType().equals(((BaseJsonLikeValue) obj).getJsonType()); + } + return false; + } - @Override - public int hashCode() { - return "null".hashCode(); - } + @Override + public int hashCode() { + return "null".hashCode(); + } - @Override - public String toString() { - return "null"; - } - }; + @Override + public String toString() { + return "null"; + } + }; - public static final BaseJsonLikeValue TRUE = new BaseJsonLikeValue() { - @Override - public ValueType getJsonType() { - return ValueType.SCALAR; - } - - @Override - public ScalarType getDataType() { - return ScalarType.BOOLEAN; - } + public static final BaseJsonLikeValue TRUE = new BaseJsonLikeValue() { + @Override + public ValueType getJsonType() { + return ValueType.SCALAR; + } - @Override - public Object getValue() { - return Boolean.TRUE; - } + @Override + public ScalarType getDataType() { + return ScalarType.BOOLEAN; + } - @Override - public boolean equals(Object obj) { - if (this == obj){ - return true; - } - if (obj instanceof BaseJsonLikeValue) { - return getJsonType().equals(((BaseJsonLikeValue)obj).getJsonType()) - && getDataType().equals(((BaseJsonLikeValue)obj).getDataType()) - && toString().equals(((BaseJsonLikeValue)obj).toString()); - } - return false; - } + @Override + public Object getValue() { + return Boolean.TRUE; + } - @Override - public int hashCode() { - return "true".hashCode(); - } + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof BaseJsonLikeValue) { + return getJsonType().equals(((BaseJsonLikeValue) obj).getJsonType()) + && getDataType().equals(((BaseJsonLikeValue) obj).getDataType()) + && toString().equals(((BaseJsonLikeValue) obj).toString()); + } + return false; + } - @Override - public String toString() { - return "true"; - } - }; + @Override + public int hashCode() { + return "true".hashCode(); + } - public static final BaseJsonLikeValue FALSE = new BaseJsonLikeValue() { - @Override - public ValueType getJsonType() { - return ValueType.SCALAR; - } - - @Override - public ScalarType getDataType() { - return ScalarType.BOOLEAN; - } + @Override + public String toString() { + return "true"; + } + }; - @Override - public Object getValue() { - return Boolean.FALSE; - } + public static final BaseJsonLikeValue FALSE = new BaseJsonLikeValue() { + @Override + public ValueType getJsonType() { + return ValueType.SCALAR; + } - @Override - public boolean equals(Object obj) { - if (this == obj){ - return true; - } - if (obj instanceof BaseJsonLikeValue) { - return getJsonType().equals(((BaseJsonLikeValue)obj).getJsonType()) - && getDataType().equals(((BaseJsonLikeValue)obj).getDataType()) - && toString().equals(((BaseJsonLikeValue)obj).toString()); - } - return false; - } + @Override + public ScalarType getDataType() { + return ScalarType.BOOLEAN; + } - @Override - public int hashCode() { - return "false".hashCode(); - } + @Override + public Object getValue() { + return Boolean.FALSE; + } - @Override - public String toString() { - return "false"; - } - }; + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof BaseJsonLikeValue) { + return getJsonType().equals(((BaseJsonLikeValue) obj).getJsonType()) + && getDataType().equals(((BaseJsonLikeValue) obj).getDataType()) + && toString().equals(((BaseJsonLikeValue) obj).toString()); + } + return false; + } + + @Override + public int hashCode() { + return "false".hashCode(); + } + + @Override + public String toString() { + return "false"; + } + }; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeWriter.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeWriter.java index 4cb795914e9..f5099760ab5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeWriter.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/BaseJsonLikeWriter.java @@ -96,5 +96,4 @@ public abstract class BaseJsonLikeWriter { public abstract BaseJsonLikeWriter endArray() throws IOException; public abstract BaseJsonLikeWriter endBlock() throws IOException; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/jackson/JacksonStructure.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/jackson/JacksonStructure.java index 7baefbfea30..5c3636a6582 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/jackson/JacksonStructure.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/jackson/JacksonStructure.java @@ -98,9 +98,13 @@ public class JacksonStructure implements JsonLikeStructure { pbr.unread(nextInt); break; } - throw new DataFormatException(Msg.code(1858) + "Content does not appear to be FHIR JSON, first non-whitespace character was: '" + (char) nextInt + "' (must be '{' or '[')"); + throw new DataFormatException(Msg.code(1858) + + "Content does not appear to be FHIR JSON, first non-whitespace character was: '" + + (char) nextInt + "' (must be '{' or '[')"); } - throw new DataFormatException(Msg.code(1859) + "Content does not appear to be FHIR JSON, first non-whitespace character was: '" + (char) nextInt + "' (must be '{')"); + throw new DataFormatException(Msg.code(1859) + + "Content does not appear to be FHIR JSON, first non-whitespace character was: '" + + (char) nextInt + "' (must be '{')"); } if (nextInt == '{') { @@ -110,10 +114,13 @@ public class JacksonStructure implements JsonLikeStructure { } } catch (Exception e) { if (e.getMessage().startsWith("Unexpected char 39")) { - throw new DataFormatException(Msg.code(1860) + "Failed to parse JSON encoded FHIR content: " + e.getMessage() + " - " + - "This may indicate that single quotes are being used as JSON escapes where double quotes are required", e); + throw new DataFormatException( + Msg.code(1860) + "Failed to parse JSON encoded FHIR content: " + e.getMessage() + " - " + + "This may indicate that single quotes are being used as JSON escapes where double quotes are required", + e); } - throw new DataFormatException(Msg.code(1861) + "Failed to parse JSON encoded FHIR content: " + e.getMessage(), e); + throw new DataFormatException( + Msg.code(1861) + "Failed to parse JSON encoded FHIR content: " + e.getMessage(), e); } } @@ -147,7 +154,10 @@ public class JacksonStructure implements JsonLikeStructure { throw new DataFormatException(Msg.code(1862) + "Content must be a valid JSON Object. It must start with '{'."); } - private enum ROOT_TYPE {OBJECT, ARRAY} + private enum ROOT_TYPE { + OBJECT, + ARRAY + } private static class JacksonJsonObject extends BaseJsonLikeObject { private final ObjectNode nativeObject; @@ -370,10 +380,7 @@ public class JacksonStructure implements JsonLikeStructure { } private static ObjectMapper createObjectMapper() { - ObjectMapper retVal = - JsonMapper - .builder() - .build(); + ObjectMapper retVal = JsonMapper.builder().build(); retVal = retVal.setNodeFactory(new JsonNodeFactory(true)); retVal = retVal.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS); retVal = retVal.enable(DeserializationFeature.FAIL_ON_TRAILING_TOKENS); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/jackson/JacksonWriter.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/jackson/JacksonWriter.java index e43b12a0b25..d0101d0af7c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/jackson/JacksonWriter.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/json/jackson/JacksonWriter.java @@ -40,8 +40,7 @@ public class JacksonWriter extends BaseJsonLikeWriter { setWriter(theWriter); } - public JacksonWriter() { - } + public JacksonWriter() {} @Override public BaseJsonLikeWriter init() { @@ -68,7 +67,6 @@ public class JacksonWriter extends BaseJsonLikeWriter { _objectFieldValueSeparatorWithSpaces = separators.getObjectFieldValueSeparator() + " "; return this; } - }; prettyPrinter = prettyPrinter.withObjectIndenter(new DefaultIndenter(" ", "\n")); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/path/EncodeContextPath.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/path/EncodeContextPath.java index 0174bdf8718..caed61a2065 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/path/EncodeContextPath.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/path/EncodeContextPath.java @@ -155,6 +155,4 @@ public class EncodeContextPath { EncodeContextPath parsedPath = new EncodeContextPath(thePath); return getPath().equals(parsedPath.getPath()); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/path/EncodeContextPathElement.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/path/EncodeContextPathElement.java index fee8178e344..d1ae0abb7d1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/path/EncodeContextPathElement.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/parser/path/EncodeContextPathElement.java @@ -24,79 +24,75 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; public class EncodeContextPathElement { - private final String myName; - private final boolean myResource; + private final String myName; + private final boolean myResource; - public EncodeContextPathElement(String theName, boolean theResource) { - Validate.notBlank(theName); - myName = theName; - myResource = theResource; - } + public EncodeContextPathElement(String theName, boolean theResource) { + Validate.notBlank(theName); + myName = theName; + myResource = theResource; + } + public boolean matches(EncodeContextPathElement theOther) { + if (myResource != theOther.isResource()) { + return false; + } + String otherName = theOther.getName(); + if (myName.equals(otherName)) { + return true; + } + /* + * This is here to handle situations where a path like + * Observation.valueQuantity has been specified as an include/exclude path, + * since we only know that path as + * Observation.value + * until we get to actually looking at the values there. + */ + if (myName.length() > otherName.length() && myName.startsWith(otherName)) { + char ch = myName.charAt(otherName.length()); + if (Character.isUpperCase(ch)) { + return true; + } + } + return myName.equals("*") || otherName.equals("*"); + } - public boolean matches(EncodeContextPathElement theOther) { - if (myResource != theOther.isResource()) { - return false; - } - String otherName = theOther.getName(); - if (myName.equals(otherName)) { - return true; - } - /* - * This is here to handle situations where a path like - * Observation.valueQuantity has been specified as an include/exclude path, - * since we only know that path as - * Observation.value - * until we get to actually looking at the values there. - */ - if (myName.length() > otherName.length() && myName.startsWith(otherName)) { - char ch = myName.charAt(otherName.length()); - if (Character.isUpperCase(ch)) { - return true; - } - } - return myName.equals("*") || otherName.equals("*"); - } + @Override + public boolean equals(Object theO) { + if (this == theO) { + return true; + } - @Override - public boolean equals(Object theO) { - if (this == theO) { - return true; - } + if (theO == null || getClass() != theO.getClass()) { + return false; + } - if (theO == null || getClass() != theO.getClass()) { - return false; - } + EncodeContextPathElement that = (EncodeContextPathElement) theO; - EncodeContextPathElement that = (EncodeContextPathElement) theO; + return new EqualsBuilder() + .append(myResource, that.myResource) + .append(myName, that.myName) + .isEquals(); + } - return new EqualsBuilder() - .append(myResource, that.myResource) - .append(myName, that.myName) - .isEquals(); - } + @Override + public int hashCode() { + return new HashCodeBuilder(17, 37).append(myName).append(myResource).toHashCode(); + } - @Override - public int hashCode() { - return new HashCodeBuilder(17, 37) - .append(myName) - .append(myResource) - .toHashCode(); - } + @Override + public String toString() { + if (myResource) { + return myName + "(res)"; + } + return myName; + } - @Override - public String toString() { - if (myResource) { - return myName + "(res)"; - } - return myName; - } + public String getName() { + return myName; + } - public String getName() { - return myName; - } - - public boolean isResource() { - return myResource; - } + public boolean isResource() { + return myResource; + } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/AddTags.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/AddTags.java index 61a0229bcbc..e92b00a0012 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/AddTags.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/AddTags.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.model.api.TagList; +import ca.uhn.fhir.model.primitive.IdDt; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.model.api.IResource; -import ca.uhn.fhir.model.api.TagList; -import ca.uhn.fhir.model.primitive.IdDt; - /** * RESTful method annotation to be used for the FHIR Tag @@ -39,7 +38,7 @@ import ca.uhn.fhir.model.primitive.IdDt; * To add tag(s) to the given resource * instance, this annotation should contain a {@link #type()} attribute * specifying the resource type, and the method should have a parameter of type - * {@link IdDt} annotated with the {@link IdParam} annotation, as well as + * {@link IdDt} annotated with the {@link IdParam} annotation, as well as * a parameter of type {@link TagList}. Note that this {@link TagList} parameter * does not need to contain a complete list of tags for the resource, only a list * of tags to be added. Server implementations must not remove tags based on this @@ -55,7 +54,7 @@ import ca.uhn.fhir.model.primitive.IdDt; * attribute specifying the resource type, and the method should have a * parameter of type {@link IdDt} annotated with the {@link VersionIdParam} * annotation, and a parameter of type {@link IdDt} annotated with the - * {@link IdParam} annotation, as well as + * {@link IdParam} annotation, as well as * a parameter of type {@link TagList}. Note that this {@link TagList} parameter * does not need to contain a complete list of tags for the resource, only a list * of tags to be added. Server implementations must not remove tags based on this @@ -66,8 +65,8 @@ import ca.uhn.fhir.model.primitive.IdDt; * >resource provider, since the type is implied. * */ -@Target(value= ElementType.METHOD) -@Retention(value=RetentionPolicy.RUNTIME) +@Target(value = ElementType.METHOD) +@Retention(value = RetentionPolicy.RUNTIME) public @interface AddTags { /** diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/At.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/At.java index ce660ca3ff5..0d7d3e57d2c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/At.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/At.java @@ -19,24 +19,24 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.rest.param.DateParam; +import ca.uhn.fhir.rest.param.DateRangeParam; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import ca.uhn.fhir.rest.param.DateParam; -import ca.uhn.fhir.rest.param.DateRangeParam; - /** - * Parameter annotation for the _at parameter, which indicates to the - * server that only results dated since the given instant will be returned. + * Parameter annotation for the _at parameter, which indicates to the + * server that only results dated since the given instant will be returned. *

    - * Parameters with this annotation should be of type {@link DateParam} or {@link DateRangeParam} + * Parameters with this annotation should be of type {@link DateParam} or {@link DateRangeParam} *

    * @see History */ -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface At { - //nothing + // nothing } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ConditionalUrlParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ConditionalUrlParam.java index 913d838ff86..776edc74abe 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ConditionalUrlParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ConditionalUrlParam.java @@ -26,7 +26,7 @@ import java.lang.annotation.Target; /** * On the {@link Update}, {@link Create} and {@link Delete} operation methods, this annotation - * can be used to mark a {@link String} parameter which will be populated with the + * can be used to mark a {@link String} parameter which will be populated with the * conditional "search" URL for the operation, if an incoming client invocation is * a conditional operation. For non-conditional invocations, the value will be set to * null so it is important to handle null. @@ -45,8 +45,7 @@ public @interface ConditionalUrlParam { *

    * Note that this flag is only a hint to the Conformance statement generator, * it does not actually affect how the server itself works. - *

    + *

    */ boolean supportsMultiple() default false; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Count.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Count.java index 7d3aa867ae9..69f0a7abb14 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Count.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Count.java @@ -25,13 +25,13 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * Parameter annotation for the _count parameter, which indicates to the - * server the maximum number of desired results. - * + * Parameter annotation for the _count parameter, which indicates to the + * server the maximum number of desired results. + * * @see History */ -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface Count { - //nothing + // nothing } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Create.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Create.java index 1984f00c0a8..62bc77cdf28 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Create.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Create.java @@ -19,17 +19,17 @@ */ package ca.uhn.fhir.rest.annotation; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; -import org.hl7.fhir.instance.model.api.IBaseResource; - @Retention(RetentionPolicy.RUNTIME) /** * RESTful method annotation to be used for the FHIR * create method. - * + * *

    * Create is used to save a new resource, allowing the server to assign a new ID and version ID. *

    @@ -40,12 +40,11 @@ public @interface Create { * The return type for this search method. This generally does not need * to be populated for a server implementation, since servers will return * only one resource per class, but generally does need to be populated - * for client implementations. + * for client implementations. */ // NB: Read, Search (maybe others) share this annotation, so update the javadocs everywhere Class type() default IBaseResource.class; - /** * This method allows the return type for this method to be specified in a * non-type-specific way, using the text name of the resource, e.g. "Patient". diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Delete.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Delete.java index c3a57e638aa..afc84db1b30 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Delete.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Delete.java @@ -19,31 +19,31 @@ */ package ca.uhn.fhir.rest.annotation; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBaseResource; - /** * RESTful method annotation to be used for the FHIR * delete method. - * + * *

    * Delete is used to remove an existing resource, meaning that any attempts to * do a non-version-specific read of that resource will fail. *

    */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.METHOD) +@Target(value = ElementType.METHOD) public @interface Delete { - + /** * The return type for this search method. This generally does not need * to be populated for a server implementation, since servers will return * only one resource per class, but generally does need to be populated - * for client implementations. + * for client implementations. */ // NB: Read, Search (maybe others) share this annotation, so update the javadocs everywhere Class type() default IBaseResource.class; @@ -57,5 +57,4 @@ public @interface Delete { * @since 5.4.0 */ String typeName() default ""; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/DeleteTags.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/DeleteTags.java index ffaa0845f97..ba6ed10c30b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/DeleteTags.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/DeleteTags.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.model.api.TagList; +import ca.uhn.fhir.model.primitive.IdDt; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.model.api.IResource; -import ca.uhn.fhir.model.api.TagList; -import ca.uhn.fhir.model.primitive.IdDt; - /** * RESTful method annotation to be used for the FHIR Tag @@ -39,7 +38,7 @@ import ca.uhn.fhir.model.primitive.IdDt; * To delete tag(s) to the given resource * instance, this annotation should contain a {@link #type()} attribute * specifying the resource type, and the method should have a parameter of type - * {@link IdDt} annotated with the {@link IdParam} annotation, as well as + * {@link IdDt} annotated with the {@link IdParam} annotation, as well as * a parameter of type {@link TagList} which will contain the list of tags * to be deleted. * Note that for a @@ -53,7 +52,7 @@ import ca.uhn.fhir.model.primitive.IdDt; * attribute specifying the resource type, and the method should have a * parameter of type {@link IdDt} annotated with the {@link VersionIdParam} * annotation, and a parameter of type {@link IdDt} annotated with the - * {@link IdParam} annotation, as well as + * {@link IdParam} annotation, as well as * a parameter of type {@link TagList} which will contain the list of tags * to be deleted. * Note that for a server implementation, the @@ -62,8 +61,8 @@ import ca.uhn.fhir.model.primitive.IdDt; * >resource provider, since the type is implied. * */ -@Target(value= ElementType.METHOD) -@Retention(value=RetentionPolicy.RUNTIME) +@Target(value = ElementType.METHOD) +@Retention(value = RetentionPolicy.RUNTIME) public @interface DeleteTags { /** @@ -82,5 +81,4 @@ public @interface DeleteTags { * @since 5.4.0 */ String typeName() default ""; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Destroy.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Destroy.java index a21a60a7f9f..d41bbe9caa4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Destroy.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Destroy.java @@ -30,5 +30,4 @@ import java.lang.annotation.Target; */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) -public @interface Destroy { -} +public @interface Destroy {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Elements.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Elements.java index 44706ed5ab9..130ba429b3e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Elements.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Elements.java @@ -27,7 +27,7 @@ import java.lang.annotation.Target; /** * On a method which returns resource(s), a parameter of type * Set<String> with this annotation will be passed the - * contents of the _elements parameter + * contents of the _elements parameter */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.PARAMETER) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/GetPage.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/GetPage.java index 893a7940ef4..af72766ae60 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/GetPage.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/GetPage.java @@ -27,13 +27,13 @@ import java.lang.annotation.Target; /** * INTERNAL API (do not use): REST method annotation for the method called when a client requests a page. *

    - * This annotation is currently intended as an internal part of HAPI's API. At some point we + * This annotation is currently intended as an internal part of HAPI's API. At some point we * will hopefully provide a way to create alternate implementations of the GetPage mewthod. If * you would like to help out or have ideas, please get in touch! *

    */ -@Target(value= ElementType.METHOD) -@Retention(value=RetentionPolicy.RUNTIME) +@Target(value = ElementType.METHOD) +@Retention(value = RetentionPolicy.RUNTIME) public @interface GetPage { // nothing } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/GraphQL.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/GraphQL.java index 1e0fb7fe401..3e27f09ddc1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/GraphQL.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/GraphQL.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.rest.annotation; import ca.uhn.fhir.rest.api.RequestTypeEnum; -import org.hl7.fhir.instance.model.api.IBaseResource; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -32,7 +31,7 @@ import java.lang.annotation.Target; * method */ @Retention(RetentionPolicy.RUNTIME) -@Target(value= ElementType.METHOD) +@Target(value = ElementType.METHOD) public @interface GraphQL { RequestTypeEnum type() default RequestTypeEnum.GET; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/History.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/History.java index 02eb3a77174..66120809dad 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/History.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/History.java @@ -19,23 +19,21 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.model.primitive.IdDt; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.model.api.IResource; -import ca.uhn.fhir.model.primitive.IdDt; -//import ca.uhn.fhir.testmodel.Patient; // TODO: qualify this correctly - /** * RESTful method annotation to be used for the FHIR * history method. - * + * *

    - * History returns a feed containing all versions (or a selected range of versions) of + * History returns a feed containing all versions (or a selected range of versions) of * a resource or a specific set of resources. *

    *

    @@ -44,39 +42,39 @@ import ca.uhn.fhir.model.primitive.IdDt; *

    *
      *
    • - * A search for the history of all resources on a server. In this case, {@link #type()} + * A search for the history of all resources on a server. In this case, {@link #type()} * should be set to {@link IResource} (as is the default) and the method should not have an ID parameter. *
      • * To invoke this pattern: GET [base]/_history{?[parameters]&_format=[mime-type]} *
      *
    • *
    • - * A search for the history of all instances of a specific resource type on a server. In this case, {@link #type()} + * A search for the history of all instances of a specific resource type on a server. In this case, {@link #type()} * should be set to the specific resource type (e.g. Patient.class) and the method should not have an ID parameter. *
      • * To invoke this pattern: GET [base]/[type]/_history{?[parameters]&_format=[mime-type]} *
      *
    • *
    • - * A search for the history of a specific instances of a specific resource type on a server. In this case, {@link #type()} - * should be set to the specific resource type (e.g. Patient.class and the method should - * have one parameter of type {@link IdDt} annotated with the {@link IdParam} annotation. + * A search for the history of a specific instances of a specific resource type on a server. In this case, {@link #type()} + * should be set to the specific resource type (e.g. Patient.class and the method should + * have one parameter of type {@link IdDt} annotated with the {@link IdParam} annotation. *
      • * To invoke this pattern: GET [base]/[type]/[id]/_history{?[parameters]&_format=[mime-type]} *
      *
    • *
    - * + * * @see Count * @see Since */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.METHOD) +@Target(value = ElementType.METHOD) public @interface History { - + /** * The resource type that this method applies to. See the {@link History History annotation type documentation} - * for information on usage patterns. + * for information on usage patterns. */ Class type() default IBaseResource.class; @@ -89,5 +87,4 @@ public @interface History { * @since 5.4.0 */ String typeName() default ""; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/IdParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/IdParam.java index ad671af0a4a..2bcbdf9e436 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/IdParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/IdParam.java @@ -27,11 +27,10 @@ import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.PARAMETER) public @interface IdParam { - + /** * For {@link Operation extended operations}, any parameter with this value set to true * (default is false) will also be invoked if the operation is invoked against the resource type. */ boolean optional() default false; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/IncludeParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/IncludeParam.java index 80a555c5082..9b49b868f0b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/IncludeParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/IncludeParam.java @@ -19,35 +19,35 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.model.api.Include; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import ca.uhn.fhir.model.api.Include; - /** * Method parameter which is used to indicate a parameter that will * be populated with the "_include" (or "_revinclude") values for a search param. * The parameter annotated with this annotation is used for either "_include" * or "_revinclude", depending on whether {@link #reverse()} has been * set to true (default is false). - * + * *

    * Only up to two parameters may be annotated with this annotation (one each * for reverse=false and reverse=true. That * parameter should be one of the following: - *

    + *

    *
      - *
    • Collection<Include>
    • - *
    • List<Include>
    • - *
    • Set<Include>
    • + *
    • Collection<Include>
    • + *
    • List<Include>
    • + *
    • Set<Include>
    • *
    - * + * * @see Include */ @Retention(RetentionPolicy.RUNTIME) -@Target(value= {ElementType.PARAMETER}) +@Target(value = {ElementType.PARAMETER}) public @interface IncludeParam { /** @@ -58,11 +58,11 @@ public @interface IncludeParam { * Values for this parameter take the form that the FHIR specification * defines for _include values, namely [Resource Name].[path]. * For example: "Patient.link.other" - * or "Encounter.partOf" + * or "Encounter.partOf" *

    *

    * You may also pass in a value of "*" which indicates means that the - * client may request _include=*. This is a request to + * client may request _include=*. This is a request to * include all referenced resources as well as any resources referenced * by those resources, etc. *

    @@ -70,18 +70,17 @@ public @interface IncludeParam { * Leave this parameter empty if you do not want the server to declare or * restrict which includes are allowable. In this case, the client may add * any _include value they want, and that value will be accepted by the server - * and passed to the handling method. Note that this means that the server + * and passed to the handling method. Note that this means that the server * will not declare which _include values it supports in its conformance * statement. - *

    + *

    */ String[] allow() default {}; - + /** * If set to true (default is false), the values * for this parameter correspond to the _revinclude parameter * instead of the _include parameter. */ boolean reverse() default false; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Initialize.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Initialize.java index 0ae3d782caf..d4c99b225bd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Initialize.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Initialize.java @@ -30,5 +30,4 @@ import java.lang.annotation.Target; */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) -public @interface Initialize { -} +public @interface Initialize {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Metadata.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Metadata.java index a0eeb398ea4..b02fa1dd051 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Metadata.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Metadata.java @@ -24,18 +24,16 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; - /** * RESTful method annotation used for a method which provides * the FHIR "conformance" method. - * + * * See the FHIR HTTP Conformance definition * fore more information. */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.METHOD) +@Target(value = ElementType.METHOD) public @interface Metadata { long cacheMillis() default 60 * 1000L; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Offset.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Offset.java index 09c0f262ed6..ec851bd8b6b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Offset.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Offset.java @@ -27,11 +27,11 @@ import java.lang.annotation.Target; /** * Parameter annotation for the _offset parameter, which indicates to the * server the offset of desired results. - * + * * @see History */ -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface Offset { - //nothing + // nothing } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Operation.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Operation.java index 7d8fe7d0d05..83907544fc5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Operation.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Operation.java @@ -158,5 +158,4 @@ public @interface Operation { *

    */ String canonicalUrl() default ""; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/OperationParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/OperationParam.java index ee4956089fe..5276d46fb88 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/OperationParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/OperationParam.java @@ -19,20 +19,19 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.model.primitive.StringDt; +import ca.uhn.fhir.rest.param.StringParam; +import org.hl7.fhir.instance.model.api.IBase; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBase; - -import ca.uhn.fhir.model.primitive.StringDt; -import ca.uhn.fhir.rest.param.StringParam; - /** */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) public @interface OperationParam { /** @@ -40,12 +39,11 @@ public @interface OperationParam { */ int MAX_UNLIMITED = -1; - /** - * Value for {@link OperationParam#max()} indicating that the maximum will be inferred + * Value for {@link OperationParam#max()} indicating that the maximum will be inferred * from the type. If the type is a single parameter type (e.g. StringDt, * TokenParam, IBaseResource) the maximum will be - * 1. + * 1. *

    * If the type is a collection, e.g. * List<StringDt> or List<TokenOrListParam> @@ -53,16 +51,16 @@ public @interface OperationParam { * "and" type, such as TokenAndListParam the maximum will also be * set to * *

    - * + * * @since 1.5 */ int MAX_DEFAULT = -2; - + /** * The name of the parameter */ String name(); - + /** * The type of the parameter. This will only have effect on @OperationParam * annotations specified as values for {@link Operation#returnParameters()}, otherwise the @@ -82,7 +80,7 @@ public @interface OperationParam { * {@link @org.hl7.fhir.instance.model.api.ICompositeType}. */ String typeName() default ""; - + /** * The minimum number of repetitions allowed for this child (default is 0) */ @@ -95,6 +93,4 @@ public @interface OperationParam { * behaviour. */ int max() default MAX_DEFAULT; - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/OptionalParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/OptionalParam.java index 6ea9a0bd87c..3a0d0bfe561 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/OptionalParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/OptionalParam.java @@ -19,23 +19,21 @@ */ package ca.uhn.fhir.rest.annotation; -import java.lang.annotation.ElementType; - -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.param.CompositeParam; import ca.uhn.fhir.rest.param.ReferenceParam; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * Parameter annotation which specifies a search parameter for a {@link Search} method. */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) public @interface OptionalParam { public static final String ALLOW_CHAIN_ANY = "*"; @@ -79,7 +77,7 @@ public @interface OptionalParam { * this value must not be populated. *

    */ - String[] chainWhitelist() default { ALLOW_CHAIN_ANY }; + String[] chainWhitelist() default {ALLOW_CHAIN_ANY}; /** * For composite parameters ({@link CompositeParam}) this value may be diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/PageIdParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/PageIdParam.java index d6e5e94f6ca..cb8ad4aa2fd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/PageIdParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/PageIdParam.java @@ -26,7 +26,7 @@ import java.lang.annotation.Target; /** * Parameter annotation which specifies the parameter to receive the ID of the page - * being requested. Parameter must be of type {@link String} + * being requested. Parameter must be of type {@link String} */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.PARAMETER) diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Patch.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Patch.java index 23a2ae6addb..d365388d121 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Patch.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Patch.java @@ -19,25 +19,24 @@ */ package ca.uhn.fhir.rest.annotation; -import java.lang.annotation.ElementType; +import org.hl7.fhir.instance.model.api.IBaseResource; +import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBaseResource; - /** * RESTful method annotation to be used for the proposed FHIR * PATCH method - * + * *

    * Patch is used to apply a differential to a resource in either * XML or JSON format *

    */ @Retention(RetentionPolicy.RUNTIME) -@Target({ ElementType.METHOD }) +@Target({ElementType.METHOD}) public @interface Patch { /** diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/RawParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/RawParam.java index 19b13783f75..34ea3e6e7f4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/RawParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/RawParam.java @@ -25,15 +25,15 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * On a {@link Search} method, a parameter marked with this annotation - * will receive search parameters not captured by other parameters. + * On a {@link Search} method, a parameter marked with this annotation + * will receive search parameters not captured by other parameters. *

    * Parameters with this annotation must be of type * {@code Map>} *

    */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) public @interface RawParam { // nothing } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Read.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Read.java index 53dfd7c1f03..4276229f630 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Read.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Read.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.rest.annotation; -import java.lang.annotation.*; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.rest.client.api.IBasicClient; import ca.uhn.fhir.rest.client.api.IRestfulClient; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.annotation.*; /** * RESTful method annotation to be used for the FHIR read and vread method. - * + * *

    * If this method has a parameter annotated with the {@link IdParam} annotation and a parameter annotated with the {@link VersionIdParam} annotation, the method will be treated as a vread method. If * the method has only a parameter annotated with the {@link IdParam} annotation, it will be treated as a read operation. diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/RequiredParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/RequiredParam.java index e36a63f937e..305e44dcd52 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/RequiredParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/RequiredParam.java @@ -19,23 +19,21 @@ */ package ca.uhn.fhir.rest.annotation; -import java.lang.annotation.ElementType; - -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.param.CompositeParam; import ca.uhn.fhir.rest.param.ReferenceParam; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; /** * Parameter annotation which specifies a search parameter for a {@link Search} method. */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) public @interface RequiredParam { /** @@ -72,7 +70,7 @@ public @interface RequiredParam { * this value must not be populated. *

    */ - String[] chainWhitelist() default { OptionalParam.ALLOW_CHAIN_ANY }; + String[] chainWhitelist() default {OptionalParam.ALLOW_CHAIN_ANY}; /** * For composite parameters ({@link CompositeParam}) this parameter may be used to indicate the parameter type(s) which may be referenced by this param. @@ -101,5 +99,4 @@ public @interface RequiredParam { *

    */ Class[] targetTypes() default {}; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ResourceParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ResourceParam.java index 1b05ee5b72b..7d61535e4fc 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ResourceParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ResourceParam.java @@ -50,6 +50,4 @@ import java.lang.annotation.Target; */ @Target(value = ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) -public @interface ResourceParam { - -} +public @interface ResourceParam {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Search.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Search.java index b47ce6237d0..76c672f4771 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Search.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Search.java @@ -19,33 +19,31 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.rest.client.api.IBasicClient; +import ca.uhn.fhir.rest.client.api.IRestfulClient; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.rest.client.api.IBasicClient; -import ca.uhn.fhir.rest.client.api.IRestfulClient; - - /** * RESTful method annotation used for a method which provides * the FHIR "search" method. - * + * * See the FHIR Search definition * for more information. */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.METHOD) +@Target(value = ElementType.METHOD) public @interface Search { - + /** * If specified, this the name for the Named Query - * + * *

    - * See the FHIR specification section on + * See the FHIR specification section on * named queries *

    */ @@ -53,9 +51,9 @@ public @interface Search { /** * If specified, this the name for the compartment - * + * *

    - * See the FHIR specification section on + * See the FHIR specification section on * compartments *

    */ @@ -63,15 +61,15 @@ public @interface Search { /** * The return type for this method. This generally does not need - * to be populated for IResourceProvider instances in a server implementation, + * to be populated for IResourceProvider instances in a server implementation, * but often does need to be populated in client implementations using {@link IBasicClient} or * {@link IRestfulClient}, or in plain providers on a server. *

    - * This value also does not need to be populated if the return type for a method annotated with - * this annotation is sufficient to determine the type of resource provided. E.g. if the - * method returns Patient or List<Patient>, the server/client + * This value also does not need to be populated if the return type for a method annotated with + * this annotation is sufficient to determine the type of resource provided. E.g. if the + * method returns Patient or List<Patient>, the server/client * will automatically determine that the Patient resource is the return type, and this value - * may be left blank. + * may be left blank. *

    */ // NB: Read, Search (maybe others) share this annotation method, so update the javadocs everywhere @@ -88,7 +86,7 @@ public @interface Search { String typeName() default ""; /** - * In a REST server, should this method be invoked even if it does not have method parameters + * In a REST server, should this method be invoked even if it does not have method parameters * which correspond to all of the URL parameters passed in by the client (default is false). *

    * Use this method with caution: Methods marked with a value of true will @@ -100,5 +98,4 @@ public @interface Search { *

    */ boolean allowUnknownParams() default false; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ServerBase.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ServerBase.java index 6dbb1cbc80b..48dccf4334b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ServerBase.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/ServerBase.java @@ -33,7 +33,7 @@ import java.lang.annotation.Target; * indicating the server's base URL, with NO trailing '/'. *

    */ -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface ServerBase { // nothing diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Since.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Since.java index 446370a0457..505a92b9655 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Since.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Since.java @@ -19,25 +19,25 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.rest.param.DateParam; +import ca.uhn.fhir.rest.param.DateRangeParam; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import ca.uhn.fhir.rest.param.DateParam; -import ca.uhn.fhir.rest.param.DateRangeParam; - /** - * Parameter annotation for the _since parameter, which indicates to the - * server that only results dated since the given instant will be returned. + * Parameter annotation for the _since parameter, which indicates to the + * server that only results dated since the given instant will be returned. *

    - * Parameters with this annotation should be of type {@link DateParam} or {@link DateRangeParam} + * Parameters with this annotation should be of type {@link DateParam} or {@link DateRangeParam} *

    - * + * * @see History */ -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface Since { - //nothing + // nothing } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Sort.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Sort.java index d62f8abe11e..531d1c86a0a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Sort.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Sort.java @@ -19,26 +19,26 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.rest.api.SortSpec; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import ca.uhn.fhir.rest.api.SortSpec; - /** - * For searches, a parameter may be annotated with the {@link Sort} annotation. The - * parameter should be of type {@link SortSpec}. - * + * For searches, a parameter may be annotated with the {@link Sort} annotation. The + * parameter should be of type {@link SortSpec}. + * *

    * Note that if you wish to chain * multiple sort parameters (i.e. a sub sort), you should use the {@link SortSpec#setChain(SortSpec)} * method. Multiple parameters should not be annotated with the Sort annotation. *

    - * + * * @see Search */ -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface Sort { // nothing diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Transaction.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Transaction.java index 4b13fe54e70..6aff5e7c553 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Transaction.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Transaction.java @@ -23,7 +23,7 @@ import java.lang.annotation.*; /** * RESTful method annotation to be used for the FHIR transaction method. - * + * *

    * This method should have a parameter of type Bundle annotated with the {@link TransactionParam} annotation. *

    diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/TransactionParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/TransactionParam.java index a086ac1a062..8e50b13c9bb 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/TransactionParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/TransactionParam.java @@ -24,11 +24,9 @@ import java.util.List; /** * Parameter annotation for the "transaction" operation. The parameter annotated with this - * annotation must be a Bundle resource type, or a type that implements + * annotation must be a Bundle resource type, or a type that implements * {@link List}<IBaseResource> */ -@Target(value=ElementType.PARAMETER) +@Target(value = ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) -public @interface TransactionParam { - -} +public @interface TransactionParam {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Update.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Update.java index 4f3cee44680..a9c937ff3cf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Update.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Update.java @@ -19,24 +19,24 @@ */ package ca.uhn.fhir.rest.annotation; +import ca.uhn.fhir.model.api.IResource; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import ca.uhn.fhir.model.api.IResource; - /** * RESTful method annotation to be used for the FHIR * update method. - * + * *

    * Update is used to save an update to an existing resource (using its ID and optionally - * a version ID). It also may allow a client to save a new resource using an ID of its choosing. + * a version ID). It also may allow a client to save a new resource using an ID of its choosing. *

    */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.METHOD) +@Target(value = ElementType.METHOD) public @interface Update { /** @@ -57,5 +57,4 @@ public @interface Update { * @since 5.4.0 */ String typeName() default ""; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Validate.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Validate.java index 4f1d3b56355..7826df8daee 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Validate.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/annotation/Validate.java @@ -19,36 +19,35 @@ */ package ca.uhn.fhir.rest.annotation; -import java.lang.annotation.*; - +import ca.uhn.fhir.rest.api.ValidationModeEnum; import org.hl7.fhir.instance.model.api.IBaseResource; -import ca.uhn.fhir.rest.api.ValidationModeEnum; +import java.lang.annotation.*; /** * RESTful method annotation to be used for the FHIR * validate method. - * + * *

    * Validate is used to accept a resource, and test whether it would be acceptable for - * storing (e.g. using an update or create method) + * storing (e.g. using an update or create method) *

    *

    * FHIR Version Note: The validate operation was defined as a type operation in DSTU1 * using a URL syntax like http://example.com/Patient/_validate. In DSTU2, validation - * has been switched to being an extended operation using a URL syntax like - * http://example.com/Patient/$validate, with a n + * has been switched to being an extended operation using a URL syntax like + * http://example.com/Patient/$validate, with a n *

    */ @Retention(RetentionPolicy.RUNTIME) -@Target(value=ElementType.METHOD) +@Target(value = ElementType.METHOD) public @interface Validate { /** * The return type for this method. This generally does not need - * to be populated for a server implementation (using an IResourceProvider, - * since resource providers will return only one resource type per class, - * but generally does need to be populated for client implementations. + * to be populated for a server implementation (using an IResourceProvider, + * since resource providers will return only one resource type per class, + * but generally does need to be populated for client implementations. */ // NB: Read, Search (maybe others) share this annotation, so update the javadocs everywhere Class type() default IBaseResource.class; @@ -68,19 +67,18 @@ public @interface Validate { * in FHIR DSTU2+). Parameter must be of type {@link ValidationModeEnum}. */ @Retention(RetentionPolicy.RUNTIME) - @Target(value=ElementType.PARAMETER) + @Target(value = ElementType.PARAMETER) @interface Mode { // nothing } - + /** * Validation mode parameter annotation for the validation URI parameter (only supported * in FHIR DSTU2+). Parameter must be of type {@link String}. */ @Retention(RetentionPolicy.RUNTIME) - @Target(value=ElementType.PARAMETER) + @Target(value = ElementType.PARAMETER) @interface Profile { // nothing } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/BundleLinks.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/BundleLinks.java index 6cfe1ee5ebb..9a3611f6fe6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/BundleLinks.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/BundleLinks.java @@ -38,7 +38,8 @@ public class BundleLinks { private String next; private String prev; - public BundleLinks(String theServerBase, Set theIncludes, boolean thePrettyPrint, BundleTypeEnum theBundleType) { + public BundleLinks( + String theServerBase, Set theIncludes, boolean thePrettyPrint, BundleTypeEnum theBundleType) { serverBase = theServerBase; includes = theIncludes == null ? null : new ArrayList<>(theIncludes); prettyPrint = thePrettyPrint; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/CacheControlDirective.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/CacheControlDirective.java index d78454c7017..affa4169af4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/CacheControlDirective.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/CacheControlDirective.java @@ -116,7 +116,6 @@ public class CacheControlDirective { } catch (NumberFormatException e) { ourLog.warn("Invalid {} value: {}", Constants.CACHE_CONTROL_MAX_RESULTS, valueString); } - } } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java index c4d88baafb8..1eacae7780e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java @@ -51,19 +51,21 @@ public class Constants { * would make a useful set for CORS AllowedMethod declarations */ public static final Set CORS_ALLWED_METHODS; + public static final String CT_FHIR_JSON = "application/json+fhir"; public static final String CT_RDF_TURTLE = "application/x-turtle"; /** * The FHIR MimeType for JSON encoding in FHIR DSTU3+ */ public static final String CT_FHIR_JSON_NEW = "application/fhir+json"; - public static final String CT_FHIR_XML = "application/xml+fhir"; + public static final String CT_FHIR_XML = "application/xml+fhir"; /** * The FHIR MimeType for XML encoding in FHIR DSTU3+ */ public static final String CT_FHIR_XML_NEW = "application/fhir+xml"; + public static final String CT_HTML = "text/html"; public static final String CT_HTML_WITH_UTF8 = "text/html" + CHARSET_UTF8_CTSUFFIX; public static final String CT_JSON = "application/json"; @@ -77,7 +79,7 @@ public class Constants { public static final String CT_XML = "application/xml"; public static final String CT_XML_PATCH = "application/xml-patch+xml"; public static final String ENCODING_GZIP = "gzip"; - public static final String EXTOP_PROCESS_MESSAGE = "$process-message"; //Used in messaging + public static final String EXTOP_PROCESS_MESSAGE = "$process-message"; // Used in messaging public static final String EXTOP_VALIDATE = "$validate"; public static final String EXTOP_VALIDATE_MODE = "mode"; public static final String EXTOP_VALIDATE_PROFILE = "profile"; @@ -89,20 +91,24 @@ public class Constants { public static final String CT_RDF_TURTLE_LEGACY = "text/turtle"; public static final String FORMAT_TURTLE = "ttl"; - /** * "text/html" and "html" */ public static final Set FORMATS_HTML; + public static final String FORMATS_HTML_JSON = "html/json"; public static final String FORMATS_HTML_XML = "html/xml"; public static final String FORMATS_HTML_TTL = "html/turtle"; public static final String HEADER_ACCEPT = "Accept"; public static final String HEADER_ACCEPT_ENCODING = "Accept-Encoding"; - public static final String HEADER_ACCEPT_VALUE_JSON_NON_LEGACY = CT_FHIR_JSON_NEW + ";q=1.0, " + CT_FHIR_JSON + ";q=0.9"; - public static final String HEADER_ACCEPT_VALUE_XML_NON_LEGACY = CT_FHIR_XML_NEW + ";q=1.0, " + CT_FHIR_XML + ";q=0.9"; - public static final String HEADER_ACCEPT_VALUE_XML_OR_JSON_LEGACY = CT_FHIR_XML + ";q=1.0, " + CT_FHIR_JSON + ";q=1.0"; - public static final String HEADER_ACCEPT_VALUE_XML_OR_JSON_NON_LEGACY = CT_FHIR_XML_NEW + ";q=1.0, " + CT_FHIR_JSON_NEW + ";q=1.0, " + HEADER_ACCEPT_VALUE_XML_OR_JSON_LEGACY.replace("1.0", "0.9"); + public static final String HEADER_ACCEPT_VALUE_JSON_NON_LEGACY = + CT_FHIR_JSON_NEW + ";q=1.0, " + CT_FHIR_JSON + ";q=0.9"; + public static final String HEADER_ACCEPT_VALUE_XML_NON_LEGACY = + CT_FHIR_XML_NEW + ";q=1.0, " + CT_FHIR_XML + ";q=0.9"; + public static final String HEADER_ACCEPT_VALUE_XML_OR_JSON_LEGACY = + CT_FHIR_XML + ";q=1.0, " + CT_FHIR_JSON + ";q=1.0"; + public static final String HEADER_ACCEPT_VALUE_XML_OR_JSON_NON_LEGACY = CT_FHIR_XML_NEW + ";q=1.0, " + + CT_FHIR_JSON_NEW + ";q=1.0, " + HEADER_ACCEPT_VALUE_XML_OR_JSON_LEGACY.replace("1.0", "0.9"); public static final String HEADER_ALLOW = "Allow"; public static final String HEADER_AUTHORIZATION = "Authorization"; public static final String HEADER_AUTHORIZATION_VALPREFIX_BASIC = "Basic "; @@ -155,13 +161,14 @@ public class Constants { public static final String LINK_PREVIOUS = "previous"; public static final String LINK_SELF = "self"; public static final String OPENSEARCH_NS_OLDER = "http://purl.org/atompub/tombstones/1.0"; - public static final String PARAM_ASYNC = "async"; //Used in messaging + public static final String PARAM_ASYNC = "async"; // Used in messaging public static final String PARAM_AT = "_at"; - public static final String PARAM_ID= "_id"; + public static final String PARAM_ID = "_id"; /** * Used in paging links */ public static final String PARAM_BUNDLETYPE = "_bundletype"; + public static final String PARAM_FILTER = "_filter"; public static final String PARAM_CONTAINED = "_contained"; public static final String PARAM_CONTAINED_TYPE = "_containedType"; @@ -188,7 +195,7 @@ public class Constants { public static final String PARAM_PRETTY_VALUE_TRUE = "true"; public static final String PARAM_PROFILE = "_profile"; public static final String PARAM_QUERY = "_query"; - public static final String PARAM_RESPONSE_URL = "response-url"; //Used in messaging + public static final String PARAM_RESPONSE_URL = "response-url"; // Used in messaging public static final String PARAM_REVINCLUDE = "_revinclude"; public static final String PARAM_REVINCLUDE_RECURSE = PARAM_REVINCLUDE + PARAM_INCLUDE_QUALIFIER_RECURSE; public static final String PARAM_REVINCLUDE_ITERATE = PARAM_REVINCLUDE + PARAM_INCLUDE_QUALIFIER_ITERATE; @@ -210,6 +217,7 @@ public class Constants { * $member-match operation */ public static final String PARAM_MEMBER_PATIENT = "MemberPatient"; + public static final String PARAM_MEMBER_IDENTIFIER = "MemberIdentifier"; public static final String PARAM_OLD_COVERAGE = "OldCoverage"; @@ -261,7 +269,8 @@ public class Constants { public static final String PARAM_PAGEID = "_pageId"; public static final String JAVA_VALIDATOR_DETAILS_SYSTEM = "http://hl7.org/fhir/java-core-messageId"; public static final String PARAM_SEARCH_TOTAL_MODE = "_total"; - public static final String CAPABILITYSTATEMENT_WEBSOCKET_URL = "http://hl7.org/fhir/StructureDefinition/capabilitystatement-websocket"; + public static final String CAPABILITYSTATEMENT_WEBSOCKET_URL = + "http://hl7.org/fhir/StructureDefinition/capabilitystatement-websocket"; public static final String PARAMETER_CASCADE_DELETE = "_cascade"; public static final String PARAMETER_CASCADE_DELETE_MAX_ROUNDS = "_maxRounds"; public static final String HEADER_CASCADE = "X-Cascade"; @@ -280,6 +289,7 @@ public class Constants { * Note that this constant is used in a number of places including DB column lengths! Be careful if you decide to change it. */ public static final int REQUEST_ID_LENGTH = 16; + public static final int STATUS_HTTP_202_ACCEPTED = 202; public static final String HEADER_X_PROGRESS = "X-Progress"; public static final String HEADER_RETRY_AFTER = "Retry-After"; @@ -287,6 +297,7 @@ public class Constants { * Operation name for the $lastn operation */ public static final String OPERATION_LASTN = "$lastn"; + public static final String PARAM_FHIRPATH = "_fhirpath"; public static final String PARAM_TYPE = "_type"; @@ -296,13 +307,15 @@ public class Constants { * key will be of type {@link ca.uhn.fhir.interceptor.model.RequestPartitionId}. */ public static final String RESOURCE_PARTITION_ID = Constants.class.getName() + "_RESOURCE_PARTITION_ID"; + public static final String PARTITION_IDS = "partitionIds"; public static final String CT_APPLICATION_GZIP = "application/gzip"; public static final String[] EMPTY_STRING_ARRAY = new String[0]; public static final String SUBSCRIPTION_MULTITYPE_PREFIX = "["; public static final String SUBSCRIPTION_MULTITYPE_SUFFIX = "]"; public static final String SUBSCRIPTION_MULTITYPE_STAR = "*"; - public static final String SUBSCRIPTION_STAR_CRITERIA = SUBSCRIPTION_MULTITYPE_PREFIX + SUBSCRIPTION_MULTITYPE_STAR + SUBSCRIPTION_MULTITYPE_SUFFIX; + public static final String SUBSCRIPTION_STAR_CRITERIA = + SUBSCRIPTION_MULTITYPE_PREFIX + SUBSCRIPTION_MULTITYPE_STAR + SUBSCRIPTION_MULTITYPE_SUFFIX; public static final String INCLUDE_STAR = "*"; public static final String PARAMQUALIFIER_TOKEN_IN = ":in"; public static final String PARAMQUALIFIER_TOKEN_NOT_IN = ":not-in"; @@ -310,7 +323,7 @@ public class Constants { public static final String PARAMQUALIFIER_TOKEN_BELOW = ":below"; /** * The number of characters in a UUID (36) - */ + */ public static final int UUID_LENGTH = 36; /** @@ -318,7 +331,7 @@ public class Constants { */ public static final String HIBERNATE_INTEGRATION_ENVERS_ENABLED = "hibernate.integration.envers.enabled"; - static { + static { CHARSET_UTF8 = StandardCharsets.UTF_8; CHARSET_US_ASCII = StandardCharsets.ISO_8859_1; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/DeleteCascadeModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/DeleteCascadeModeEnum.java index b8e3dd7b268..f8dfbe26c8b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/DeleteCascadeModeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/DeleteCascadeModeEnum.java @@ -26,9 +26,7 @@ package ca.uhn.fhir.rest.api; *

    */ public enum DeleteCascadeModeEnum { - NONE, DELETE - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/EncodingEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/EncodingEnum.java index c7430132762..11b854013a4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/EncodingEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/EncodingEnum.java @@ -30,7 +30,6 @@ import java.util.Map; import static org.apache.commons.lang3.StringUtils.isBlank; public enum EncodingEnum { - JSON(Constants.CT_FHIR_JSON, Constants.CT_FHIR_JSON_NEW, Constants.FORMAT_JSON) { @Override public IParser newParser(FhirContext theContext) { @@ -69,17 +68,16 @@ public enum EncodingEnum { */ public static final String RDF_PLAIN_STRING = "rdf"; - /** * "xml" */ public static final String XML_PLAIN_STRING = "xml"; - /** - * "ndjson" - */ - public static final String NDJSON_PLAIN_STRING = "ndjson"; - + /** + * "ndjson" + */ + public static final String NDJSON_PLAIN_STRING = "ndjson"; + private static Map ourContentTypeToEncoding; private static Map ourContentTypeToEncodingLegacy; private static Map ourContentTypeToEncodingStrict; @@ -99,7 +97,6 @@ public enum EncodingEnum { ourContentTypeToEncoding.put(next.myResourceContentTypeNonLegacy.replace('+', ' '), next); ourContentTypeToEncoding.put(next.myResourceContentTypeLegacy.replace('+', ' '), next); ourContentTypeToEncodingLegacy.put(next.myResourceContentTypeLegacy.replace('+', ' '), next); - } // Add before we add the lenient ones @@ -113,7 +110,7 @@ public enum EncodingEnum { ourContentTypeToEncoding.put("application/xml", XML); ourContentTypeToEncoding.put("application/fhir+turtle", RDF); ourContentTypeToEncoding.put("application/x-turtle", RDF); - ourContentTypeToEncoding.put("application/ndjson", NDJSON); + ourContentTypeToEncoding.put("application/ndjson", NDJSON); ourContentTypeToEncoding.put("text/json", JSON); ourContentTypeToEncoding.put("text/ndjson", NDJSON); ourContentTypeToEncoding.put("text/xml", XML); @@ -129,7 +126,6 @@ public enum EncodingEnum { ourContentTypeToEncoding.put(Constants.FORMAT_TURTLE, RDF); ourContentTypeToEncodingLegacy = Collections.unmodifiableMap(ourContentTypeToEncodingLegacy); - } private String myFormatContentType; @@ -202,11 +198,10 @@ public enum EncodingEnum { if (contentTypeSplitted == null) { return null; } else { - return ourContentTypeToEncoding.get(contentTypeSplitted ); + return ourContentTypeToEncoding.get(contentTypeSplitted); } } - /** * Returns the encoding for a given content type, or null if no encoding * is found. @@ -268,6 +263,4 @@ public enum EncodingEnum { return ourContentTypeToEncodingLegacy.containsKey(contentTypeSplitted); } } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/IVersionSpecificBundleFactory.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/IVersionSpecificBundleFactory.java index 4851f912898..946c8943fbf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/IVersionSpecificBundleFactory.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/IVersionSpecificBundleFactory.java @@ -25,21 +25,30 @@ import ca.uhn.fhir.model.valueset.BundleTypeEnum; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * This interface should be considered experimental and will likely change in future releases of HAPI. Use with caution! */ public interface IVersionSpecificBundleFactory { - void addResourcesToBundle(List theResult, BundleTypeEnum theBundleType, String theServerBase, @Nullable BundleInclusionRule theBundleInclusionRule, @Nullable Set theIncludes); + void addResourcesToBundle( + List theResult, + BundleTypeEnum theBundleType, + String theServerBase, + @Nullable BundleInclusionRule theBundleInclusionRule, + @Nullable Set theIncludes); - void addRootPropertiesToBundle(String theId, @Nonnull BundleLinks theBundleLinks, Integer theTotalResults, IPrimitiveType theLastUpdated); + void addRootPropertiesToBundle( + String theId, + @Nonnull BundleLinks theBundleLinks, + Integer theTotalResults, + IPrimitiveType theLastUpdated); IBaseResource getResourceBundle(); @@ -48,7 +57,13 @@ public interface IVersionSpecificBundleFactory { * and {@link #addResourcesToBundle(List, BundleTypeEnum, String, BundleInclusionRule, Set)} methods */ @Deprecated - default void initializeBundleFromResourceList(String theAuthor, List theResult, String theServerBase, String theCompleteUrl, int theTotalResults, BundleTypeEnum theBundleType) { + default void initializeBundleFromResourceList( + String theAuthor, + List theResult, + String theServerBase, + String theCompleteUrl, + int theTotalResults, + BundleTypeEnum theBundleType) { addTotalResultsToBundle(theResult.size(), theBundleType); addResourcesToBundle(new ArrayList<>(theResult), theBundleType, null, null, null); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/MethodOutcome.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/MethodOutcome.java index 5e50cee3d1c..c9bfa5aa2c9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/MethodOutcome.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/MethodOutcome.java @@ -214,7 +214,6 @@ public class MethodOutcome { } } - /** * Registers a callback to be invoked before the resource in this object gets * returned to the client. Note that this is an experimental API and may change. diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PatchTypeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PatchTypeEnum.java index 6a913139b77..ea4c764e044 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PatchTypeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PatchTypeEnum.java @@ -24,9 +24,9 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Patch; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import javax.annotation.Nonnull; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -35,7 +35,6 @@ import static org.apache.commons.lang3.StringUtils.isBlank; * Parameter type for methods annotated with {@link Patch} */ public enum PatchTypeEnum { - JSON_PATCH(Constants.CT_JSON_PATCH), XML_PATCH(Constants.CT_XML_PATCH), FHIR_PATCH_JSON(Constants.CT_FHIR_JSON_NEW), @@ -53,7 +52,8 @@ public enum PatchTypeEnum { } @Nonnull - public static PatchTypeEnum forContentTypeOrThrowInvalidRequestException(FhirContext theContext, String theContentType) { + public static PatchTypeEnum forContentTypeOrThrowInvalidRequestException( + FhirContext theContext, String theContentType) { String contentType = defaultString(theContentType); int semiColonIdx = contentType.indexOf(';'); if (semiColonIdx != -1) { @@ -61,7 +61,6 @@ public enum PatchTypeEnum { } contentType = contentType.trim(); - Map map = ourContentTypeToPatchType; if (map == null) { map = new HashMap<>(); @@ -78,7 +77,9 @@ public enum PatchTypeEnum { throw new InvalidRequestException(Msg.code(1964) + msg); } - String msg = theContext.getLocalizer().getMessageSanitized(PatchTypeEnum.class, "invalidPatchContentType", contentType); + String msg = theContext + .getLocalizer() + .getMessageSanitized(PatchTypeEnum.class, "invalidPatchContentType", contentType); throw new InvalidRequestException(Msg.code(1965) + msg); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferHandlingEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferHandlingEnum.java index 13932b7c552..1ff76244e75 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferHandlingEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferHandlingEnum.java @@ -25,8 +25,8 @@ import java.util.HashMap; * Represents values for "handling" value as provided in the the FHIR Search Spec. */ public enum PreferHandlingEnum { - - STRICT(Constants.HEADER_PREFER_HANDLING_STRICT), LENIENT(Constants.HEADER_PREFER_HANDLING_LENIENT); + STRICT(Constants.HEADER_PREFER_HANDLING_STRICT), + LENIENT(Constants.HEADER_PREFER_HANDLING_LENIENT); private static HashMap ourValues; private String myHeaderValue; @@ -49,5 +49,4 @@ public enum PreferHandlingEnum { } return ourValues.get(theHeaderValue); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferReturnEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferReturnEnum.java index 7b228c6b59c..07ab10f62c0 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferReturnEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/PreferReturnEnum.java @@ -19,14 +19,13 @@ */ package ca.uhn.fhir.rest.api; -import javax.annotation.Nullable; import java.util.HashMap; +import javax.annotation.Nullable; /** * Represents values for "return" value as provided in the the HTTP Prefer header. */ public enum PreferReturnEnum { - REPRESENTATION(Constants.HEADER_PREFER_RETURN_REPRESENTATION), MINIMAL(Constants.HEADER_PREFER_RETURN_MINIMAL), OPERATION_OUTCOME(Constants.HEADER_PREFER_RETURN_OPERATION_OUTCOME); @@ -53,5 +52,4 @@ public enum PreferReturnEnum { } return ourValues.get(theHeaderValue); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/QualifiedParamList.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/QualifiedParamList.java index 18247e7bcd0..b09fe23783b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/QualifiedParamList.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/QualifiedParamList.java @@ -104,7 +104,6 @@ public class QualifiedParamList extends ArrayList { retVal.add(str); prev = str; } - } // If no value was found, at least add that empty string as a value. It should get ignored later, but at @@ -131,5 +130,4 @@ public class QualifiedParamList extends ArrayList { } return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RequestFormatParamStyleEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RequestFormatParamStyleEnum.java index 10042a89b1c..c66342ff9c3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RequestFormatParamStyleEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RequestFormatParamStyleEnum.java @@ -29,5 +29,4 @@ public enum RequestFormatParamStyleEnum { * "xml" or "json" */ SHORT - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RequestTypeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RequestTypeEnum.java index c812dcae34b..8e33665ee67 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RequestTypeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RequestTypeEnum.java @@ -20,5 +20,14 @@ package ca.uhn.fhir.rest.api; public enum RequestTypeEnum { - CONNECT, DELETE, GET, OPTIONS, PATCH, POST, PUT, TRACE, TRACK, HEAD + CONNECT, + DELETE, + GET, + OPTIONS, + PATCH, + POST, + PUT, + TRACE, + TRACK, + HEAD } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RestOperationTypeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RestOperationTypeEnum.java index 9878de291de..9a37ca86aa2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RestOperationTypeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RestOperationTypeEnum.java @@ -22,13 +22,12 @@ package ca.uhn.fhir.rest.api; import ca.uhn.fhir.util.CoverageIgnore; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; @CoverageIgnore public enum RestOperationTypeEnum { - BATCH("batch", true, false, false), ADD_TAGS("add-tags", false, false, true), @@ -126,7 +125,7 @@ public enum RestOperationTypeEnum { * Load the server's metadata */ METADATA("metadata", false, false, false), - + /** * $meta-add extended operation */ @@ -179,7 +178,8 @@ public enum RestOperationTypeEnum { /** * Constructor */ - RestOperationTypeEnum(@Nonnull String theCode, boolean theSystemLevel, boolean theTypeLevel, boolean theInstanceLevel) { + RestOperationTypeEnum( + @Nonnull String theCode, boolean theSystemLevel, boolean theTypeLevel, boolean theInstanceLevel) { myCode = theCode; mySystemLevel = theSystemLevel; myTypeLevel = theTypeLevel; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RestSearchParameterTypeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RestSearchParameterTypeEnum.java index b6829c6ea5d..a81e523ac7a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RestSearchParameterTypeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/RestSearchParameterTypeEnum.java @@ -19,11 +19,11 @@ */ package ca.uhn.fhir.rest.api; +import ca.uhn.fhir.model.api.IValueSetEnumBinder; + import java.util.HashMap; import java.util.Map; -import ca.uhn.fhir.model.api.IValueSetEnumBinder; - public enum RestSearchParameterTypeEnum { /** @@ -32,71 +32,69 @@ public enum RestSearchParameterTypeEnum { * Search parameter SHALL be a number (a whole number, or a decimal). */ NUMBER("number", "http://hl7.org/fhir/search-param-type"), - + /** * Code Value: date * * Search parameter is on a date/time. The date format is the standard XML format, though other formats may be supported. */ DATE("date", "http://hl7.org/fhir/search-param-type"), - + /** * Code Value: string * * Search parameter is a simple string, like a name part. Search is case-insensitive and accent-insensitive. May match just the start of a string. String parameters may contain spaces. */ STRING("string", "http://hl7.org/fhir/search-param-type"), - + /** * Code Value: token * * Search parameter on a coded element or identifier. May be used to search through the text, displayname, code and code/codesystem (for codes) and label, system and key (for identifier). Its value is either a string or a pair of namespace and value, separated by a "|", depending on the modifier used. */ TOKEN("token", "http://hl7.org/fhir/search-param-type"), - + /** * Code Value: reference * * A reference to another resource. */ REFERENCE("reference", "http://hl7.org/fhir/search-param-type"), - + /** * Code Value: composite * * A composite search parameter that combines a search on two values together. */ COMPOSITE("composite", "http://hl7.org/fhir/search-param-type"), - + /** * Code Value: quantity * * A search parameter that searches on a quantity. */ QUANTITY("quantity", "http://hl7.org/fhir/search-param-type"), - + /** * Code Value: quantity * * A search parameter that searches on a quantity. */ - URI("uri", "http://hl7.org/fhir/search-param-type"), - + URI("uri", "http://hl7.org/fhir/search-param-type"), + /** * _has parameter */ HAS("string", "http://hl7.org/fhir/search-param-type"), - /** + /** * Code Value: number * * Search parameter SHALL be a number (a whole number, or a decimal). */ SPECIAL("special", "http://hl7.org/fhir/search-param-type"), - ; - /** * Identifier for this Value Set: * http://hl7.org/fhir/vs/search-param-type @@ -109,41 +107,43 @@ public enum RestSearchParameterTypeEnum { */ public static final String VALUESET_NAME = "SearchParamType"; - private static Map CODE_TO_ENUM = new HashMap(); - private static Map> SYSTEM_TO_CODE_TO_ENUM = new HashMap>(); - + private static Map CODE_TO_ENUM = + new HashMap(); + private static Map> SYSTEM_TO_CODE_TO_ENUM = + new HashMap>(); + private final String myCode; private final String mySystem; - + static { for (RestSearchParameterTypeEnum next : RestSearchParameterTypeEnum.values()) { if (next == HAS) { continue; } - + CODE_TO_ENUM.put(next.getCode(), next); - + if (!SYSTEM_TO_CODE_TO_ENUM.containsKey(next.getSystem())) { SYSTEM_TO_CODE_TO_ENUM.put(next.getSystem(), new HashMap()); } - SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); + SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); } } - + /** * Returns the code associated with this enumerated value */ public String getCode() { return myCode; } - + /** * Returns the code system associated with this enumerated value */ public String getSystem() { return mySystem; } - + /** * Returns the enumerated value associated with this code */ @@ -155,42 +155,40 @@ public enum RestSearchParameterTypeEnum { /** * Converts codes to their respective enumerated values */ - public static final IValueSetEnumBinder VALUESET_BINDER = new IValueSetEnumBinder() { - private static final long serialVersionUID = 1L; + public static final IValueSetEnumBinder VALUESET_BINDER = + new IValueSetEnumBinder() { + private static final long serialVersionUID = 1L; - @Override - public String toCodeString(RestSearchParameterTypeEnum theEnum) { - return theEnum.getCode(); - } + @Override + public String toCodeString(RestSearchParameterTypeEnum theEnum) { + return theEnum.getCode(); + } - @Override - public String toSystemString(RestSearchParameterTypeEnum theEnum) { - return theEnum.getSystem(); - } - - @Override - public RestSearchParameterTypeEnum fromCodeString(String theCodeString) { - return CODE_TO_ENUM.get(theCodeString); - } - - @Override - public RestSearchParameterTypeEnum fromCodeString(String theCodeString, String theSystemString) { - Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); - if (map == null) { - return null; - } - return map.get(theCodeString); - } - - }; - - /** + @Override + public String toSystemString(RestSearchParameterTypeEnum theEnum) { + return theEnum.getSystem(); + } + + @Override + public RestSearchParameterTypeEnum fromCodeString(String theCodeString) { + return CODE_TO_ENUM.get(theCodeString); + } + + @Override + public RestSearchParameterTypeEnum fromCodeString(String theCodeString, String theSystemString) { + Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); + if (map == null) { + return null; + } + return map.get(theCodeString); + } + }; + + /** * Constructor */ RestSearchParameterTypeEnum(String theCode, String theSystem) { myCode = theCode; mySystem = theSystem; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchContainedModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchContainedModeEnum.java index 1284709ef0c..5b7102c9190 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchContainedModeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchContainedModeEnum.java @@ -67,10 +67,10 @@ public enum SearchContainedModeEnum { SearchContainedModeEnum retVal = codeToEnum.get(theCode); if (retVal == null) { - throw new InvalidRequestException(Msg.code(1963) + "Invalid contained mode: " + UrlUtil.sanitizeUrlPart(theCode)); + throw new InvalidRequestException( + Msg.code(1963) + "Invalid contained mode: " + UrlUtil.sanitizeUrlPart(theCode)); } return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchStyleEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchStyleEnum.java index 19411a2fe9a..b4db90fc69b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchStyleEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchStyleEnum.java @@ -21,7 +21,7 @@ package ca.uhn.fhir.rest.api; /** * Enumerated type to represent the various allowable syntax for a search/query - * as described in the + * as described in the * FHIR Specification Section 2.1.11 */ public enum SearchStyleEnum { @@ -32,7 +32,7 @@ public enum SearchStyleEnum { * GET [base]/[resource type]?[params] */ GET, - + /** * Performs the search using the style below. Note that this style is less commonly supported * in servers so it should not be used unless there is a specific reason for needing to. @@ -49,5 +49,4 @@ public enum SearchStyleEnum { * and the params in a form encoded POST body. */ POST - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java index 5e4fafdc8c1..d42697b9363 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java @@ -23,7 +23,6 @@ import java.util.HashMap; import java.util.Map; public enum SearchTotalModeEnum { - NONE("none"), ESTIMATED("estimated"), ACCURATE("accurate"); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SortOrderEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SortOrderEnum.java index 295a256b3a6..74aed6b32aa 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SortOrderEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SortOrderEnum.java @@ -20,8 +20,6 @@ package ca.uhn.fhir.rest.api; public enum SortOrderEnum { - ASC, DESC - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SortSpec.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SortSpec.java index b6c4b9cf240..63cc867faa0 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SortSpec.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SortSpec.java @@ -30,7 +30,7 @@ import java.io.Serializable; public class SortSpec implements Serializable { private static final long serialVersionUID = 2866833099879713467L; - + private SortSpec myChain; private String myParamName; private SortOrderEnum myOrder; @@ -44,7 +44,7 @@ public class SortSpec implements Serializable { /** * Constructor - * + * * @param theParamName * The search name to sort on. See {@link #setParamName(String)} for more information. */ @@ -55,7 +55,7 @@ public class SortSpec implements Serializable { /** * Constructor - * + * * @param theParamName * The search name to sort on. See {@link #setParamName(String)} for more information. * @param theOrder @@ -69,7 +69,7 @@ public class SortSpec implements Serializable { /** * Constructor - * + * * @param theParamName * The search name to sort on. See {@link #setParamName(String)} for more information. * @param theOrder @@ -138,5 +138,4 @@ public class SortSpec implements Serializable { myOrder = theOrder; return this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SummaryEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SummaryEnum.java index 3a45c94ae21..6ca6d680ad2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SummaryEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SummaryEnum.java @@ -51,7 +51,7 @@ public enum SummaryEnum { * Return all parts of the resource(s) */ FALSE("false"); - + private String myCode; private static Map ourCodeToSummary = null; @@ -74,5 +74,4 @@ public enum SummaryEnum { } return c2s.get(theCode); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/ValidationModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/ValidationModeEnum.java index 9149f855584..9fd1c6ca6a7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/ValidationModeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/ValidationModeEnum.java @@ -19,19 +19,19 @@ */ package ca.uhn.fhir.rest.api; -import java.util.HashMap; - import org.apache.commons.lang3.Validate; +import java.util.HashMap; + /** - * Validation mode parameter for the $validate operation (DSTU2+ only) + * Validation mode parameter for the $validate operation (DSTU2+ only) */ public enum ValidationModeEnum { /** * The server checks the content, and then checks that the content would be acceptable as a create (e.g. that the content would not validate any uniqueness constraints) */ CREATE("create"), - + /** * The server checks the content, and then checks that it would accept it as an update against the nominated specific resource (e.g. that there are no changes to immutable fields the server does not allow to change, and checking version integrity if appropriate) */ @@ -51,12 +51,12 @@ public enum ValidationModeEnum { myCodeToValue.put(next.getCode(), next); } } - + public static ValidationModeEnum forCode(String theCode) { Validate.notBlank(theCode, "theCode must not be blank"); return myCodeToValue.get(theCode); } - + public String getCode() { return myCode; } @@ -64,9 +64,9 @@ public enum ValidationModeEnum { private ValidationModeEnum(String theCode) { myCode = theCode; } - -// @Override -// public boolean isEmpty() { -// return false; -// } + + // @Override + // public boolean isEmpty() { + // return false; + // } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/BaseHttpRequest.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/BaseHttpRequest.java index 8e7a6633072..e01c85de8c3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/BaseHttpRequest.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/BaseHttpRequest.java @@ -32,5 +32,4 @@ public abstract class BaseHttpRequest implements IHttpRequest { public void setUrlSource(UrlSourceEnum theUrlSource) { myUrlSource = theUrlSource; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/Header.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/Header.java index 6859345dc11..aeeb448f79a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/Header.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/Header.java @@ -23,31 +23,30 @@ package ca.uhn.fhir.rest.client.api; * Represents an HTTP header field. */ public class Header { - + public final String myName; public final String myValue; - public Header(String myName, String myValue) { + public Header(String myName, String myValue) { this.myName = myName; this.myValue = myValue; } /** - * Get the name of the Header. - * - * @return the name of the Header, never {@code null} - */ - public String getName() { - return myName; - } - - /** - * Get the value of the Header. - * - * @return the value of the Header, may be {@code null} - */ - public String getValue() { - return myValue; - } + * Get the name of the Header. + * + * @return the name of the Header, never {@code null} + */ + public String getName() { + return myName; + } + /** + * Get the value of the Header. + * + * @return the value of the Header, may be {@code null} + */ + public String getValue() { + return myValue; + } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/HttpClientUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/HttpClientUtil.java index 4efd0459e61..6a868da6c36 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/HttpClientUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/HttpClientUtil.java @@ -37,5 +37,4 @@ public class HttpClientUtil { b.append(")"); return b.toString(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IBasicClient.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IBasicClient.java index 82fdfb6a428..0f2c1151d7a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IBasicClient.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IBasicClient.java @@ -19,9 +19,8 @@ */ package ca.uhn.fhir.rest.client.api; -import org.hl7.fhir.instance.model.api.IBaseConformance; - import ca.uhn.fhir.rest.annotation.Metadata; +import org.hl7.fhir.instance.model.api.IBaseConformance; /** * Base interface for a client supporting the mandatory operations as defined by @@ -31,11 +30,10 @@ public interface IBasicClient extends IRestfulClient { /** * Returns the server conformance statement - * + * * See the FHIR HTTP Conformance definition * for more information. */ @Metadata IBaseConformance getServerConformanceStatement(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IClientInterceptor.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IClientInterceptor.java index 6367d9ef128..f42329c805a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IClientInterceptor.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IClientInterceptor.java @@ -30,7 +30,7 @@ import java.io.IOException; *

    * See the HAPI Documentation Client Interceptor * page for more information on how to use this feature. - *

    + *

    */ public interface IClientInterceptor { @@ -39,11 +39,10 @@ public interface IClientInterceptor { */ @Hook(Pointcut.CLIENT_REQUEST) void interceptRequest(IHttpRequest theRequest); - + /** * Fired by the client upon receiving an HTTP response, prior to processing that response */ @Hook(Pointcut.CLIENT_RESPONSE) void interceptResponse(IHttpResponse theResponse) throws IOException; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IGenericClient.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IGenericClient.java index e8b3a2c6db0..803c46e8d33 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IGenericClient.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IGenericClient.java @@ -59,14 +59,14 @@ public interface IGenericClient extends IRestfulClient { /** * Retrieves the server's conformance statement - * + * * @deprecated As of HAPI 3.0.0 this method has been deprecated, as the operation is now called "capabilities". Use {@link #capabilities()} instead */ IFetchConformanceUntyped fetchConformance(); /** * Force the client to fetch the server's conformance statement and validate that it is appropriate for this client. - * + * * @throws FhirClientConnectionException * if the conformance statement cannot be read, or if the client * @throws FhirClientInappropriateForServerException @@ -87,7 +87,7 @@ public interface IGenericClient extends IRestfulClient { /** * Fluent method for the "meta" operations, which can be used to get, add and remove tags and other * Meta elements from a resource or across the server. - * + * * @since 1.1 */ IMeta meta(); @@ -109,13 +109,13 @@ public interface IGenericClient extends IRestfulClient { /** * Implementation of the "instance read" method. - * + * * @param theType * The type of resource to load * @param theId * The ID to load * @return The resource - * + * * @deprecated Use {@link #read() read() fluent method} instead (deprecated in HAPI FHIR 3.0.0) */ @Deprecated @@ -123,7 +123,7 @@ public interface IGenericClient extends IRestfulClient { /** * Perform the "read" operation (retrieve the latest version of a resource instance by ID) using an absolute URL. - * + * * @param theType * The resource type that is being retrieved * @param theUrl @@ -136,7 +136,7 @@ public interface IGenericClient extends IRestfulClient { /** * Perform the "read" operation (retrieve the latest version of a resource instance by ID) using an absolute URL. - * + * * @param theUrl * The absolute URL, e.g. "http://example.com/fhir/Patient/123" * @return The returned resource from the server @@ -162,7 +162,7 @@ public interface IGenericClient extends IRestfulClient { /** * If set to true, the client will log all requests and all responses. This is probably not a good production setting since it will result in a lot of extra logging, but it can be * useful for troubleshooting. - * + * * @param theLogRequestAndResponse * Should requests and responses be logged * @deprecated Use LoggingInterceptor as a client interceptor registered to your @@ -190,7 +190,7 @@ public interface IGenericClient extends IRestfulClient { /** * Implementation of the "instance update" method. - * + * * @param theId * The ID to update * @param theResource @@ -236,7 +236,7 @@ public interface IGenericClient extends IRestfulClient { * Note that if an absolute resource ID is passed in (i.e. a URL containing a protocol and host as well as the resource type and ID) the server base for the client will be ignored, and the URL * passed in will be queried. *

    - * + * * @param theType * The type of resource to load * @param theId @@ -249,7 +249,7 @@ public interface IGenericClient extends IRestfulClient { /** * Implementation of the "instance vread" method. - * + * * @param theType * The type of resource to load * @param theId @@ -261,5 +261,4 @@ public interface IGenericClient extends IRestfulClient { */ @Deprecated T vread(Class theType, String theId, String theVersionId); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpClient.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpClient.java index 56df96d9874..c62b86afcca 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpClient.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpClient.java @@ -19,52 +19,52 @@ */ package ca.uhn.fhir.rest.client.api; -import java.util.List; -import java.util.Map; - -import org.hl7.fhir.instance.model.api.IBaseBinary; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.EncodingEnum; +import org.hl7.fhir.instance.model.api.IBaseBinary; + +import java.util.List; +import java.util.Map; /** * A HTTP Client interface. */ public interface IHttpClient { - /** - * Create a byte request - * @param theContext TODO - * @param theContents the contents - * @param theContentType the contentType - * @param theEncoding the encoding - * @return the http request to be executed - */ - IHttpRequest createByteRequest(FhirContext theContext, String theContents, String theContentType, EncodingEnum theEncoding); + /** + * Create a byte request + * @param theContext TODO + * @param theContents the contents + * @param theContentType the contentType + * @param theEncoding the encoding + * @return the http request to be executed + */ + IHttpRequest createByteRequest( + FhirContext theContext, String theContents, String theContentType, EncodingEnum theEncoding); - /** - * Create a parameter request - * @param theContext TODO - * @param theParams the parameters - * @param theEncoding the encoding - * @return the http request to be executed - */ - IHttpRequest createParamRequest(FhirContext theContext, Map> theParams, EncodingEnum theEncoding); + /** + * Create a parameter request + * @param theContext TODO + * @param theParams the parameters + * @param theEncoding the encoding + * @return the http request to be executed + */ + IHttpRequest createParamRequest( + FhirContext theContext, Map> theParams, EncodingEnum theEncoding); - /** - * Create a binary request - * @param theContext TODO - * @param theBinary the binary - * @return the http request to be executed - */ - IHttpRequest createBinaryRequest(FhirContext theContext, IBaseBinary theBinary); - - /** - * Create a normal http get request - * @param theContext TODO - * @param theEncoding the request encoding - * @return the http request to be executed - */ - IHttpRequest createGetRequest(FhirContext theContext, EncodingEnum theEncoding); + /** + * Create a binary request + * @param theContext TODO + * @param theBinary the binary + * @return the http request to be executed + */ + IHttpRequest createBinaryRequest(FhirContext theContext, IBaseBinary theBinary); + /** + * Create a normal http get request + * @param theContext TODO + * @param theEncoding the request encoding + * @return the http request to be executed + */ + IHttpRequest createGetRequest(FhirContext theContext, EncodingEnum theEncoding); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpRequest.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpRequest.java index 6e91ad9d694..6790f1fe724 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpRequest.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IHttpRequest.java @@ -96,5 +96,4 @@ public interface IHttpRequest { * @since 5.0.0 */ void setUrlSource(UrlSourceEnum theUrlSource); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IRestfulClientFactory.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IRestfulClientFactory.java index d67dd16f704..56c793d0934 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IRestfulClientFactory.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/IRestfulClientFactory.java @@ -46,17 +46,17 @@ public interface IRestfulClientFactory { * Default value for {@link #getSocketTimeout()} */ public static final int DEFAULT_SOCKET_TIMEOUT = 10000; - + /** * Default value for {@link #getPoolMaxTotal() ()} */ public static final int DEFAULT_POOL_MAX = 20; - + /** * Default value for {@link #getPoolMaxPerRoute() } */ public static final int DEFAULT_POOL_MAX_PER_ROUTE = DEFAULT_POOL_MAX; - + /** * Gets the connection request timeout, in milliseconds. This is the amount of time that the HTTPClient connection * pool may wait for an available connection before failing. This setting typically does not need to be adjusted. @@ -65,7 +65,7 @@ public interface IRestfulClientFactory { *

    */ int getConnectionRequestTimeout(); - + /** * Gets the connect timeout, in milliseconds. This is the amount of time that the initial connection attempt network * operation may block without failing. @@ -84,12 +84,17 @@ public interface IRestfulClientFactory { * @param theIfNoneExistString * The param for header "If-None-Exist" as a string * @param theRequestType - * the type of HTTP request (GET, DELETE, ..) + * the type of HTTP request (GET, DELETE, ..) * @param theHeaders * the headers to be sent together with the http request * @return the HTTP client instance */ - IHttpClient getHttpClient(StringBuilder theUrl, Map> theIfNoneExistParams, String theIfNoneExistString, RequestTypeEnum theRequestType, List
    theHeaders); + IHttpClient getHttpClient( + StringBuilder theUrl, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders); /** * @deprecated Use {@link #getServerValidationMode()} instead (this method is a synonym for that method, but this method is poorly named and will be removed at some point) @@ -98,13 +103,13 @@ public interface IRestfulClientFactory { ServerValidationModeEnum getServerValidationModeEnum(); /** - * Gets the server validation mode for any clients created from this factory. Server + * Gets the server validation mode for any clients created from this factory. Server * validation involves the client requesting the server's conformance statement - * to determine whether the server is appropriate for the given client. + * to determine whether the server is appropriate for the given client. *

    * The default value for this setting is defined by {@link #DEFAULT_SERVER_VALIDATION_MODE} *

    - * + * * @since 1.0 */ ServerValidationModeEnum getServerValidationMode(); @@ -133,10 +138,10 @@ public interface IRestfulClientFactory { *

    */ int getPoolMaxPerRoute(); - + /** * Instantiates a new client instance - * + * * @param theClientType * The client type, which is an interface type to be instantiated * @param theServerBase @@ -149,7 +154,7 @@ public interface IRestfulClientFactory { /** * Instantiates a new generic client instance - * + * * @param theServerBase * The URL of the base for the restful FHIR server to connect to * @return A newly created client @@ -177,7 +182,7 @@ public interface IRestfulClientFactory { /** * Sets the Apache HTTP client instance to be used by any new restful clients created by this factory. If set to * null, a new HTTP client with default settings will be created. - * + * * @param theHttpClient * An HTTP client instance to use, or null */ @@ -185,7 +190,7 @@ public interface IRestfulClientFactory { /** * Sets the HTTP proxy to use for outgoing connections - * + * * @param theHost * The host (or null to disable proxying, as is the default) * @param thePort @@ -208,17 +213,17 @@ public interface IRestfulClientFactory { void setServerValidationModeEnum(ServerValidationModeEnum theServerValidationMode); /** - * Sets the server validation mode for any clients created from this factory. Server + * Sets the server validation mode for any clients created from this factory. Server * validation involves the client requesting the server's conformance statement - * to determine whether the server is appropriate for the given client. + * to determine whether the server is appropriate for the given client. *

    * This check is primarily to validate that the server supports an appropriate * version of FHIR - *

    + *

    *

    * The default value for this setting is defined by {@link #DEFAULT_SERVER_VALIDATION_MODE} *

    - * + * * @since 1.0 */ void setServerValidationMode(ServerValidationModeEnum theServerValidationMode); @@ -247,12 +252,12 @@ public interface IRestfulClientFactory { *

    */ void setPoolMaxPerRoute(int thePoolMaxPerRoute); - + void validateServerBase(String theServerBase, IHttpClient theHttpClient, IRestfulClient theClient); /** * This method is internal to HAPI - It may change in future versions, use with caution. - */ - void validateServerBaseIfConfiguredToDoSo(String theServerBase, IHttpClient theHttpClient, IRestfulClient theClient); - + */ + void validateServerBaseIfConfiguredToDoSo( + String theServerBase, IHttpClient theHttpClient, IRestfulClient theClient); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/ServerValidationModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/ServerValidationModeEnum.java index 1b29b88d837..ba8b03c8abe 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/ServerValidationModeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/ServerValidationModeEnum.java @@ -24,15 +24,14 @@ import ca.uhn.fhir.context.FhirContext; public enum ServerValidationModeEnum { /** - * Do not validate the server's conformance statement before attempting to + * Do not validate the server's conformance statement before attempting to * call it. */ NEVER, - + /** * Validate the server's conformance statement once (per base URL) and cache the * results for the lifetime of the {@link FhirContext} */ ONCE - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/UrlSourceEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/UrlSourceEnum.java index feef25ff747..152a1cbf4a9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/UrlSourceEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/api/UrlSourceEnum.java @@ -30,5 +30,4 @@ public enum UrlSourceEnum { * URL was supplied (i.e. it came from a paging link in a bundle) */ EXPLICIT - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/FhirClientConnectionException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/FhirClientConnectionException.java index 41c7ac04eb1..a5c9a333c9c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/FhirClientConnectionException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/FhirClientConnectionException.java @@ -42,5 +42,4 @@ public class FhirClientConnectionException extends BaseServerResponseException { public FhirClientConnectionException(String theMessage) { super(500, theMessage); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/FhirClientInappropriateForServerException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/FhirClientInappropriateForServerException.java index 7c4b214d0fd..24457e791a8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/FhirClientInappropriateForServerException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/FhirClientInappropriateForServerException.java @@ -43,5 +43,4 @@ public class FhirClientInappropriateForServerException extends BaseServerRespons public FhirClientInappropriateForServerException(String theMessage) { super(0, theMessage); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/InvalidResponseException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/InvalidResponseException.java index 720a254de90..a5882c9aa49 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/InvalidResponseException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/InvalidResponseException.java @@ -56,5 +56,4 @@ public class InvalidResponseException extends BaseServerResponseException { public InvalidResponseException(Throwable theCause, int theStatusCode) { super(theStatusCode, theCause.toString(), theCause); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/NonFhirResponseException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/NonFhirResponseException.java index 9da0f04e01f..505777c758e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/NonFhirResponseException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/client/exceptions/NonFhirResponseException.java @@ -46,7 +46,8 @@ public class NonFhirResponseException extends BaseServerResponseException { super(theStatusCode, theMessage); } - public static NonFhirResponseException newInstance(int theStatusCode, String theContentType, InputStream theInputStream) { + public static NonFhirResponseException newInstance( + int theStatusCode, String theContentType, InputStream theInputStream) { return newInstance(theStatusCode, theContentType, new InputStreamReader(theInputStream, Charsets.UTF_8)); } @@ -68,13 +69,15 @@ public class NonFhirResponseException extends BaseServerResponseException { if (isBlank(theContentType)) { retVal = new NonFhirResponseException(theStatusCode, "Response contains no Content-Type"); } else if (theContentType.contains("text")) { - retVal = new NonFhirResponseException(theStatusCode, "Response contains non FHIR Content-Type '" + theContentType + "' : " + responseBody); + retVal = new NonFhirResponseException( + theStatusCode, + "Response contains non FHIR Content-Type '" + theContentType + "' : " + responseBody); } else { - retVal = new NonFhirResponseException(theStatusCode, "Response contains non FHIR Content-Type '" + theContentType + "'"); + retVal = new NonFhirResponseException( + theStatusCode, "Response contains non FHIR Content-Type '" + theContentType + "'"); } retVal.setResponseBody(responseBody); return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/BaseClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/BaseClientParam.java index fd5fc1ae098..a11acd8b508 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/BaseClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/BaseClientParam.java @@ -26,14 +26,13 @@ abstract class BaseClientParam implements IParam { @Override public ICriterion isMissing(boolean theMissing) { - return new MissingCriterion(theMissing ? Constants.PARAMQUALIFIER_MISSING_TRUE : Constants.PARAMQUALIFIER_MISSING_FALSE); + return new MissingCriterion( + theMissing ? Constants.PARAMQUALIFIER_MISSING_TRUE : Constants.PARAMQUALIFIER_MISSING_FALSE); } - private class MissingCriterion implements ICriterion, ICriterionInternal - { + private class MissingCriterion implements ICriterion, ICriterionInternal { private String myParameterValue; - public MissingCriterion(String theParameterValue) { myParameterValue = theParameterValue; } @@ -47,7 +46,5 @@ abstract class BaseClientParam implements IParam { public String getParameterName() { return BaseClientParam.this.getParamName() + Constants.PARAMQUALIFIER_MISSING; } - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/CompositeClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/CompositeClientParam.java index f36f729adb7..6f90ef620d7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/CompositeClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/CompositeClientParam.java @@ -27,18 +27,15 @@ public class CompositeClientParam extends Ba private String myName; public CompositeClientParam(String theName) { - myName=theName; + myName = theName; } - @Override public String getParamName() { return myName; } - + public ICompositeWithLeft withLeft(ICriterion theLeft) { - return new CompositeCriterion(myName, theLeft); + return new CompositeCriterion(myName, theLeft); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/CompositeCriterion.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/CompositeCriterion.java index 9425a3fa7b1..634cd115aa7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/CompositeCriterion.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/CompositeCriterion.java @@ -19,11 +19,12 @@ */ package ca.uhn.fhir.rest.gclient; -import static org.apache.commons.lang3.StringUtils.defaultString; - import ca.uhn.fhir.context.FhirContext; -public class CompositeCriterion implements ICompositeWithLeft, ICriterion, ICriterionInternal { +import static org.apache.commons.lang3.StringUtils.defaultString; + +public class CompositeCriterion + implements ICompositeWithLeft, ICriterion, ICriterionInternal { private ICriterion myRight; private String myName; @@ -44,12 +45,13 @@ public class CompositeCriterion implements I public String getParameterValue(FhirContext theContext) { ICriterionInternal left = (ICriterionInternal) myLeft; ICriterionInternal right = (ICriterionInternal) myRight; - return defaultString(left.getParameterValue(theContext)) + '$' + defaultString(right.getParameterValue(theContext)); + return defaultString(left.getParameterValue(theContext)) + + '$' + + defaultString(right.getParameterValue(theContext)); } @Override public String getParameterName() { return myName; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/DateClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/DateClientParam.java index 058a6817b11..79273c1a4fe 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/DateClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/DateClientParam.java @@ -19,19 +19,19 @@ */ package ca.uhn.fhir.rest.gclient; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.util.Date; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.primitive.DateTimeDt; import ca.uhn.fhir.rest.param.ParamPrefixEnum; +import java.util.Date; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + /** * Date parameter type for use in fluent client interfaces */ -public class DateClientParam extends BaseClientParam implements IParam { +public class DateClientParam extends BaseClientParam implements IParam { private String myParamName; @@ -125,7 +125,6 @@ public class DateClientParam extends BaseClientParam implements IParam { public IDateSpecifier orExactly() { return new DateWithPrefix(ParamPrefixEnum.EQUAL, this); } - } private class DateWithPrefix implements IDateSpecifier { @@ -215,7 +214,6 @@ public class DateClientParam extends BaseClientParam implements IParam { IDateCriterion millis(Date theValue); IDateCriterion millis(String theValue); - } public interface IDateCriterion extends ICriterion { @@ -229,5 +227,4 @@ public class DateClientParam extends BaseClientParam implements IParam { IDateSpecifier orExactly(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IBaseOn.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IBaseOn.java index 47fe8f0b514..57eb8ba221f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IBaseOn.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IBaseOn.java @@ -28,7 +28,7 @@ public interface IBaseOn { * Perform the operation across all versions of all resources of all types on the server */ T onServer(); - + /** * Perform the operation across all versions of all resources of the given type on the server */ @@ -45,8 +45,8 @@ public interface IBaseOn { * Perform the operation across all versions of a specific resource (by ID and type) on the server. * Note that theId must be populated with both a resource type and a resource ID at * a minimum. - * - * @throws IllegalArgumentException If theId does not contain at least a resource type and ID + * + * @throws IllegalArgumentException If theId does not contain at least a resource type and ID */ T onInstance(IIdType theId); @@ -58,5 +58,4 @@ public interface IBaseOn { * @throws IllegalArgumentException If theId does not contain at least a resource type and ID */ T onInstance(String theId); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IBaseQuery.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IBaseQuery.java index 9758e315b5f..d5c0b8d2ed3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IBaseQuery.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IBaseQuery.java @@ -56,5 +56,4 @@ public interface IBaseQuery> { *

    */ T whereMap(Map> theRawMap); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IClientExecutable.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IClientExecutable.java index a97c0a0afab..fd9ebb8e563 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IClientExecutable.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IClientExecutable.java @@ -27,7 +27,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.List; - public interface IClientExecutable, Y> { /** diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICompositeWithLeft.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICompositeWithLeft.java index b9e202b1a8e..57933dde538 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICompositeWithLeft.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICompositeWithLeft.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.rest.gclient; public interface ICompositeWithLeft { ICriterion withRight(ICriterion theRight); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreate.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreate.java index 44cf19d870e..2e254069d28 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreate.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreate.java @@ -21,8 +21,8 @@ package ca.uhn.fhir.rest.gclient; import org.hl7.fhir.instance.model.api.IBaseResource; -public interface ICreate { +public interface ICreate { ICreateTyped resource(IBaseResource theResource); - + ICreateTyped resource(String theResourceAsText); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateTyped.java index 3d51604b5d8..b4c04b46b03 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateTyped.java @@ -23,7 +23,7 @@ import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.PreferReturnEnum; public interface ICreateTyped extends IClientExecutable { - + /** * @since HAPI 0.9 / FHIR DSTU 2 */ @@ -40,12 +40,11 @@ public interface ICreateTyped extends IClientExecutablePrefer
    header to the request, which requests that the server include + * Add a Prefer header to the request, which requests that the server include * or suppress the resource body as a part of the result. If a resource is returned by the server * it will be parsed an accessible to the client via {@link MethodOutcome#getResource()} - * + * * @since HAPI 1.1 */ ICreateTyped prefer(PreferReturnEnum theReturn); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateWithQuery.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateWithQuery.java index b179c9c17e7..6b2cdaf439b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateWithQuery.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateWithQuery.java @@ -19,7 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; - -public interface ICreateWithQuery extends IBaseQuery { - -} +public interface ICreateWithQuery extends IBaseQuery {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateWithQueryTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateWithQueryTyped.java index 16fe644b9c5..2ca9a9ef6b7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateWithQueryTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICreateWithQueryTyped.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; -public interface ICreateWithQueryTyped extends ICreateTyped, ICreateWithQuery { - -} +public interface ICreateWithQueryTyped extends ICreateTyped, ICreateWithQuery {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICriterion.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICriterion.java index fb793070ea9..5402aa2a032 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICriterion.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICriterion.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; -public interface ICriterion { - -} +public interface ICriterion {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICriterionInternal.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICriterionInternal.java index 106e76c88e3..731d06e0770 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICriterionInternal.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ICriterionInternal.java @@ -26,5 +26,4 @@ public interface ICriterionInternal { String getParameterValue(FhirContext theContext); String getParameterName(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDelete.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDelete.java index c857e30cfbb..7c8eb478dc7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDelete.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDelete.java @@ -25,9 +25,9 @@ import org.hl7.fhir.instance.model.api.IIdType; public interface IDelete { IDeleteTyped resource(IBaseResource theResource); - + IDeleteTyped resourceById(IIdType theId); - + IDeleteTyped resourceById(String theResourceType, String theLogicalId); /** @@ -51,5 +51,4 @@ public interface IDelete { * @since HAPI 1.3 */ IDeleteWithQuery resourceConditionalByType(Class theResourceType); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDeleteTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDeleteTyped.java index cc7273c3c1a..0f1f38ebf9c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDeleteTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDeleteTyped.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.rest.gclient; import ca.uhn.fhir.rest.api.DeleteCascadeModeEnum; import ca.uhn.fhir.rest.api.MethodOutcome; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; public interface IDeleteTyped extends IClientExecutable { @@ -29,5 +28,4 @@ public interface IDeleteTyped extends IClientExecutable { - -} +public interface IDeleteWithQuery extends IBaseQuery {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDeleteWithQueryTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDeleteWithQueryTyped.java index 7b9309d3533..8016fb85238 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDeleteWithQueryTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IDeleteWithQueryTyped.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; -public interface IDeleteWithQueryTyped extends IDeleteTyped, IDeleteWithQuery { - -} +public interface IDeleteWithQueryTyped extends IDeleteTyped, IDeleteWithQuery {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IFetchConformanceTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IFetchConformanceTyped.java index 518d1d99c22..981c70e9e44 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IFetchConformanceTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IFetchConformanceTyped.java @@ -21,6 +21,5 @@ package ca.uhn.fhir.rest.gclient; import org.hl7.fhir.instance.model.api.IBaseConformance; -public interface IFetchConformanceTyped extends IClientExecutable, T> { - -} +public interface IFetchConformanceTyped + extends IClientExecutable, T> {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IFetchConformanceUntyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IFetchConformanceUntyped.java index 5f4e5b5c140..1186e57b967 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IFetchConformanceUntyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IFetchConformanceUntyped.java @@ -27,5 +27,4 @@ public interface IFetchConformanceUntyped { * Retrieve the conformance statement using the given model type */ IFetchConformanceTyped ofType(Class theType); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPage.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPage.java index b2282f4b931..754a4f82827 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPage.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPage.java @@ -26,7 +26,7 @@ public interface IGetPage { /** * Load the next page of results using the link with relation "next" in the bundle. This * method accepts a DSTU2 Bundle resource - * + * * @since 1.1 */ IGetPageTyped next(T theBundle); @@ -34,7 +34,7 @@ public interface IGetPage { /** * Load the previous page of results using the link with relation "prev" in the bundle. This * method accepts a DSTU2+ Bundle resource - * + * * @since 1.1 */ IGetPageTyped previous(T theBundle); @@ -43,5 +43,4 @@ public interface IGetPage { * Load a page of results using the a given URL and return a DSTU1 Atom bundle */ IGetPageUntyped byUrl(String thePageUrl); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPageTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPageTyped.java index c5bf36111ad..5dd5d49a20c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPageTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPageTyped.java @@ -19,11 +19,9 @@ */ package ca.uhn.fhir.rest.gclient; - import ca.uhn.fhir.rest.api.PagingHttpMethodEnum; public interface IGetPageTyped extends IClientExecutable, T> { IGetPageTyped usingMethod(PagingHttpMethodEnum thePagingHttpMethod); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPageUntyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPageUntyped.java index 8b38962b7ce..fa2941e8a72 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPageUntyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IGetPageUntyped.java @@ -22,11 +22,9 @@ package ca.uhn.fhir.rest.gclient; import org.hl7.fhir.instance.model.api.IBaseBundle; public interface IGetPageUntyped { - + /** * Return a Bundle resource of the given type */ IGetPageTyped andReturnBundle(Class theBundleType); - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistory.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistory.java index 2b49cbd4de9..a59e351c146 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistory.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistory.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; -public interface IHistory extends IBaseOn { - -} +public interface IHistory extends IBaseOn {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistoryTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistoryTyped.java index f2baf55b7f3..6fefe04dca2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistoryTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistoryTyped.java @@ -50,6 +50,4 @@ public interface IHistoryTyped extends IClientExecutable, T> * Request that the server return only resource versions that were created at or after the given time (inclusive) */ IHistoryTyped since(Date theCutoff); - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistoryUntyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistoryUntyped.java index faa1e9099cb..07cf6f89f43 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistoryUntyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IHistoryUntyped.java @@ -37,5 +37,4 @@ public interface IHistoryUntyped { * @since 4.0.0 */ IHistoryTyped returnBundle(Class theType); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMeta.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMeta.java index 48e9959374e..0c27fdf88bb 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMeta.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMeta.java @@ -22,10 +22,10 @@ package ca.uhn.fhir.rest.gclient; import org.hl7.fhir.instance.model.api.IBaseMetaType; public interface IMeta { - + /** * Fetch the current metadata - * + * * @param theMetaType The type of the meta datatype for the given FHIR model version (should be MetaDt.class or MetaType.class) */ IMetaGetUnsourced get(Class theMetaType); @@ -36,8 +36,7 @@ public interface IMeta { IMetaAddOrDeleteUnsourced add(); /** - * Delete the elements in the given metadata to the + * Delete the elements in the given metadata to the */ IMetaAddOrDeleteUnsourced delete(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMetaAddOrDeleteUnsourced.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMetaAddOrDeleteUnsourced.java index c4b697a1f57..ee93a1d03b4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMetaAddOrDeleteUnsourced.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMetaAddOrDeleteUnsourced.java @@ -24,5 +24,4 @@ import org.hl7.fhir.instance.model.api.IIdType; public interface IMetaAddOrDeleteUnsourced { IMetaAddOrDeleteSourced onResource(IIdType theId); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMetaGetUnsourced.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMetaGetUnsourced.java index f6a5e4f6326..17c3eed1aa4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMetaGetUnsourced.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IMetaGetUnsourced.java @@ -30,10 +30,9 @@ public interface IMetaGetUnsourced { /** * Get the meta from a resource instance by ID. - * + * * @param theId * The ID. Must contain both a resource type and an ID part */ IClientExecutable, T> fromResource(IIdType theId); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperation.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperation.java index 0607930db6e..4e093aea7a7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperation.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperation.java @@ -61,5 +61,4 @@ public interface IOperation extends IBaseOn { * using FHIR Resources */ IOperationProcessMsg processMessage(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationOn.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationOn.java index a11ce5a93ae..ef81fd8d186 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationOn.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationOn.java @@ -19,8 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; -public interface IOperationOn extends IBaseOn { - - - -} +public interface IOperationOn extends IBaseOn {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationProcessMsg.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationProcessMsg.java index 79df7257018..c814852ad95 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationProcessMsg.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationProcessMsg.java @@ -31,23 +31,23 @@ import org.hl7.fhir.instance.model.api.IBaseResource; * * @author HGS */ -public interface IOperationProcessMsg{ +public interface IOperationProcessMsg { - /** - * Set the Message Bundle to POST to the messaging server.
    - * After this call you must choose either the method synchronous or asynchronous to set the processing mode. - * - * @param - * @param theMsgBundle A Bundle of type message - * @return - */ - IOperationProcessMsgMode setMessageBundle(IBaseBundle theMsgBundle); + /** + * Set the Message Bundle to POST to the messaging server.
    + * After this call you must choose either the method synchronous or asynchronous to set the processing mode. + * + * @param + * @param theMsgBundle A Bundle of type message + * @return + */ + IOperationProcessMsgMode setMessageBundle(IBaseBundle theMsgBundle); - /** - * An optional query parameter indicating that responses from the receiving server should be sent to this url - * - * @param respondToUri The receiving endpoint to witch server response messages should be sent. - * @return - */ - IOperationProcessMsg setResponseUrlParam(String respondToUri); + /** + * An optional query parameter indicating that responses from the receiving server should be sent to this url + * + * @param respondToUri The receiving endpoint to witch server response messages should be sent. + * @return + */ + IOperationProcessMsg setResponseUrlParam(String respondToUri); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationProcessMsgMode.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationProcessMsgMode.java index 561352683e7..e782a3a4795 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationProcessMsgMode.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationProcessMsgMode.java @@ -26,9 +26,10 @@ package ca.uhn.fhir.rest.gclient; import org.hl7.fhir.instance.model.api.IBaseResource; -public interface IOperationProcessMsgMode extends IClientExecutable, T> { - - IOperationProcessMsgMode asynchronous(Class theResponseClass); - - IOperationProcessMsgMode synchronous(Class theResponseClass); +public interface IOperationProcessMsgMode + extends IClientExecutable, T> { + + IOperationProcessMsgMode asynchronous(Class theResponseClass); + + IOperationProcessMsgMode synchronous(Class theResponseClass); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationTyped.java index 18d6d5bc821..88c6cfbb064 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationTyped.java @@ -19,8 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; -public interface IOperationTyped { - - - -} +public interface IOperationTyped {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUnnamed.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUnnamed.java index 0af8dd569b2..56221a29b4b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUnnamed.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUnnamed.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.rest.gclient; - public interface IOperationUnnamed { IOperationUntyped named(String theName); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntyped.java index 67ae039f98c..50876f60e31 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntyped.java @@ -19,17 +19,15 @@ */ package ca.uhn.fhir.rest.gclient; -import org.hl7.fhir.instance.model.api.IBase; - -import org.hl7.fhir.instance.model.api.IBaseParameters; - import ca.uhn.fhir.model.api.IQueryParameterType; +import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.IBaseParameters; public interface IOperationUntyped { /** * Use the given parameters resource as the input to the operation - * + * * @param theParameters The parameters to use as input. May also be null if the operation * does not require any input parameters. */ @@ -37,7 +35,7 @@ public interface IOperationUntyped { /** * The operation does not require any input parameters - * + * * @param theOutputParameterType The type to use for the output parameters (this should be set to * Parameters.class drawn from the version of the FHIR structures you are using) */ @@ -58,24 +56,25 @@ public interface IOperationUntyped { * .execute(); * *

    - * + * * @param theParameterType The type to use for the output parameters (this should be set to * Parameters.class drawn from the version of the FHIR structures you are using) * @param theName The first parameter name * @param theValue The first parameter value */ - IOperationUntypedWithInputAndPartialOutput withParameter(Class theParameterType, String theName, IBase theValue); + IOperationUntypedWithInputAndPartialOutput withParameter( + Class theParameterType, String theName, IBase theValue); /** * Use chained method calls to construct a Parameters input. This form is a convenience * in order to allow simple method chaining to be used to build up a parameters * resource for the input of an operation without needing to manually construct one. - * + * * @param theParameterType The type to use for the output parameters (this should be set to * Parameters.class drawn from the version of the FHIR structures you are using) * @param theName The first parameter name * @param theValue The first parameter value */ - IOperationUntypedWithInputAndPartialOutput withSearchParameter(Class theParameterType, String theName, IQueryParameterType theValue); - + IOperationUntypedWithInputAndPartialOutput withSearchParameter( + Class theParameterType, String theName, IQueryParameterType theValue); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInput.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInput.java index 5786456bbd0..d1293db973f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInput.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInput.java @@ -28,7 +28,7 @@ public interface IOperationUntypedWithInput extends IClientExecutable * If you have a specific reason for needing to use a GET however, this method will enable it. *

    diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInputAndPartialOutput.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInputAndPartialOutput.java index 35af4b9da83..fd612982a41 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInputAndPartialOutput.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IOperationUntypedWithInputAndPartialOutput.java @@ -19,18 +19,18 @@ */ package ca.uhn.fhir.rest.gclient; +import ca.uhn.fhir.model.api.IQueryParameterType; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseParameters; -import ca.uhn.fhir.model.api.IQueryParameterType; - -public interface IOperationUntypedWithInputAndPartialOutput extends IOperationUntypedWithInput { +public interface IOperationUntypedWithInputAndPartialOutput + extends IOperationUntypedWithInput { /** * Use chained method calls to construct a Parameters input. This form is a convenience * in order to allow simple method chaining to be used to build up a parameters * resource for the input of an operation without needing to manually construct one. - * + * * @param theName The first parameter name * @param theValue The first parameter value */ @@ -42,10 +42,9 @@ public interface IOperationUntypedWithInputAndPartialOutput andSearchParameter(String theName, IQueryParameterType theValue); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IParam.java index 3726a1d0b9f..a3fd88a2c67 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IParam.java @@ -32,5 +32,4 @@ public interface IParam { * false to indicate that the server should return resources with this value missing. */ ICriterion isMissing(boolean theMissing); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchExecutable.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchExecutable.java index 5990fe0d918..345742096b6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchExecutable.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchExecutable.java @@ -22,15 +22,14 @@ package ca.uhn.fhir.rest.gclient; import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.PreferReturnEnum; -public interface IPatchExecutable extends IClientExecutable{ +public interface IPatchExecutable extends IClientExecutable { /** - * Add a Prefer header to the request, which requests that the server include + * Add a Prefer header to the request, which requests that the server include * or suppress the resource body as a part of the result. If a resource is returned by the server * it will be parsed an accessible to the client via {@link MethodOutcome#getResource()} - * + * * @since HAPI 1.1 */ IPatchExecutable prefer(PreferReturnEnum theReturn); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithBody.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithBody.java index 8d7cbf75072..2c8199d9207 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithBody.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithBody.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.rest.gclient; import org.hl7.fhir.instance.model.api.IBaseResource; - import org.hl7.fhir.instance.model.api.IIdType; public interface IPatchWithBody extends IPatchExecutable { @@ -35,7 +34,7 @@ public interface IPatchWithBody extends IPatchExecutable { /** * Build a conditional URL using fluent constants on resource types - * + * * @param theResourceType * The resource type to patch (e.g. "Patient") */ @@ -60,5 +59,4 @@ public interface IPatchWithBody extends IPatchExecutable { * The resource ID to patch (must include both a resource type and an ID, e.g. Patient/123) */ IPatchExecutable withId(String theId); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithQuery.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithQuery.java index 13a35c2698d..625874c1253 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithQuery.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithQuery.java @@ -19,7 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; - -public interface IPatchWithQuery extends IBaseQuery { - -} +public interface IPatchWithQuery extends IBaseQuery {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithQueryTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithQueryTyped.java index e28bbd04ce7..52cbb90319f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithQueryTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IPatchWithQueryTyped.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; -public interface IPatchWithQueryTyped extends IPatchWithBody, IPatchWithQuery { - -} +public interface IPatchWithQueryTyped extends IPatchWithBody, IPatchWithQuery {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IQuery.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IQuery.java index 15ca5fbcaab..e84e610e386 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IQuery.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IQuery.java @@ -23,13 +23,10 @@ import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.rest.api.SearchStyleEnum; import ca.uhn.fhir.rest.api.SearchTotalModeEnum; import ca.uhn.fhir.rest.api.SortSpec; -import ca.uhn.fhir.rest.api.SummaryEnum; import ca.uhn.fhir.rest.param.DateRangeParam; import org.hl7.fhir.instance.model.api.IBaseBundle; import java.util.Collection; -import java.util.List; -import java.util.Map; public interface IQuery extends IBaseQuery>, IClientExecutable, Y> { @@ -176,6 +173,6 @@ public interface IQuery extends IBaseQuery>, IClientExecutable withTag(String theSystem, String theCode); -// Y execute(); + // Y execute(); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadExecutable.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadExecutable.java index 3f38e73f5ae..4a779ceb1a7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadExecutable.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadExecutable.java @@ -21,7 +21,7 @@ package ca.uhn.fhir.rest.gclient; import org.hl7.fhir.instance.model.api.IBaseResource; -public interface IReadExecutable extends IClientExecutable, T>{ +public interface IReadExecutable extends IClientExecutable, T> { /** * Send an "If-None-Match" header containing theVersion, which requests @@ -32,5 +32,4 @@ public interface IReadExecutable extends IClientExecuta * @param theVersion The version ID (e.g. "123") */ IReadIfNoneMatch ifVersionMatches(String theVersion); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadIfNoneMatch.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadIfNoneMatch.java index e26a3e8d67c..db5e635c5bd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadIfNoneMatch.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadIfNoneMatch.java @@ -19,9 +19,8 @@ */ package ca.uhn.fhir.rest.gclient; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.rest.server.exceptions.NotModifiedException; +import org.hl7.fhir.instance.model.api.IBaseResource; public interface IReadIfNoneMatch { @@ -30,17 +29,16 @@ public interface IReadIfNoneMatch { * return the given instance. */ IReadExecutable returnResource(T theInstance); - + /** * If the server responds with an HTTP 301 Not Modified, * return null. */ IReadExecutable returnNull(); - + /** * If the server responds with an HTTP 301 Not Modified, * throw a {@link NotModifiedException}. */ IReadExecutable throwNotModifiedException(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadTyped.java index 92b37a5aefa..5b02b156409 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IReadTyped.java @@ -26,14 +26,14 @@ public interface IReadTyped { /** * Perform a search by resource ID - * + * * @param theId The resource ID, e.g. "123" */ IReadExecutable withId(String theId); /** * Perform a search by resource ID and version - * + * * @param theId The resource ID, e.g. "123" * @param theVersion The resource version, eg. "5" */ @@ -41,14 +41,14 @@ public interface IReadTyped { /** * Perform a search by resource ID - * + * * @param theId The resource ID, e.g. "123" */ IReadExecutable withId(Long theId); - + /** * Search using an ID. Note that even if theId contains a base URL it will be - * ignored in favour of the base url for the given client. If you want to specify + * ignored in favour of the base url for the given client. If you want to specify * an absolute URL including a base and have that base used instead, use * {@link #withUrl(IIdType)} */ @@ -57,5 +57,4 @@ public interface IReadTyped { IReadExecutable withUrl(String theUrl); IReadExecutable withUrl(IIdType theUrl); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ISort.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ISort.java index c8de0bbed63..45cba02bcc3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ISort.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ISort.java @@ -63,5 +63,4 @@ public interface ISort { * @param theParam The param name, e.g. "address" */ IQuery descending(String theParam); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ITransaction.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ITransaction.java index e20facda7cd..cd27b6d7981 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ITransaction.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ITransaction.java @@ -19,18 +19,18 @@ */ package ca.uhn.fhir.rest.gclient; -import java.util.List; - import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; +import java.util.List; + public interface ITransaction { /** * Use a list of resources as the transaction input */ ITransactionTyped> withResources(List theResources); - + /** * Use the given Bundle resource as the transaction input */ @@ -40,5 +40,4 @@ public interface ITransaction { * Use the given raw text (should be a Bundle resource) as the transaction input */ ITransactionTyped withBundle(String theBundle); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ITransactionTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ITransactionTyped.java index afc2d381133..beaeb62d1e1 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ITransactionTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ITransactionTyped.java @@ -19,9 +19,8 @@ */ package ca.uhn.fhir.rest.gclient; - public interface ITransactionTyped extends IClientExecutable, T> { - + // nothing for now } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUntypedQuery.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUntypedQuery.java index 47eb32588cb..56cc4705d94 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUntypedQuery.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUntypedQuery.java @@ -32,11 +32,10 @@ public interface IUntypedQuery { /** * Perform a search directly by URL. It is usually better to construct the URL using the {@link #forAllResources()}, {@link #forResource(Class)} etc, but sometimes it is useful to simply search by * entering a search URL directly. - * + * * @param theSearchUrl * The URL to search for. Note that this URL may be complete (e.g. "http://example.com/base/Patient?name=foo") in which case the client's base URL will be ignored. Or it can be relative * (e.g. "Patient?name=foo") in which case the client's base URL will be used. */ IQuery byUrl(String theSearchUrl); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdate.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdate.java index b80bad639cb..489c09f29ba 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdate.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdate.java @@ -26,5 +26,4 @@ public interface IUpdate { IUpdateTyped resource(IBaseResource theResource); IUpdateTyped resource(String theResourceBody); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateExecutable.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateExecutable.java index f5e4bf4e7fa..0a56486893e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateExecutable.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateExecutable.java @@ -22,15 +22,14 @@ package ca.uhn.fhir.rest.gclient; import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.PreferReturnEnum; -public interface IUpdateExecutable extends IClientExecutable{ +public interface IUpdateExecutable extends IClientExecutable { /** - * Add a Prefer header to the request, which requests that the server include + * Add a Prefer header to the request, which requests that the server include * or suppress the resource body as a part of the result. If a resource is returned by the server * it will be parsed an accessible to the client via {@link MethodOutcome#getResource()} - * + * * @since HAPI 1.1 */ IUpdateExecutable prefer(PreferReturnEnum theReturn); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateWithQuery.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateWithQuery.java index d76caeeeec1..06cfb4e041b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateWithQuery.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateWithQuery.java @@ -19,7 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; - -public interface IUpdateWithQuery extends IBaseQuery { - -} +public interface IUpdateWithQuery extends IBaseQuery {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateWithQueryTyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateWithQueryTyped.java index 37a0408a828..19263ac53a5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateWithQueryTyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IUpdateWithQueryTyped.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.gclient; -public interface IUpdateWithQueryTyped extends IUpdateTyped, IUpdateWithQuery { - -} +public interface IUpdateWithQueryTyped extends IUpdateTyped, IUpdateWithQuery {} diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IValidate.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IValidate.java index 000325d270f..20a2fd838d9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IValidate.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IValidate.java @@ -27,10 +27,9 @@ public interface IValidate { * Use a resource as validate input */ IValidateUntyped resource(IBaseResource theResource); - + /** * Use a raw resource as validate input */ IValidateUntyped resource(String theRawResource); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IValidateUntyped.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IValidateUntyped.java index 317b1477364..3a9ba9c3454 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IValidateUntyped.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/IValidateUntyped.java @@ -21,9 +21,8 @@ package ca.uhn.fhir.rest.gclient; import ca.uhn.fhir.rest.api.MethodOutcome; - public interface IValidateUntyped extends IClientExecutable { - + // nothing for now } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/NumberClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/NumberClientParam.java index c81cbce0b6d..9e17ba2ecf8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/NumberClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/NumberClientParam.java @@ -24,7 +24,7 @@ import ca.uhn.fhir.rest.param.ParamPrefixEnum; /** * Token parameter type for use in fluent client interfaces */ -public class NumberClientParam extends BaseClientParam implements IParam { +public class NumberClientParam extends BaseClientParam implements IParam { private final String myParamName; @@ -69,7 +69,8 @@ public class NumberClientParam extends BaseClientParam implements IParam { return new IMatches>() { @Override public ICriterion number(long theNumber) { - return new StringCriterion<>(getParamName(), ParamPrefixEnum.GREATERTHAN_OR_EQUALS, Long.toString(theNumber)); + return new StringCriterion<>( + getParamName(), ParamPrefixEnum.GREATERTHAN_OR_EQUALS, Long.toString(theNumber)); } @Override @@ -97,7 +98,8 @@ public class NumberClientParam extends BaseClientParam implements IParam { return new IMatches>() { @Override public ICriterion number(long theNumber) { - return new StringCriterion<>(getParamName(), ParamPrefixEnum.LESSTHAN_OR_EQUALS, Long.toString(theNumber)); + return new StringCriterion<>( + getParamName(), ParamPrefixEnum.LESSTHAN_OR_EQUALS, Long.toString(theNumber)); } @Override @@ -124,7 +126,7 @@ public class NumberClientParam extends BaseClientParam implements IParam { public interface IMatches { /** * Creates a search criterion that matches against the given number - * + * * @param theNumber * The number * @return A criterion @@ -133,12 +135,11 @@ public class NumberClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches against the given number - * + * * @param theNumber * The number * @return A criterion */ T number(String theNumber); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/QuantityClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/QuantityClientParam.java index 8ab9263a0a8..83784c0f852 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/QuantityClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/QuantityClientParam.java @@ -19,16 +19,16 @@ */ package ca.uhn.fhir.rest.gclient; -import static org.apache.commons.lang3.StringUtils.defaultString; - import ca.uhn.fhir.rest.gclient.NumberClientParam.IMatches; import ca.uhn.fhir.rest.param.ParamPrefixEnum; +import static org.apache.commons.lang3.StringUtils.defaultString; + /** * Quantity parameter type for use in fluent client interfaces */ @SuppressWarnings("deprecation") -public class QuantityClientParam extends BaseClientParam implements IParam { +public class QuantityClientParam extends BaseClientParam implements IParam { private String myParamName; @@ -127,7 +127,7 @@ public class QuantityClientParam extends BaseClientParam implements IParam { /** * Use the given quantity prefix - * + * * @param thePrefix The prefix, or null for no prefix */ public IMatches withPrefix(final ParamPrefixEnum thePrefix) { @@ -175,9 +175,8 @@ public class QuantityClientParam extends BaseClientParam implements IParam { @Override public ICriterion andUnits(String theSystem, String theUnits) { - return new QuantityCriterion(getParamName(), myPrefix, myValue , defaultString(theSystem) , defaultString(theUnits)); + return new QuantityCriterion( + getParamName(), myPrefix, myValue, defaultString(theSystem), defaultString(theUnits)); } - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/QuantityCriterion.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/QuantityCriterion.java index 26ff6e9f2e2..e3d8bad5ee3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/QuantityCriterion.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/QuantityCriterion.java @@ -19,12 +19,12 @@ */ package ca.uhn.fhir.rest.gclient; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.param.ParamPrefixEnum; import ca.uhn.fhir.rest.param.ParameterUtil; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + class QuantityCriterion implements ICriterion, ICriterionInternal { private String myValue; @@ -33,7 +33,8 @@ class QuantityCriterion implements ICriterion, ICriterionIn private String myUnits; private ParamPrefixEnum myPrefix; - public QuantityCriterion(String theParamName, ParamPrefixEnum thePrefix, String theValue, String theSystem, String theUnits) { + public QuantityCriterion( + String theParamName, ParamPrefixEnum thePrefix, String theValue, String theSystem, String theUnits) { myValue = theValue; myPrefix = thePrefix; myName = theParamName; @@ -61,5 +62,4 @@ class QuantityCriterion implements ICriterion, ICriterionIn } return b.toString(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ReferenceClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ReferenceClientParam.java index ebf2ce2a58a..fe44e7d24cf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ReferenceClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/ReferenceClientParam.java @@ -28,8 +28,7 @@ import java.util.Collection; import static org.apache.commons.lang3.StringUtils.isNotBlank; - -public class ReferenceClientParam extends BaseClientParam implements IParam { +public class ReferenceClientParam extends BaseClientParam implements IParam { private String myName; @@ -136,7 +135,5 @@ public class ReferenceClientParam extends BaseClientParam implements IParam { public String getParameterValue(FhirContext theContext) { return myWrappedCriterion.getParameterValue(theContext); } - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/SpecialClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/SpecialClientParam.java index 365d5c99986..f258bac66da 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/SpecialClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/SpecialClientParam.java @@ -19,14 +19,8 @@ */ package ca.uhn.fhir.rest.gclient; -import ca.uhn.fhir.rest.api.Constants; -import org.hl7.fhir.instance.model.api.IPrimitiveType; - -import java.util.Arrays; -import java.util.List; - /** - * + * */ public class SpecialClientParam extends BaseClientParam implements IParam { @@ -40,6 +34,4 @@ public class SpecialClientParam extends BaseClientParam implements IParam { public String getParamName() { return myParamName; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringClientParam.java index 382a9385d33..be956cfb4c5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringClientParam.java @@ -26,11 +26,11 @@ import java.util.Arrays; import java.util.List; /** - * + * * @author james * */ -public class StringClientParam extends BaseClientParam implements IParam { +public class StringClientParam extends BaseClientParam implements IParam { private final String myParamName; @@ -90,7 +90,6 @@ public class StringClientParam extends BaseClientParam implements IParam { * parameter. */ ICriterion values(String... theValues); - } private class StringExactly implements IStringMatch { @@ -111,7 +110,8 @@ public class StringClientParam extends BaseClientParam implements IParam { @Override public ICriterion values(String... theValues) { - return new StringCriterion(getParamName() + Constants.PARAMQUALIFIER_STRING_EXACT, Arrays.asList(theValues)); + return new StringCriterion( + getParamName() + Constants.PARAMQUALIFIER_STRING_EXACT, Arrays.asList(theValues)); } } @@ -123,7 +123,8 @@ public class StringClientParam extends BaseClientParam implements IParam { @Override public ICriterion value(IPrimitiveType theValue) { - return new StringCriterion<>(getParamName() + Constants.PARAMQUALIFIER_STRING_CONTAINS, theValue.getValue()); + return new StringCriterion<>( + getParamName() + Constants.PARAMQUALIFIER_STRING_CONTAINS, theValue.getValue()); } @Override @@ -133,7 +134,8 @@ public class StringClientParam extends BaseClientParam implements IParam { @Override public ICriterion values(String... theValues) { - return new StringCriterion(getParamName() + Constants.PARAMQUALIFIER_STRING_CONTAINS, Arrays.asList(theValues)); + return new StringCriterion( + getParamName() + Constants.PARAMQUALIFIER_STRING_CONTAINS, Arrays.asList(theValues)); } } @@ -143,7 +145,6 @@ public class StringClientParam extends BaseClientParam implements IParam { return new StringCriterion<>(getParamName(), theValue); } - @Override public ICriterion value(IPrimitiveType theValue) { return new StringCriterion<>(getParamName(), theValue.getValue()); @@ -158,7 +159,5 @@ public class StringClientParam extends BaseClientParam implements IParam { public ICriterion values(String... theValues) { return new StringCriterion(getParamName(), Arrays.asList(theValues)); } - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringCriterion.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringCriterion.java index f783e3bb9ce..a3aa39a9096 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringCriterion.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringCriterion.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.rest.gclient; -import static org.apache.commons.lang3.StringUtils.isBlank; - -import java.util.Collection; - -import org.apache.commons.lang3.StringUtils; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.param.ParamPrefixEnum; import ca.uhn.fhir.rest.param.ParameterUtil; +import org.apache.commons.lang3.StringUtils; + +import java.util.Collection; + +import static org.apache.commons.lang3.StringUtils.isBlank; class StringCriterion implements ICriterion, ICriterionInternal { @@ -36,18 +35,18 @@ class StringCriterion implements ICriterion, ICriterionInte private ParamPrefixEnum myPrefix; public StringCriterion(String theName, String theValue) { - myName=theName; + myName = theName; myValue = ParameterUtil.escapeWithDefault(theValue); } public StringCriterion(String theName, ParamPrefixEnum thePrefix, String theValue) { - myName=theName; + myName = theName; myPrefix = thePrefix; myValue = ParameterUtil.escapeWithDefault(theValue); } public StringCriterion(String theName, Collection theValue) { - myName=theName; + myName = theName; StringBuilder b = new StringBuilder(); for (String next : theValue) { if (StringUtils.isBlank(next)) { @@ -76,5 +75,4 @@ class StringCriterion implements ICriterion, ICriterionInte } return myValue; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/TokenClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/TokenClientParam.java index 82430ced34c..2f8a91f22f4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/TokenClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/TokenClientParam.java @@ -19,14 +19,13 @@ */ package ca.uhn.fhir.rest.gclient; -import static org.apache.commons.lang3.StringUtils.defaultString; - -import java.util.*; - +import ca.uhn.fhir.model.base.composite.BaseIdentifierDt; import org.apache.commons.lang3.ObjectUtils; import org.hl7.fhir.instance.model.api.IBaseCoding; -import ca.uhn.fhir.model.base.composite.BaseIdentifierDt; +import java.util.*; + +import static org.apache.commons.lang3.StringUtils.defaultString; /** * Token parameter type for use in fluent client interfaces @@ -65,7 +64,10 @@ public class TokenClientParam extends BaseClientParam implements IParam { @Override public ICriterion identifier(BaseIdentifierDt theIdentifier) { - return new TokenCriterion(getParamName(), theIdentifier.getSystemElement().getValueAsString(), theIdentifier.getValueElement().getValue()); + return new TokenCriterion( + getParamName(), + theIdentifier.getSystemElement().getValueAsString(), + theIdentifier.getValueElement().getValue()); } @Override @@ -130,7 +132,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { public interface IMatches { /** * Creates a search criterion that matches against the given code, with no code system specified - * + * * @param theIdentifier * The identifier * @return A criterion @@ -142,7 +144,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { * codes (this will be used to form a comma-separated OR query) with any system value. * The URL form of this method will create a parameter like * parameter=code1,code2 - * + * * @param theCodes * The codes */ @@ -153,7 +155,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { * codes (this will be used to form a comma-separated OR query) with any system value. * The URL form of this method will create a parameter like * parameter=code1,code2 - * + * * @param theCodes * The codes */ @@ -167,7 +169,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { * The URL form of this method will create a parameter like * parameter=system1|code1,system2|code2 *

    - * + * * @param theCodings * The codings */ @@ -175,7 +177,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches against the given identifier (system and code if both are present, or whatever is present) - * + * * @param theIdentifier * The identifier * @return A criterion @@ -184,7 +186,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches against the given identifier, with no system specified - * + * * @param theIdentifier * The identifier * @return A criterion @@ -194,7 +196,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches against the given collection of identifiers (system and code if both are present, or whatever is present). * In the query URL that is generated, identifiers will be joined with a ',' to create an OR query. - * + * * @param theIdentifiers * The identifier * @return A criterion @@ -204,7 +206,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches against the given collection of identifiers (system and code if both are present, or whatever is present). * In the query URL that is generated, identifiers will be joined with a ',' to create an OR query. - * + * * @param theIdentifiers * The identifier * @return A criterion @@ -213,7 +215,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches against the given code system and code - * + * * @param theSystem * The code system (should be a URI) * @param theCode @@ -224,7 +226,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches against the given system and identifier - * + * * @param theSystem * The code system (should be a URI) * @param theIdentifier @@ -236,7 +238,7 @@ public class TokenClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches a given system with a collection of possible * values (this will be used to form a comma-separated OR query) - * + * * @param theSystem * The system, which will be used with each value * @param theValues @@ -247,14 +249,12 @@ public class TokenClientParam extends BaseClientParam implements IParam { /** * Creates a search criterion that matches a given system with a collection of possible * values (this will be used to form a comma-separated OR query) - * + * * @param theSystem * The system, which will be used with each value * @param theValues * The values */ ICriterion systemAndValues(String theSystem, String... theValues); - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/TokenCriterion.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/TokenCriterion.java index f4855c05959..bf3b5531ad6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/TokenCriterion.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/TokenCriterion.java @@ -19,19 +19,17 @@ */ package ca.uhn.fhir.rest.gclient; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.util.Collection; - -import java.util.List; - -import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.instance.model.api.IBaseCoding; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.base.composite.BaseIdentifierDt; import ca.uhn.fhir.rest.param.ParameterUtil; +import org.apache.commons.lang3.StringUtils; +import org.hl7.fhir.instance.model.api.IBaseCoding; + +import java.util.Collection; +import java.util.List; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; class TokenCriterion implements ICriterion, ICriterionInternal { @@ -40,7 +38,7 @@ class TokenCriterion implements ICriterion, ICriterionInternal public TokenCriterion(String theName, String theSystem, String theCode) { myName = theName; - myValue=toValue(theSystem, theCode); + myValue = toValue(theSystem, theCode); } private String toValue(String theSystem, String theCode) { @@ -58,7 +56,7 @@ class TokenCriterion implements ICriterion, ICriterionInternal } public TokenCriterion(String theParamName, List theValue) { - myName=theParamName; + myName = theParamName; StringBuilder b = new StringBuilder(); for (BaseIdentifierDt next : theValue) { if (next.getSystemElement().isEmpty() && next.getValueElement().isEmpty()) { @@ -67,7 +65,9 @@ class TokenCriterion implements ICriterion, ICriterionInternal if (b.length() > 0) { b.append(','); } - b.append(toValue(next.getSystemElement().getValueAsString(), next.getValueElement().getValue())); + b.append(toValue( + next.getSystemElement().getValueAsString(), + next.getValueElement().getValue())); } myValue = b.toString(); } @@ -91,18 +91,18 @@ class TokenCriterion implements ICriterion, ICriterionInternal } public TokenCriterion(String theParamName, IBaseCoding... theCodings) { - myName=theParamName; + myName = theParamName; StringBuilder b = new StringBuilder(); if (theCodings != null) { - for (IBaseCoding next : theCodings) { - if (isBlank(next.getSystem()) && isBlank(next.getCode())) { - continue; + for (IBaseCoding next : theCodings) { + if (isBlank(next.getSystem()) && isBlank(next.getCode())) { + continue; + } + if (b.length() > 0) { + b.append(','); + } + b.append(toValue(next.getSystem(), next.getCode())); } - if (b.length() > 0) { - b.append(','); - } - b.append(toValue(next.getSystem(), next.getCode())); - } } myValue = b.toString(); } @@ -116,5 +116,4 @@ class TokenCriterion implements ICriterion, ICriterionInternal public String getParameterName() { return myName; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/UriClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/UriClientParam.java index 5e722ccb29c..397c3423de6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/UriClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/UriClientParam.java @@ -19,19 +19,19 @@ */ package ca.uhn.fhir.rest.gclient; -import java.util.Arrays; -import java.util.List; - import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.util.CoverageIgnore; +import java.util.Arrays; +import java.util.List; + /** * */ -public class UriClientParam extends BaseClientParam implements IParam { +public class UriClientParam extends BaseClientParam implements IParam { + + // TODO: handle :above and :below - //TODO: handle :above and :below - private final String myParamName; public UriClientParam(String theParamName) { @@ -46,7 +46,7 @@ public class UriClientParam extends BaseClientParam implements IParam { /** * The string matches the given value (servers will often, but are not required to) implement this as a left match, meaning that a value of "smi" would match "smi" and "smith". * @param theValue THIS PARAMETER DOES NOT DO ANYTHING - This method was added by accident - * + * * @deprecated theValue does not do anything, use {@link #matches()} instead */ @CoverageIgnore @@ -85,7 +85,6 @@ public class UriClientParam extends BaseClientParam implements IParam { * where} criteria with the same parameter. */ ICriterion values(String... theValues); - } private class UriMatches implements IUriMatch { @@ -108,7 +107,5 @@ public class UriClientParam extends BaseClientParam implements IParam { public ICriterion values(String... theValues) { return new StringCriterion(getParamName(), Arrays.asList(theValues)); } - } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseAndListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseAndListParam.java index 7cf9758514d..52d556bb3cf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseAndListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseAndListParam.java @@ -47,7 +47,9 @@ public abstract class BaseAndListParam> implement abstract T newInstance(); @Override - public void setValuesAsQueryTokens(FhirContext theContext, String theParamName, List theParameters) throws InvalidRequestException { + public void setValuesAsQueryTokens( + FhirContext theContext, String theParamName, List theParameters) + throws InvalidRequestException { myValues.clear(); for (QualifiedParamList nextParam : theParameters) { T nextList = newInstance(); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseOrListParam.java index 53e65ec76b7..61720a6b48a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseOrListParam.java @@ -27,7 +27,8 @@ import ca.uhn.fhir.rest.api.QualifiedParamList; import java.util.ArrayList; import java.util.List; -public abstract class BaseOrListParam, PT extends IQueryParameterType> implements IQueryParameterOr { +public abstract class BaseOrListParam, PT extends IQueryParameterType> + implements IQueryParameterOr { private List myList = new ArrayList<>(); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParam.java index 6d8693f758e..2b091e7be34 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParam.java @@ -38,7 +38,8 @@ public abstract class BaseParam implements IQueryParameterType { abstract String doGetValueAsQueryToken(FhirContext theContext); - abstract void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue); + abstract void doSetValueAsQueryToken( + FhirContext theContext, String theParamName, String theQualifier, String theValue); /** * If set to non-null value, indicates that this parameter has been populated with a "[name]:missing=true" or "[name]:missing=false" vale instead of a normal value @@ -75,7 +76,7 @@ public abstract class BaseParam implements IQueryParameterType { * If set to non-null value, indicates that this parameter has been populated * with a "[name]:missing=true" or "[name]:missing=false" value instead of a * normal value - * + * * @return Returns a reference to this for easier method chaining */ @Override @@ -85,14 +86,18 @@ public abstract class BaseParam implements IQueryParameterType { } @Override - public final void setValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) { + public final void setValueAsQueryToken( + FhirContext theContext, String theParamName, String theQualifier, String theValue) { if (Constants.PARAMQUALIFIER_MISSING.equals(theQualifier)) { myMissing = "true".equals(theValue); doSetValueAsQueryToken(theContext, theParamName, null, null); } else { if (isNotBlank(theQualifier) && theQualifier.charAt(0) == '.') { if (!isSupportsChain()) { - String msg = theContext.getLocalizer().getMessage(BaseParam.class, "chainNotSupported", theParamName + theQualifier, theQualifier); + String msg = theContext + .getLocalizer() + .getMessage( + BaseParam.class, "chainNotSupported", theParamName + theQualifier, theQualifier); throw new InvalidRequestException(Msg.code(1935) + msg); } } @@ -101,5 +106,4 @@ public abstract class BaseParam implements IQueryParameterType { doSetValueAsQueryToken(theContext, theParamName, theQualifier, theValue); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParamWithPrefix.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParamWithPrefix.java index d238e9691b4..44ac2c8a596 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParamWithPrefix.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/BaseParamWithPrefix.java @@ -29,7 +29,6 @@ public abstract class BaseParamWithPrefix extends BaseParam private static final long serialVersionUID = 1L; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseParamWithPrefix.class); - public static final String MSG_PREFIX_INVALID_FORMAT = "Invalid date/time/quantity format: "; private ParamPrefixEnum myPrefix; @@ -65,38 +64,40 @@ public abstract class BaseParamWithPrefix extends BaseParam String prefix = theString.substring(0, offset); if (!isBlank(prefix)) { - + myPrefix = ParamPrefixEnum.forValue(prefix); if (myPrefix == null) { // prefix doesn't match standard values. Try legacy values switch (prefix) { - case ">=": - myPrefix = ParamPrefixEnum.GREATERTHAN_OR_EQUALS; - break; - case ">": - myPrefix = ParamPrefixEnum.GREATERTHAN; - break; - case "<=": - myPrefix = ParamPrefixEnum.LESSTHAN_OR_EQUALS; - break; - case "<": - myPrefix = ParamPrefixEnum.LESSTHAN; - break; - case "~": - myPrefix = ParamPrefixEnum.APPROXIMATE; - break; - case "=": - myPrefix = ParamPrefixEnum.EQUAL; - break; - default : - throw new DataFormatException(Msg.code(1941) + "Invalid prefix: \"" + prefix + "\""); + case ">=": + myPrefix = ParamPrefixEnum.GREATERTHAN_OR_EQUALS; + break; + case ">": + myPrefix = ParamPrefixEnum.GREATERTHAN; + break; + case "<=": + myPrefix = ParamPrefixEnum.LESSTHAN_OR_EQUALS; + break; + case "<": + myPrefix = ParamPrefixEnum.LESSTHAN; + break; + case "~": + myPrefix = ParamPrefixEnum.APPROXIMATE; + break; + case "=": + myPrefix = ParamPrefixEnum.EQUAL; + break; + default: + throw new DataFormatException(Msg.code(1941) + "Invalid prefix: \"" + prefix + "\""); } - ourLog.warn("Date parameter has legacy prefix '{}' which has been removed from FHIR. This should be replaced with '{}'", prefix, myPrefix.getValue()); + ourLog.warn( + "Date parameter has legacy prefix '{}' which has been removed from FHIR. This should be replaced with '{}'", + prefix, + myPrefix.getValue()); } - } - + return theString.substring(offset); } @@ -115,5 +116,4 @@ public abstract class BaseParamWithPrefix extends BaseParam myPrefix = thePrefix; return (T) this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/CompositeAndListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/CompositeAndListParam.java index 952a30c88c6..3de998b2ab2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/CompositeAndListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/CompositeAndListParam.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.util.CoverageIgnore; - -public class CompositeAndListParam
    extends BaseAndListParam> { +public class CompositeAndListParam + extends BaseAndListParam> { private Class myLeftType; private Class myRightType; @@ -35,8 +35,8 @@ public class CompositeAndListParam newInstance() { - return new CompositeOrListParam(myLeftType, myRightType); + CompositeOrListParam newInstance() { + return new CompositeOrListParam(myLeftType, myRightType); } @CoverageIgnore @@ -45,6 +45,4 @@ public class CompositeAndListParam extends BaseOrListParam, CompositeParam> { +public class CompositeOrListParam + extends BaseOrListParam, CompositeParam> { private Class myLeftType; private Class myRightType; @@ -44,8 +44,8 @@ public class CompositeOrListParam newInstance() { - return new CompositeParam(myLeftType, myRightType); + CompositeParam newInstance() { + return new CompositeParam(myLeftType, myRightType); } @CoverageIgnore @@ -54,6 +54,4 @@ public class CompositeOrListParam extends BaseParam implements IQueryParameterType { +public class CompositeParam extends BaseParam + implements IQueryParameterType { private A myLeftType; private B myRightType; @@ -89,7 +90,9 @@ public class CompositeParam parts = ParameterUtil.splitParameterString(theValue, '$', false); if (parts.size() > 2) { - throw new InvalidRequestException(Msg.code(1947) + "Invalid value for composite parameter (only one '$' is valid for this parameter, others must be escaped). Value was: " + theValue); + throw new InvalidRequestException(Msg.code(1947) + + "Invalid value for composite parameter (only one '$' is valid for this parameter, others must be escaped). Value was: " + + theValue); } myLeftType.setValueAsQueryToken(theContext, theParamName, theQualifier, parts.get(0)); if (parts.size() > 1) { @@ -126,5 +129,4 @@ public class CompositeParam { @Override DateOrListParam newInstance() { return new DateOrListParam(); } - + @CoverageIgnore @Override public DateAndListParam addAnd(DateOrListParam theValue) { addValue(theValue); return this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateOrListParam.java index 506315f976d..1c18c12af65 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateOrListParam.java @@ -21,19 +21,17 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - -public class DateOrListParam extends BaseOrListParam { +public class DateOrListParam extends BaseOrListParam { @Override DateParam newInstance() { return new DateParam(); } - + @CoverageIgnore @Override public DateOrListParam addOr(DateParam theParameter) { add(theParameter); return this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateParam.java index a6806ae35f2..3394624472f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateParam.java @@ -40,7 +40,8 @@ import java.util.Objects; import static org.apache.commons.lang3.StringUtils.isNotBlank; -public class DateParam extends BaseParamWithPrefix implements /*IQueryParameterType , */IQueryParameterOr { +public class DateParam extends BaseParamWithPrefix + implements /*IQueryParameterType , */ IQueryParameterOr { private static final long serialVersionUID = 1L; @@ -94,10 +95,9 @@ public class DateParam extends BaseParamWithPrefix implements /*IQuer setValueAsString(theDate); } - /** * Constructor which takes a complete [qualifier]{date} string. - * + * * @param theString * The string */ @@ -116,7 +116,7 @@ public class DateParam extends BaseParamWithPrefix implements /*IQuer if (getPrefix() != null) { b.append(ParameterUtil.escapeWithDefault(getPrefix().getValue())); } - + b.append(ParameterUtil.escapeWithDefault(myValue.getValueAsString())); return b.toString(); @@ -128,15 +128,15 @@ public class DateParam extends BaseParamWithPrefix implements /*IQuer } public TemporalPrecisionEnum getPrecision() { - return myValue.getPrecision(); + return myValue.getPrecision(); } public Date getValue() { - return myValue.getValue(); + return myValue.getValue(); } public String getValueAsString() { - return myValue.getValueAsString(); + return myValue.getValueAsString(); } @Override @@ -189,17 +189,17 @@ public class DateParam extends BaseParamWithPrefix implements /*IQuer } @Override - public void setValuesAsQueryTokens(FhirContext theContext, String theParamName, QualifiedParamList theParameters) { + public void setValuesAsQueryTokens(FhirContext theContext, String theParamName, QualifiedParamList theParameters) { setMissing(null); setPrefix(null); setValueAsString(null); - + if (theParameters.size() == 1) { setValueAsString(theParameters.get(0)); } else if (theParameters.size() > 1) { - throw new InvalidRequestException(Msg.code(1939) + "This server does not support multi-valued dates for this parameter: " + theParameters); + throw new InvalidRequestException(Msg.code(1939) + + "This server does not support multi-valued dates for this parameter: " + theParameters); } - } @Override @@ -211,8 +211,7 @@ public class DateParam extends BaseParamWithPrefix implements /*IQuer return false; } DateParam other = (DateParam) obj; - return Objects.equals(getValue(), other.getValue()) && - Objects.equals(getPrefix(), other.getPrefix()); + return Objects.equals(getValue(), other.getValue()) && Objects.equals(getPrefix(), other.getPrefix()); } @Override diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java index 9f882665232..c1192fe5f70 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java @@ -30,12 +30,11 @@ import ca.uhn.fhir.util.DateUtils; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Objects; +import javax.annotation.Nonnull; import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; import static ca.uhn.fhir.rest.param.ParamPrefixEnum.GREATERTHAN_OR_EQUALS; @@ -110,7 +109,8 @@ public class DateRangeParam implements IQueryParameterAnd { case GREATERTHAN: case GREATERTHAN_OR_EQUALS: if (theDateParam.getPrecision().ordinal() <= TemporalPrecisionEnum.MONTH.ordinal()) { - theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString()).getRight()); + theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString()) + .getRight()); } validateAndSet(theDateParam, null); break; @@ -118,13 +118,15 @@ public class DateRangeParam implements IQueryParameterAnd { case LESSTHAN: case LESSTHAN_OR_EQUALS: if (theDateParam.getPrecision().ordinal() <= TemporalPrecisionEnum.MONTH.ordinal()) { - theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString()).getLeft()); + theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString()) + .getLeft()); } validateAndSet(null, theDateParam); break; default: // Should not happen - throw new InvalidRequestException(Msg.code(1921) + "Invalid comparator for date range parameter:" + theDateParam.getPrefix() + ". This is a bug."); + throw new InvalidRequestException(Msg.code(1921) + "Invalid comparator for date range parameter:" + + theDateParam.getPrefix() + ". This is a bug."); } } } @@ -175,7 +177,7 @@ public class DateRangeParam implements IQueryParameterAnd { } private void addParam(DateParam theParsed) throws InvalidRequestException { - if (theParsed.getPrefix() == null){ + if (theParsed.getPrefix() == null) { theParsed.setPrefix(EQUAL); } @@ -183,7 +185,8 @@ public class DateRangeParam implements IQueryParameterAnd { case NOT_EQUAL: case EQUAL: if (myLowerBound != null || myUpperBound != null) { - throw new InvalidRequestException(Msg.code(1922) + "Can not have multiple date range parameters for the same param without a qualifier"); + throw new InvalidRequestException(Msg.code(1922) + + "Can not have multiple date range parameters for the same param without a qualifier"); } if (theParsed.getMissing() != null) { myLowerBound = theParsed; @@ -197,7 +200,9 @@ public class DateRangeParam implements IQueryParameterAnd { case GREATERTHAN_OR_EQUALS: case STARTS_AFTER: if (myLowerBound != null) { - throw new InvalidRequestException(Msg.code(1923) + "Can not have multiple date range parameters for the same param that specify a lower bound"); + throw new InvalidRequestException( + Msg.code(1923) + + "Can not have multiple date range parameters for the same param that specify a lower bound"); } myLowerBound = theParsed; break; @@ -205,14 +210,15 @@ public class DateRangeParam implements IQueryParameterAnd { case LESSTHAN_OR_EQUALS: case ENDS_BEFORE: if (myUpperBound != null) { - throw new InvalidRequestException(Msg.code(1924) + "Can not have multiple date range parameters for the same param that specify an upper bound"); + throw new InvalidRequestException( + Msg.code(1924) + + "Can not have multiple date range parameters for the same param that specify an upper bound"); } myUpperBound = theParsed; break; default: throw new InvalidRequestException(Msg.code(1925) + "Unknown comparator: " + theParsed.getPrefix()); } - } @Override @@ -224,8 +230,7 @@ public class DateRangeParam implements IQueryParameterAnd { return false; } DateRangeParam other = (DateRangeParam) obj; - return Objects.equals(myLowerBound, other.myLowerBound) && - Objects.equals(myUpperBound, other.myUpperBound); + return Objects.equals(myLowerBound, other.myLowerBound) && Objects.equals(myUpperBound, other.myUpperBound); } public DateParam getLowerBound() { @@ -275,7 +280,6 @@ public class DateRangeParam implements IQueryParameterAnd { return this; } - /** * Sets the lower bound to be greaterthan to the given date */ @@ -317,7 +321,8 @@ public class DateRangeParam implements IQueryParameterAnd { case APPROXIMATE: case LESSTHAN_OR_EQUALS: case ENDS_BEFORE: - throw new IllegalStateException(Msg.code(1926) + "Invalid lower bound comparator: " + myLowerBound.getPrefix()); + throw new IllegalStateException( + Msg.code(1926) + "Invalid lower bound comparator: " + myLowerBound.getPrefix()); } } return retVal; @@ -347,7 +352,8 @@ public class DateRangeParam implements IQueryParameterAnd { case GREATERTHAN: case APPROXIMATE: case STARTS_AFTER: - throw new IllegalStateException(Msg.code(1927) + "Invalid upper bound comparator: " + myUpperBound.getPrefix()); + throw new IllegalStateException( + Msg.code(1927) + "Invalid upper bound comparator: " + myUpperBound.getPrefix()); } } return retVal; @@ -381,7 +387,8 @@ public class DateRangeParam implements IQueryParameterAnd { case APPROXIMATE: case LESSTHAN_OR_EQUALS: case ENDS_BEFORE: - throw new IllegalStateException(Msg.code(1928) + "Invalid lower bound comparator: " + theLowerBound.getPrefix()); + throw new IllegalStateException( + Msg.code(1928) + "Invalid lower bound comparator: " + theLowerBound.getPrefix()); } } return retVal; @@ -450,7 +457,8 @@ public class DateRangeParam implements IQueryParameterAnd { case GREATERTHAN: case APPROXIMATE: case STARTS_AFTER: - throw new IllegalStateException(Msg.code(1929) + "Invalid upper bound comparator: " + theUpperBound.getPrefix()); + throw new IllegalStateException( + Msg.code(1929) + "Invalid upper bound comparator: " + theUpperBound.getPrefix()); } } return retVal; @@ -496,10 +504,8 @@ public class DateRangeParam implements IQueryParameterAnd { * theUpperBound may both be populated, or one may be null, but it is not valid for both to be null. */ public void setRangeFromDatesInclusive(Date theLowerBound, Date theUpperBound) { - DateParam lowerBound = theLowerBound != null - ? new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound) : null; - DateParam upperBound = theUpperBound != null - ? new DateParam(LESSTHAN_OR_EQUALS, theUpperBound) : null; + DateParam lowerBound = theLowerBound != null ? new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound) : null; + DateParam upperBound = theUpperBound != null ? new DateParam(LESSTHAN_OR_EQUALS, theUpperBound) : null; validateAndSet(lowerBound, upperBound); } @@ -532,7 +538,10 @@ public class DateRangeParam implements IQueryParameterAnd { public void setRangeFromDatesInclusive(IPrimitiveType theLowerBound, IPrimitiveType theUpperBound) { IPrimitiveType lowerBound = theLowerBound; IPrimitiveType upperBound = theUpperBound; - if (lowerBound != null && lowerBound.getValue() != null && upperBound != null && upperBound.getValue() != null) { + if (lowerBound != null + && lowerBound.getValue() != null + && upperBound != null + && upperBound.getValue() != null) { if (lowerBound.getValue().after(upperBound.getValue())) { IPrimitiveType temp = lowerBound; lowerBound = upperBound; @@ -540,8 +549,8 @@ public class DateRangeParam implements IQueryParameterAnd { } } validateAndSet( - lowerBound != null ? new DateParam(GREATERTHAN_OR_EQUALS, lowerBound) : null, - upperBound != null ? new DateParam(LESSTHAN_OR_EQUALS, upperBound) : null); + lowerBound != null ? new DateParam(GREATERTHAN_OR_EQUALS, lowerBound) : null, + upperBound != null ? new DateParam(LESSTHAN_OR_EQUALS, upperBound) : null); } /** @@ -555,12 +564,8 @@ public class DateRangeParam implements IQueryParameterAnd { * theUpperBound may both be populated, or one may be null, but it is not valid for both to be null. */ public void setRangeFromDatesInclusive(String theLowerBound, String theUpperBound) { - DateParam lowerBound = theLowerBound != null - ? new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound) - : null; - DateParam upperBound = theUpperBound != null - ? new DateParam(LESSTHAN_OR_EQUALS, theUpperBound) - : null; + DateParam lowerBound = theLowerBound != null ? new DateParam(GREATERTHAN_OR_EQUALS, theLowerBound) : null; + DateParam upperBound = theUpperBound != null ? new DateParam(LESSTHAN_OR_EQUALS, theUpperBound) : null; if (isNotBlank(theLowerBound) && isNotBlank(theUpperBound) && theLowerBound.equals(theUpperBound)) { lowerBound.setPrefix(EQUAL); upperBound.setPrefix(EQUAL); @@ -569,8 +574,9 @@ public class DateRangeParam implements IQueryParameterAnd { } @Override - public void setValuesAsQueryTokens(FhirContext theContext, String theParamName, List theParameters) - throws InvalidRequestException { + public void setValuesAsQueryTokens( + FhirContext theContext, String theParamName, List theParameters) + throws InvalidRequestException { boolean haveHadUnqualifiedParameter = false; for (QualifiedParamList paramList : theParameters) { @@ -594,13 +600,13 @@ public class DateRangeParam implements IQueryParameterAnd { if (parsed.getPrefix() == null) { if (haveHadUnqualifiedParameter) { - throw new InvalidRequestException(Msg.code(1931) + "Multiple date parameters with the same name and no qualifier (>, <, etc.) is not supported"); + throw new InvalidRequestException( + Msg.code(1931) + + "Multiple date parameters with the same name and no qualifier (>, <, etc.) is not supported"); } haveHadUnqualifiedParameter = true; } - } - } @Override @@ -645,9 +651,10 @@ public class DateRangeParam implements IQueryParameterAnd { Date lowerBoundAsInstant = getLowerBoundAsInstant(lowerBound); Date upperBoundAsInstant = getUpperBoundAsInstant(upperBound); if (lowerBoundAsInstant.after(upperBoundAsInstant)) { - throw new DataFormatException(Msg.code(1932) + format( - "Lower bound of %s is after upper bound of %s", - lowerBound.getValueAsString(), upperBound.getValueAsString())); + throw new DataFormatException(Msg.code(1932) + + format( + "Lower bound of %s is after upper bound of %s", + lowerBound.getValueAsString(), upperBound.getValueAsString())); } } @@ -662,7 +669,8 @@ public class DateRangeParam implements IQueryParameterAnd { break; case LESSTHAN: case LESSTHAN_OR_EQUALS: - throw new DataFormatException(Msg.code(1933) + "Lower bound comparator must be > or >=, can not be " + lowerBound.getPrefix().getValue()); + throw new DataFormatException(Msg.code(1933) + "Lower bound comparator must be > or >=, can not be " + + lowerBound.getPrefix().getValue()); } } @@ -677,12 +685,12 @@ public class DateRangeParam implements IQueryParameterAnd { break; case GREATERTHAN: case GREATERTHAN_OR_EQUALS: - throw new DataFormatException(Msg.code(1934) + "Upper bound comparator must be < or <=, can not be " + upperBound.getPrefix().getValue()); + throw new DataFormatException(Msg.code(1934) + "Upper bound comparator must be < or <=, can not be " + + upperBound.getPrefix().getValue()); } } myLowerBound = lowerBound; myUpperBound = upperBound; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasAndListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasAndListParam.java index f0a838f1da6..f6eb311bf7f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasAndListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasAndListParam.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - public class HasAndListParam extends BaseAndListParam { @Override @@ -35,6 +34,4 @@ public class HasAndListParam extends BaseAndListParam { addValue(theValue); return this; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasOrListParam.java index 65da4de8564..80617d3ed43 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasOrListParam.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - public class HasOrListParam extends BaseOrListParam { @CoverageIgnore @@ -36,5 +35,4 @@ public class HasOrListParam extends BaseOrListParam { add(theParameter); return this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasParam.java index 9b73af22997..1efdffc3489 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HasParam.java @@ -43,8 +43,11 @@ public class HasParam extends BaseParam implements IQueryParameterType { super(); } - - public HasParam(String theTargetResourceType, String theReferenceFieldName, String theParameterName, String theParameterValue) { + public HasParam( + String theTargetResourceType, + String theReferenceFieldName, + String theParameterName, + String theParameterValue) { this(); myTargetResourceType = theTargetResourceType; myReferenceFieldName = theReferenceFieldName; @@ -52,12 +55,11 @@ public class HasParam extends BaseParam implements IQueryParameterType { myParameterValue = theParameterValue; } - @Override String doGetQueryParameterQualifier() { return ':' + myTargetResourceType + ':' + myReferenceFieldName + ':' + myParameterName; } - + @Override String doGetValueAsQueryToken(FhirContext theContext) { return myParameterValue; @@ -73,7 +75,7 @@ public class HasParam extends BaseParam implements IQueryParameterType { validateColon(qualifier, colonIndex0); int colonIndex1 = qualifier.indexOf(':', colonIndex0 + 1); validateColon(qualifier, colonIndex1); - + myTargetResourceType = qualifier.substring(1, colonIndex0); myReferenceFieldName = qualifier.substring(colonIndex0 + 1, colonIndex1); myParameterName = qualifier.substring(colonIndex1 + 1); @@ -102,9 +104,7 @@ public class HasParam extends BaseParam implements IQueryParameterType { } } - private static void throwInvalidSyntaxException(String theParameterName) { throw new InvalidRequestException(Msg.code(1942) + "Invalid _has parameter syntax: " + theParameterName); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java index 86ef7fd952c..0d6eeb4324c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchDateRangeParam.java @@ -22,15 +22,20 @@ package ca.uhn.fhir.rest.param; import java.util.Map; public class HistorySearchDateRangeParam extends DateRangeParam { - public HistorySearchDateRangeParam(Map theParameters, DateRangeParam theDateRange, - Integer theOffset){ + public HistorySearchDateRangeParam( + Map theParameters, DateRangeParam theDateRange, Integer theOffset) { super(theDateRange); this.myOffset = theOffset; - this.myHistorySearchType = theParameters == null? null - : theParameters.keySet().stream().map(key -> HistorySearchStyleEnum.parse(key)) - .filter(type -> type != null).findAny().orElse(null); + this.myHistorySearchType = theParameters == null + ? null + : theParameters.keySet().stream() + .map(key -> HistorySearchStyleEnum.parse(key)) + .filter(type -> type != null) + .findAny() + .orElse(null); } + private HistorySearchStyleEnum myHistorySearchType; private Integer myOffset; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchStyleEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchStyleEnum.java index 2ee4cb05bcf..2de15621957 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchStyleEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/HistorySearchStyleEnum.java @@ -38,12 +38,14 @@ public enum HistorySearchStyleEnum { this.myValue = theValue; } - public static HistorySearchStyleEnum parse(String value){ + public static HistorySearchStyleEnum parse(String value) { return Arrays.stream(HistorySearchStyleEnum.values()) - .filter(type -> type.myValue.equals(value)).findAny().orElse(null); + .filter(type -> type.myValue.equals(value)) + .findAny() + .orElse(null); } - public boolean isAt(){ + public boolean isAt() { return this == HistorySearchStyleEnum.AT; } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/InternalCodingDt.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/InternalCodingDt.java index 3925c4e1f9c..49be6fd8040 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/InternalCodingDt.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/InternalCodingDt.java @@ -56,34 +56,51 @@ public class InternalCodingDt extends BaseCodingDt implements ICompositeDatatype } @Child(name = "system", type = UriDt.class, order = 0, min = 0, max = 1) - @Description(shortDefinition = "Identity of the terminology system", formalDefinition = "The identification of the code system that defines the meaning of the symbol in the code.") + @Description( + shortDefinition = "Identity of the terminology system", + formalDefinition = + "The identification of the code system that defines the meaning of the symbol in the code.") private UriDt mySystem; @Child(name = "version", type = StringDt.class, order = 1, min = 0, max = 1) - @Description(shortDefinition = "Version of the system - if relevant", formalDefinition = "The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and When the meaning is not guaranteed to be consistent, the version SHOULD be exchanged") + @Description( + shortDefinition = "Version of the system - if relevant", + formalDefinition = + "The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and When the meaning is not guaranteed to be consistent, the version SHOULD be exchanged") private StringDt myVersion; @Child(name = "code", type = CodeDt.class, order = 2, min = 0, max = 1) - @Description(shortDefinition = "Symbol in syntax defined by the system", formalDefinition = "A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)") + @Description( + shortDefinition = "Symbol in syntax defined by the system", + formalDefinition = + "A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)") private CodeDt myCode; @Child(name = "display", type = StringDt.class, order = 3, min = 0, max = 1) - @Description(shortDefinition = "Representation defined by the system", formalDefinition = "A representation of the meaning of the code in the system, following the rules of the system.") + @Description( + shortDefinition = "Representation defined by the system", + formalDefinition = + "A representation of the meaning of the code in the system, following the rules of the system.") private StringDt myDisplay; @Child(name = "primary", type = BooleanDt.class, order = 4, min = 0, max = 1) - @Description(shortDefinition = "If this code was chosen directly by the user", formalDefinition = "Indicates that this code was chosen by a user directly - i.e. off a pick list of available items (codes or displays)") + @Description( + shortDefinition = "If this code was chosen directly by the user", + formalDefinition = + "Indicates that this code was chosen by a user directly - i.e. off a pick list of available items (codes or displays)") private BooleanDt myPrimary; @Override public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(mySystem, myVersion, myCode, myDisplay, myPrimary); + return super.isBaseEmpty() + && ca.uhn.fhir.util.ElementUtil.isEmpty(mySystem, myVersion, myCode, myDisplay, myPrimary); } - @Deprecated //override deprecated method + @Deprecated // override deprecated method @Override public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, mySystem, myVersion, myCode, myDisplay, myPrimary); + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( + theType, mySystem, myVersion, myCode, myDisplay, myPrimary); } /** @@ -300,16 +317,15 @@ public class InternalCodingDt extends BaseCodingDt implements ICompositeDatatype return getDisplay(); } - @Deprecated //override deprecated method + @Deprecated // override deprecated method @Override public Boolean getMissing() { throw new UnsupportedOperationException(Msg.code(1950)); } - @Deprecated //override deprecated method + @Deprecated // override deprecated method @Override public IQueryParameterType setMissing(Boolean theMissing) { throw new UnsupportedOperationException(Msg.code(1951)); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberAndListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberAndListParam.java index 2ac358a2771..3ac3cc3f31b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberAndListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberAndListParam.java @@ -21,8 +21,7 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - -public class NumberAndListParam extends BaseAndListParam { +public class NumberAndListParam extends BaseAndListParam { @Override NumberOrListParam newInstance() { @@ -35,6 +34,4 @@ public class NumberAndListParam extends BaseAndListParam { addValue(theValue); return this; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberOrListParam.java index 251b5301473..e4348415712 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberOrListParam.java @@ -21,22 +21,17 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - public class NumberOrListParam extends BaseOrListParam { @Override NumberParam newInstance() { return new NumberParam(); } - + @CoverageIgnore @Override public NumberOrListParam addOr(NumberParam theParameter) { add(theParameter); return this; } - - - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberParam.java index be0eb7c2c4a..8b5fbf2930a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/NumberParam.java @@ -19,16 +19,15 @@ */ package ca.uhn.fhir.rest.param; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.math.BigDecimal; - +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.model.api.IQueryParameterType; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.api.IQueryParameterType; +import java.math.BigDecimal; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class NumberParam extends BaseParamWithPrefix implements IQueryParameterType { @@ -44,7 +43,7 @@ public class NumberParam extends BaseParamWithPrefix implements IQu /** * Constructor - * + * * @param theValue * A value, e.g. "10" */ @@ -54,7 +53,7 @@ public class NumberParam extends BaseParamWithPrefix implements IQu /** * Constructor - * + * * @param theValue * A string value, e.g. "gt5.0" */ @@ -76,7 +75,7 @@ public class NumberParam extends BaseParamWithPrefix implements IQu b.append(ParameterUtil.escapeWithDefault(myQuantity.toPlainString())); return b.toString(); } - + @Override void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) { if (getMissing() != null && isBlank(theValue)) { @@ -88,8 +87,7 @@ public class NumberParam extends BaseParamWithPrefix implements IQu myQuantity = new BigDecimal(value); } } - - + public BigDecimal getValue() { return myQuantity; } @@ -98,7 +96,7 @@ public class NumberParam extends BaseParamWithPrefix implements IQu myQuantity = theValue; return this; } - + @Override public String toString() { ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE); @@ -106,5 +104,4 @@ public class NumberParam extends BaseParamWithPrefix implements IQu b.append("value", myQuantity); return b.build(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParamPrefixEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParamPrefixEnum.java index 2a3032ead59..3763c5292ae 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParamPrefixEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParamPrefixEnum.java @@ -24,7 +24,7 @@ import java.util.*; /** * Comparator/qualifier for values used in REST params, such as {@link DateParam}, {@link NumberParam}, and * {@link QuantityParam} - * + * * @since 1.5 */ public enum ParamPrefixEnum { @@ -76,22 +76,22 @@ public enum ParamPrefixEnum { * * The actual value is less than or equal to the given value. */ - LESSTHAN_OR_EQUALS("le"), - + LESSTHAN_OR_EQUALS("le"), + /** * Code Value: ne * * The actual value is not equal to the given value */ NOT_EQUAL("ne"), - + /** * Code Value: sa * * The range of the search value does not overlap with the range of the target value, and the range below the search value contains the range of the target value */ STARTS_AFTER("sa"); - + private static final Map VALUE_TO_PREFIX; static { @@ -118,7 +118,7 @@ public enum ParamPrefixEnum { /** * Returns the prefix associated with a given DSTU2+ value (e.g. lt or eq) - * + * * @param theValue * e.g. < or ~ * @return The prefix, or null if no prefix matches the value diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParameterUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParameterUtil.java index e629ab26891..c0a8dc1306a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParameterUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ParameterUtil.java @@ -123,7 +123,9 @@ public class ParameterUtil { boolean isRi = theContext.getVersion().getVersion().isRi(); boolean usesHapiId = IdDt.class.equals(paramType); if (isRi == usesHapiId) { - throw new ConfigurationException(Msg.code(1936) + "Method uses the wrong Id datatype (IdDt / IdType) for the given context FHIR version: " + theMethod.toString()); + throw new ConfigurationException(Msg.code(1936) + + "Method uses the wrong Id datatype (IdDt / IdType) for the given context FHIR version: " + + theMethod.toString()); } } return index; @@ -160,15 +162,11 @@ public class ParameterUtil { } public static boolean isBindableIntegerType(Class theClass) { - return Integer.class.isAssignableFrom(theClass) - || IPrimitiveType.class.isAssignableFrom(theClass); + return Integer.class.isAssignableFrom(theClass) || IPrimitiveType.class.isAssignableFrom(theClass); } public static String escapeAndJoinOrList(Collection theValues) { - return theValues - .stream() - .map(ParameterUtil::escape) - .collect(Collectors.joining(",")); + return theValues.stream().map(ParameterUtil::escape).collect(Collectors.joining(",")); } public static int nonEscapedIndexOf(String theString, char theCharacter) { @@ -189,8 +187,10 @@ public class ParameterUtil { if (value.charAt(value.length() - 1) == '"') { if (value.charAt(0) == '"') { eTagVersion = value.substring(1, value.length() - 1); - } else if (value.length() > 3 && value.charAt(0) == 'W' && value.charAt(1) == '/' - && value.charAt(2) == '"') { + } else if (value.length() > 3 + && value.charAt(0) == 'W' + && value.charAt(1) == '/' + && value.charAt(2) == '"') { eTagVersion = value.substring(3, value.length() - 1); } else { eTagVersion = value; @@ -215,16 +215,17 @@ public class ParameterUtil { } @Override - public void setValuesAsQueryTokens(FhirContext theContext, String theParamName, - QualifiedParamList theParameters) { + public void setValuesAsQueryTokens( + FhirContext theContext, String theParamName, QualifiedParamList theParameters) { if (theParameters.isEmpty()) { return; } if (theParameters.size() > 1) { - throw new IllegalArgumentException(Msg.code(1937) + "Type " + theParam.getClass().getCanonicalName() + " does not support multiple values"); + throw new IllegalArgumentException(Msg.code(1937) + "Type " + + theParam.getClass().getCanonicalName() + " does not support multiple values"); } - theParam.setValueAsQueryToken(theContext, theParamName, theParameters.getQualifier(), - theParameters.get(0)); + theParam.setValueAsQueryToken( + theContext, theParamName, theParameters.getQualifier(), theParameters.get(0)); } }; } @@ -325,6 +326,7 @@ public class ParameterUtil { * Returns true if the value is :iterate or :recurse (the former name of :iterate) for an _include parameter */ public static boolean isIncludeIterate(String theQualifier) { - return Constants.PARAM_INCLUDE_QUALIFIER_RECURSE.equals(theQualifier) || Constants.PARAM_INCLUDE_QUALIFIER_ITERATE.equals(theQualifier); + return Constants.PARAM_INCLUDE_QUALIFIER_RECURSE.equals(theQualifier) + || Constants.PARAM_INCLUDE_QUALIFIER_ITERATE.equals(theQualifier); } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QualifierDetails.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QualifierDetails.java index d35d3113ff8..21ddd946e4c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QualifierDetails.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QualifierDetails.java @@ -82,7 +82,6 @@ public class QualifierDetails { myWholeQualifier = theWholeQualifier; } - public static QualifierDetails extractQualifiersFromParameterName(String theParamName) { QualifierDetails retVal = new QualifierDetails(); if (theParamName == null || theParamName.length() == 0) { @@ -129,7 +128,4 @@ public class QualifierDetails { return retVal; } - - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QuantityAndListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QuantityAndListParam.java index c1300ad8cb4..f23d0050967 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QuantityAndListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QuantityAndListParam.java @@ -21,8 +21,7 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - -public class QuantityAndListParam extends BaseAndListParam { +public class QuantityAndListParam extends BaseAndListParam { @Override QuantityOrListParam newInstance() { @@ -35,5 +34,4 @@ public class QuantityAndListParam extends BaseAndListParam addValue(theValue); return this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QuantityOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QuantityOrListParam.java index d1f62416fc8..0c5cbb3a8d3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QuantityOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/QuantityOrListParam.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - public class QuantityOrListParam extends BaseOrListParam { @Override @@ -35,7 +34,4 @@ public class QuantityOrListParam extends BaseOrListParam implements /** * Constructor - * + * * @param thePrefix * The comparator, or null for an equals comparator * @param theValue @@ -69,7 +69,7 @@ public class QuantityParam extends BaseParamWithPrefix implements /** * Constructor - * + * * @param thePrefix * The comparator, or null for an equals comparator * @param theValue @@ -88,7 +88,7 @@ public class QuantityParam extends BaseParamWithPrefix implements /** * Constructor - * + * * @param thePrefix * The comparator, or null for an equals comparator * @param theValue @@ -107,7 +107,7 @@ public class QuantityParam extends BaseParamWithPrefix implements /** * Constructor - * + * * @param theQuantity * A quantity value (with no system or units), such as "100.0" or "gt4" */ @@ -117,7 +117,7 @@ public class QuantityParam extends BaseParamWithPrefix implements /** * Constructor - * + * * @param theQuantity * A quantity value (with no system or units), such as 100 */ @@ -127,7 +127,7 @@ public class QuantityParam extends BaseParamWithPrefix implements /** * Constructor - * + * * @param theQuantity * A quantity value (with no system or units), such as "100.0" or "<=4" * @param theSystem @@ -195,7 +195,6 @@ public class QuantityParam extends BaseParamWithPrefix implements if (parts.size() > 2 && StringUtils.isNotBlank(parts.get(2))) { setUnits(parts.get(2)); } - } /** @@ -203,7 +202,7 @@ public class QuantityParam extends BaseParamWithPrefix implements *

    * Note that prior to HAPI FHIR 1.5, this method returned a {@link UriDt} *

    - * + * * @since 1.5 */ public String getSystem() { @@ -228,7 +227,7 @@ public class QuantityParam extends BaseParamWithPrefix implements *

    * Note that prior to HAPI FHIR 1.5, this method returned a {@link DecimalDt} *

    - * + * * @since 1.5 */ public BigDecimal getValue() { @@ -301,22 +300,23 @@ public class QuantityParam extends BaseParamWithPrefix implements return b.toString(); } - public static QuantityParam toQuantityParam(IQueryParameterType theParam) { - if (theParam instanceof BaseQuantityDt) { - BaseQuantityDt param = (BaseQuantityDt) theParam; - String systemValue = param.getSystemElement().getValueAsString(); - String unitsValue = param.getUnitsElement().getValueAsString(); - ParamPrefixEnum cmpValue = ParamPrefixEnum.forValue(param.getComparatorElement().getValueAsString()); - BigDecimal valueValue = param.getValueElement().getValue(); - return new QuantityParam() - .setSystem(systemValue) - .setUnits(unitsValue) - .setPrefix(cmpValue) - .setValue(valueValue); - } else if (theParam instanceof QuantityParam) { - return (QuantityParam) theParam; - } else { - throw new IllegalArgumentException(Msg.code(1948) + "Invalid quantity type: " + theParam.getClass()); - } - } + public static QuantityParam toQuantityParam(IQueryParameterType theParam) { + if (theParam instanceof BaseQuantityDt) { + BaseQuantityDt param = (BaseQuantityDt) theParam; + String systemValue = param.getSystemElement().getValueAsString(); + String unitsValue = param.getUnitsElement().getValueAsString(); + ParamPrefixEnum cmpValue = + ParamPrefixEnum.forValue(param.getComparatorElement().getValueAsString()); + BigDecimal valueValue = param.getValueElement().getValue(); + return new QuantityParam() + .setSystem(systemValue) + .setUnits(unitsValue) + .setPrefix(cmpValue) + .setValue(valueValue); + } else if (theParam instanceof QuantityParam) { + return (QuantityParam) theParam; + } else { + throw new IllegalArgumentException(Msg.code(1948) + "Invalid quantity type: " + theParam.getClass()); + } + } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceAndListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceAndListParam.java index a6fc2eb52a6..c1a50e1727b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceAndListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceAndListParam.java @@ -21,20 +21,17 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - public class ReferenceAndListParam extends BaseAndListParam { @Override ReferenceOrListParam newInstance() { return new ReferenceOrListParam(); } - + @CoverageIgnore @Override public ReferenceAndListParam addAnd(ReferenceOrListParam theValue) { addValue(theValue); return this; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceOrListParam.java index 9c3a2ad5e85..48d3b0163ed 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/ReferenceOrListParam.java @@ -21,8 +21,7 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - -public class ReferenceOrListParam extends BaseOrListParam { +public class ReferenceOrListParam extends BaseOrListParam { @CoverageIgnore @Override @@ -36,5 +35,4 @@ public class ReferenceOrListParam extends BaseOrListParam { @Override SpecialOrListParam newInstance() { return new SpecialOrListParam(); } - + @CoverageIgnore @Override public SpecialAndListParam addAnd(SpecialOrListParam theValue) { addValue(theValue); return this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/SpecialOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/SpecialOrListParam.java index de102a63910..8a32a3096bc 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/SpecialOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/SpecialOrListParam.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - public class SpecialOrListParam extends BaseOrListParam { @CoverageIgnore @@ -36,5 +35,4 @@ public class SpecialOrListParam extends BaseOrListParam { +public class StringAndListParam extends BaseAndListParam { @Override StringOrListParam newInstance() { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/StringOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/StringOrListParam.java index 8adc230ab47..c9bbcacebcd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/StringOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/StringOrListParam.java @@ -21,8 +21,7 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - -public class StringOrListParam extends BaseOrListParam { +public class StringOrListParam extends BaseOrListParam { @CoverageIgnore @Override @@ -36,5 +35,4 @@ public class StringOrListParam extends BaseOrListParam { @Override TokenOrListParam newInstance() { return new TokenOrListParam(); } - + @Override public TokenAndListParam addAnd(TokenOrListParam theValue) { addValue(theValue); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenOrListParam.java index b2e225f3e80..c2591bb9bc5 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/TokenOrListParam.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.rest.param; -import java.util.ArrayList; -import java.util.List; - import ca.uhn.fhir.model.base.composite.BaseCodingDt; import ca.uhn.fhir.model.base.composite.BaseIdentifierDt; import ca.uhn.fhir.util.CoverageIgnore; +import java.util.ArrayList; +import java.util.List; + /** - * This class represents a restful search operation parameter for an "OR list" of tokens (in other words, a + * This class represents a restful search operation parameter for an "OR list" of tokens (in other words, a * list which can contain one-or-more tokens, where the server should return results matching any of the tokens) */ public class TokenOrListParam extends BaseOrListParam { @@ -35,12 +35,11 @@ public class TokenOrListParam extends BaseOrListParam { @Override UriOrListParam newInstance() { return new UriOrListParam(); } - + @CoverageIgnore @Override public UriAndListParam addAnd(UriOrListParam theValue) { addValue(theValue); return this; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriOrListParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriOrListParam.java index 039b7d579c3..b5539b6270d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriOrListParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriOrListParam.java @@ -21,21 +21,18 @@ package ca.uhn.fhir.rest.param; import ca.uhn.fhir.util.CoverageIgnore; - -public class UriOrListParam extends BaseOrListParam { +public class UriOrListParam extends BaseOrListParam { @CoverageIgnore @Override UriParam newInstance() { return new UriParam(); } - + @CoverageIgnore @Override public UriOrListParam addOr(UriParam theParameter) { add(theParameter); return this; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriParam.java index c1e7a9e3adb..04adf83c891 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriParam.java @@ -19,16 +19,15 @@ */ package ca.uhn.fhir.rest.param; -import static org.apache.commons.lang3.StringUtils.defaultString; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.apache.commons.lang3.builder.ToStringStyle; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.model.primitive.UriDt; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; + +import static org.apache.commons.lang3.StringUtils.defaultString; public class UriParam extends BaseParam implements IQueryParameterType { @@ -91,7 +90,7 @@ public class UriParam extends BaseParam implements IQueryParameterType { /** * Sets the qualifier for this param (may be null and generally will be) - * + * * @return Returns a reference to this for easy method chanining */ public UriParam setQualifier(UriParamQualifierEnum theQualifier) { @@ -101,7 +100,7 @@ public class UriParam extends BaseParam implements IQueryParameterType { /** * Sets the value for this param - * + * * @return Returns a reference to this for easy method chanining */ public UriParam setValue(String theValue) { @@ -115,5 +114,4 @@ public class UriParam extends BaseParam implements IQueryParameterType { builder.append("value", getValue()); return builder.toString(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriParamQualifierEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriParamQualifierEnum.java index dcf01f1bd80..e697f53f2a6 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriParamQualifierEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/UriParamQualifierEnum.java @@ -29,27 +29,27 @@ import java.util.Map; public enum UriParamQualifierEnum { /** - * The search parameter is a concept with the form [system]|[code], - * and the search parameter tests whether the coding in a resource subsumes the - * specified search code. For example, the search concept has an is-a relationship + * The search parameter is a concept with the form [system]|[code], + * and the search parameter tests whether the coding in a resource subsumes the + * specified search code. For example, the search concept has an is-a relationship * with the coding in the resource, and this includes the coding itself. *

    * Value :above - *

    + *

    */ ABOVE(":above"), - + /** - * The search parameter is a concept with the form [system]|[code], - * and the search parameter tests whether the coding in a resource subsumes the - * specified search code. For example, the search concept has an is-a relationship + * The search parameter is a concept with the form [system]|[code], + * and the search parameter tests whether the coding in a resource subsumes the + * specified search code. For example, the search concept has an is-a relationship * with the coding in the resource, and this includes the coding itself. *

    * Value :below - *

    + *

    */ BELOW(":below"); - + private static final Map KEY_TO_VALUE; static { @@ -61,17 +61,18 @@ public enum UriParamQualifierEnum { } private final String myValue; + private UriParamQualifierEnum(String theValue) { myValue = theValue; } - + /** * Returns the qualifier value, e.g. :below */ public String getValue() { return myValue; } - + /** * Returns the {@link UriParamQualifierEnum} matching the given qualifier value, such as :below, * or null @@ -79,5 +80,4 @@ public enum UriParamQualifierEnum { public static UriParamQualifierEnum forValue(String theValue) { return KEY_TO_VALUE.get(theValue); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/BaseBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/BaseBinder.java index 91a66aaf126..784bb960d97 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/BaseBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/BaseBinder.java @@ -36,14 +36,15 @@ abstract class BaseBinder { public BaseBinder(Class theType, List> theCompositeTypes) { myType = theType; myCompositeTypes = theCompositeTypes; - - + if (myType.equals(CompositeParam.class)) { if (myCompositeTypes.size() != 2) { - throw new ConfigurationException(Msg.code(1959) + "Search parameter of type " + myType.getName() + " must have 2 composite types declared in parameter annotation, found " + theCompositeTypes.size()); + throw new ConfigurationException(Msg.code(1959) + "Search parameter of type " + myType.getName() + + " must have 2 composite types declared in parameter annotation, found " + + theCompositeTypes.size()); } } - + try { Class[] types = new Class[myCompositeTypes.size()]; for (int i = 0; i < myCompositeTypes.size(); i++) { @@ -51,22 +52,22 @@ abstract class BaseBinder { } myConstructor = myType.getConstructor(types); } catch (NoSuchMethodException e) { - throw new ConfigurationException(Msg.code(1960) + "Query parameter type " + theType.getName() + " has no constructor with types " + theCompositeTypes); + throw new ConfigurationException(Msg.code(1960) + "Query parameter type " + theType.getName() + + " has no constructor with types " + theCompositeTypes); } } public T newInstance() { try { final Object[] args = new Object[myCompositeTypes.size()]; - for (int i = 0; i < myCompositeTypes.size();i++) { - args[i] = myCompositeTypes.get(i);//.newInstance(); + for (int i = 0; i < myCompositeTypes.size(); i++) { + args[i] = myCompositeTypes.get(i); // .newInstance(); } - + T dt = myConstructor.newInstance(args); return dt; } catch (final Exception e) { throw new InternalErrorException(Msg.code(1961) + e); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/BaseJavaPrimitiveBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/BaseJavaPrimitiveBinder.java index 940edc40df5..8cb9fb9ccc7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/BaseJavaPrimitiveBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/BaseJavaPrimitiveBinder.java @@ -33,7 +33,7 @@ import java.util.List; import static org.apache.commons.lang3.StringUtils.isBlank; -abstract class BaseJavaPrimitiveBinderimplements IParamBinder { +abstract class BaseJavaPrimitiveBinder implements IParamBinder { public BaseJavaPrimitiveBinder() { super(); @@ -55,16 +55,17 @@ abstract class BaseJavaPrimitiveBinderimplements IParamBinder { } @Override - public T parse(FhirContext theContext, String theName, List theParams) throws InternalErrorException, InvalidRequestException { + public T parse(FhirContext theContext, String theName, List theParams) + throws InternalErrorException, InvalidRequestException { if (theParams.size() == 0 || theParams.get(0).size() == 0) { return null; } if (theParams.size() > 1 || theParams.get(0).size() > 1) { - throw new InvalidRequestException(Msg.code(1955) + "Multiple values detected for non-repeatable parameter '" + theName + "'. This server is not configured to allow multiple (AND) values for this param."); + throw new InvalidRequestException(Msg.code(1955) + "Multiple values detected for non-repeatable parameter '" + + theName + "'. This server is not configured to allow multiple (AND) values for this param."); } - + T value = doParse(theParams.get(0).get(0)); return value; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/CalendarBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/CalendarBinder.java index c47d08fa8c9..4c40e95c43f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/CalendarBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/CalendarBinder.java @@ -19,13 +19,12 @@ */ package ca.uhn.fhir.rest.param.binder; -import java.util.Calendar; - import ca.uhn.fhir.model.primitive.InstantDt; +import java.util.Calendar; + public final class CalendarBinder extends BaseJavaPrimitiveBinder { - public CalendarBinder() { - } + public CalendarBinder() {} @Override protected String doEncode(Calendar theString) { @@ -36,6 +35,4 @@ public final class CalendarBinder extends BaseJavaPrimitiveBinder { protected Calendar doParse(String theString) { return new InstantDt(theString).getValueAsCalendar(); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/CollectionBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/CollectionBinder.java index 4414d6b02c7..705f330360c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/CollectionBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/CollectionBinder.java @@ -35,16 +35,18 @@ public class CollectionBinder /** * @param thePositionDescription Just used in exceptions if theCollectionType is invalid */ - @SuppressWarnings({ "rawtypes", "cast" }) - public static Class getInstantiableCollectionType(Class> theCollectionType, String thePositionDescription) { - if (theCollectionType.equals(List.class) || theCollectionType .equals(ArrayList.class)) { + @SuppressWarnings({"rawtypes", "cast"}) + public static Class getInstantiableCollectionType( + Class> theCollectionType, String thePositionDescription) { + if (theCollectionType.equals(List.class) || theCollectionType.equals(ArrayList.class)) { return (Class) ArrayList.class; - } else if (theCollectionType .equals( Set.class )|| theCollectionType .equals( HashSet.class)) { + } else if (theCollectionType.equals(Set.class) || theCollectionType.equals(HashSet.class)) { return (Class) HashSet.class; } else if (theCollectionType.equals(Collection.class)) { return (Class) ArrayList.class; } else { - throw new ConfigurationException(Msg.code(1956) + "Unsupported binding collection type '" + theCollectionType.getCanonicalName() + "' for " + thePositionDescription); + throw new ConfigurationException(Msg.code(1956) + "Unsupported binding collection type '" + + theCollectionType.getCanonicalName() + "' for " + thePositionDescription); } } @@ -60,7 +62,8 @@ public class CollectionBinder // } else if (theCollectionType == Collection.class) { // myCollectionType = ArrayList.class; // } else { - // throw new ConfigurationException(Msg.code(1957) + "Unsupported binding collection type: " + theCollectionType.getCanonicalName()); + // throw new ConfigurationException(Msg.code(1957) + "Unsupported binding collection type: " + + // theCollectionType.getCanonicalName()); // } // } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/DateBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/DateBinder.java index cb36494f9a8..acd4c80301d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/DateBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/DateBinder.java @@ -19,13 +19,12 @@ */ package ca.uhn.fhir.rest.param.binder; -import java.util.Date; - import ca.uhn.fhir.model.primitive.InstantDt; +import java.util.Date; + public final class DateBinder extends BaseJavaPrimitiveBinder { - public DateBinder() { - } + public DateBinder() {} @Override protected String doEncode(Date theString) { @@ -36,6 +35,4 @@ public final class DateBinder extends BaseJavaPrimitiveBinder { protected Date doParse(String theString) { return new InstantDt(theString).getValue(); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/FhirPrimitiveBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/FhirPrimitiveBinder.java index 67da7de9105..0085b55df8f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/FhirPrimitiveBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/FhirPrimitiveBinder.java @@ -19,12 +19,11 @@ */ package ca.uhn.fhir.rest.param.binder; +import ca.uhn.fhir.util.ReflectionUtil; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import ca.uhn.fhir.util.ReflectionUtil; - public final class FhirPrimitiveBinder extends BaseJavaPrimitiveBinder> { - + private Class> myType; public FhirPrimitiveBinder(Class> theType) { @@ -42,6 +41,4 @@ public final class FhirPrimitiveBinder extends BaseJavaPrimitiveBinder { - + List> encode(FhirContext theContext, T theString) throws InternalErrorException; - T parse(FhirContext theContext, String theName, List theList) throws InternalErrorException, InvalidRequestException; - + T parse(FhirContext theContext, String theName, List theList) + throws InternalErrorException, InvalidRequestException; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterAndBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterAndBinder.java index eff86a5dbae..9eb2e307922 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterAndBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterAndBinder.java @@ -30,21 +30,27 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import java.util.List; -public final class QueryParameterAndBinder extends BaseBinder> implements IParamBinder> { +public final class QueryParameterAndBinder extends BaseBinder> + implements IParamBinder> { - public QueryParameterAndBinder(Class> theType, List> theCompositeTypes) { + public QueryParameterAndBinder( + Class> theType, + List> theCompositeTypes) { super(theType, theCompositeTypes); } @SuppressWarnings("unchecked") @Override - public List> encode(FhirContext theContext, IQueryParameterAnd theString) throws InternalErrorException { - List> retVal = (List>) ((IQueryParameterAnd) theString).getValuesAsQueryTokens(); + public List> encode(FhirContext theContext, IQueryParameterAnd theString) + throws InternalErrorException { + List> retVal = + (List>) ((IQueryParameterAnd) theString).getValuesAsQueryTokens(); return retVal; } @Override - public IQueryParameterAnd parse(FhirContext theContext, String theParamName, List theString) throws InternalErrorException, InvalidRequestException { + public IQueryParameterAnd parse(FhirContext theContext, String theParamName, List theString) + throws InternalErrorException, InvalidRequestException { IQueryParameterAnd dt; try { dt = newInstance(); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterOrBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterOrBinder.java index 67a97f6e16a..eff2f136d3c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterOrBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterOrBinder.java @@ -30,22 +30,27 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import java.util.Collections; import java.util.List; -public final class QueryParameterOrBinder extends BaseBinder> implements IParamBinder> { +public final class QueryParameterOrBinder extends BaseBinder> + implements IParamBinder> { - public QueryParameterOrBinder(Class> theType, List> theCompositeTypes) { + public QueryParameterOrBinder( + Class> theType, + List> theCompositeTypes) { super(theType, theCompositeTypes); } @SuppressWarnings("unchecked") @Override - public List> encode(FhirContext theContext, IQueryParameterOr theValue) throws InternalErrorException { + public List> encode(FhirContext theContext, IQueryParameterOr theValue) + throws InternalErrorException { IQueryParameterOr retVal = (theValue); - List retVal2 = Collections.singletonList((IQueryParameterOr)retVal); + List retVal2 = Collections.singletonList((IQueryParameterOr) retVal); return (List>) retVal2; } @Override - public IQueryParameterOr parse(FhirContext theContext, String theParamName, List theString) throws InternalErrorException, InvalidRequestException { + public IQueryParameterOr parse(FhirContext theContext, String theParamName, List theString) + throws InternalErrorException, InvalidRequestException { IQueryParameterOr dt; try { dt = newInstance(); @@ -53,9 +58,11 @@ public final class QueryParameterOrBinder extends BaseBinder 1) { - throw new InvalidRequestException(Msg.code(1953) + "Multiple values detected for non-repeatable parameter '" + theParamName + "'. This server is not configured to allow multiple (AND/OR) values for this param."); + throw new InvalidRequestException( + Msg.code(1953) + "Multiple values detected for non-repeatable parameter '" + theParamName + + "'. This server is not configured to allow multiple (AND/OR) values for this param."); } - + dt.setValuesAsQueryTokens(theContext, theParamName, theString.get(0)); } catch (SecurityException e) { throw new InternalErrorException(Msg.code(1954) + e); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterTypeBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterTypeBinder.java index 48e9117cd78..e8ea5e99c90 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterTypeBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/QueryParameterTypeBinder.java @@ -32,38 +32,44 @@ import org.apache.commons.lang3.StringUtils; import java.util.Collections; import java.util.List; -public final class QueryParameterTypeBinder extends BaseBinder implements IParamBinder { +public final class QueryParameterTypeBinder extends BaseBinder + implements IParamBinder { - public QueryParameterTypeBinder(Class theType, List> theCompositeTypes) { + public QueryParameterTypeBinder( + Class theType, + List> theCompositeTypes) { super(theType, theCompositeTypes); } @SuppressWarnings("unchecked") @Override - public List> encode(FhirContext theContext, IQueryParameterType theValue) throws InternalErrorException { + public List> encode(FhirContext theContext, IQueryParameterType theValue) + throws InternalErrorException { IQueryParameterType param = theValue; List retVal = Collections.singletonList(ParameterUtil.singleton(param, null)); return (List>) retVal; } @Override - public IQueryParameterType parse(FhirContext theContext, String theParamName, List theParams) throws InternalErrorException, InvalidRequestException { + public IQueryParameterType parse(FhirContext theContext, String theParamName, List theParams) + throws InternalErrorException, InvalidRequestException { String value = theParams.get(0).get(0); if (StringUtils.isBlank(value)) { return null; } - + IQueryParameterType dt = super.newInstance(); if (theParams.size() == 0 || theParams.get(0).size() == 0) { return dt; } if (theParams.size() > 1 || theParams.get(0).size() > 1) { - throw new InvalidRequestException(Msg.code(1962) + "Multiple values detected for non-repeatable parameter '" + theParamName + "'. This server is not configured to allow multiple (AND/OR) values for this param."); + throw new InvalidRequestException( + Msg.code(1962) + "Multiple values detected for non-repeatable parameter '" + theParamName + + "'. This server is not configured to allow multiple (AND/OR) values for this param."); } dt.setValueAsQueryToken(theContext, theParamName, theParams.get(0).getQualifier(), value); return dt; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/StringBinder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/StringBinder.java index 3bdf1e47e37..b908020a53f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/StringBinder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/binder/StringBinder.java @@ -20,8 +20,7 @@ package ca.uhn.fhir.rest.param.binder; public final class StringBinder extends BaseJavaPrimitiveBinder { - public StringBinder() { - } + public StringBinder() {} @Override protected String doEncode(String theString) { @@ -32,6 +31,4 @@ public final class StringBinder extends BaseJavaPrimitiveBinder { protected String doParse(String theString) { return theString; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/AuthenticationException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/AuthenticationException.java index feddcdd1d99..e1a206118aa 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/AuthenticationException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/AuthenticationException.java @@ -23,15 +23,15 @@ import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; /** - * Represents an HTTP 401 Client Unauthorized response, which - * means that the client needs to provide credentials, or has + * Represents an HTTP 401 Client Unauthorized response, which + * means that the client needs to provide credentials, or has * provided invalid credentials. *

    * For security failures, you should use * {@link AuthenticationException} if you want to indicate that the - * user could not be authenticated (e.g. credential failures), also - * known as an authentication failure. - * You should use {@link ForbiddenOperationException} if you want to + * user could not be authenticated (e.g. credential failures), also + * known as an authentication failure. + * You should use {@link ForbiddenOperationException} if you want to * indicate that the authenticated user does not have permission to * perform the requested operation, also known as an authorization * failure. @@ -40,7 +40,7 @@ import ca.uhn.fhir.util.CoverageIgnore; * Note that a complete list of RESTful exceptions is available in the Package * Summary. *

    - + * */ @CoverageIgnore public class AuthenticationException extends BaseServerResponseException { @@ -60,16 +60,15 @@ public class AuthenticationException extends BaseServerResponseException { public AuthenticationException(String theMessage, Throwable theCause) { super(STATUS_CODE, theMessage, theCause); } - + /** * Adds a WWW-Authenticate header to the response, of the form:
    - * WWW-Authenticate: Basic realm="theRealm" - * + * WWW-Authenticate: Basic realm="theRealm" + * * @return Returns a reference to this for easy method chaining */ public AuthenticationException addAuthenticateHeaderForRealm(String theRealm) { addResponseHeader("WWW-Authenticate", "Basic realm=\"" + theRealm + "\""); return this; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/BaseServerResponseException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/BaseServerResponseException.java index bc4c59b7398..511ea85f92f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/BaseServerResponseException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/BaseServerResponseException.java @@ -30,7 +30,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - /** * Base class for RESTful client and server exceptions. RESTful client methods will only throw exceptions which are subclasses of this exception type, and RESTful server methods should also only call * subclasses of this exception type. @@ -49,7 +48,8 @@ import java.util.Map; */ public abstract class BaseServerResponseException extends RuntimeException { - private static final Map> ourStatusCodeToExceptionType = new HashMap>(); + private static final Map> ourStatusCodeToExceptionType = + new HashMap>(); private static final long serialVersionUID = 1L; static { @@ -82,7 +82,8 @@ public abstract class BaseServerResponseException extends RuntimeException { * @param theStatusCode The HTTP status code corresponding to this problem * @param theMessage The message */ - public /** + public + /** * Interceptor hook method. This method should not be called directly. */ BaseServerResponseException(int theStatusCode, String theMessage) { @@ -102,7 +103,8 @@ public abstract class BaseServerResponseException extends RuntimeException { myStatusCode = theStatusCode; myBaseOperationOutcome = null; if (theMessages != null && theMessages.length > 1) { - myAdditionalMessages = Arrays.asList(Arrays.copyOfRange(theMessages, 1, theMessages.length, String[].class)); + myAdditionalMessages = + Arrays.asList(Arrays.copyOfRange(theMessages, 1, theMessages.length, String[].class)); } } @@ -113,7 +115,8 @@ public abstract class BaseServerResponseException extends RuntimeException { * @param theMessage The message * @param theBaseOperationOutcome An BaseOperationOutcome resource to return to the calling client (in a server) or the BaseOperationOutcome that was returned from the server (in a client) */ - public BaseServerResponseException(int theStatusCode, String theMessage, IBaseOperationOutcome theBaseOperationOutcome) { + public BaseServerResponseException( + int theStatusCode, String theMessage, IBaseOperationOutcome theBaseOperationOutcome) { super(theMessage); myStatusCode = theStatusCode; myBaseOperationOutcome = theBaseOperationOutcome; @@ -140,7 +143,8 @@ public abstract class BaseServerResponseException extends RuntimeException { * @param theCause The underlying cause exception * @param theBaseOperationOutcome An BaseOperationOutcome resource to return to the calling client (in a server) or the BaseOperationOutcome that was returned from the server (in a client) */ - public BaseServerResponseException(int theStatusCode, String theMessage, Throwable theCause, IBaseOperationOutcome theBaseOperationOutcome) { + public BaseServerResponseException( + int theStatusCode, String theMessage, Throwable theCause, IBaseOperationOutcome theBaseOperationOutcome) { super(theMessage, theCause); myStatusCode = theStatusCode; myBaseOperationOutcome = theBaseOperationOutcome; @@ -165,7 +169,8 @@ public abstract class BaseServerResponseException extends RuntimeException { * @param theCause The underlying cause exception * @param theBaseOperationOutcome An BaseOperationOutcome resource to return to the calling client (in a server) or the BaseOperationOutcome that was returned from the server (in a client) */ - public BaseServerResponseException(int theStatusCode, Throwable theCause, IBaseOperationOutcome theBaseOperationOutcome) { + public BaseServerResponseException( + int theStatusCode, Throwable theCause, IBaseOperationOutcome theBaseOperationOutcome) { super(theCause.toString(), theCause); myStatusCode = theStatusCode; myBaseOperationOutcome = theBaseOperationOutcome; @@ -306,7 +311,10 @@ public abstract class BaseServerResponseException extends RuntimeException { public static BaseServerResponseException newInstance(int theStatusCode, String theMessage) { if (ourStatusCodeToExceptionType.containsKey(theStatusCode)) { try { - return ourStatusCodeToExceptionType.get(theStatusCode).getConstructor(new Class[]{String.class}).newInstance(theMessage); + return ourStatusCodeToExceptionType + .get(theStatusCode) + .getConstructor(new Class[] {String.class}) + .newInstance(theMessage); } catch (InstantiationException e) { throw new InternalErrorException(Msg.code(1912) + e); } catch (IllegalAccessException e) { @@ -326,9 +334,9 @@ public abstract class BaseServerResponseException extends RuntimeException { static void registerExceptionType(int theStatusCode, Class theType) { if (ourStatusCodeToExceptionType.containsKey(theStatusCode)) { - throw new Error(Msg.code(1918) + "Can not register " + theType + " to status code " + theStatusCode + " because " + ourStatusCodeToExceptionType.get(theStatusCode) + " already registers that code"); + throw new Error(Msg.code(1918) + "Can not register " + theType + " to status code " + theStatusCode + + " because " + ourStatusCodeToExceptionType.get(theStatusCode) + " already registers that code"); } ourStatusCodeToExceptionType.put(theStatusCode, theType); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ForbiddenOperationException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ForbiddenOperationException.java index ecae1d6e59a..5ca70e07b45 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ForbiddenOperationException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ForbiddenOperationException.java @@ -19,10 +19,9 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * This Represents an HTTP 403 Forbidden response, which generally indicates one of two conditions: @@ -30,13 +29,13 @@ import ca.uhn.fhir.util.CoverageIgnore; *
  • Authentication was provided, but the authenticated user is not permitted to perform the requested operation.
  • *
  • The operation is forbidden to all users. Repeating the request with authentication would serve no purpose.
  • * - * + * *

    * For security failures, you should use * {@link AuthenticationException} if you want to indicate that the - * user could not be authenticated (e.g. credential failures), also - * known as an authentication failure. - * You should use {@link ForbiddenOperationException} if you want to + * user could not be authenticated (e.g. credential failures), also + * known as an authentication failure. + * You should use {@link ForbiddenOperationException} if you want to * indicate that the authenticated user does not have permission to * perform the requested operation, also known as an authorization * failure. @@ -58,7 +57,7 @@ public class ForbiddenOperationException extends BaseServerResponseException { /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome @@ -67,5 +66,4 @@ public class ForbiddenOperationException extends BaseServerResponseException { public ForbiddenOperationException(String theMessage, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, theMessage, theOperationOutcome); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/InternalErrorException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/InternalErrorException.java index 7d1b4842dee..c7110836933 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/InternalErrorException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/InternalErrorException.java @@ -19,10 +19,9 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * Represents an HTTP 500 Internal Error response. @@ -32,24 +31,24 @@ import ca.uhn.fhir.util.CoverageIgnore; * send this status code in the case of a bad request message (although it * should not do this; an HTTP 4xx response is more appropriate in that * situation). - * + * *

    * Note that a complete list of RESTful exceptions is available in the * Package Summary. *

    - * + * * @see UnprocessableEntityException Which should be used for business level validation failures */ @CoverageIgnore public class InternalErrorException extends BaseServerResponseException { public static final int STATUS_CODE = Constants.STATUS_HTTP_500_INTERNAL_ERROR; - + private static final long serialVersionUID = 1L; /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome The OperationOutcome resource to return to the client @@ -69,5 +68,4 @@ public class InternalErrorException extends BaseServerResponseException { public InternalErrorException(Throwable theCause) { super(STATUS_CODE, theCause); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/InvalidRequestException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/InvalidRequestException.java index 5bb9316c4e9..256b98e7067 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/InvalidRequestException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/InvalidRequestException.java @@ -19,22 +19,21 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * Represents an HTTP 400 Bad Request response. * This status indicates that the client's message was invalid (e.g. not a valid FHIR Resource * per the specifications), as opposed to the {@link UnprocessableEntityException} which indicates * that data does not pass business rule validation on the server. - * + * *

    * Note that a complete list of RESTful exceptions is available in the * Package Summary. *

    - * + * * @see UnprocessableEntityException Which should be used for business level validation failures */ @CoverageIgnore @@ -63,10 +62,10 @@ public class InvalidRequestException extends BaseServerResponseException { public InvalidRequestException(Throwable theCause) { super(STATUS_CODE, theCause); } - + /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome The OperationOutcome resource to return to the client @@ -74,6 +73,4 @@ public class InvalidRequestException extends BaseServerResponseException { public InvalidRequestException(String theMessage, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, theMessage, theOperationOutcome); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/MethodNotAllowedException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/MethodNotAllowedException.java index ce292de869e..390f2e3f0a3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/MethodNotAllowedException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/MethodNotAllowedException.java @@ -19,21 +19,20 @@ */ package ca.uhn.fhir.rest.server.exceptions; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.RequestTypeEnum; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; + import java.util.LinkedHashSet; import java.util.Set; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.RequestTypeEnum; - /** * Represents an HTTP 405 Method Not Allowed response. - * + * *

    * Note that a complete list of RESTful exceptions is available in the Package Summary. *

    - * + * * @see UnprocessableEntityException Which should be used for business level validation failures */ public class MethodNotAllowedException extends BaseServerResponseException { @@ -43,7 +42,7 @@ public class MethodNotAllowedException extends BaseServerResponseException { /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome @@ -51,14 +50,15 @@ public class MethodNotAllowedException extends BaseServerResponseException { * @param theAllowedMethods * A list of allowed methods (see {@link #setAllowedMethods(RequestTypeEnum...)} ) */ - public MethodNotAllowedException(String theMessage, IBaseOperationOutcome theOperationOutcome, RequestTypeEnum... theAllowedMethods) { + public MethodNotAllowedException( + String theMessage, IBaseOperationOutcome theOperationOutcome, RequestTypeEnum... theAllowedMethods) { super(STATUS_CODE, theMessage, theOperationOutcome); setAllowedMethods(theAllowedMethods); } /** * Constructor - * + * * @param theMessage * The message * @param theAllowedMethods @@ -71,7 +71,7 @@ public class MethodNotAllowedException extends BaseServerResponseException { /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome @@ -83,7 +83,7 @@ public class MethodNotAllowedException extends BaseServerResponseException { /** * Constructor - * + * * @param theMessage * The message */ @@ -133,5 +133,4 @@ public class MethodNotAllowedException extends BaseServerResponseException { } addResponseHeader(Constants.HEADER_ALLOW, b.toString()); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/NotImplementedOperationException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/NotImplementedOperationException.java index 86f4533f22d..05dae12b79c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/NotImplementedOperationException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/NotImplementedOperationException.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * This Represents an HTTP 501 Not Implemented response, which means the resource provider currently lacks the * ability to fullfill the request. - * + * *

    * Note that a complete list of RESTful exceptions is available in the Package * Summary. @@ -45,7 +44,7 @@ public class NotImplementedOperationException extends BaseServerResponseExceptio /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome @@ -54,5 +53,4 @@ public class NotImplementedOperationException extends BaseServerResponseExceptio public NotImplementedOperationException(String theMessage, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, theMessage, theOperationOutcome); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/NotModifiedException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/NotModifiedException.java index ad1a1396f3c..ad0ae2726c3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/NotModifiedException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/NotModifiedException.java @@ -19,16 +19,15 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * This Represents an HTTP 301 Not Modified response, which means the resource has not * changed since the last version the client retrieved. This exception should only be used - * as a part of the ETag workflow. - * + * as a part of the ETag workflow. + * *

    * Note that a complete list of RESTful exceptions is available in the Package * Summary. @@ -46,7 +45,7 @@ public class NotModifiedException extends BaseServerResponseException { /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome @@ -55,5 +54,4 @@ public class NotModifiedException extends BaseServerResponseException { public NotModifiedException(String theMessage, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, theMessage, theOperationOutcome); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/PayloadTooLargeException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/PayloadTooLargeException.java index 83eeebdcc03..3dab9779c32 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/PayloadTooLargeException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/PayloadTooLargeException.java @@ -26,7 +26,7 @@ import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * This Represents an HTTP 413 Payload Too Large response, which means the request body * was too big for the server to accept - * + * *

    */ - public static final String EXT_SUBSCRIPTION_SUBJECT_TEMPLATE = "http://hapifhir.io/fhir/StructureDefinition/subscription-email-subject-template"; + public static final String EXT_SUBSCRIPTION_SUBJECT_TEMPLATE = + "http://hapifhir.io/fhir/StructureDefinition/subscription-email-subject-template"; /** * This extension URL indicates whether a REST HOOK delivery should @@ -39,7 +38,8 @@ public class HapiExtensions { * placed on the Subscription.channel element. *

    */ - public static final String EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS = "http://hapifhir.io/fhir/StructureDefinition/subscription-resthook-strip-version-ids"; + public static final String EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS = + "http://hapifhir.io/fhir/StructureDefinition/subscription-resthook-strip-version-ids"; /** * This extension URL indicates whether a REST HOOK delivery should @@ -57,12 +57,14 @@ public class HapiExtensions { * placed on the Subscription.channel element. *

    */ - public static final String EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION = "http://hapifhir.io/fhir/StructureDefinition/subscription-resthook-deliver-latest-version"; + public static final String EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION = + "http://hapifhir.io/fhir/StructureDefinition/subscription-resthook-deliver-latest-version"; /** * Indicate which strategy will be used to match this subscription */ - public static final String EXT_SUBSCRIPTION_MATCHING_STRATEGY = "http://hapifhir.io/fhir/StructureDefinition/subscription-matching-strategy"; + public static final String EXT_SUBSCRIPTION_MATCHING_STRATEGY = + "http://hapifhir.io/fhir/StructureDefinition/subscription-matching-strategy"; /** *

    @@ -70,27 +72,32 @@ public class HapiExtensions { * placed on the Subscription.channel element *

    */ - public static final String EXT_SUBSCRIPTION_EMAIL_FROM = "http://hapifhir.io/fhir/StructureDefinition/subscription-email-from"; + public static final String EXT_SUBSCRIPTION_EMAIL_FROM = + "http://hapifhir.io/fhir/StructureDefinition/subscription-email-from"; /** * Extension ID for external binary references */ - public static final String EXT_EXTERNALIZED_BINARY_ID = "http://hapifhir.io/fhir/StructureDefinition/externalized-binary-id"; + public static final String EXT_EXTERNALIZED_BINARY_ID = + "http://hapifhir.io/fhir/StructureDefinition/externalized-binary-id"; /** * For subscription, deliver a bundle containing a search result instead of just a single resource */ - public static final String EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA = "http://hapifhir.io/fhir/StructureDefinition/subscription-payload-search-criteria"; + public static final String EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA = + "http://hapifhir.io/fhir/StructureDefinition/subscription-payload-search-criteria"; /** * Message added to expansion valueset */ - public static final String EXT_VALUESET_EXPANSION_MESSAGE = "http://hapifhir.io/fhir/StructureDefinition/valueset-expansion-message"; + public static final String EXT_VALUESET_EXPANSION_MESSAGE = + "http://hapifhir.io/fhir/StructureDefinition/valueset-expansion-message"; /** * Extension URL for extension on a SearchParameter indicating that text values should not be indexed */ - public static final String EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING = "http://hapifhir.io/fhir/StructureDefinition/searchparameter-token-suppress-text-index"; + public static final String EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING = + "http://hapifhir.io/fhir/StructureDefinition/searchparameter-token-suppress-text-index"; /** *

    * This extension represents the equivalent of the @@ -100,28 +107,33 @@ public class HapiExtensions { *

    */ public static final String EXT_META_SOURCE = "http://hapifhir.io/fhir/StructureDefinition/resource-meta-source"; + public static final String EXT_SP_UNIQUE = "http://hapifhir.io/fhir/StructureDefinition/sp-unique"; /** * URL for extension on a Phonetic String SearchParameter indicating that text values should be phonetically indexed with the named encoder */ - public static final String EXT_SEARCHPARAM_PHONETIC_ENCODER = "http://hapifhir.io/fhir/StructureDefinition/searchparameter-phonetic-encoder"; + public static final String EXT_SEARCHPARAM_PHONETIC_ENCODER = + "http://hapifhir.io/fhir/StructureDefinition/searchparameter-phonetic-encoder"; /** * URL for boolean extension added to all placeholder resources */ - public static final String EXT_RESOURCE_PLACEHOLDER = "http://hapifhir.io/fhir/StructureDefinition/resource-placeholder"; + public static final String EXT_RESOURCE_PLACEHOLDER = + "http://hapifhir.io/fhir/StructureDefinition/resource-placeholder"; /** * URL for extension in a Group Bulk Export which identifies the golden patient of a given exported resource. */ - public static final String ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL = "https://hapifhir.org/associated-patient-golden-resource/"; + public static final String ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL = + "https://hapifhir.org/associated-patient-golden-resource/"; /** * This extension provides an example value for a parameter value for * a REST operation (eg for an OperationDefinition) */ - public static final String EXT_OP_PARAMETER_EXAMPLE_VALUE = "http://hapifhir.io/fhir/StructureDefinition/op-parameter-example-value"; + public static final String EXT_OP_PARAMETER_EXAMPLE_VALUE = + "http://hapifhir.io/fhir/StructureDefinition/op-parameter-example-value"; /** * This extension provides a way for subscribers to provide @@ -129,23 +141,27 @@ public class HapiExtensions { * If provided, subscriptions will be retried this many times * (to a total of retry-count + 1 (for original attempt) */ - public static final String EX_RETRY_COUNT = "http://hapifhir.io/fhir/StructureDefinition/subscription-delivery-retry-count"; + public static final String EX_RETRY_COUNT = + "http://hapifhir.io/fhir/StructureDefinition/subscription-delivery-retry-count"; /** * This extension provides a way for subscribers to indicate if DELETE messages must be sent (default is ignoring them) */ - public static final String EX_SEND_DELETE_MESSAGES = "http://hapifhir.io/fhir/StructureDefinition/subscription-send-delete-messages"; + public static final String EX_SEND_DELETE_MESSAGES = + "http://hapifhir.io/fhir/StructureDefinition/subscription-send-delete-messages"; /** * This entension allows subscriptions to be marked as cross partition and with correct settings, listen to incoming resources from all partitions. */ - public static final String EXTENSION_SUBSCRIPTION_CROSS_PARTITION = "https://smilecdr.com/fhir/ns/StructureDefinition/subscription-cross-partition"; + public static final String EXTENSION_SUBSCRIPTION_CROSS_PARTITION = + "https://smilecdr.com/fhir/ns/StructureDefinition/subscription-cross-partition"; /** * This extension is used for "uplifted refchains" on search parameters. See the * HAPI FHIR documentation for an explanation of how these work. */ - public static final String EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN = "https://smilecdr.com/fhir/ns/StructureDefinition/searchparameter-uplift-refchain"; + public static final String EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN = + "https://smilecdr.com/fhir/ns/StructureDefinition/searchparameter-uplift-refchain"; /** * This extension is used for "uplifted refchains" on search parameters. See the * HAPI FHIR documentation for an explanation of how these work. @@ -157,14 +173,13 @@ public class HapiExtensions { */ public static final String EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_ELEMENT_NAME = "element-name"; - public static final String EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE = "http://hl7.org/fhir/tools/CustomBaseResource"; - public static final String EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE = "http://hl7.org/fhir/tools/CustomTargetResource"; + public static final String EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE = + "http://hl7.org/fhir/tools/CustomBaseResource"; + public static final String EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE = + "http://hl7.org/fhir/tools/CustomTargetResource"; /** * Non instantiable */ - private HapiExtensions() { - } - - + private HapiExtensions() {} } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ICallable.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ICallable.java index 9155d1d869e..98ffe6c9f65 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ICallable.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ICallable.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.util; public interface ICallable { T call(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ILockable.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ILockable.java index 91846895075..78cd36be93c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ILockable.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ILockable.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.util; public interface ILockable { void lock(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IModelVisitor.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IModelVisitor.java index b4c1eaed1c8..11c89c4a8e4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IModelVisitor.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IModelVisitor.java @@ -19,13 +19,12 @@ */ package ca.uhn.fhir.util; -import java.util.List; - +import ca.uhn.fhir.context.BaseRuntimeChildDefinition; +import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; -import ca.uhn.fhir.context.BaseRuntimeChildDefinition; -import ca.uhn.fhir.context.BaseRuntimeElementDefinition; +import java.util.List; /** * @see FhirTerser#visit(IBaseResource, IModelVisitor) @@ -42,6 +41,10 @@ public interface IModelVisitor { * May be null if this is a root element * @param theDefinition */ - void acceptElement(IBaseResource theResource, IBase theElement, List thePathToElement, BaseRuntimeChildDefinition theChildDefinition, BaseRuntimeElementDefinition theDefinition); - + void acceptElement( + IBaseResource theResource, + IBase theElement, + List thePathToElement, + BaseRuntimeChildDefinition theChildDefinition, + BaseRuntimeElementDefinition theDefinition); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IModelVisitor2.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IModelVisitor2.java index 87338b4ff32..8866eb5c6c9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IModelVisitor2.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IModelVisitor2.java @@ -23,7 +23,6 @@ import ca.uhn.fhir.context.BaseRuntimeChildDefinition; import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseExtension; -import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.List; @@ -39,12 +38,20 @@ public interface IModelVisitor2 { * @param theElement The element being visited * @param theContainingElementPath The elements in the path leading up to the actual element being accepted. The first element in this path will be the outer resource being visited, and the last element will be the saem object as the object passed as theElement */ - boolean acceptElement(IBase theElement, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath); + boolean acceptElement( + IBase theElement, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath); /** * */ - default boolean acceptUndeclaredExtension(IBaseExtension theNextExt, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { return true; } - - + default boolean acceptUndeclaredExtension( + IBaseExtension theNextExt, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { + return true; + } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IoUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IoUtil.java index 6be207770ec..628c5424f78 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IoUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/IoUtil.java @@ -34,5 +34,4 @@ public class IoUtil { // ignore } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java index aa70522de2d..f02d60eecf4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java @@ -28,11 +28,11 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.List; +import javax.annotation.Nonnull; public class JsonUtil { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/LogUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/LogUtil.java index 4131bd6be5b..587feb44afd 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/LogUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/LogUtil.java @@ -50,5 +50,4 @@ public class LogUtil { break; } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/Logs.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/Logs.java index 5ff65d1a394..99a9cf6dee2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/Logs.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/Logs.java @@ -23,12 +23,16 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Logs { - private static final Logger ourBatchTroubleshootingLog = LoggerFactory.getLogger("ca.uhn.fhir.log.batch_troubleshooting"); - private static final Logger ourNarrativeGenerationTroubleshootingLog = LoggerFactory.getLogger("ca.uhn.fhir.log.narrative_generation_troubleshooting"); + private static final Logger ourBatchTroubleshootingLog = + LoggerFactory.getLogger("ca.uhn.fhir.log.batch_troubleshooting"); + private static final Logger ourNarrativeGenerationTroubleshootingLog = + LoggerFactory.getLogger("ca.uhn.fhir.log.narrative_generation_troubleshooting"); - private static final Logger ourSubscriptionTroubleshootingLog = LoggerFactory.getLogger("ca.cdr.log.subscription_troubleshooting"); + private static final Logger ourSubscriptionTroubleshootingLog = + LoggerFactory.getLogger("ca.cdr.log.subscription_troubleshooting"); - private static final Logger ourSubscriptionTopicLog = LoggerFactory.getLogger("ca.uhn.fhir.log.subscription_topic_troubleshooting"); + private static final Logger ourSubscriptionTopicLog = + LoggerFactory.getLogger("ca.uhn.fhir.log.subscription_topic_troubleshooting"); public static Logger getBatchTroubleshootingLog() { return ourBatchTroubleshootingLog; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MessageSupplier.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MessageSupplier.java index c622c1abc09..c83192ca760 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MessageSupplier.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MessageSupplier.java @@ -26,18 +26,18 @@ import java.util.function.Supplier; * a future version will allow lambda params */ public class MessageSupplier { - private Supplier supplier; + private Supplier supplier; - public MessageSupplier(Supplier supplier) { - this.supplier = supplier; - } + public MessageSupplier(Supplier supplier) { + this.supplier = supplier; + } - @Override - public String toString() { - return supplier.get().toString(); - } + @Override + public String toString() { + return supplier.get().toString(); + } - public static MessageSupplier msg(Supplier supplier) { - return new MessageSupplier(supplier); - } + public static MessageSupplier msg(Supplier supplier) { + return new MessageSupplier(supplier); + } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MetaUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MetaUtil.java index 476b36bd283..c3bcc5d801d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MetaUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MetaUtil.java @@ -39,7 +39,6 @@ import java.util.List; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isNotBlank; - public class MetaUtil { private static final Logger ourLog = LoggerFactory.getLogger(MetaUtil.class); @@ -58,7 +57,9 @@ public class MetaUtil { } else if (theContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) { return getSourceDstu3((IBaseHasExtensions) theMeta); } else { - throw new UnsupportedOperationException(Msg.code(1782) + MetaUtil.class.getSimpleName() + ".getSource() not supported on FHIR Version " + theContext.getVersion().getVersion()); + throw new UnsupportedOperationException( + Msg.code(1782) + MetaUtil.class.getSimpleName() + ".getSource() not supported on FHIR Version " + + theContext.getVersion().getVersion()); } } @@ -75,7 +76,8 @@ public class MetaUtil { } private static String getSourceR4Plus(FhirContext theFhirContext, IBaseMetaType theMeta) { - BaseRuntimeElementCompositeDefinition elementDef = (BaseRuntimeElementCompositeDefinition) theFhirContext.getElementDefinition(theMeta.getClass()); + BaseRuntimeElementCompositeDefinition elementDef = + (BaseRuntimeElementCompositeDefinition) theFhirContext.getElementDefinition(theMeta.getClass()); BaseRuntimeChildDefinition sourceChild = elementDef.getChildByName("source"); if (sourceChild == null) { return null; @@ -88,13 +90,14 @@ public class MetaUtil { return retVal; } - public static void populateResourceSource(FhirContext theFhirContext, String theProvenanceSourceUri, String theProvenanceRequestId, R theRetVal) { + public static void populateResourceSource( + FhirContext theFhirContext, String theProvenanceSourceUri, String theProvenanceRequestId, R theRetVal) { String sourceString = cleanProvenanceSourceUriOrEmpty(theProvenanceSourceUri); - if (isNotBlank(theProvenanceRequestId)){ + if (isNotBlank(theProvenanceRequestId)) { sourceString = sourceString + "#" + theProvenanceRequestId; } - if (isNotBlank(sourceString)){ + if (isNotBlank(sourceString)) { setSource(theFhirContext, theRetVal, sourceString); } } @@ -116,27 +119,29 @@ public class MetaUtil { } else if (theContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) { IBaseExtension sourceExtension = ((IBaseHasExtensions) theResource.getMeta()).addExtension(); sourceExtension.setUrl(HapiExtensions.EXT_META_SOURCE); - IPrimitiveType value = (IPrimitiveType) theContext.getElementDefinition("uri").newInstance(); + IPrimitiveType value = (IPrimitiveType) + theContext.getElementDefinition("uri").newInstance(); value.setValue(theValue); sourceExtension.setValue(value); } else { - ourLog.debug(MetaUtil.class.getSimpleName() + ".setSource() not supported on FHIR Version " + theContext.getVersion().getVersion()); + ourLog.debug(MetaUtil.class.getSimpleName() + ".setSource() not supported on FHIR Version " + + theContext.getVersion().getVersion()); } } public static void setSource(FhirContext theContext, IBaseMetaType theMeta, String theValue) { - BaseRuntimeElementCompositeDefinition elementDef = (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theMeta.getClass()); + BaseRuntimeElementCompositeDefinition elementDef = + (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theMeta.getClass()); BaseRuntimeChildDefinition sourceChild = elementDef.getChildByName("source"); List sourceValues = sourceChild.getAccessor().getValues(theMeta); IPrimitiveType sourceElement; if (sourceValues.size() > 0) { sourceElement = ((IPrimitiveType) sourceValues.get(0)); } else { - sourceElement = (IPrimitiveType) theContext.getElementDefinition("uri").newInstance(); + sourceElement = + (IPrimitiveType) theContext.getElementDefinition("uri").newInstance(); sourceChild.getMutator().setValue(theMeta, sourceElement); } sourceElement.setValueAsString(theValue); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MultimapCollector.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MultimapCollector.java index 1682aa8c661..a86577454fb 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MultimapCollector.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/MultimapCollector.java @@ -33,8 +33,7 @@ import java.util.stream.Collector; /** * Copied from https://stackoverflow.com/questions/23003542/cleanest-way-to-create-a-guava-multimap-from-a-java-8-stream */ -public class MultimapCollector implements - Collector, ListMultimap> { +public class MultimapCollector implements Collector, ListMultimap> { private final Function keyGetter; private final Function valueGetter; @@ -44,7 +43,8 @@ public class MultimapCollector implements this.valueGetter = valueGetter; } - public static MultimapCollector toMultimap(Function keyGetter, Function valueGetter) { + public static MultimapCollector toMultimap( + Function keyGetter, Function valueGetter) { return new MultimapCollector<>(keyGetter, valueGetter); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/NonPrettyPrintWriterWrapper.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/NonPrettyPrintWriterWrapper.java index f1a73af0ada..5f2ae793da8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/NonPrettyPrintWriterWrapper.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/NonPrettyPrintWriterWrapper.java @@ -90,7 +90,8 @@ public class NonPrettyPrintWriterWrapper implements XMLStreamWriter { } @Override - public void writeStartElement(String thePrefix, String theLocalName, String theNamespaceURI) throws XMLStreamException { + public void writeStartElement(String thePrefix, String theLocalName, String theNamespaceURI) + throws XMLStreamException { if (PRE.equals(theLocalName) || myInsidePre > 0) { myInsidePre++; } @@ -105,7 +106,8 @@ public class NonPrettyPrintWriterWrapper implements XMLStreamWriter { @Override @CoverageIgnore - public void writeEmptyElement(String thePrefix, String theLocalName, String theNamespaceURI) throws XMLStreamException { + public void writeEmptyElement(String thePrefix, String theLocalName, String theNamespaceURI) + throws XMLStreamException { myTarget.writeEmptyElement(thePrefix, theLocalName, theNamespaceURI); } @@ -135,7 +137,8 @@ public class NonPrettyPrintWriterWrapper implements XMLStreamWriter { @Override @CoverageIgnore - public void writeAttribute(String thePrefix, String theNamespaceURI, String theLocalName, String theValue) throws XMLStreamException { + public void writeAttribute(String thePrefix, String theNamespaceURI, String theLocalName, String theValue) + throws XMLStreamException { myTarget.writeAttribute(thePrefix, theNamespaceURI, theLocalName, theValue); } @@ -221,7 +224,8 @@ public class NonPrettyPrintWriterWrapper implements XMLStreamWriter { writeCharacters(theText, theStart, theLen, myTarget, myInsidePre); } - static void writeCharacters(char[] theText, int theStart, int theLen, XMLStreamWriter target, int insidePre) throws XMLStreamException { + static void writeCharacters(char[] theText, int theStart, int theLen, XMLStreamWriter target, int insidePre) + throws XMLStreamException { if (theLen > 0) { if (insidePre > 0) { target.writeCharacters(theText, theStart, theLen); @@ -248,7 +252,6 @@ public class NonPrettyPrintWriterWrapper implements XMLStreamWriter { if (end < initialEnd) { target.writeCharacters(" "); } - } } } @@ -258,5 +261,4 @@ public class NonPrettyPrintWriterWrapper implements XMLStreamWriter { public Object getProperty(String theName) throws IllegalArgumentException { return myTarget.getProperty(theName); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/NumericParamRangeUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/NumericParamRangeUtil.java index 3a8975f7619..bbdc0d99769 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/NumericParamRangeUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/NumericParamRangeUtil.java @@ -34,9 +34,7 @@ public class NumericParamRangeUtil { * @return a Pair of BigDecimal(s) with the low and high range boundaries */ public static Pair getRange(BigDecimal theNumber) { - BigDecimal halfRange = BigDecimal.valueOf(.5).movePointLeft( theNumber.scale() ); + BigDecimal halfRange = BigDecimal.valueOf(.5).movePointLeft(theNumber.scale()); return Pair.of(theNumber.subtract(halfRange), theNumber.add(halfRange)); } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ObjectUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ObjectUtil.java index eae12d73aea..afefe7bc2ce 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ObjectUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ObjectUtil.java @@ -37,12 +37,11 @@ public class ObjectUtil { public static boolean equals(Object object1, Object object2) { return Objects.equals(object1, object2); } - + public static T requireNonNull(T obj, String message) { - if (obj == null) - throw new NullPointerException(Msg.code(1776) + message); - return obj; - } + if (obj == null) throw new NullPointerException(Msg.code(1776) + message); + return obj; + } public static void requireNotEmpty(String str, String message) { if (StringUtils.isBlank(str)) { @@ -65,5 +64,4 @@ public class ObjectUtil { return Optional.empty(); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java index 422a90d1ea1..ccb2b588169 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/OperationOutcomeUtil.java @@ -34,8 +34,8 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.ICompositeType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nullable; import java.util.List; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -53,20 +53,45 @@ public class OperationOutcomeUtil { * @param theCode * @return Returns the newly added issue */ - public static IBase addIssue(FhirContext theCtx, IBaseOperationOutcome theOperationOutcome, String theSeverity, String theDetails, String theLocation, String theCode) { + public static IBase addIssue( + FhirContext theCtx, + IBaseOperationOutcome theOperationOutcome, + String theSeverity, + String theDetails, + String theLocation, + String theCode) { return addIssue(theCtx, theOperationOutcome, theSeverity, theDetails, theLocation, theCode, null, null, null); } - public static IBase addIssue(FhirContext theCtx, IBaseOperationOutcome theOperationOutcome, String theSeverity, String theDetails, String theLocation, String theCode, @Nullable String theDetailSystem, @Nullable String theDetailCode, @Nullable String theDetailDescription) { + public static IBase addIssue( + FhirContext theCtx, + IBaseOperationOutcome theOperationOutcome, + String theSeverity, + String theDetails, + String theLocation, + String theCode, + @Nullable String theDetailSystem, + @Nullable String theDetailCode, + @Nullable String theDetailDescription) { IBase issue = createIssue(theCtx, theOperationOutcome); - populateDetails(theCtx, issue, theSeverity, theDetails, theLocation, theCode, theDetailSystem, theDetailCode, theDetailDescription); + populateDetails( + theCtx, + issue, + theSeverity, + theDetails, + theLocation, + theCode, + theDetailSystem, + theDetailCode, + theDetailDescription); return issue; } private static IBase createIssue(FhirContext theCtx, IBaseResource theOutcome) { RuntimeResourceDefinition ooDef = theCtx.getResourceDefinition(theOutcome); BaseRuntimeChildDefinition issueChild = ooDef.getChildByName("issue"); - BaseRuntimeElementCompositeDefinition issueElement = (BaseRuntimeElementCompositeDefinition) issueChild.getChildByName("issue"); + BaseRuntimeElementCompositeDefinition issueElement = + (BaseRuntimeElementCompositeDefinition) issueChild.getChildByName("issue"); IBase issue = issueElement.newInstance(); issueChild.getMutator().addValue(theOutcome, issue); @@ -95,7 +120,8 @@ public class OperationOutcomeUtil { } IBase issue = issues.get(0); - BaseRuntimeElementCompositeDefinition issueElement = (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(issue.getClass()); + BaseRuntimeElementCompositeDefinition issueElement = + (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(issue.getClass()); BaseRuntimeChildDefinition detailsChild = issueElement.getChildByName(name); List details = detailsChild.getAccessor().getValues(issue); @@ -121,24 +147,26 @@ public class OperationOutcomeUtil { return issueChild.getAccessor().getValues(theOutcome).size(); } - public static boolean hasIssuesOfSeverity(FhirContext theCtx, IBaseOperationOutcome theOutcome, String theSeverity) { + public static boolean hasIssuesOfSeverity( + FhirContext theCtx, IBaseOperationOutcome theOutcome, String theSeverity) { RuntimeResourceDefinition ooDef = theCtx.getResourceDefinition(theOutcome); BaseRuntimeChildDefinition issueChild = ooDef.getChildByName("issue"); List issues = issueChild.getAccessor().getValues(theOutcome); if (issues.isEmpty()) { - return false; // if there are no issues at all, there are no issues of the required severity + return false; // if there are no issues at all, there are no issues of the required severity } IBase firstIssue = issues.get(0); - BaseRuntimeElementCompositeDefinition issueElement = (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(firstIssue.getClass()); + BaseRuntimeElementCompositeDefinition issueElement = + (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(firstIssue.getClass()); BaseRuntimeChildDefinition severityChild = issueElement.getChildByName("severity"); return issues.stream() - .flatMap(t -> severityChild.getAccessor().getValues(t).stream()) - .map(t -> (IPrimitiveType) t) - .map(IPrimitiveType::getValueAsString) - .anyMatch(theSeverity::equals); + .flatMap(t -> severityChild.getAccessor().getValues(t).stream()) + .map(t -> (IPrimitiveType) t) + .map(IPrimitiveType::getValueAsString) + .anyMatch(theSeverity::equals); } public static IBaseOperationOutcome newInstance(FhirContext theCtx) { @@ -152,20 +180,32 @@ public class OperationOutcomeUtil { } } - private static void populateDetails(FhirContext theCtx, IBase theIssue, String theSeverity, String theDetails, String theLocation, String theCode, String theDetailSystem, String theDetailCode, String theDetailDescription) { - BaseRuntimeElementCompositeDefinition issueElement = (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(theIssue.getClass()); + private static void populateDetails( + FhirContext theCtx, + IBase theIssue, + String theSeverity, + String theDetails, + String theLocation, + String theCode, + String theDetailSystem, + String theDetailCode, + String theDetailDescription) { + BaseRuntimeElementCompositeDefinition issueElement = + (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(theIssue.getClass()); BaseRuntimeChildDefinition diagnosticsChild; diagnosticsChild = issueElement.getChildByName("diagnostics"); BaseRuntimeChildDefinition codeChild = issueElement.getChildByName("code"); - IPrimitiveType codeElem = (IPrimitiveType) codeChild.getChildByName("code").newInstance(codeChild.getInstanceConstructorArguments()); + IPrimitiveType codeElem = (IPrimitiveType) + codeChild.getChildByName("code").newInstance(codeChild.getInstanceConstructorArguments()); codeElem.setValueAsString(theCode); codeChild.getMutator().addValue(theIssue, codeElem); BaseRuntimeElementDefinition stringDef = diagnosticsChild.getChildByName(diagnosticsChild.getElementName()); BaseRuntimeChildDefinition severityChild = issueElement.getChildByName("severity"); - IPrimitiveType severityElem = (IPrimitiveType) severityChild.getChildByName("severity").newInstance(severityChild.getInstanceConstructorArguments()); + IPrimitiveType severityElem = (IPrimitiveType) + severityChild.getChildByName("severity").newInstance(severityChild.getInstanceConstructorArguments()); severityElem.setValueAsString(theSeverity); severityChild.getMutator().addValue(theIssue, severityElem); @@ -196,29 +236,44 @@ public class OperationOutcomeUtil { public static void addLocationToIssue(FhirContext theContext, IBase theIssue, String theLocation) { if (isNotBlank(theLocation)) { - BaseRuntimeElementCompositeDefinition issueElement = (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theIssue.getClass()); + BaseRuntimeElementCompositeDefinition issueElement = + (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theIssue.getClass()); BaseRuntimeChildDefinition locationChild = issueElement.getChildByName("location"); - IPrimitiveType locationElem = (IPrimitiveType) locationChild.getChildByName("location").newInstance(locationChild.getInstanceConstructorArguments()); + IPrimitiveType locationElem = (IPrimitiveType) locationChild + .getChildByName("location") + .newInstance(locationChild.getInstanceConstructorArguments()); locationElem.setValueAsString(theLocation); locationChild.getMutator().addValue(theIssue, locationElem); } } - public static IBase addIssueWithMessageId(FhirContext myCtx, IBaseOperationOutcome theOperationOutcome, String severity, String message, String messageId, String location, String theCode) { + public static IBase addIssueWithMessageId( + FhirContext myCtx, + IBaseOperationOutcome theOperationOutcome, + String severity, + String message, + String messageId, + String location, + String theCode) { IBase issue = addIssue(myCtx, theOperationOutcome, severity, message, location, theCode); - BaseRuntimeElementCompositeDefinition issueElement = (BaseRuntimeElementCompositeDefinition) myCtx.getElementDefinition(issue.getClass()); + BaseRuntimeElementCompositeDefinition issueElement = + (BaseRuntimeElementCompositeDefinition) myCtx.getElementDefinition(issue.getClass()); BaseRuntimeChildDefinition detailsChildDef = issueElement.getChildByName("details"); - IPrimitiveType system = (IPrimitiveType) myCtx.getElementDefinition("uri").newInstance(); + IPrimitiveType system = + (IPrimitiveType) myCtx.getElementDefinition("uri").newInstance(); system.setValueAsString(Constants.JAVA_VALIDATOR_DETAILS_SYSTEM); - IPrimitiveType code = (IPrimitiveType) myCtx.getElementDefinition("code").newInstance(); + IPrimitiveType code = + (IPrimitiveType) myCtx.getElementDefinition("code").newInstance(); code.setValueAsString(messageId); - BaseRuntimeElementCompositeDefinition codingDef = (BaseRuntimeElementCompositeDefinition) myCtx.getElementDefinition("Coding"); + BaseRuntimeElementCompositeDefinition codingDef = + (BaseRuntimeElementCompositeDefinition) myCtx.getElementDefinition("Coding"); ICompositeType coding = (ICompositeType) codingDef.newInstance(); codingDef.getChildByName("system").getMutator().addValue(coding, system); codingDef.getChildByName("code").getMutator().addValue(coding, code); - BaseRuntimeElementCompositeDefinition ccDef = (BaseRuntimeElementCompositeDefinition) myCtx.getElementDefinition("CodeableConcept"); + BaseRuntimeElementCompositeDefinition ccDef = + (BaseRuntimeElementCompositeDefinition) myCtx.getElementDefinition("CodeableConcept"); ICompositeType codeableConcept = (ICompositeType) ccDef.newInstance(); ccDef.getChildByName("coding").getMutator().addValue(codeableConcept, coding); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java index a82efced19e..3b091f9ff08 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java @@ -35,7 +35,6 @@ import org.hl7.fhir.instance.model.api.IBaseReference; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nullable; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.math.BigDecimal; @@ -46,6 +45,7 @@ import java.util.List; import java.util.Optional; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultIfBlank; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -55,54 +55,63 @@ import static org.apache.commons.lang3.StringUtils.isBlank; */ public class ParametersUtil { - public static Optional getNamedParameterValueAsString(FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { + public static Optional getNamedParameterValueAsString( + FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { Function, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null); - return extractNamedParameters(theCtx, theParameters, theParameterName, mapper).stream().findFirst(); + return extractNamedParameters(theCtx, theParameters, theParameterName, mapper).stream() + .findFirst(); } - public static List getNamedParameterValuesAsString(FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { + public static List getNamedParameterValuesAsString( + FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { Function, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null); return extractNamedParameters(theCtx, theParameters, theParameterName, mapper); } - public static List getNamedParameterValuesAsInteger(FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { + public static List getNamedParameterValuesAsInteger( + FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { Function, Integer> mapper = t -> (Integer) t.getValue(); return extractNamedParameters(theCtx, theParameters, theParameterName, mapper); } - public static Optional getNamedParameterValueAsInteger(FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { - return getNamedParameterValuesAsInteger(theCtx, theParameters, theParameterName).stream().findFirst(); + public static Optional getNamedParameterValueAsInteger( + FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { + return getNamedParameterValuesAsInteger(theCtx, theParameters, theParameterName).stream() + .findFirst(); } - public static Optional getNamedParameter(FhirContext theCtx, IBaseResource theParameters, String theParameterName) { - return getNamedParameters(theCtx, theParameters, theParameterName).stream().findFirst(); + public static Optional getNamedParameter( + FhirContext theCtx, IBaseResource theParameters, String theParameterName) { + return getNamedParameters(theCtx, theParameters, theParameterName).stream() + .findFirst(); } - public static List getNamedParameters(FhirContext theCtx, IBaseResource theParameters, String theParameterName) { + public static List getNamedParameters( + FhirContext theCtx, IBaseResource theParameters, String theParameterName) { Validate.notNull(theParameters, "theParameters must not be null"); RuntimeResourceDefinition resDef = theCtx.getResourceDefinition(theParameters.getClass()); BaseRuntimeChildDefinition parameterChild = resDef.getChildByName("parameter"); List parameterReps = parameterChild.getAccessor().getValues(theParameters); - return parameterReps - .stream() - .filter(param -> { - BaseRuntimeElementCompositeDefinition nextParameterDef = (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(param.getClass()); - BaseRuntimeChildDefinition nameChild = nextParameterDef.getChildByName("name"); - List nameValues = nameChild.getAccessor().getValues(param); - Optional> nameValue = nameValues - .stream() - .filter(t -> t instanceof IPrimitiveType) - .map(t -> ((IPrimitiveType) t)) - .findFirst(); - return nameValue.isPresent() && theParameterName.equals(nameValue.get().getValueAsString()); - }) - .collect(Collectors.toList()); - + return parameterReps.stream() + .filter(param -> { + BaseRuntimeElementCompositeDefinition nextParameterDef = + (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(param.getClass()); + BaseRuntimeChildDefinition nameChild = nextParameterDef.getChildByName("name"); + List nameValues = nameChild.getAccessor().getValues(param); + Optional> nameValue = nameValues.stream() + .filter(t -> t instanceof IPrimitiveType) + .map(t -> ((IPrimitiveType) t)) + .findFirst(); + return nameValue.isPresent() + && theParameterName.equals(nameValue.get().getValueAsString()); + }) + .collect(Collectors.toList()); } public static Optional getParameterPart(FhirContext theCtx, IBase theParameter, String theParameterName) { - BaseRuntimeElementCompositeDefinition nextParameterDef = (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(theParameter.getClass()); + BaseRuntimeElementCompositeDefinition nextParameterDef = + (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(theParameter.getClass()); BaseRuntimeChildDefinition valueChild = nextParameterDef.getChildByName("part"); List parts = valueChild.getAccessor().getValues(theParameter); @@ -116,7 +125,8 @@ public class ParametersUtil { return Optional.empty(); } - public static Optional getParameterPartValue(FhirContext theCtx, IBase theParameter, String theParameterName) { + public static Optional getParameterPartValue( + FhirContext theCtx, IBase theParameter, String theParameterName) { Optional part = getParameterPart(theCtx, theParameter, theParameterName); if (part.isPresent()) { return theCtx.newTerser().getSingleValue(part.get(), "value[x]", IBase.class); @@ -125,47 +135,63 @@ public class ParametersUtil { } } - public static String getParameterPartValueAsString(FhirContext theCtx, IBase theParameter, String theParameterName) { - return getParameterPartValue(theCtx, theParameter, theParameterName).map(t -> (IPrimitiveType) t).map(t -> t.getValueAsString()).orElse(null); - } - - public static Optional getParameterPartValueAsInteger(FhirContext theCtx, IBase theParameter, String theParameterName) { + public static String getParameterPartValueAsString( + FhirContext theCtx, IBase theParameter, String theParameterName) { return getParameterPartValue(theCtx, theParameter, theParameterName) - .filter(t -> IPrimitiveType.class.isAssignableFrom(t.getClass())) - .map(t -> (IPrimitiveType) t) - .map(IPrimitiveType::getValue) - .filter(t -> Integer.class.isAssignableFrom(t.getClass())) - .map(t -> (Integer) t); + .map(t -> (IPrimitiveType) t) + .map(t -> t.getValueAsString()) + .orElse(null); } - private static List extractNamedParameters(FhirContext theCtx, IBaseParameters theParameters, String theParameterName, Function, T> theMapper) { + public static Optional getParameterPartValueAsInteger( + FhirContext theCtx, IBase theParameter, String theParameterName) { + return getParameterPartValue(theCtx, theParameter, theParameterName) + .filter(t -> IPrimitiveType.class.isAssignableFrom(t.getClass())) + .map(t -> (IPrimitiveType) t) + .map(IPrimitiveType::getValue) + .filter(t -> Integer.class.isAssignableFrom(t.getClass())) + .map(t -> (Integer) t); + } + + private static List extractNamedParameters( + FhirContext theCtx, + IBaseParameters theParameters, + String theParameterName, + Function, T> theMapper) { List retVal = new ArrayList<>(); List namedParameters = getNamedParameters(theCtx, theParameters, theParameterName); for (IBase nextParameter : namedParameters) { - BaseRuntimeElementCompositeDefinition nextParameterDef = (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(nextParameter.getClass()); + BaseRuntimeElementCompositeDefinition nextParameterDef = + (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(nextParameter.getClass()); BaseRuntimeChildDefinition valueChild = nextParameterDef.getChildByName("value[x]"); List valueValues = valueChild.getAccessor().getValues(nextParameter); - valueValues - .stream() - .filter(t -> t instanceof IPrimitiveType) - .map(t -> ((IPrimitiveType) t)) - .map(theMapper) - .filter(t -> t != null) - .forEach(retVal::add); - + valueValues.stream() + .filter(t -> t instanceof IPrimitiveType) + .map(t -> ((IPrimitiveType) t)) + .map(theMapper) + .filter(t -> t != null) + .forEach(retVal::add); } return retVal; } - private static void addClientParameter(FhirContext theContext, Object theValue, IBaseResource theTargetResource, BaseRuntimeChildDefinition paramChild, BaseRuntimeElementCompositeDefinition paramChildElem, String theName) { + private static void addClientParameter( + FhirContext theContext, + Object theValue, + IBaseResource theTargetResource, + BaseRuntimeChildDefinition paramChild, + BaseRuntimeElementCompositeDefinition paramChildElem, + String theName) { Validate.notNull(theValue, "theValue must not be null"); if (theValue instanceof IBaseResource) { - IBase parameter = createParameterRepetition(theContext, theTargetResource, paramChild, paramChildElem, theName); + IBase parameter = + createParameterRepetition(theContext, theTargetResource, paramChild, paramChildElem, theName); paramChildElem.getChildByName("resource").getMutator().addValue(parameter, (IBaseResource) theValue); } else if (theValue instanceof IBaseDatatype) { - IBase parameter = createParameterRepetition(theContext, theTargetResource, paramChild, paramChildElem, theName); + IBase parameter = + createParameterRepetition(theContext, theTargetResource, paramChild, paramChildElem, theName); paramChildElem.getChildByName("value[x]").getMutator().addValue(parameter, (IBaseDatatype) theValue); } else if (theValue instanceof Collection) { Collection collection = (Collection) theValue; @@ -173,7 +199,8 @@ public class ParametersUtil { addClientParameter(theContext, next, theTargetResource, paramChild, paramChildElem, theName); } } else { - throw new IllegalArgumentException(Msg.code(1806) + "Don't know how to handle value of type " + theValue.getClass() + " for parameter " + theName); + throw new IllegalArgumentException(Msg.code(1806) + "Don't know how to handle value of type " + + theValue.getClass() + " for parameter " + theName); } } @@ -185,10 +212,12 @@ public class ParametersUtil { * @param theName The parametr name * @param theValue The parameter value (can be a {@link IBaseResource resource} or a {@link IBaseDatatype datatype}) */ - public static void addParameterToParameters(FhirContext theContext, IBaseParameters theParameters, String theName, Object theValue) { + public static void addParameterToParameters( + FhirContext theContext, IBaseParameters theParameters, String theName, Object theValue) { RuntimeResourceDefinition def = theContext.getResourceDefinition(theParameters); BaseRuntimeChildDefinition paramChild = def.getChildByName("parameter"); - BaseRuntimeElementCompositeDefinition paramChildElem = (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); + BaseRuntimeElementCompositeDefinition paramChildElem = + (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); addClientParameter(theContext, theValue, theParameters, paramChild, paramChildElem, theName); } @@ -202,7 +231,12 @@ public class ParametersUtil { * @param thePrimitiveDatatype The datatype, e.g. "string", or "uri" * @param theValue The value */ - public static void addParameterToParameters(FhirContext theContext, IBaseParameters theParameters, String theName, String thePrimitiveDatatype, String theValue) { + public static void addParameterToParameters( + FhirContext theContext, + IBaseParameters theParameters, + String theName, + String thePrimitiveDatatype, + String theValue) { Validate.notBlank(thePrimitiveDatatype, "thePrimitiveDatatype must not be null or empty"); BaseRuntimeElementDefinition datatypeDef = theContext.getElementDefinition(thePrimitiveDatatype); @@ -212,7 +246,12 @@ public class ParametersUtil { addParameterToParameters(theContext, theParameters, theName, value); } - private static IBase createParameterRepetition(FhirContext theContext, IBaseResource theTargetResource, BaseRuntimeChildDefinition paramChild, BaseRuntimeElementCompositeDefinition paramChildElem, String theName) { + private static IBase createParameterRepetition( + FhirContext theContext, + IBaseResource theTargetResource, + BaseRuntimeChildDefinition paramChild, + BaseRuntimeElementCompositeDefinition paramChildElem, + String theName) { IBase parameter = paramChildElem.newInstance(); paramChild.getMutator().addValue(theTargetResource, parameter); IPrimitiveType value; @@ -224,7 +263,8 @@ public class ParametersUtil { public static IPrimitiveType createString(FhirContext theContext, String theValue) { IPrimitiveType value; if (theContext.getVersion().getVersion().isRi()) { - value = (IPrimitiveType) theContext.getElementDefinition("string").newInstance(theValue); + value = (IPrimitiveType) + theContext.getElementDefinition("string").newInstance(theValue); } else { value = new StringDt(theValue); } @@ -232,12 +272,14 @@ public class ParametersUtil { } public static IPrimitiveType createUri(FhirContext theContext, String theValue) { - IPrimitiveType value = (IPrimitiveType) theContext.getElementDefinition("uri").newInstance(theValue); + IPrimitiveType value = + (IPrimitiveType) theContext.getElementDefinition("uri").newInstance(theValue); return value; } public static IPrimitiveType createCode(FhirContext theContext, String theValue) { - IPrimitiveType value = (IPrimitiveType) theContext.getElementDefinition("code").newInstance(theValue); + IPrimitiveType value = + (IPrimitiveType) theContext.getElementDefinition("code").newInstance(theValue); return value; } @@ -247,89 +289,108 @@ public class ParametersUtil { } @SuppressWarnings("unchecked") - public static void addParameterToParametersBoolean(FhirContext theCtx, IBaseParameters theParameters, String theName, boolean theValue) { + public static void addParameterToParametersBoolean( + FhirContext theCtx, IBaseParameters theParameters, String theName, boolean theValue) { addParameterToParameters(theCtx, theParameters, theName, theCtx.getPrimitiveBoolean(theValue)); } @SuppressWarnings("unchecked") - public static void addParameterToParametersCode(FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) { - IPrimitiveType value = (IPrimitiveType) theCtx.getElementDefinition("code").newInstance(); + public static void addParameterToParametersCode( + FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) { + IPrimitiveType value = + (IPrimitiveType) theCtx.getElementDefinition("code").newInstance(); value.setValue(theValue); addParameterToParameters(theCtx, theParameters, theName, value); } @SuppressWarnings("unchecked") - public static void addParameterToParametersInteger(FhirContext theCtx, IBaseParameters theParameters, String theName, int theValue) { - IPrimitiveType count = (IPrimitiveType) theCtx.getElementDefinition("integer").newInstance(); + public static void addParameterToParametersInteger( + FhirContext theCtx, IBaseParameters theParameters, String theName, int theValue) { + IPrimitiveType count = + (IPrimitiveType) theCtx.getElementDefinition("integer").newInstance(); count.setValue(theValue); addParameterToParameters(theCtx, theParameters, theName, count); } - public static void addParameterToParametersLong(FhirContext theCtx, IBaseParameters theParameters, String theName, long theValue) { + public static void addParameterToParametersLong( + FhirContext theCtx, IBaseParameters theParameters, String theName, long theValue) { addParameterToParametersDecimal(theCtx, theParameters, theName, BigDecimal.valueOf(theValue)); } - public static void addParameterToParametersDecimal(FhirContext theCtx, IBaseParameters theParameters, String theName, BigDecimal theValue) { - IPrimitiveType count = (IPrimitiveType) theCtx.getElementDefinition("decimal").newInstance(); + public static void addParameterToParametersDecimal( + FhirContext theCtx, IBaseParameters theParameters, String theName, BigDecimal theValue) { + IPrimitiveType count = (IPrimitiveType) + theCtx.getElementDefinition("decimal").newInstance(); count.setValue(theValue); addParameterToParameters(theCtx, theParameters, theName, count); } - public static void addParameterToParametersReference(FhirContext theCtx, IBaseParameters theParameters, String theName, String theReference) { - IBaseReference target = (IBaseReference) theCtx.getElementDefinition("reference").newInstance(); + public static void addParameterToParametersReference( + FhirContext theCtx, IBaseParameters theParameters, String theName, String theReference) { + IBaseReference target = + (IBaseReference) theCtx.getElementDefinition("reference").newInstance(); target.setReference(theReference); addParameterToParameters(theCtx, theParameters, theName, target); } @SuppressWarnings("unchecked") - public static void addParameterToParametersString(FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) { - IPrimitiveType value = (IPrimitiveType) theCtx.getElementDefinition("string").newInstance(); + public static void addParameterToParametersString( + FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) { + IPrimitiveType value = + (IPrimitiveType) theCtx.getElementDefinition("string").newInstance(); value.setValue(theValue); addParameterToParameters(theCtx, theParameters, theName, value); } @SuppressWarnings("unchecked") - public static void addParameterToParametersUri(FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) { - IPrimitiveType value = (IPrimitiveType) theCtx.getElementDefinition("uri").newInstance(); + public static void addParameterToParametersUri( + FhirContext theCtx, IBaseParameters theParameters, String theName, String theValue) { + IPrimitiveType value = + (IPrimitiveType) theCtx.getElementDefinition("uri").newInstance(); value.setValue(theValue); addParameterToParameters(theCtx, theParameters, theName, value); - } /** * Add a parameter with no value (typically because we'll be adding sub-parameters) */ - public static IBase addParameterToParameters(FhirContext theContext, IBaseParameters theParameters, String theName) { + public static IBase addParameterToParameters( + FhirContext theContext, IBaseParameters theParameters, String theName) { RuntimeResourceDefinition def = theContext.getResourceDefinition(theParameters); BaseRuntimeChildDefinition paramChild = def.getChildByName("parameter"); - BaseRuntimeElementCompositeDefinition paramChildElem = (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); + BaseRuntimeElementCompositeDefinition paramChildElem = + (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); return createParameterRepetition(theContext, theParameters, paramChild, paramChildElem, theName); } public static void addPartCode(FhirContext theContext, IBase theParameter, String theName, String theCode) { - IPrimitiveType value = (IPrimitiveType) theContext.getElementDefinition("code").newInstance(); + IPrimitiveType value = + (IPrimitiveType) theContext.getElementDefinition("code").newInstance(); value.setValue(theCode); addPart(theContext, theParameter, theName, value); } public static void addPartInteger(FhirContext theContext, IBase theParameter, String theName, Integer theInteger) { - IPrimitiveType value = (IPrimitiveType) theContext.getElementDefinition("integer").newInstance(); + IPrimitiveType value = (IPrimitiveType) + theContext.getElementDefinition("integer").newInstance(); value.setValue(theInteger); addPart(theContext, theParameter, theName, value); } public static void addPartString(FhirContext theContext, IBase theParameter, String theName, String theValue) { - IPrimitiveType value = (IPrimitiveType) theContext.getElementDefinition("string").newInstance(); + IPrimitiveType value = (IPrimitiveType) + theContext.getElementDefinition("string").newInstance(); value.setValue(theValue); addPart(theContext, theParameter, theName, value); } public static void addPartUrl(FhirContext theContext, IBase theParameter, String theName, String theCode) { - IPrimitiveType value = (IPrimitiveType) theContext.getElementDefinition("url").newInstance(); + IPrimitiveType value = + (IPrimitiveType) theContext.getElementDefinition("url").newInstance(); value.setValue(theCode); addPart(theContext, theParameter, theName, value); @@ -340,16 +401,24 @@ public class ParametersUtil { } public static void addPartDecimal(FhirContext theContext, IBase theParameter, String theName, Double theValue) { - IPrimitiveType value = (IPrimitiveType) theContext.getElementDefinition("decimal").newInstance(); + IPrimitiveType value = (IPrimitiveType) + theContext.getElementDefinition("decimal").newInstance(); value.setValue(theValue == null ? null : new BigDecimal(theValue)); addPart(theContext, theParameter, theName, value); } - public static void addPartCoding(FhirContext theContext, IBase theParameter, String theName, String theSystem, String theCode, String theDisplay) { + public static void addPartCoding( + FhirContext theContext, + IBase theParameter, + String theName, + String theSystem, + String theCode, + String theDisplay) { IBase coding = theContext.getElementDefinition("coding").newInstance(); - BaseRuntimeElementCompositeDefinition codingDef = (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(coding.getClass()); + BaseRuntimeElementCompositeDefinition codingDef = + (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(coding.getClass()); codingDef.getChildByName("system").getMutator().addValue(coding, createUri(theContext, theSystem)); codingDef.getChildByName("code").getMutator().addValue(coding, createCode(theContext, theCode)); codingDef.getChildByName("display").getMutator().addValue(coding, createString(theContext, theDisplay)); @@ -358,14 +427,17 @@ public class ParametersUtil { } public static void addPart(FhirContext theContext, IBase theParameter, String theName, IBase theValue) { - BaseRuntimeElementCompositeDefinition def = (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theParameter.getClass()); + BaseRuntimeElementCompositeDefinition def = + (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theParameter.getClass()); BaseRuntimeChildDefinition partChild = def.getChildByName("part"); - BaseRuntimeElementCompositeDefinition partChildElem = (BaseRuntimeElementCompositeDefinition) partChild.getChildByName("part"); + BaseRuntimeElementCompositeDefinition partChildElem = + (BaseRuntimeElementCompositeDefinition) partChild.getChildByName("part"); IBase part = partChildElem.newInstance(); partChild.getMutator().addValue(theParameter, part); - IPrimitiveType name = (IPrimitiveType) theContext.getElementDefinition("string").newInstance(); + IPrimitiveType name = (IPrimitiveType) + theContext.getElementDefinition("string").newInstance(); name.setValue(theName); partChildElem.getChildByName("name").getMutator().addValue(part, name); @@ -376,33 +448,40 @@ public class ParametersUtil { } } - public static void addPartResource(FhirContext theContext, IBase theParameter, String theName, IBaseResource theValue) { - BaseRuntimeElementCompositeDefinition def = (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theParameter.getClass()); + public static void addPartResource( + FhirContext theContext, IBase theParameter, String theName, IBaseResource theValue) { + BaseRuntimeElementCompositeDefinition def = + (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theParameter.getClass()); BaseRuntimeChildDefinition partChild = def.getChildByName("part"); - BaseRuntimeElementCompositeDefinition partChildElem = (BaseRuntimeElementCompositeDefinition) partChild.getChildByName("part"); + BaseRuntimeElementCompositeDefinition partChildElem = + (BaseRuntimeElementCompositeDefinition) partChild.getChildByName("part"); IBase part = partChildElem.newInstance(); partChild.getMutator().addValue(theParameter, part); - IPrimitiveType name = (IPrimitiveType) theContext.getElementDefinition("string").newInstance(); + IPrimitiveType name = (IPrimitiveType) + theContext.getElementDefinition("string").newInstance(); name.setValue(theName); partChildElem.getChildByName("name").getMutator().addValue(part, name); partChildElem.getChildByName("resource").getMutator().addValue(part, theValue); } - public static List getNamedParameterPartAsString(FhirContext theCtx, IBaseParameters theParameters, String thePartName, String theParameterName) { + public static List getNamedParameterPartAsString( + FhirContext theCtx, IBaseParameters theParameters, String thePartName, String theParameterName) { return extractNamedParameterPartsAsString(theCtx, theParameters, thePartName, theParameterName); } // TODO KHS need to consolidate duplicated functionality that came in from different branches - private static List extractNamedParameterPartsAsString(FhirContext theCtx, IBaseParameters theParameters, String thePartName, String theParameterName) { + private static List extractNamedParameterPartsAsString( + FhirContext theCtx, IBaseParameters theParameters, String thePartName, String theParameterName) { List parameterReps = getParameterReps(theCtx, theParameters); List retVal = new ArrayList<>(); for (IBase nextParameter : parameterReps) { - BaseRuntimeElementCompositeDefinition nextParameterDef = (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(nextParameter.getClass()); + BaseRuntimeElementCompositeDefinition nextParameterDef = + (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(nextParameter.getClass()); Optional> nameValue = getNameValue(nextParameter, nextParameterDef); if (!nameValue.isPresent() || !thePartName.equals(nameValue.get().getValueAsString())) { continue; @@ -411,21 +490,21 @@ public class ParametersUtil { BaseRuntimeChildDefinition partChild = nextParameterDef.getChildByName("part"); List partValues = partChild.getAccessor().getValues(nextParameter); for (IBase partValue : partValues) { - BaseRuntimeElementCompositeDefinition partParameterDef = (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(partValue.getClass()); + BaseRuntimeElementCompositeDefinition partParameterDef = + (BaseRuntimeElementCompositeDefinition) theCtx.getElementDefinition(partValue.getClass()); Optional> partNameValue = getNameValue(partValue, partParameterDef); - if (!partNameValue.isPresent() || !theParameterName.equals(partNameValue.get().getValueAsString())) { + if (!partNameValue.isPresent() + || !theParameterName.equals(partNameValue.get().getValueAsString())) { continue; } BaseRuntimeChildDefinition valueChild = partParameterDef.getChildByName("value[x]"); List valueValues = valueChild.getAccessor().getValues(partValue); - valueValues - .stream() - .filter(t -> t instanceof IPrimitiveType) - .map(t -> ((IPrimitiveType) t)) - .map(t -> defaultIfBlank(t.getValueAsString(), null)) - .filter(t -> t != null) - .forEach(retVal::add); - + valueValues.stream() + .filter(t -> t instanceof IPrimitiveType) + .map(t -> ((IPrimitiveType) t)) + .map(t -> defaultIfBlank(t.getValueAsString(), null)) + .filter(t -> t != null) + .forEach(retVal::add); } } return retVal; @@ -438,14 +517,14 @@ public class ParametersUtil { return parameterChild.getAccessor().getValues(theParameters); } - private static Optional> getNameValue(IBase nextParameter, BaseRuntimeElementCompositeDefinition theNextParameterDef) { + private static Optional> getNameValue( + IBase nextParameter, BaseRuntimeElementCompositeDefinition theNextParameterDef) { BaseRuntimeChildDefinition nameChild = theNextParameterDef.getChildByName("name"); List nameValues = nameChild.getAccessor().getValues(nextParameter); - return nameValues - .stream() - .filter(t -> t instanceof IPrimitiveType) - .map(t -> ((IPrimitiveType) t)) - .findFirst(); + return nameValues.stream() + .filter(t -> t instanceof IPrimitiveType) + .map(t -> ((IPrimitiveType) t)) + .findFirst(); } @Nullable @@ -483,7 +562,7 @@ public class ParametersUtil { public static String extractDescription(Annotation[] theParameterAnnotations) { for (Annotation next : theParameterAnnotations) { if (next instanceof Description) { - return extractDescription((Description)next); + return extractDescription((Description) next); } } return null; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PhoneticEncoderUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PhoneticEncoderUtil.java index 57825d9ab48..27d33bf4123 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PhoneticEncoderUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PhoneticEncoderUtil.java @@ -59,8 +59,7 @@ public final class PhoneticEncoderUtil { private static final Logger ourLog = LoggerFactory.getLogger(PhoneticEncoderUtil.class); - private PhoneticEncoderUtil() { - } + private PhoneticEncoderUtil() {} /** * Creates the phonetic encoder wrapper from @@ -82,8 +81,7 @@ public final class PhoneticEncoderUtil { IPhoneticEncoder encoder = getEncoderFromString(encoderType, encoderMaxString); if (encoder != null) { return encoder; - } - else { + } else { ourLog.warn("Invalid phonetic param string " + theString); return null; } @@ -105,16 +103,14 @@ public final class PhoneticEncoderUtil { // invalid number parse error } - if (encoderMaxString == null - || encoderMaxString < 0) { + if (encoderMaxString == null || encoderMaxString < 0) { // parse error ourLog.error("Invalid encoder max character length: " + num); encoderType = null; } } // else - parse error - } - else { + } else { encoderType = theString; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PrettyPrintWriterWrapper.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PrettyPrintWriterWrapper.java index 947040f73cc..b4f28dbed3d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PrettyPrintWriterWrapper.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PrettyPrintWriterWrapper.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.util; +import org.apache.commons.lang3.StringUtils; + import java.util.HashMap; import java.util.Map; - import javax.xml.namespace.NamespaceContext; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; -import org.apache.commons.lang3.StringUtils; - public class PrettyPrintWriterWrapper implements XMLStreamWriter { private static final String INDENT_CHAR = " "; @@ -38,7 +37,7 @@ public class PrettyPrintWriterWrapper implements XMLStreamWriter { private int myInsidePre = 0; private XMLStreamWriter myTarget; - private boolean myFirstIndent=true; + private boolean myFirstIndent = true; public PrettyPrintWriterWrapper(XMLStreamWriter target) { myTarget = target; @@ -103,7 +102,8 @@ public class PrettyPrintWriterWrapper implements XMLStreamWriter { @CoverageIgnore @Override - public void writeAttribute(String thePrefix, String theNamespaceURI, String theLocalName, String theValue) throws XMLStreamException { + public void writeAttribute(String thePrefix, String theNamespaceURI, String theLocalName, String theValue) + throws XMLStreamException { myTarget.writeAttribute(thePrefix, theNamespaceURI, theLocalName, theValue); } @@ -160,7 +160,8 @@ public class PrettyPrintWriterWrapper implements XMLStreamWriter { @CoverageIgnore @Override - public void writeEmptyElement(String thePrefix, String theLocalName, String theNamespaceURI) throws XMLStreamException { + public void writeEmptyElement(String thePrefix, String theLocalName, String theNamespaceURI) + throws XMLStreamException { indent(); myTarget.writeEmptyElement(thePrefix, theLocalName, theNamespaceURI); } @@ -180,7 +181,6 @@ public class PrettyPrintWriterWrapper implements XMLStreamWriter { decrementAndIndent(); myTarget.writeEndElement(); - } @CoverageIgnore @@ -208,19 +208,19 @@ public class PrettyPrintWriterWrapper implements XMLStreamWriter { @Override public void writeStartDocument() throws XMLStreamException { - myFirstIndent=true; + myFirstIndent = true; myTarget.writeStartDocument(); } @Override public void writeStartDocument(String theVersion) throws XMLStreamException { - myFirstIndent=true; + myFirstIndent = true; myTarget.writeStartDocument(theVersion); } @Override public void writeStartDocument(String theEncoding, String theVersion) throws XMLStreamException { - myFirstIndent=true; + myFirstIndent = true; myTarget.writeStartDocument(theEncoding, theVersion); } @@ -243,7 +243,8 @@ public class PrettyPrintWriterWrapper implements XMLStreamWriter { } @Override - public void writeStartElement(String thePrefix, String theLocalName, String theNamespaceURI) throws XMLStreamException { + public void writeStartElement(String thePrefix, String theLocalName, String theNamespaceURI) + throws XMLStreamException { indentAndAdd(); myTarget.writeStartElement(thePrefix, theLocalName, theNamespaceURI); if (PRE.equals(theLocalName) || myInsidePre > 0) { @@ -291,5 +292,4 @@ public class PrettyPrintWriterWrapper implements XMLStreamWriter { private String repeat(int d, String s) { return StringUtils.repeat(s, d * 3); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PropertyModifyingHelper.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PropertyModifyingHelper.java index 2a98697b842..c501d6b25ac 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PropertyModifyingHelper.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/PropertyModifyingHelper.java @@ -53,7 +53,8 @@ public class PropertyModifyingHelper { */ public PropertyModifyingHelper(FhirContext theFhirContext, IBase theBase) { if (findGetPropertyMethod(theBase) == null) { - throw new IllegalArgumentException(Msg.code(1771) + "Specified base instance does not support property retrieval."); + throw new IllegalArgumentException( + Msg.code(1771) + "Specified base instance does not support property retrieval."); } myBase = theBase; myFhirContext = theFhirContext; @@ -95,9 +96,9 @@ public class PropertyModifyingHelper { */ public String getFields(String... theFiledNames) { return Arrays.stream(theFiledNames) - .map(this::get) - .filter(s -> !StringUtils.isBlank(s)) - .collect(Collectors.joining(getDelimiter())); + .map(this::get) + .filter(s -> !StringUtils.isBlank(s)) + .collect(Collectors.joining(getDelimiter())); } /** @@ -108,9 +109,7 @@ public class PropertyModifyingHelper { * specified delimiter. */ public String get(String thePropertyName) { - return getMultiple(thePropertyName) - .stream() - .collect(Collectors.joining(getDelimiter())); + return getMultiple(thePropertyName).stream().collect(Collectors.joining(getDelimiter())); } /** @@ -129,7 +128,8 @@ public class PropertyModifyingHelper { int hashCode = thePropertyName.hashCode(); setPropertyMethod.invoke(myBase, hashCode, thePropertyName, value); } catch (Exception e) { - throw new IllegalStateException(Msg.code(1772) + String.format("Unable to set property %s on %s", thePropertyName, myBase), e); + throw new IllegalStateException( + Msg.code(1772) + String.format("Unable to set property %s on %s", thePropertyName, myBase), e); } } @@ -145,13 +145,15 @@ public class PropertyModifyingHelper { try { values = (Object[]) getPropertyMethod.invoke(myBase, thePropertyName.hashCode(), thePropertyName, true); } catch (Exception e) { - throw new IllegalStateException(Msg.code(1773) + String.format("Instance %s does not supply property %s", myBase, thePropertyName), e); + throw new IllegalStateException( + Msg.code(1773) + String.format("Instance %s does not supply property %s", myBase, thePropertyName), + e); } return Arrays.stream(values) - .map(String::valueOf) - .filter(s -> !StringUtils.isEmpty(s)) - .collect(Collectors.toList()); + .map(String::valueOf) + .filter(s -> !StringUtils.isEmpty(s)) + .collect(Collectors.toList()); } private Method findGetPropertyMethod(IBase theAddress) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ProxyUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ProxyUtil.java index 1470deb778b..24de130bac8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ProxyUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ProxyUtil.java @@ -39,7 +39,7 @@ public class ProxyUtil { public static T synchronizedProxy(Class theClass, T theInstance) { Validate.isTrue(theClass.isInterface(), "%s is not an interface", theClass); InvocationHandler handler = new SynchronizedHandler(theInstance); - Object object = Proxy.newProxyInstance(theClass.getClassLoader(), new Class[] { theClass }, handler); + Object object = Proxy.newProxyInstance(theClass.getClassLoader(), new Class[] {theClass}, handler); return theClass.cast(object); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ReflectionUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ReflectionUtil.java index ba7bfc28e69..0da15e2c571 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ReflectionUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ReflectionUtil.java @@ -24,7 +24,6 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; @@ -40,6 +39,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.concurrent.ConcurrentHashMap; +import javax.annotation.Nonnull; public class ReflectionUtil { @@ -81,13 +81,14 @@ public class ReflectionUtil { return retVal; } - private static void populateDeclaredMethodsMap(Class theClazz, HashMap foundMethods, boolean theIncludeMethodsFromSuperclasses) { + private static void populateDeclaredMethodsMap( + Class theClazz, HashMap foundMethods, boolean theIncludeMethodsFromSuperclasses) { Method[] declaredMethods = theClazz.getDeclaredMethods(); for (Method next : declaredMethods) { - if (Modifier.isAbstract(next.getModifiers()) || - Modifier.isStatic(next.getModifiers()) || - Modifier.isPrivate(next.getModifiers())) { + if (Modifier.isAbstract(next.getModifiers()) + || Modifier.isStatic(next.getModifiers()) + || Modifier.isPrivate(next.getModifiers())) { continue; } @@ -218,12 +219,19 @@ public class ReflectionUtil { } public static Object newInstanceOfFhirServerType(String theType) { - String errorMessage = "Unable to instantiate server framework. Please make sure that hapi-fhir-server library is on your classpath!"; + String errorMessage = + "Unable to instantiate server framework. Please make sure that hapi-fhir-server library is on your classpath!"; String wantedType = "ca.uhn.fhir.rest.api.server.IFhirVersionServer"; return newInstanceOfType(theType, theType, errorMessage, wantedType, new Class[0], new Object[0]); } - private static Object newInstanceOfType(String theKey, String theType, String errorMessage, String wantedType, Class[] theParameterArgTypes, Object[] theConstructorArgs) { + private static Object newInstanceOfType( + String theKey, + String theType, + String errorMessage, + String wantedType, + Class[] theParameterArgTypes, + Object[] theConstructorArgs) { Object fhirServerVersion = ourFhirServerVersions.get(theKey); if (fhirServerVersion == null) { try { @@ -245,7 +253,8 @@ public class ReflectionUtil { } @SuppressWarnings("unchecked") - public static T newInstanceOrReturnNull(String theClassName, Class theType, Class[] theArgTypes, Object[] theArgs) { + public static T newInstanceOrReturnNull( + String theClassName, Class theType, Class[] theArgTypes, Object[] theArgs) { try { return newInstance(theClassName, theType, theArgTypes, theArgs); } catch (ConfigurationException e) { @@ -264,8 +273,11 @@ public class ReflectionUtil { throw new ConfigurationException(Msg.code(1787) + theClassName + " is not assignable to " + theType); } return (T) clazz.getConstructor(theArgTypes).newInstance(theArgs); - } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | - InvocationTargetException e) { + } catch (ClassNotFoundException + | NoSuchMethodException + | InstantiationException + | IllegalAccessException + | InvocationTargetException e) { throw new InternalErrorException(Msg.code(2330) + e.getMessage(), e); } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ResourceReferenceInfo.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ResourceReferenceInfo.java index f759ab16817..cbba0cc286f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ResourceReferenceInfo.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ResourceReferenceInfo.java @@ -19,19 +19,18 @@ */ package ca.uhn.fhir.util; -import java.util.Iterator; -import java.util.List; -import java.util.Set; - +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.model.api.Include; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IBaseReference; import org.hl7.fhir.instance.model.api.IBaseResource; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.RuntimeResourceDefinition; -import ca.uhn.fhir.context.RuntimeSearchParam; -import ca.uhn.fhir.model.api.Include; +import java.util.Iterator; +import java.util.List; +import java.util.Set; /** * Created by Bill de Beaubien on 2/26/2015. @@ -42,17 +41,20 @@ public class ResourceReferenceInfo { private IBaseReference myResource; private FhirContext myContext; - public ResourceReferenceInfo(FhirContext theContext, IBaseResource theOwningResource, List thePathToElement, IBaseReference theElement) { + public ResourceReferenceInfo( + FhirContext theContext, + IBaseResource theOwningResource, + List thePathToElement, + IBaseReference theElement) { myContext = theContext; myOwningResource = theContext.getResourceType(theOwningResource); myResource = theElement; if (thePathToElement != null && !thePathToElement.isEmpty()) { StringBuilder sb = new StringBuilder(); - for (Iterator iterator = thePathToElement.iterator(); iterator.hasNext();) { + for (Iterator iterator = thePathToElement.iterator(); iterator.hasNext(); ) { sb.append(iterator.next()); - if (iterator.hasNext()) - sb.append("."); + if (iterator.hasNext()) sb.append("."); } myName = sb.toString(); } else { @@ -77,11 +79,9 @@ public class ResourceReferenceInfo { } public boolean matchesIncludeSet(Set theIncludes) { - if (theIncludes == null) - return false; + if (theIncludes == null) return false; for (Include include : theIncludes) { - if (matchesInclude(include)) - return true; + if (matchesInclude(include)) return true; } return false; } @@ -98,13 +98,13 @@ public class ResourceReferenceInfo { RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(resourceName); if (resourceDef != null) { RuntimeSearchParam searchParamDef = resourceDef.getSearchParam(paramName); - if (searchParamDef!=null) { + if (searchParamDef != null) { final String completeName = myOwningResource + "." + myName; boolean matched = false; for (String s : searchParamDef.getPathsSplit()) { - if (s.equals(completeName) || - s.startsWith(completeName + ".")) { - matched = true; break; + if (s.equals(completeName) || s.startsWith(completeName + ".")) { + matched = true; + break; } } return matched; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SearchParameterUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SearchParameterUtil.java index 7aa8733742e..9f5f9c0c32e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SearchParameterUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SearchParameterUtil.java @@ -29,12 +29,12 @@ import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class SearchParameterUtil { @@ -62,7 +62,8 @@ public class SearchParameterUtil { * 3.1 If that returns >1 result, throw an error * 3.2 If that returns 1 result, return it */ - public static Optional getOnlyPatientSearchParamForResourceType(FhirContext theFhirContext, String theResourceType) { + public static Optional getOnlyPatientSearchParamForResourceType( + FhirContext theFhirContext, String theResourceType) { RuntimeSearchParam myPatientSearchParam = null; RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType); myPatientSearchParam = runtimeResourceDefinition.getSearchParam("patient"); @@ -78,10 +79,12 @@ public class SearchParameterUtil { /** * Given the resource type, fetch all its patient-based search parameter name that's available */ - public static Set getPatientSearchParamsForResourceType(FhirContext theFhirContext, String theResourceType) { + public static Set getPatientSearchParamsForResourceType( + FhirContext theFhirContext, String theResourceType) { RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType); - List searchParams = new ArrayList<>(runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient")); + List searchParams = + new ArrayList<>(runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient")); // add patient search parameter for resources that's not in the compartment RuntimeSearchParam myPatientSearchParam = runtimeResourceDefinition.getSearchParam("patient"); if (myPatientSearchParam != null) { @@ -92,7 +95,9 @@ public class SearchParameterUtil { searchParams.add(mySubjectSearchParam); } if (searchParams == null || searchParams.size() == 0) { - String errorMessage = String.format("Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", runtimeResourceDefinition.getId()); + String errorMessage = String.format( + "Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", + runtimeResourceDefinition.getId()); throw new IllegalArgumentException(Msg.code(2222) + errorMessage); } // deduplicate list of searchParams and get their names @@ -102,60 +107,69 @@ public class SearchParameterUtil { /** * Search the resource definition for a compartment named 'patient' and return its related Search Parameter. */ - public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam(FhirContext theFhirContext, String theResourceType) { + public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam( + FhirContext theFhirContext, String theResourceType) { RuntimeResourceDefinition resourceDefinition = theFhirContext.getResourceDefinition(theResourceType); return getOnlyPatientCompartmentRuntimeSearchParam(resourceDefinition); } - public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam(RuntimeResourceDefinition runtimeResourceDefinition) { + public static RuntimeSearchParam getOnlyPatientCompartmentRuntimeSearchParam( + RuntimeResourceDefinition runtimeResourceDefinition) { RuntimeSearchParam patientSearchParam; List searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); if (searchParams == null || searchParams.size() == 0) { - String errorMessage = String.format("Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", runtimeResourceDefinition.getId()); + String errorMessage = String.format( + "Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", + runtimeResourceDefinition.getId()); throw new IllegalArgumentException(Msg.code(1774) + errorMessage); } else if (searchParams.size() == 1) { patientSearchParam = searchParams.get(0); } else { - String errorMessage = String.format("Resource type %s has more than one Search Param which references a patient compartment. We are unable to disambiguate which patient search parameter we should be searching by.", runtimeResourceDefinition.getId()); + String errorMessage = String.format( + "Resource type %s has more than one Search Param which references a patient compartment. We are unable to disambiguate which patient search parameter we should be searching by.", + runtimeResourceDefinition.getId()); throw new IllegalArgumentException(Msg.code(1775) + errorMessage); } return patientSearchParam; } - public static List getAllPatientCompartmentRuntimeSearchParamsForResourceType(FhirContext theFhirContext, String theResourceType) { + public static List getAllPatientCompartmentRuntimeSearchParamsForResourceType( + FhirContext theFhirContext, String theResourceType) { RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType); return getAllPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition); } public static List getAllPatientCompartmenRuntimeSearchParams(FhirContext theFhirContext) { - return theFhirContext.getResourceTypes() - .stream() - .flatMap(type -> getAllPatientCompartmentRuntimeSearchParamsForResourceType(theFhirContext, type).stream()) - .collect(Collectors.toList()); + return theFhirContext.getResourceTypes().stream() + .flatMap(type -> + getAllPatientCompartmentRuntimeSearchParamsForResourceType(theFhirContext, type).stream()) + .collect(Collectors.toList()); } public static Set getAllResourceTypesThatAreInPatientCompartment(FhirContext theFhirContext) { return theFhirContext.getResourceTypes().stream() - .filter(type -> getAllPatientCompartmentRuntimeSearchParamsForResourceType(theFhirContext, type).size() > 0) - .collect(Collectors.toSet()); - + .filter(type -> getAllPatientCompartmentRuntimeSearchParamsForResourceType(theFhirContext, type) + .size() + > 0) + .collect(Collectors.toSet()); } - private static List getAllPatientCompartmentRuntimeSearchParams(RuntimeResourceDefinition theRuntimeResourceDefinition) { + private static List getAllPatientCompartmentRuntimeSearchParams( + RuntimeResourceDefinition theRuntimeResourceDefinition) { List patient = theRuntimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); return patient; } - /** * Return true if any search parameter in the resource can point at a patient, false otherwise */ public static boolean isResourceTypeInPatientCompartment(FhirContext theFhirContext, String theResourceType) { RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType); - return getAllPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition).size() > 0; + return getAllPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition) + .size() + > 0; } - @Nullable public static String getCode(FhirContext theContext, IBaseResource theResource) { return getStringChild(theContext, theResource, "code"); @@ -177,12 +191,11 @@ public class SearchParameterUtil { RuntimeResourceDefinition def = theFhirContext.getResourceDefinition(theResource); BaseRuntimeChildDefinition base = def.getChildByName(theChildName); - return base - .getAccessor() - .getFirstValueOrNull(theResource) - .map(t -> ((IPrimitiveType) t)) - .map(t -> t.getValueAsString()) - .orElse(null); + return base.getAccessor() + .getFirstValueOrNull(theResource) + .map(t -> ((IPrimitiveType) t)) + .map(t -> t.getValueAsString()) + .orElse(null); } public static String stripModifier(String theSearchParam) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java index fd8fdcbb223..618207d0f0c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StopWatch.java @@ -266,9 +266,7 @@ public class StopWatch { public void startTask(String theTaskName) { endCurrentTask(); Validate.notBlank(theTaskName, "Task name must not be blank"); - myCurrentTask = new TaskTiming() - .setTaskName(theTaskName) - .setStart(now()); + myCurrentTask = new TaskTiming().setTaskName(theTaskName).setStart(now()); myTasks.add(myCurrentTask); } @@ -329,7 +327,8 @@ public class StopWatch { /** * Append a right-aligned and zero-padded numeric value to a `StringBuilder`. */ - static void appendRightAlignedNumber(StringBuilder theStringBuilder, String thePrefix, int theNumberOfDigits, long theValueToAppend) { + static void appendRightAlignedNumber( + StringBuilder theStringBuilder, String thePrefix, int theNumberOfDigits, long theValueToAppend) { theStringBuilder.append(thePrefix); if (theNumberOfDigits > 1) { int pad = (theNumberOfDigits - 1); @@ -397,9 +396,12 @@ public class StopWatch { } } else { long millisAsLong = (long) theMillis; - appendRightAlignedNumber(buf, "", 2, ((millisAsLong % DateUtils.MILLIS_PER_DAY) / DateUtils.MILLIS_PER_HOUR)); - appendRightAlignedNumber(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_HOUR) / DateUtils.MILLIS_PER_MINUTE)); - appendRightAlignedNumber(buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_MINUTE) / DateUtils.MILLIS_PER_SECOND)); + appendRightAlignedNumber( + buf, "", 2, ((millisAsLong % DateUtils.MILLIS_PER_DAY) / DateUtils.MILLIS_PER_HOUR)); + appendRightAlignedNumber( + buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_HOUR) / DateUtils.MILLIS_PER_MINUTE)); + appendRightAlignedNumber( + buf, ":", 2, ((millisAsLong % DateUtils.MILLIS_PER_MINUTE) / DateUtils.MILLIS_PER_SECOND)); if (theMillis <= DateUtils.MILLIS_PER_MINUTE) { appendRightAlignedNumber(buf, ".", 3, (millisAsLong % DateUtils.MILLIS_PER_SECOND)); } @@ -415,7 +417,7 @@ public class StopWatch { } @VisibleForTesting - static public void setNowForUnitTest(Long theNowForUnitTest) { + public static void setNowForUnitTest(Long theNowForUnitTest) { ourNowForUnitTest = theNowForUnitTest; } @@ -458,5 +460,4 @@ public class StopWatch { return this; } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StringUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StringUtil.java index 6daa93680f9..34b092ced88 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StringUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/StringUtil.java @@ -19,11 +19,11 @@ */ package ca.uhn.fhir.util; -import javax.annotation.Nonnull; import java.io.CharArrayWriter; import java.nio.charset.StandardCharsets; import java.text.Normalizer; import java.util.Arrays; +import javax.annotation.Nonnull; public class StringUtil { @@ -110,7 +110,11 @@ public class StringUtil { StringBuilder schemaOutput = new StringBuilder(); int index = 0; for (String next : theInput.split("\\n")) { - schemaOutput.append(index++).append(": ").append(next.replace("\r", "")).append("\n"); + schemaOutput + .append(index++) + .append(": ") + .append(next.replace("\r", "")) + .append("\n"); } return schemaOutput.toString(); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SubscriptionUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SubscriptionUtil.java index 67ec84a5d03..f22fc5552a7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SubscriptionUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/SubscriptionUtil.java @@ -39,7 +39,8 @@ import java.util.Objects; */ public class SubscriptionUtil { - private static void populatePrimitiveValue(FhirContext theContext, IBaseResource theSubscription, String theChildName, String theValue) { + private static void populatePrimitiveValue( + FhirContext theContext, IBaseResource theSubscription, String theChildName, String theValue) { RuntimeResourceDefinition def = theContext.getResourceDefinition(theSubscription); Validate.isTrue(def.getName().equals("Subscription"), "theResource is not a subscription"); BaseRuntimeChildDefinition statusChild = def.getChildByName(theChildName); @@ -66,7 +67,8 @@ public class SubscriptionUtil { public static boolean isCrossPartition(IBaseResource theSubscription) { if (theSubscription instanceof IBaseHasExtensions) { - IBaseExtension extension = ExtensionUtil.getExtensionByUrl(theSubscription, HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION); + IBaseExtension extension = ExtensionUtil.getExtensionByUrl( + theSubscription, HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION); if (Objects.nonNull(extension)) { try { IBaseBooleanDatatype booleanDatatype = (IBaseBooleanDatatype) (extension.getValue()); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TaskChunker.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TaskChunker.java index d10ad74594e..49799cc2f6e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TaskChunker.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TaskChunker.java @@ -49,5 +49,4 @@ public class TaskChunker { theBatchConsumer.accept(batch); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtil.java index f686b4e4c24..11b416fdbbf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtil.java @@ -52,7 +52,7 @@ public final class TerserUtil { * Exclude for id, identifier and meta fields of a resource. */ public static final Collection IDS_AND_META_EXCLUDES = - Collections.unmodifiableSet(Stream.of("id", "identifier", "meta").collect(Collectors.toSet())); + Collections.unmodifiableSet(Stream.of("id", "identifier", "meta").collect(Collectors.toSet())); /** * Exclusion predicate for id, identifier, meta fields. */ @@ -66,17 +66,21 @@ public final class TerserUtil { * Exclusion predicate for id/identifier, meta and fields with empty values. This ensures that source / target resources, * empty source fields will not results in erasure of target fields. */ - public static final Predicate> EXCLUDE_IDS_META_AND_EMPTY = new Predicate>() { - @Override - public boolean test(Triple theTriple) { - if (!EXCLUDE_IDS_AND_META.test(theTriple.getLeft().getElementName())) { - return false; - } - BaseRuntimeChildDefinition childDefinition = theTriple.getLeft(); - boolean isSourceFieldEmpty = childDefinition.getAccessor().getValues(theTriple.getMiddle()).isEmpty(); - return !isSourceFieldEmpty; - } - }; + public static final Predicate> EXCLUDE_IDS_META_AND_EMPTY = + new Predicate>() { + @Override + public boolean test(Triple theTriple) { + if (!EXCLUDE_IDS_AND_META.test(theTriple.getLeft().getElementName())) { + return false; + } + BaseRuntimeChildDefinition childDefinition = theTriple.getLeft(); + boolean isSourceFieldEmpty = childDefinition + .getAccessor() + .getValues(theTriple.getMiddle()) + .isEmpty(); + return !isSourceFieldEmpty; + } + }; /** * Exclusion predicate for keeping all fields. */ @@ -86,18 +90,23 @@ public final class TerserUtil { return true; } }; + private static final Logger ourLog = getLogger(TerserUtil.class); private static final String EQUALS_DEEP = "equalsDeep"; - private TerserUtil() { - } + private TerserUtil() {} /** * Given an Child Definition of `identifier`, a R4/DSTU3 EID Identifier, and a new resource, clone the EID into that resources' identifier list. */ - public static void cloneEidIntoResource(FhirContext theFhirContext, BaseRuntimeChildDefinition theIdentifierDefinition, IBase theEid, IBase theResourceToCloneEidInto) { + public static void cloneEidIntoResource( + FhirContext theFhirContext, + BaseRuntimeChildDefinition theIdentifierDefinition, + IBase theEid, + IBase theResourceToCloneEidInto) { // FHIR choice types - fields within fhir where we have a choice of ids - BaseRuntimeElementCompositeDefinition childIdentifier = (BaseRuntimeElementCompositeDefinition) theIdentifierDefinition.getChildByName(FIELD_NAME_IDENTIFIER); + BaseRuntimeElementCompositeDefinition childIdentifier = (BaseRuntimeElementCompositeDefinition) + theIdentifierDefinition.getChildByName(FIELD_NAME_IDENTIFIER); IBase resourceNewIdentifier = childIdentifier.newInstance(); FhirTerser terser = theFhirContext.newTerser(); @@ -165,7 +174,8 @@ public final class TerserUtil { * @param theTo Resource to clone the specified field to * @param theField Field name to be copied */ - public static void cloneCompositeField(FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo, String theField) { + public static void cloneCompositeField( + FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo, String theField) { FhirTerser terser = theFhirContext.newTerser(); RuntimeResourceDefinition definition = theFhirContext.getResourceDefinition(theFrom); @@ -231,7 +241,8 @@ public final class TerserUtil { try { return (Boolean) theMethod.invoke(theItem1, theItem2); } catch (Exception e) { - throw new RuntimeException(Msg.code(1746) + String.format("Unable to compare equality via %s", EQUALS_DEEP), e); + throw new RuntimeException( + Msg.code(1746) + String.format("Unable to compare equality via %s", EQUALS_DEEP), e); } } return theItem1.equals(theItem2); @@ -263,9 +274,13 @@ public final class TerserUtil { * @param theTo The resource to merge the fields into * @param theFieldNameInclusion Inclusion strategy that checks if a given field should be replaced */ - public static void replaceFields(FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo, Predicate theFieldNameInclusion) { - Predicate> predicate - = (t) -> theFieldNameInclusion.test(t.getLeft().getElementName()); + public static void replaceFields( + FhirContext theFhirContext, + IBaseResource theFrom, + IBaseResource theTo, + Predicate theFieldNameInclusion) { + Predicate> predicate = + (t) -> theFieldNameInclusion.test(t.getLeft().getElementName()); replaceFieldsByPredicate(theFhirContext, theFrom, theTo, predicate); } @@ -278,7 +293,11 @@ public final class TerserUtil { * @param theTo The resource to merge the fields into * @param thePredicate Predicate that checks if a given field should be replaced */ - public static void replaceFieldsByPredicate(FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo, Predicate> thePredicate) { + public static void replaceFieldsByPredicate( + FhirContext theFhirContext, + IBaseResource theFrom, + IBaseResource theTo, + Predicate> thePredicate) { RuntimeResourceDefinition definition = theFhirContext.getResourceDefinition(theFrom); FhirTerser terser = theFhirContext.newTerser(); for (BaseRuntimeChildDefinition childDefinition : definition.getChildrenAndExtension()) { @@ -308,10 +327,15 @@ public final class TerserUtil { * @param theFrom The resource to replace the field from * @param theTo The resource to replace the field on */ - public static void replaceField(FhirContext theFhirContext, String theFieldName, IBaseResource theFrom, IBaseResource theTo) { + public static void replaceField( + FhirContext theFhirContext, String theFieldName, IBaseResource theFrom, IBaseResource theTo) { RuntimeResourceDefinition definition = theFhirContext.getResourceDefinition(theFrom); Validate.notNull(definition); - replaceField(theFhirContext.newTerser(), theFrom, theTo, theFhirContext.getResourceDefinition(theFrom).getChildByName(theFieldName)); + replaceField( + theFhirContext.newTerser(), + theFrom, + theTo, + theFhirContext.getResourceDefinition(theFrom).getChildByName(theFieldName)); } /** @@ -322,7 +346,8 @@ public final class TerserUtil { * @param theFieldName */ public static void clearField(FhirContext theFhirContext, IBaseResource theResource, String theFieldName) { - BaseRuntimeChildDefinition childDefinition = getBaseRuntimeChildDefinition(theFhirContext, theFieldName, theResource); + BaseRuntimeChildDefinition childDefinition = + getBaseRuntimeChildDefinition(theFhirContext, theFieldName, theResource); clear(childDefinition.getAccessor().getValues(theResource)); } @@ -350,7 +375,8 @@ public final class TerserUtil { * @param theResource The resource to set the values on * @param theValues The values to set on the resource child field name */ - public static void setField(FhirContext theFhirContext, String theFieldName, IBaseResource theResource, IBase... theValues) { + public static void setField( + FhirContext theFhirContext, String theFieldName, IBaseResource theResource, IBase... theValues) { setField(theFhirContext, theFhirContext.newTerser(), theFieldName, theResource, theValues); } @@ -365,15 +391,24 @@ public final class TerserUtil { * @param theResource The resource to set the values on * @param theValues The values to set on the resource child field name */ - public static void setField(FhirContext theFhirContext, FhirTerser theTerser, String theFieldName, IBaseResource theResource, IBase... theValues) { - BaseRuntimeChildDefinition childDefinition = getBaseRuntimeChildDefinition(theFhirContext, theFieldName, theResource); + public static void setField( + FhirContext theFhirContext, + FhirTerser theTerser, + String theFieldName, + IBaseResource theResource, + IBase... theValues) { + BaseRuntimeChildDefinition childDefinition = + getBaseRuntimeChildDefinition(theFhirContext, theFieldName, theResource); List theFromFieldValues = childDefinition.getAccessor().getValues(theResource); if (theFromFieldValues.isEmpty()) { for (IBase value : theValues) { try { childDefinition.getMutator().addValue(theResource, value); } catch (UnsupportedOperationException e) { - ourLog.warn("Resource {} does not support multiple values, but an attempt to set {} was made. Setting the first item only", theResource, theValues); + ourLog.warn( + "Resource {} does not support multiple values, but an attempt to set {} was made. Setting the first item only", + theResource, + theValues); childDefinition.getMutator().setValue(theResource, value); break; } @@ -392,7 +427,8 @@ public final class TerserUtil { * @param theResource The resource on which the value should be set * @param theValue The value to set */ - public static void setFieldByFhirPath(FhirTerser theTerser, String theFhirPath, IBaseResource theResource, IBase theValue) { + public static void setFieldByFhirPath( + FhirTerser theTerser, String theFhirPath, IBaseResource theResource, IBase theValue) { List theFromFieldValues = theTerser.getValues(theResource, theFhirPath, true, false); for (IBase theFromFieldValue : theFromFieldValues) { theTerser.cloneInto(theValue, theFromFieldValue, true); @@ -407,7 +443,8 @@ public final class TerserUtil { * @param theResource The resource on which the value should be set * @param theValue The value to set */ - public static void setFieldByFhirPath(FhirContext theFhirContext, String theFhirPath, IBaseResource theResource, IBase theValue) { + public static void setFieldByFhirPath( + FhirContext theFhirContext, String theFhirPath, IBaseResource theResource, IBase theValue) { setFieldByFhirPath(theFhirContext.newTerser(), theFhirPath, theResource, theValue); } @@ -439,7 +476,11 @@ public final class TerserUtil { return values.get(0); } - private static void replaceField(FhirTerser theTerser, IBaseResource theFrom, IBaseResource theTo, BaseRuntimeChildDefinition childDefinition) { + private static void replaceField( + FhirTerser theTerser, + IBaseResource theFrom, + IBaseResource theTo, + BaseRuntimeChildDefinition childDefinition) { List fromValues = childDefinition.getAccessor().getValues(theFrom); List toValues = childDefinition.getAccessor().getValues(theTo); @@ -461,7 +502,8 @@ public final class TerserUtil { * @param theFrom Resource to merge the specified field from * @param theTo Resource to merge the specified field into */ - public static void mergeFieldsExceptIdAndMeta(FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo) { + public static void mergeFieldsExceptIdAndMeta( + FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo) { mergeFields(theFhirContext, theFrom, theTo, EXCLUDE_IDS_AND_META); } @@ -474,7 +516,11 @@ public final class TerserUtil { * @param theTo Resource to merge the specified field into * @param inclusionStrategy Predicate to test which fields should be merged */ - public static void mergeFields(FhirContext theFhirContext, IBaseResource theFrom, IBaseResource theTo, Predicate inclusionStrategy) { + public static void mergeFields( + FhirContext theFhirContext, + IBaseResource theFrom, + IBaseResource theTo, + Predicate inclusionStrategy) { FhirTerser terser = theFhirContext.newTerser(); RuntimeResourceDefinition definition = theFhirContext.getResourceDefinition(theFrom); @@ -499,7 +545,8 @@ public final class TerserUtil { * @param theFrom Resource to merge the specified field from * @param theTo Resource to merge the specified field into */ - public static void mergeField(FhirContext theFhirContext, String theFieldName, IBaseResource theFrom, IBaseResource theTo) { + public static void mergeField( + FhirContext theFhirContext, String theFieldName, IBaseResource theFrom, IBaseResource theTo) { mergeField(theFhirContext, theFhirContext.newTerser(), theFieldName, theFrom, theTo); } @@ -513,8 +560,14 @@ public final class TerserUtil { * @param theFrom Resource to merge the specified field from * @param theTo Resource to merge the specified field into */ - public static void mergeField(FhirContext theFhirContext, FhirTerser theTerser, String theFieldName, IBaseResource theFrom, IBaseResource theTo) { - BaseRuntimeChildDefinition childDefinition = getBaseRuntimeChildDefinition(theFhirContext, theFieldName, theFrom); + public static void mergeField( + FhirContext theFhirContext, + FhirTerser theTerser, + String theFieldName, + IBaseResource theFrom, + IBaseResource theTo) { + BaseRuntimeChildDefinition childDefinition = + getBaseRuntimeChildDefinition(theFhirContext, theFieldName, theFrom); List theFromFieldValues = childDefinition.getAccessor().getValues(theFrom); List theToFieldValues = childDefinition.getAccessor().getValues(theTo); @@ -522,7 +575,8 @@ public final class TerserUtil { mergeFields(theTerser, theTo, childDefinition, theFromFieldValues, theToFieldValues); } - private static BaseRuntimeChildDefinition getBaseRuntimeChildDefinition(FhirContext theFhirContext, String theFieldName, IBaseResource theFrom) { + private static BaseRuntimeChildDefinition getBaseRuntimeChildDefinition( + FhirContext theFhirContext, String theFieldName, IBaseResource theFrom) { RuntimeResourceDefinition definition = theFhirContext.getResourceDefinition(theFrom); BaseRuntimeChildDefinition childDefinition = definition.getChildByName(theFieldName); Validate.notNull(childDefinition); @@ -539,10 +593,15 @@ public final class TerserUtil { * @param theConstructorParam Optional constructor param * @return Returns the new element with the given value if configured */ - private static IBase newElement(FhirTerser theFhirTerser, BaseRuntimeChildDefinition theChildDefinition, IBase theFromFieldValue, Object theConstructorParam) { + private static IBase newElement( + FhirTerser theFhirTerser, + BaseRuntimeChildDefinition theChildDefinition, + IBase theFromFieldValue, + Object theConstructorParam) { BaseRuntimeElementDefinition runtimeElementDefinition; if (theChildDefinition instanceof RuntimeChildChoiceDefinition) { - runtimeElementDefinition = theChildDefinition.getChildElementDefinitionByDatatype(theFromFieldValue.getClass()); + runtimeElementDefinition = + theChildDefinition.getChildElementDefinitionByDatatype(theFromFieldValue.getClass()); } else { runtimeElementDefinition = theChildDefinition.getChildByName(theChildDefinition.getElementName()); } @@ -556,7 +615,12 @@ public final class TerserUtil { } } - private static void mergeFields(FhirTerser theTerser, IBaseResource theTo, BaseRuntimeChildDefinition childDefinition, List theFromFieldValues, List theToFieldValues) { + private static void mergeFields( + FhirTerser theTerser, + IBaseResource theTo, + BaseRuntimeChildDefinition childDefinition, + List theFromFieldValues, + List theToFieldValues) { for (IBase theFromFieldValue : theFromFieldValues) { if (contains(theFromFieldValue, theToFieldValues)) { continue; @@ -567,10 +631,11 @@ public final class TerserUtil { try { Method copyMethod = getMethod(theFromFieldValue, "copy"); if (copyMethod != null) { - newFieldValue = (IBase) copyMethod.invoke(theFromFieldValue, new Object[]{}); + newFieldValue = (IBase) copyMethod.invoke(theFromFieldValue, new Object[] {}); } } catch (Throwable t) { - ((IPrimitiveType) newFieldValue).setValueAsString(((IPrimitiveType) theFromFieldValue).getValueAsString()); + ((IPrimitiveType) newFieldValue) + .setValueAsString(((IPrimitiveType) theFromFieldValue).getValueAsString()); } } else { theTerser.cloneInto(theFromFieldValue, newFieldValue, true); @@ -624,7 +689,8 @@ public final class TerserUtil { * @param Base element type * @return Returns a new instance of the element with the specified initial value */ - public static T newElement(FhirContext theFhirContext, String theElementType, Object theConstructorParam) { + public static T newElement( + FhirContext theFhirContext, String theElementType, Object theConstructorParam) { BaseRuntimeElementDefinition def = theFhirContext.getElementDefinition(theElementType); Validate.notNull(def); return (T) def.newInstance(theConstructorParam); @@ -652,7 +718,8 @@ public final class TerserUtil { * @param Type of the resource * @return Returns a new instance of the resource */ - public static T newResource(FhirContext theFhirContext, String theResourceName, Object theConstructorParam) { + public static T newResource( + FhirContext theFhirContext, String theResourceName, Object theConstructorParam) { RuntimeResourceDefinition def = theFhirContext.getResourceDefinition(theResourceName); return (T) def.newInstance(theConstructorParam); } @@ -665,10 +732,13 @@ public final class TerserUtil { * @param theTargetFieldName Name of the backbone element in the resource * @return Returns a new instance of the element */ - public static IBaseBackboneElement instantiateBackboneElement(FhirContext theFhirContext, String theTargetResourceName, String theTargetFieldName) { - BaseRuntimeElementDefinition targetParentElementDefinition = theFhirContext.getResourceDefinition(theTargetResourceName); + public static IBaseBackboneElement instantiateBackboneElement( + FhirContext theFhirContext, String theTargetResourceName, String theTargetFieldName) { + BaseRuntimeElementDefinition targetParentElementDefinition = + theFhirContext.getResourceDefinition(theTargetResourceName); BaseRuntimeChildDefinition childDefinition = targetParentElementDefinition.getChildByName(theTargetFieldName); - return (IBaseBackboneElement) childDefinition.getChildByName(theTargetFieldName).newInstance(); + return (IBaseBackboneElement) + childDefinition.getChildByName(theTargetFieldName).newInstance(); } private static void clear(List values) { @@ -682,5 +752,4 @@ public final class TerserUtil { ourLog.debug("Unable to clear values " + String.valueOf(values), t); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtilHelper.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtilHelper.java index 5c40e3f0d7f..4469ac52a75 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtilHelper.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtilHelper.java @@ -200,5 +200,4 @@ public class TerserUtilHelper { public FhirContext getContext() { return myContext; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TestUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TestUtil.java index d0bd276488a..36ff4c3f536 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TestUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TestUtil.java @@ -56,7 +56,7 @@ public class TestUtil { * environment */ public static void doRandomizeLocaleAndTimezone() { -// Locale[] availableLocales = {Locale.CANADA, Locale.GERMANY, Locale.TAIWAN}; + // Locale[] availableLocales = {Locale.CANADA, Locale.GERMANY, Locale.TAIWAN}; Locale[] availableLocales = {Locale.US}; Locale.setDefault(availableLocales[(int) (Math.random() * availableLocales.length)]); ourLog.info("Tests are running in locale: " + Locale.getDefault().getDisplayName()); @@ -79,7 +79,6 @@ public class TestUtil { ourLog.info("Tests are using time zone: {}", TimeZone.getDefault().getID()); } - /** * THIS IS FOR UNIT TESTS ONLY - DO NOT CALL THIS METHOD FROM USER CODE *

    @@ -140,7 +139,6 @@ public class TestUtil { sleepAtLeast(theMillis, true); } - @SuppressWarnings("BusyWait") public static void sleepAtLeast(long theMillis, boolean theLogProgress) { long start = System.currentTimeMillis(); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TimeoutManager.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TimeoutManager.java index a7bd39d9a37..878aaa1d187 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TimeoutManager.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TimeoutManager.java @@ -57,7 +57,8 @@ public class TimeoutManager { } if (myStopWatch.getMillis() > myErrorTimeout.toMillis() && !errored) { if (HapiSystemProperties.isUnitTestModeEnabled()) { - throw new TimeoutException(Msg.code(2133) + myServiceName + " timed out after running for " + myStopWatch); + throw new TimeoutException( + Msg.code(2133) + myServiceName + " timed out after running for " + myStopWatch); } else { ourLog.error(myServiceName + " has run for {}", myStopWatch); errored = true; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlPathTokenizer.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlPathTokenizer.java index 261c869b1ad..f94e7443c75 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlPathTokenizer.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlPathTokenizer.java @@ -45,5 +45,4 @@ public class UrlPathTokenizer { public String nextTokenUnescapedAndSanitized() { return UrlUtil.sanitizeUrlPart(UrlUtil.unescape(myTok.nextToken())); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java index 5c4d6f5eb24..9d35a50bf18 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/UrlUtil.java @@ -34,8 +34,6 @@ import org.apache.http.client.utils.URLEncodedUtils; import org.apache.http.message.BasicNameValuePair; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URI; @@ -50,6 +48,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.StringTokenizer; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultIfBlank; import static org.apache.commons.lang3.StringUtils.defaultString; @@ -67,8 +67,7 @@ public class UrlUtil { /** * Non instantiable */ - private UrlUtil() { - } + private UrlUtil() {} /** * Cleans up a value that will be serialized as an HTTP header. This method: @@ -102,7 +101,8 @@ public class UrlUtil { try { return new URL(new URL(theBase), theEndpoint).toString(); } catch (MalformedURLException e) { - ourLog.warn("Failed to resolve relative URL[" + theEndpoint + "] against absolute base[" + theBase + "]", e); + ourLog.warn( + "Failed to resolve relative URL[" + theEndpoint + "] against absolute base[" + theBase + "]", e); return theEndpoint; } } @@ -126,7 +126,9 @@ public class UrlUtil { return theExtensionUrl; } - if (!theParentExtensionUrl.substring(0, parentLastSlashIdx).equals(theExtensionUrl.substring(0, parentLastSlashIdx))) { + if (!theParentExtensionUrl + .substring(0, parentLastSlashIdx) + .equals(theExtensionUrl.substring(0, parentLastSlashIdx))) { return theExtensionUrl; } @@ -188,7 +190,6 @@ public class UrlUtil { if (slashIdx != -1) { resourceType = new IdDt(resourceType).getResourceType(); } - } try { @@ -202,7 +203,6 @@ public class UrlUtil { return resourceType; } - /** * URL encode a value according to RFC 3986 *

    @@ -226,10 +226,7 @@ public class UrlUtil { * values in a collection */ public static List escapeUrlParams(@Nonnull Collection theUnescaped) { - return theUnescaped - .stream() - .map(t -> PARAMETER_ESCAPER.escape(t)) - .collect(Collectors.toList()); + return theUnescaped.stream().map(t -> PARAMETER_ESCAPER.escape(t)).collect(Collectors.toList()); } public static boolean isAbsolute(String theValue) { @@ -298,7 +295,8 @@ public class UrlUtil { return true; } - public static RuntimeResourceDefinition parseUrlResourceType(FhirContext theCtx, String theUrl) throws DataFormatException { + public static RuntimeResourceDefinition parseUrlResourceType(FhirContext theCtx, String theUrl) + throws DataFormatException { String url = theUrl; int paramIndex = url.indexOf('?'); @@ -327,7 +325,6 @@ public class UrlUtil { query = query.substring(1); } - StringTokenizer tok = new StringTokenizer(query, "&"); while (tok.hasMoreTokens()) { String nextToken = tok.nextToken(); @@ -450,7 +447,6 @@ public class UrlUtil { } return retVal; - } /** @@ -492,10 +488,10 @@ public class UrlUtil { char nextChar = theString.charAt(j); switch (nextChar) { - /* - * NB: If you add a constant here, you also need to add it - * to isNeedsSanitization()!! - */ + /* + * NB: If you add a constant here, you also need to add it + * to isNeedsSanitization()!! + */ case '\'': buffer.append("'"); break; @@ -520,7 +516,6 @@ public class UrlUtil { } break; } - } // for build escaped string return buffer.toString(); @@ -582,25 +577,12 @@ public class UrlUtil { matchUrl = matchUrl.substring(questionMarkIndex + 1); } - final String[] searchList = new String[]{ - "+", - "|", - "=>=", - "=<=", - "=>", - "=<" - }; - final String[] replacementList = new String[]{ - "%2B", - "%7C", - "=%3E%3D", - "=%3C%3D", - "=%3E", - "=%3C" - }; + final String[] searchList = new String[] {"+", "|", "=>=", "=<=", "=>", "=<"}; + final String[] replacementList = new String[] {"%2B", "%7C", "=%3E%3D", "=%3C%3D", "=%3E", "=%3C"}; matchUrl = StringUtils.replaceEach(matchUrl, searchList, replacementList); if (matchUrl.contains(" ")) { - throw new InvalidRequestException(Msg.code(1744) + "Failed to parse match URL[" + theMatchUrl + "] - URL is invalid (must not contain spaces)"); + throw new InvalidRequestException(Msg.code(1744) + "Failed to parse match URL[" + theMatchUrl + + "] - URL is invalid (must not contain spaces)"); } parameters = URLEncodedUtils.parse((matchUrl), Constants.CHARSET_UTF8, '&'); @@ -611,7 +593,8 @@ public class UrlUtil { for (int i = 0; i < parameters.size(); i++) { NameValuePair next = parameters.get(i); if (next.getName().equals("email") && next.getValue().contains(" ")) { - BasicNameValuePair newPair = new BasicNameValuePair(next.getName(), next.getValue().replace(' ', '+')); + BasicNameValuePair newPair = + new BasicNameValuePair(next.getName(), next.getValue().replace(' ', '+')); parameters.set(i, newPair); } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java index 324a0399c6e..2c403f551d4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ValidateUtil.java @@ -93,5 +93,4 @@ public class ValidateUtil { throw new InvalidRequestException(Msg.code(1770) + theMessage); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java index 9a80311f3ed..8dd3b16225c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java @@ -118,8 +118,7 @@ public enum VersionEnum { V6_5_0, V6_6_0, V6_7_0, - V6_8_0 - ; + V6_8_0; public static VersionEnum latestVersion() { VersionEnum[] values = VersionEnum.values(); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionUtil.java index 722a71ee4dc..74a38a9bbb8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionUtil.java @@ -92,5 +92,4 @@ public class VersionUtil { public static String getBuildDate() { return ourBuildTime.substring(0, 10); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/XmlDetectionUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/XmlDetectionUtil.java index d7e77157956..bee58b39999 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/XmlDetectionUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/XmlDetectionUtil.java @@ -51,6 +51,4 @@ public class XmlDetectionUtil { } return retVal; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/XmlUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/XmlUtil.java index 64c337593a4..e256e282980 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/XmlUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/XmlUtil.java @@ -36,6 +36,19 @@ import org.w3c.dom.Node; import org.xml.sax.InputSource; import org.xml.sax.SAXException; +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Reader; +import java.io.StringReader; +import java.io.StringWriter; +import java.io.UnsupportedEncodingException; +import java.io.Writer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; @@ -54,19 +67,6 @@ import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; -import java.io.IOException; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.Reader; -import java.io.StringReader; -import java.io.StringWriter; -import java.io.UnsupportedEncodingException; -import java.io.Writer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -1542,8 +1542,7 @@ public class XmlUtil { /** * Non-instantiable */ - private XmlUtil() { - } + private XmlUtil() {} private static final class ExtendedEntityReplacingXmlResolver implements XMLResolver { @Override @@ -1606,14 +1605,15 @@ public class XmlUtil { } }; } - } private static XMLOutputFactory createOutputFactory() throws FactoryConfigurationError { try { // Detect if we're running with the Android lib, and force repackaged Woodstox to be used Class.forName("ca.uhn.fhir.repackage.javax.xml.stream.XMLOutputFactory"); - System.setProperty(javax.xml.stream.XMLOutputFactory.class.getName(), com.ctc.wstx.stax.WstxOutputFactory.class.getName()); + System.setProperty( + javax.xml.stream.XMLOutputFactory.class.getName(), + com.ctc.wstx.stax.WstxOutputFactory.class.getName()); } catch (ClassNotFoundException e) { // ok } @@ -1631,7 +1631,7 @@ public class XmlUtil { try { Class.forName("com.ctc.wstx.stax.WstxOutputFactory"); if (outputFactory instanceof WstxOutputFactory) { -// ((WstxOutputFactory)outputFactory).getConfig().setAttrValueEscaperFactory(new MyEscaper()); + // ((WstxOutputFactory)outputFactory).getConfig().setAttrValueEscaperFactory(new MyEscaper()); outputFactory.setProperty(XMLOutputFactory2.P_TEXT_ESCAPER, new MyEscaper()); } } catch (ClassNotFoundException e) { @@ -1640,7 +1640,8 @@ public class XmlUtil { return outputFactory; } - private static XMLEventWriter createXmlFragmentWriter(Writer theWriter) throws FactoryConfigurationError, XMLStreamException { + private static XMLEventWriter createXmlFragmentWriter(Writer theWriter) + throws FactoryConfigurationError, XMLStreamException { XMLOutputFactory outputFactory = getOrCreateFragmentOutputFactory(); return outputFactory.createXMLEventWriter(theWriter); } @@ -1654,14 +1655,16 @@ public class XmlUtil { return inputFactory.createXMLEventReader(reader); } - public static XMLStreamWriter createXmlStreamWriter(Writer theWriter) throws FactoryConfigurationError, XMLStreamException { + public static XMLStreamWriter createXmlStreamWriter(Writer theWriter) + throws FactoryConfigurationError, XMLStreamException { throwUnitTestExceptionIfConfiguredToDoSo(); XMLOutputFactory outputFactory = getOrCreateOutputFactory(); return outputFactory.createXMLStreamWriter(theWriter); } - public static XMLEventWriter createXmlWriter(Writer theWriter) throws FactoryConfigurationError, XMLStreamException { + public static XMLEventWriter createXmlWriter(Writer theWriter) + throws FactoryConfigurationError, XMLStreamException { XMLOutputFactory outputFactory = getOrCreateOutputFactory(); return outputFactory.createXMLEventWriter(theWriter); } @@ -1707,7 +1710,9 @@ public class XmlUtil { try { // Detect if we're running with the Android lib, and force repackaged Woodstox to be used Class.forName("ca.uhn.fhir.repackage.javax.xml.stream.XMLInputFactory"); - System.setProperty(javax.xml.stream.XMLInputFactory.class.getName(), com.ctc.wstx.stax.WstxInputFactory.class.getName()); + System.setProperty( + javax.xml.stream.XMLInputFactory.class.getName(), + com.ctc.wstx.stax.WstxInputFactory.class.getName()); } catch (ClassNotFoundException e) { // ok } @@ -1725,9 +1730,10 @@ public class XmlUtil { * See https://github.com/hapifhir/hapi-fhir/issues/339 * https://www.owasp.org/index.php/XML_External_Entity_%28XXE%29_Processing */ - inputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); // This disables DTDs entirely for that factory - inputFactory.setProperty("javax.xml.stream.isSupportingExternalEntities", false); // disable external entities - + inputFactory.setProperty( + XMLInputFactory.SUPPORT_DTD, false); // This disables DTDs entirely for that factory + inputFactory.setProperty( + "javax.xml.stream.isSupportingExternalEntities", false); // disable external entities /* * In the following few lines, you can uncomment the first and comment the second to disable automatic @@ -1740,7 +1746,8 @@ public class XmlUtil { Class.forName("com.ctc.wstx.stax.WstxInputFactory"); boolean isWoodstox = inputFactory instanceof com.ctc.wstx.stax.WstxInputFactory; if (!isWoodstox) { - // Check if implementation is woodstox by property since instanceof check does not work if running in JBoss + // Check if implementation is woodstox by property since instanceof check does not work if running + // in JBoss try { isWoodstox = inputFactory.getProperty("org.codehaus.stax2.implVersion") != null; } catch (Exception e) { @@ -1788,7 +1795,8 @@ public class XmlUtil { } throwUnitTestExceptionIfConfiguredToDoSo(); } catch (Throwable e) { - throw new ConfigurationException(Msg.code(1753) + "Unable to initialize StAX - XML processing is disabled", e); + throw new ConfigurationException( + Msg.code(1753) + "Unable to initialize StAX - XML processing is disabled", e); } return inputFactory; } @@ -1799,7 +1807,8 @@ public class XmlUtil { outputFactory = XMLOutputFactory.newInstance(); throwUnitTestExceptionIfConfiguredToDoSo(); } catch (Throwable e) { - throw new ConfigurationException(Msg.code(1754) + "Unable to initialize StAX - XML processing is disabled", e); + throw new ConfigurationException( + Msg.code(1754) + "Unable to initialize StAX - XML processing is disabled", e); } return outputFactory; } @@ -1821,7 +1830,6 @@ public class XmlUtil { return null; } - try { ArrayList value = new ArrayList<>(); StringReader reader = new StringReader(val); @@ -1841,7 +1849,10 @@ public class XmlUtil { return value; } catch (XMLStreamException e) { - throw new DataFormatException(Msg.code(1755) + "String does not appear to be valid XML/XHTML (error is \"" + e.getMessage() + "\"): " + theValue, e); + throw new DataFormatException( + Msg.code(1755) + "String does not appear to be valid XML/XHTML (error is \"" + e.getMessage() + + "\"): " + theValue, + e); } catch (FactoryConfigurationError e) { throw new ConfigurationException(Msg.code(1756) + e); } @@ -1854,7 +1865,8 @@ public class XmlUtil { ourNextException = theException; } - private static void throwUnitTestExceptionIfConfiguredToDoSo() throws FactoryConfigurationError, XMLStreamException { + private static void throwUnitTestExceptionIfConfiguredToDoSo() + throws FactoryConfigurationError, XMLStreamException { if (ourNextException != null) { if (ourNextException instanceof javax.xml.stream.FactoryConfigurationError) { throw ((javax.xml.stream.FactoryConfigurationError) ourNextException); @@ -1872,7 +1884,8 @@ public class XmlUtil { return parseDocument(reader, true, false); } - public static Document parseDocument(Reader theReader, boolean theNamespaceAware, boolean allowDoctypeDeclaration) throws SAXException, IOException { + public static Document parseDocument(Reader theReader, boolean theNamespaceAware, boolean allowDoctypeDeclaration) + throws SAXException, IOException { DocumentBuilder builder; try { DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance(); @@ -1880,7 +1893,8 @@ public class XmlUtil { docBuilderFactory.setXIncludeAware(false); docBuilderFactory.setExpandEntityReferences(false); try { - docBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", !allowDoctypeDeclaration); + docBuilderFactory.setFeature( + "http://apache.org/xml/features/disallow-doctype-decl", !allowDoctypeDeclaration); docBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); docBuilderFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false); docBuilderFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); @@ -1899,7 +1913,6 @@ public class XmlUtil { return builder.parse(src); } - public static List getChildrenByTagName(Element theParent, String theName) { List nodeList = new ArrayList(); for (Node child = theParent.getFirstChild(); child != null; child = child.getNextSibling()) { @@ -1911,7 +1924,6 @@ public class XmlUtil { return nodeList; } - public static String encodeDocument(Node theElement) throws TransformerException { return encodeDocument(theElement, false); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryMutator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryMutator.java index 4c0b8b16dba..0c7f1877395 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryMutator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryMutator.java @@ -34,7 +34,12 @@ public class BundleEntryMutator { private final FhirContext myFhirContext; private final BaseRuntimeElementCompositeDefinition myEntryDefinition; - public BundleEntryMutator(FhirContext theFhirContext, IBase theEntry, BaseRuntimeChildDefinition theRequestChildDef, BaseRuntimeElementCompositeDefinition theChildContentsDef, BaseRuntimeElementCompositeDefinition theEntryDefinition) { + public BundleEntryMutator( + FhirContext theFhirContext, + IBase theEntry, + BaseRuntimeChildDefinition theRequestChildDef, + BaseRuntimeElementCompositeDefinition theChildContentsDef, + BaseRuntimeElementCompositeDefinition theEntryDefinition) { myFhirContext = theFhirContext; myEntry = theEntry; myRequestChildDef = theRequestChildDef; @@ -52,7 +57,8 @@ public class BundleEntryMutator { @SuppressWarnings("unchecked") public void setFullUrl(String theFullUrl) { - IPrimitiveType value = (IPrimitiveType) myFhirContext.getElementDefinition("uri").newInstance(); + IPrimitiveType value = (IPrimitiveType) + myFhirContext.getElementDefinition("uri").newInstance(); value.setValue(theFullUrl); BaseRuntimeChildDefinition fullUrlChild = myEntryDefinition.getChildByName("fullUrl"); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryParts.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryParts.java index a40577db637..722e5129dff 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryParts.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/BundleEntryParts.java @@ -29,7 +29,12 @@ public class BundleEntryParts { private final String myConditionalUrl; private final String myFullUrl; - public BundleEntryParts(String theFullUrl, RequestTypeEnum theRequestType, String theUrl, IBaseResource theResource, String theConditionalUrl) { + public BundleEntryParts( + String theFullUrl, + RequestTypeEnum theRequestType, + String theUrl, + IBaseResource theResource, + String theConditionalUrl) { super(); myFullUrl = theFullUrl; myRequestType = theRequestType; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java index 70eb0b63f69..58e7e694eeb 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/bundle/SearchBundleEntryParts.java @@ -48,6 +48,4 @@ public class SearchBundleEntryParts { public BundleEntrySearchModeEnum getSearchMode() { return mySearchMode; } - - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/DependencyLogFactory.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/DependencyLogFactory.java index 5046d04437c..bf872da4e39 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/DependencyLogFactory.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/DependencyLogFactory.java @@ -23,7 +23,7 @@ import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.ReflectionUtil; public class DependencyLogFactory { - + /** * Non instantiable */ @@ -31,7 +31,7 @@ public class DependencyLogFactory { private DependencyLogFactory() { // nothing } - + public static IDependencyLog createJarLogger() { return ReflectionUtil.newInstanceOrReturnNull("ca.uhn.fhir.util.jar.DependencyLogImpl", IDependencyLog.class); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/DependencyLogImpl.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/DependencyLogImpl.java index 181e6868c8b..1d3530988f7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/DependencyLogImpl.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/DependencyLogImpl.java @@ -19,14 +19,14 @@ */ package ca.uhn.fhir.util.jar; +import ca.uhn.fhir.util.XmlUtil; + import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.jar.Attributes; import java.util.jar.Manifest; -import ca.uhn.fhir.util.XmlUtil; - public class DependencyLogImpl implements IDependencyLog { private static final Attributes.Name BUNDLE_SYMBOLIC_NAME = new Attributes.Name("Bundle-SymbolicName"); private static final Attributes.Name BUNDLE_VENDOR = new Attributes.Name("Bundle-Vendor"); @@ -35,7 +35,7 @@ public class DependencyLogImpl implements IDependencyLog { private static final Attributes.Name IMPLEMENTATION_VENDOR = new Attributes.Name("Implementation-Vendor"); private static final Attributes.Name IMPLEMENTATION_VERSION = new Attributes.Name("Implementation-Version"); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(XmlUtil.class); - + @Override public void logStaxImplementation(Class theClass) { try { @@ -69,7 +69,9 @@ public class DependencyLogImpl implements IDependencyLog { version = attrs.getValue(BUNDLE_VERSION); } if (ourLog.isDebugEnabled()) { - ourLog.debug("FHIR XML procesing will use StAX implementation at {}\n Title: {}\n Symbolic name: {}\n Vendor: {}\n Version: {}", new Object[] { rootUrl, title, symbolicName, vendor, version } ); + ourLog.debug( + "FHIR XML procesing will use StAX implementation at {}\n Title: {}\n Symbolic name: {}\n Vendor: {}\n Version: {}", + new Object[] {rootUrl, title, symbolicName, vendor, version}); } else { ourLog.info("FHIR XML procesing will use StAX implementation '{}' version '{}'", title, version); } @@ -78,7 +80,7 @@ public class DependencyLogImpl implements IDependencyLog { ourLog.info("Unable to determine StAX implementation: " + e.getMessage()); } } - + private static URL getRootUrlForClass(Class cls) { ClassLoader classLoader = cls.getClassLoader(); String resource = cls.getName().replace('.', '/') + ".class"; @@ -95,7 +97,11 @@ public class DependencyLogImpl implements IDependencyLog { String file = url.getFile(); if (file.endsWith(resource)) { try { - return new URL(url.getProtocol(), url.getHost(), url.getPort(), file.substring(0, file.length() - resource.length())); + return new URL( + url.getProtocol(), + url.getHost(), + url.getPort(), + file.substring(0, file.length() - resource.length())); } catch (MalformedURLException ex) { return null; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/IDependencyLog.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/IDependencyLog.java index 30c639ad409..2417fed62a9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/IDependencyLog.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/jar/IDependencyLog.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.util.jar; public interface IDependencyLog { void logStaxImplementation(Class theClass); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/BaseValidationContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/BaseValidationContext.java index 498493ecbc5..9018f799def 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/BaseValidationContext.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/BaseValidationContext.java @@ -19,12 +19,12 @@ */ package ca.uhn.fhir.validation; -import java.util.ArrayList; -import java.util.List; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.util.ObjectUtil; +import java.util.ArrayList; +import java.util.List; + abstract class BaseValidationContext implements IValidationContext { protected final FhirContext myFhirContext; @@ -66,5 +66,4 @@ abstract class BaseValidationContext implements IValidationContext { public ValidationResult toResult() { return new ValidationResult(myFhirContext, myMessages); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/FhirValidator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/FhirValidator.java index dc00d9e2fc6..d9e3cbe7e12 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/FhirValidator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/FhirValidator.java @@ -46,7 +46,6 @@ import java.util.stream.IntStream; import static org.apache.commons.lang3.StringUtils.isBlank; - /** * Resource validator, which checks resources for compliance against various validation schemes (schemas, schematrons, profiles, etc.) * @@ -84,7 +83,10 @@ public class FhirValidator { } } - private void addOrRemoveValidator(boolean theValidateAgainstStandardSchema, Class type, IValidatorModule theInstance) { + private void addOrRemoveValidator( + boolean theValidateAgainstStandardSchema, + Class type, + IValidatorModule theInstance) { if (theValidateAgainstStandardSchema) { boolean found = haveValidatorOfType(type); if (!found) { @@ -123,7 +125,8 @@ public class FhirValidator { * @return Returns a referens to this for method chaining */ public synchronized FhirValidator setValidateAgainstStandardSchema(boolean theValidateAgainstStandardSchema) { - addOrRemoveValidator(theValidateAgainstStandardSchema, SchemaBaseValidator.class, new SchemaBaseValidator(myContext)); + addOrRemoveValidator( + theValidateAgainstStandardSchema, SchemaBaseValidator.class, new SchemaBaseValidator(myContext)); return this; } @@ -145,9 +148,11 @@ public class FhirValidator { * * @return Returns a referens to this for method chaining */ - public synchronized FhirValidator setValidateAgainstStandardSchematron(boolean theValidateAgainstStandardSchematron) { + public synchronized FhirValidator setValidateAgainstStandardSchematron( + boolean theValidateAgainstStandardSchematron) { if (theValidateAgainstStandardSchematron && !ourPhPresentOnClasspath) { - throw new IllegalArgumentException(Msg.code(1970) + myContext.getLocalizer().getMessage(I18N_KEY_NO_PH_ERROR)); + throw new IllegalArgumentException( + Msg.code(1970) + myContext.getLocalizer().getMessage(I18N_KEY_NO_PH_ERROR)); } if (!theValidateAgainstStandardSchematron && !ourPhPresentOnClasspath) { return this; @@ -188,7 +193,6 @@ public class FhirValidator { myValidators = newValidators; } - private void applyDefaultValidators() { if (myValidators.isEmpty()) { setValidateAgainstStandardSchema(true); @@ -198,7 +202,6 @@ public class FhirValidator { } } - /** * Validates a resource instance returning a {@link ValidationResult} which contains the results. * @@ -231,8 +234,10 @@ public class FhirValidator { */ public ValidationResult validateWithResult(String theResource, ValidationOptions theOptions) { Validate.notNull(theResource, "theResource must not be null"); - IValidationContext validationContext = ValidationContext.forText(myContext, theResource, theOptions); - Function callback = result -> invokeValidationCompletedHooks(null, theResource, result); + IValidationContext validationContext = + ValidationContext.forText(myContext, theResource, theOptions); + Function callback = + result -> invokeValidationCompletedHooks(null, theResource, result); return doValidate(validationContext, theOptions, callback); } @@ -246,18 +251,23 @@ public class FhirValidator { */ public ValidationResult validateWithResult(IBaseResource theResource, ValidationOptions theOptions) { Validate.notNull(theResource, "theResource must not be null"); - IValidationContext validationContext = ValidationContext.forResource(myContext, theResource, theOptions); - Function callback = result -> invokeValidationCompletedHooks(theResource, null, result); + IValidationContext validationContext = + ValidationContext.forResource(myContext, theResource, theOptions); + Function callback = + result -> invokeValidationCompletedHooks(theResource, null, result); return doValidate(validationContext, theOptions, callback); } - private ValidationResult doValidate(IValidationContext theValidationContext, ValidationOptions theOptions, - Function theValidationCompletionCallback) { + private ValidationResult doValidate( + IValidationContext theValidationContext, + ValidationOptions theOptions, + Function theValidationCompletionCallback) { applyDefaultValidators(); ValidationResult result; - if (myConcurrentBundleValidation && theValidationContext.getResource() instanceof IBaseBundle - && myExecutorService != null) { + if (myConcurrentBundleValidation + && theValidationContext.getResource() instanceof IBaseBundle + && myExecutorService != null) { result = validateBundleEntriesConcurrently(theValidationContext, theOptions); } else { result = validateResource(theValidationContext); @@ -266,27 +276,32 @@ public class FhirValidator { return theValidationCompletionCallback.apply(result); } - private ValidationResult validateBundleEntriesConcurrently(IValidationContext theValidationContext, ValidationOptions theOptions) { - List entries = BundleUtil.toListOfResources(myContext, (IBaseBundle) theValidationContext.getResource()); + private ValidationResult validateBundleEntriesConcurrently( + IValidationContext theValidationContext, ValidationOptions theOptions) { + List entries = + BundleUtil.toListOfResources(myContext, (IBaseBundle) theValidationContext.getResource()); // Async validation tasks List validationTasks = IntStream.range(0, entries.size()) - .mapToObj(index -> { - IBaseResource resourceToValidate; - IBaseResource entry = entries.get(index); + .mapToObj(index -> { + IBaseResource resourceToValidate; + IBaseResource entry = entries.get(index); - if (mySkipContainedReferenceValidation) { - resourceToValidate = withoutContainedResources(entry); - } else { - resourceToValidate = entry; - } + if (mySkipContainedReferenceValidation) { + resourceToValidate = withoutContainedResources(entry); + } else { + resourceToValidate = entry; + } - String entryPathPrefix = String.format("Bundle.entry[%d].resource.ofType(%s)", index, resourceToValidate.fhirType()); - Future future = myExecutorService.submit(() -> { - IValidationContext entryValidationContext = ValidationContext.forResource(theValidationContext.getFhirContext(), resourceToValidate, theOptions); - return validateResource(entryValidationContext); - }); - return new ConcurrentValidationTask(entryPathPrefix, future); - }).collect(Collectors.toList()); + String entryPathPrefix = + String.format("Bundle.entry[%d].resource.ofType(%s)", index, resourceToValidate.fhirType()); + Future future = myExecutorService.submit(() -> { + IValidationContext entryValidationContext = ValidationContext.forResource( + theValidationContext.getFhirContext(), resourceToValidate, theOptions); + return validateResource(entryValidationContext); + }); + return new ConcurrentValidationTask(entryPathPrefix, future); + }) + .collect(Collectors.toList()); List validationMessages = buildValidationMessages(validationTasks); return new ValidationResult(myContext, validationMessages); @@ -309,26 +324,26 @@ public class FhirValidator { ValidationResult result = validationTask.getFuture().get(); final String bundleEntryPathPrefix = validationTask.getResourcePathPrefix(); List messages = result.getMessages().stream() - .map(message -> { - String currentPath; + .map(message -> { + String currentPath; - String locationString = StringUtils.defaultIfEmpty(message.getLocationString(), ""); + String locationString = StringUtils.defaultIfEmpty(message.getLocationString(), ""); - int dotIndex = locationString.indexOf('.'); - if (dotIndex >= 0) { - currentPath = locationString.substring(dotIndex); - } else { - if (isBlank(bundleEntryPathPrefix) || isBlank(locationString)) { - currentPath = locationString; + int dotIndex = locationString.indexOf('.'); + if (dotIndex >= 0) { + currentPath = locationString.substring(dotIndex); } else { - currentPath = "." + locationString; + if (isBlank(bundleEntryPathPrefix) || isBlank(locationString)) { + currentPath = locationString; + } else { + currentPath = "." + locationString; + } } - } - message.setLocationString(bundleEntryPathPrefix + currentPath); - return message; - }) - .collect(Collectors.toList()); + message.setLocationString(bundleEntryPathPrefix + currentPath); + return message; + }) + .collect(Collectors.toList()); retval.addAll(messages); } } catch (InterruptedException | ExecutionException exp) { @@ -344,14 +359,16 @@ public class FhirValidator { return theValidationContext.toResult(); } - private ValidationResult invokeValidationCompletedHooks(IBaseResource theResourceParsed, String theResourceRaw, ValidationResult theValidationResult) { + private ValidationResult invokeValidationCompletedHooks( + IBaseResource theResourceParsed, String theResourceRaw, ValidationResult theValidationResult) { if (myInterceptorBroadcaster != null) { if (myInterceptorBroadcaster.hasHooks(Pointcut.VALIDATION_COMPLETED)) { HookParams params = new HookParams() - .add(IBaseResource.class, theResourceParsed) - .add(String.class, theResourceRaw) - .add(ValidationResult.class, theValidationResult); - Object newResult = myInterceptorBroadcaster.callHooksAndReturnObject(Pointcut.VALIDATION_COMPLETED, params); + .add(IBaseResource.class, theResourceParsed) + .add(String.class, theResourceRaw) + .add(ValidationResult.class, theValidationResult); + Object newResult = + myInterceptorBroadcaster.callHooksAndReturnObject(Pointcut.VALIDATION_COMPLETED, params); if (newResult != null) { theValidationResult = (ValidationResult) newResult; } @@ -378,7 +395,6 @@ public class FhirValidator { * If this is true, bundles will be validated in parallel threads. The bundle structure itself will not be validated, * only the resources in its entries. */ - public boolean isConcurrentBundleValidation() { return myConcurrentBundleValidation; } @@ -427,5 +443,4 @@ public class FhirValidator { return myFuture; } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IResourceLoader.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IResourceLoader.java index fabde11965d..81c49b72512 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IResourceLoader.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IResourceLoader.java @@ -19,16 +19,15 @@ */ package ca.uhn.fhir.validation; +import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; - public interface IResourceLoader { /** * Load the latest version of a given resource - * + * * @param theType * The type of the resource to load * @param theId @@ -37,5 +36,4 @@ public interface IResourceLoader { * If the resource is not known */ public T load(Class theType, IIdType theId) throws ResourceNotFoundException; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IValidationContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IValidationContext.java index 482fdea5c09..e24720f8ede 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IValidationContext.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IValidationContext.java @@ -19,11 +19,10 @@ */ package ca.uhn.fhir.validation; -import java.util.List; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.EncodingEnum; +import java.util.List; import javax.annotation.Nonnull; public interface IValidationContext { @@ -39,10 +38,9 @@ public interface IValidationContext { void addValidationMessage(SingleValidationMessage theMessage); List getMessages(); - + ValidationResult toResult(); @Nonnull ValidationOptions getOptions(); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IValidatorModule.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IValidatorModule.java index 214963c4914..b2e541a60e4 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IValidatorModule.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/IValidatorModule.java @@ -21,11 +21,10 @@ package ca.uhn.fhir.validation; import org.hl7.fhir.instance.model.api.IBaseResource; - /** * An individual validation module, which applies validation rules against * resources and adds failure/informational messages as it goes. - * + * * See Validation * for a list of available modules. You may also create your own. */ @@ -33,10 +32,9 @@ public interface IValidatorModule { /** * Validate the actual resource. - * + * * The {@link IValidationContext} can be used to access the resource being validated, * and is populated with the results. */ void validateResource(IValidationContext theCtx); - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/LSInputImpl.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/LSInputImpl.java index 4efbe4a3606..e7f8e5350f8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/LSInputImpl.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/LSInputImpl.java @@ -19,11 +19,11 @@ */ package ca.uhn.fhir.validation; +import org.w3c.dom.ls.LSInput; + import java.io.InputStream; import java.io.Reader; -import org.w3c.dom.ls.LSInput; - class LSInputImpl implements LSInput { private Reader myCharacterStream; @@ -42,7 +42,7 @@ class LSInputImpl implements LSInput { @Override public void setCharacterStream(Reader theCharacterStream) { - myCharacterStream=theCharacterStream; + myCharacterStream = theCharacterStream; } @Override @@ -52,7 +52,7 @@ class LSInputImpl implements LSInput { @Override public void setByteStream(InputStream theByteStream) { - myByteStream=theByteStream; + myByteStream = theByteStream; } @Override @@ -62,7 +62,7 @@ class LSInputImpl implements LSInput { @Override public void setStringData(String theStringData) { - myStringData=theStringData; + myStringData = theStringData; } @Override @@ -72,7 +72,7 @@ class LSInputImpl implements LSInput { @Override public void setSystemId(String theSystemId) { - mySystemId=theSystemId; + mySystemId = theSystemId; } @Override @@ -82,7 +82,7 @@ class LSInputImpl implements LSInput { @Override public void setPublicId(String thePublicId) { - myPublicId=thePublicId; + myPublicId = thePublicId; } @Override @@ -92,7 +92,7 @@ class LSInputImpl implements LSInput { @Override public void setBaseURI(String theBaseURI) { - myBaseURI=theBaseURI; + myBaseURI = theBaseURI; } @Override @@ -102,7 +102,7 @@ class LSInputImpl implements LSInput { @Override public void setEncoding(String theEncoding) { - myEncoding=theEncoding; + myEncoding = theEncoding; } @Override @@ -112,7 +112,6 @@ class LSInputImpl implements LSInput { @Override public void setCertifiedText(boolean theCertifiedText) { - myCertifiedText=theCertifiedText; + myCertifiedText = theCertifiedText; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ResultSeverityEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ResultSeverityEnum.java index 138918f0f4e..2143ba9b24c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ResultSeverityEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ResultSeverityEnum.java @@ -29,12 +29,12 @@ public enum ResultSeverityEnum { * The issue has no relation to the degree of success of the action */ INFORMATION("information"), - + /** * The issue is not important enough to cause the action to fail, but may cause it to be performed suboptimally or in a way that is not as desired */ WARNING("warning"), - + /** * The issue is sufficiently important to cause the action to fail */ diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java index f9cadac52e8..a50728b8057 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SchemaBaseValidator.java @@ -31,12 +31,6 @@ import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXParseException; -import javax.xml.XMLConstants; -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; -import javax.xml.validation.Schema; -import javax.xml.validation.SchemaFactory; -import javax.xml.validation.Validator; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringReader; @@ -45,9 +39,16 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; +import javax.xml.XMLConstants; +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; +import javax.xml.validation.Schema; +import javax.xml.validation.SchemaFactory; +import javax.xml.validation.Validator; public class SchemaBaseValidator implements IValidatorModule { - public static final String RESOURCES_JAR_NOTE = "Note that as of HAPI FHIR 1.2, DSTU2 validation files are kept in a separate JAR (hapi-fhir-validation-resources-XXX.jar) which must be added to your classpath. See the HAPI FHIR download page for more information."; + public static final String RESOURCES_JAR_NOTE = + "Note that as of HAPI FHIR 1.2, DSTU2 validation files are kept in a separate JAR (hapi-fhir-validation-resources-XXX.jar) which must be added to your classpath. See the HAPI FHIR download page for more information."; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SchemaBaseValidator.class); private static final Set SCHEMA_NAMES; @@ -84,7 +85,8 @@ public class SchemaBaseValidator implements IValidatorModule { if (theContext.getResourceAsStringEncoding() == EncodingEnum.XML) { encodedResource = theContext.getResourceAsString(); } else { - encodedResource = theContext.getFhirContext().newXmlParser().encodeResourceToString((IBaseResource) theContext.getResource()); + encodedResource = theContext.getFhirContext().newXmlParser().encodeResourceToString((IBaseResource) + theContext.getResource()); } try { @@ -138,9 +140,10 @@ public class SchemaBaseValidator implements IValidatorModule { ourJaxp15Supported = false; ourLog.warn("Jaxp 1.5 Support not found.", e); } - schema = schemaFactory.newSchema(new Source[]{baseSource}); + schema = schemaFactory.newSchema(new Source[] {baseSource}); } catch (SAXException e) { - throw new ConfigurationException(Msg.code(1968) + "Could not load/parse schema file: " + "fhir-single.xsd", e); + throw new ConfigurationException( + Msg.code(1968) + "Could not load/parse schema file: " + "fhir-single.xsd", e); } myKeyToSchema.put(key, schema); return schema; @@ -161,11 +164,11 @@ public class SchemaBaseValidator implements IValidatorModule { } private final class MyResourceResolver implements LSResourceResolver { - private MyResourceResolver() { - } + private MyResourceResolver() {} @Override - public LSInput resolveResource(String theType, String theNamespaceURI, String thePublicId, String theSystemId, String theBaseURI) { + public LSInput resolveResource( + String theType, String theNamespaceURI, String thePublicId, String theSystemId, String theBaseURI) { if (theSystemId != null && SCHEMA_NAMES.contains(theSystemId)) { LSInputImpl input = new LSInputImpl(); input.setPublicId(thePublicId); @@ -178,7 +181,6 @@ public class SchemaBaseValidator implements IValidatorModule { byte[] bytes = ClasspathUtil.loadResourceAsByteArray(pathToBase); input.setByteStream(new ByteArrayInputStream(bytes)); return input; - } throw new ConfigurationException(Msg.code(1969) + "Unknown schema: " + theSystemId); @@ -216,11 +218,9 @@ public class SchemaBaseValidator implements IValidatorModule { public void warning(SAXParseException theException) { addIssue(theException, ResultSeverityEnum.WARNING); } - } public static boolean isJaxp15Supported() { return ourJaxp15Supported; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SingleValidationMessage.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SingleValidationMessage.java index c22465a21ff..7d9e827d493 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SingleValidationMessage.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/SingleValidationMessage.java @@ -139,5 +139,4 @@ public class SingleValidationMessage { } return b.toString(); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationContext.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationContext.java index 5edaa77ed6c..1756e7fd124 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationContext.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationContext.java @@ -28,9 +28,9 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.util.ObjectUtil; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; @@ -42,11 +42,17 @@ public class ValidationContext extends BaseValidationContext implements IV private final ValidationOptions myOptions; private String myResourceAsString; - private ValidationContext(FhirContext theContext, T theResource, IEncoder theEncoder, ValidationOptions theOptions) { + private ValidationContext( + FhirContext theContext, T theResource, IEncoder theEncoder, ValidationOptions theOptions) { this(theContext, theResource, theEncoder, new ArrayList<>(), theOptions); } - private ValidationContext(FhirContext theContext, T theResource, IEncoder theEncoder, List theMessages, ValidationOptions theOptions) { + private ValidationContext( + FhirContext theContext, + T theResource, + IEncoder theEncoder, + List theMessages, + ValidationOptions theOptions) { super(theContext, theMessages); myResource = theResource; myEncoder = theEncoder; @@ -88,7 +94,8 @@ public class ValidationContext extends BaseValidationContext implements IV EncodingEnum getEncoding(); } - public static IValidationContext forResource(final FhirContext theContext, final T theResource, ValidationOptions theOptions) { + public static IValidationContext forResource( + final FhirContext theContext, final T theResource, ValidationOptions theOptions) { ObjectUtil.requireNonNull(theContext, "theContext can not be null"); ObjectUtil.requireNonNull(theResource, "theResource can not be null"); ValidationOptions options = defaultIfNull(theOptions, ValidationOptions.empty()); @@ -107,7 +114,8 @@ public class ValidationContext extends BaseValidationContext implements IV return new ValidationContext<>(theContext, theResource, encoder, options); } - public static IValidationContext forText(final FhirContext theContext, final String theResourceBody, final ValidationOptions theOptions) { + public static IValidationContext forText( + final FhirContext theContext, final String theResourceBody, final ValidationOptions theOptions) { ObjectUtil.requireNonNull(theContext, "theContext can not be null"); ObjectUtil.requireNotEmpty(theResourceBody, "theResourceBody can not be null or empty"); ValidationOptions options = defaultIfNull(theOptions, ValidationOptions.empty()); @@ -139,7 +147,10 @@ public class ValidationContext extends BaseValidationContext implements IV if (myEncoding == null) { myEncoding = EncodingEnum.detectEncodingNoDefault(theResourceBody); if (myEncoding == null) { - throw new InvalidRequestException(Msg.code(1971) + theContext.getLocalizer().getMessage(ValidationContext.class, "unableToDetermineEncoding")); + throw new InvalidRequestException(Msg.code(1971) + + theContext + .getLocalizer() + .getMessage(ValidationContext.class, "unableToDetermineEncoding")); } } return myEncoding; @@ -150,21 +161,28 @@ public class ValidationContext extends BaseValidationContext implements IV public ValidationOptions getOptions() { return options; } - }; } - public static IValidationContext subContext(final IValidationContext theCtx, final IBaseResource theResource, ValidationOptions theOptions) { - return new ValidationContext<>(theCtx.getFhirContext(), theResource, new IEncoder() { - @Override - public String encode() { - return theCtx.getFhirContext().newXmlParser().encodeResourceToString(theResource); - } + public static IValidationContext subContext( + final IValidationContext theCtx, + final IBaseResource theResource, + ValidationOptions theOptions) { + return new ValidationContext<>( + theCtx.getFhirContext(), + theResource, + new IEncoder() { + @Override + public String encode() { + return theCtx.getFhirContext().newXmlParser().encodeResourceToString(theResource); + } - @Override - public EncodingEnum getEncoding() { - return EncodingEnum.XML; - } - }, theCtx.getMessages(), theOptions); + @Override + public EncodingEnum getEncoding() { + return EncodingEnum.XML; + } + }, + theCtx.getMessages(), + theOptions); } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationFailureException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationFailureException.java index 5f1cecf9a44..d3fa479f43c 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationFailureException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationFailureException.java @@ -19,37 +19,36 @@ */ package ca.uhn.fhir.validation; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.util.OperationOutcomeUtil; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; public class ValidationFailureException extends RuntimeException { private static final long serialVersionUID = 1L; private IBaseOperationOutcome myOperationOutcome; -// public ValidationFailureException(String theProblem) { -// this(theProblem, IssueSeverityEnum.FATAL, null); -// } + // public ValidationFailureException(String theProblem) { + // this(theProblem, IssueSeverityEnum.FATAL, null); + // } private static String toDescription(FhirContext theCtx, IBaseOperationOutcome theOo) { StringBuilder b = new StringBuilder(); b.append(OperationOutcomeUtil.getFirstIssueDetails(theCtx, theOo)); -// b.append(" - "); -// b.append(theOo.getIssueFirstRep().getLocationFirstRep().getValue()); + // b.append(" - "); + // b.append(theOo.getIssueFirstRep().getLocationFirstRep().getValue()); return b.toString(); } -// public ValidationFailureException(String theProblem, Exception theCause) { -// this(theProblem, IssueSeverityEnum.FATAL, theCause); -// } + // public ValidationFailureException(String theProblem, Exception theCause) { + // this(theProblem, IssueSeverityEnum.FATAL, theCause); + // } -// public ValidationFailureException(String theProblem, IssueSeverityEnum theSeverity, Exception theCause) { -// super(theProblem, theCause); -// myOperationOutcome = new OperationOutcome(); -// myOperationOutcome.addIssue().setSeverity(theSeverity).setDetails(theProblem); -// } + // public ValidationFailureException(String theProblem, IssueSeverityEnum theSeverity, Exception theCause) { + // super(theProblem, theCause); + // myOperationOutcome = new OperationOutcome(); + // myOperationOutcome.addIssue().setSeverity(theSeverity).setDetails(theProblem); + // } public ValidationFailureException(FhirContext theCtx, IBaseOperationOutcome theOperationOutcome) { super(toDescription(theCtx, theOperationOutcome)); @@ -59,5 +58,4 @@ public class ValidationFailureException extends RuntimeException { public IBaseOperationOutcome getOperationOutcome() { return myOperationOutcome; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationOptions.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationOptions.java index 40ee00e383d..50cdbc1ef73 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationOptions.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationOptions.java @@ -32,8 +32,7 @@ public class ValidationOptions { private static ValidationOptions ourEmpty; private Set myProfiles; - public ValidationOptions() { - } + public ValidationOptions() {} public Set getProfiles() { return myProfiles != null ? Collections.unmodifiableSet(myProfiles) : Collections.emptySet(); @@ -65,5 +64,4 @@ public class ValidationOptions { } return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationResult.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationResult.java index 7b164103bc1..0eca0cc8a61 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationResult.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/ValidationResult.java @@ -67,14 +67,13 @@ public class ValidationResult { * severity {@link ResultSeverityEnum#ERROR} or {@link ResultSeverityEnum#FATAL}. A validation * is still considered successful if it only has issues at level {@link ResultSeverityEnum#WARNING} or * lower. - * + * * @return true if the validation was successful */ public boolean isSuccessful() { return myIsSuccessful; } - private String toDescription() { if (myMessages.isEmpty()) { return "No issues"; @@ -84,8 +83,12 @@ public class ValidationResult { int shownMsgQty = Math.min(myErrorDisplayLimit, myMessages.size()); if (shownMsgQty < myMessages.size()) { - b.append("(showing first ").append(shownMsgQty).append(" messages out of ") - .append(myMessages.size()).append(" total)").append(ourNewLine); + b.append("(showing first ") + .append(shownMsgQty) + .append(" messages out of ") + .append(myMessages.size()) + .append(" total)") + .append(ourNewLine); } for (int i = 0; i < shownMsgQty; i++) { @@ -103,7 +106,6 @@ public class ValidationResult { return b.toString(); } - /** * @deprecated Use {@link #toOperationOutcome()} instead since this method returns a view. * {@link #toOperationOutcome()} is identical to this method, but has a more suitable name so this method @@ -118,13 +120,14 @@ public class ValidationResult { * Create an OperationOutcome resource which contains all of the messages found as a result of this validation */ public IBaseOperationOutcome toOperationOutcome() { - IBaseOperationOutcome oo = (IBaseOperationOutcome) myCtx.getResourceDefinition("OperationOutcome").newInstance(); + IBaseOperationOutcome oo = (IBaseOperationOutcome) + myCtx.getResourceDefinition("OperationOutcome").newInstance(); populateOperationOutcome(oo); return oo; } /** - * Populate an operation outcome with the results of the validation + * Populate an operation outcome with the results of the validation */ public void populateOperationOutcome(IBaseOperationOutcome theOperationOutcome) { for (SingleValidationMessage next : myMessages) { @@ -137,8 +140,15 @@ public class ValidationResult { location = null; } String severity = next.getSeverity() != null ? next.getSeverity().getCode() : null; - IBase issue = OperationOutcomeUtil.addIssueWithMessageId(myCtx, theOperationOutcome, severity, next.getMessage(), next.getMessageId(), location, Constants.OO_INFOSTATUS_PROCESSING); - + IBase issue = OperationOutcomeUtil.addIssueWithMessageId( + myCtx, + theOperationOutcome, + severity, + next.getMessage(), + next.getMessageId(), + location, + Constants.OO_INFOSTATUS_PROCESSING); + if (next.getLocationLine() != null || next.getLocationCol() != null) { String unknown = "(unknown)"; String line = unknown; @@ -163,7 +173,8 @@ public class ValidationResult { @Override public String toString() { - return "ValidationResult{" + "messageCount=" + myMessages.size() + ", isSuccessful=" + myIsSuccessful + ", description='" + toDescription() + '\'' + '}'; + return "ValidationResult{" + "messageCount=" + myMessages.size() + ", isSuccessful=" + myIsSuccessful + + ", description='" + toDescription() + '\'' + '}'; } /** @@ -173,10 +184,13 @@ public class ValidationResult { return myCtx; } - public int getErrorDisplayLimit() { return myErrorDisplayLimit; } + public int getErrorDisplayLimit() { + return myErrorDisplayLimit; + } - public void setErrorDisplayLimit(int theErrorDisplayLimit) { myErrorDisplayLimit = theErrorDisplayLimit; } + public void setErrorDisplayLimit(int theErrorDisplayLimit) { + myErrorDisplayLimit = theErrorDisplayLimit; + } - - private static final String ourNewLine = System.getProperty("line.separator"); + private static final String ourNewLine = System.getProperty("line.separator"); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/schematron/SchematronBaseValidator.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/schematron/SchematronBaseValidator.java index aeae05c508a..9355952873b 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/schematron/SchematronBaseValidator.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/schematron/SchematronBaseValidator.java @@ -44,7 +44,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.xml.transform.stream.StreamSource; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; @@ -52,6 +51,7 @@ import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import javax.xml.transform.stream.StreamSource; /** * This class is only used using reflection from {@link SchematronProvider} in order @@ -95,7 +95,12 @@ public class SchematronBaseValidator implements IValidatorModule { return; } - IErrorList errors = SchematronHelper.convertToErrorList(results, theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName()); + IErrorList errors = SchematronHelper.convertToErrorList( + results, + theCtx.getFhirContext() + .getResourceDefinition(theCtx.getResource()) + .getBaseDefinition() + .getName()); if (errors.getAllErrors().containsOnlySuccess()) { return; @@ -123,39 +128,52 @@ public class SchematronBaseValidator implements IValidatorModule { message.setSeverity(severity); theCtx.addValidationMessage(message); } - } private ISchematronResource getSchematron(IValidationContext theCtx) { Class resource = theCtx.getResource().getClass(); - Class baseResourceClass = theCtx.getFhirContext().getResourceDefinition(resource).getBaseDefinition().getImplementingClass(); + Class baseResourceClass = theCtx.getFhirContext() + .getResourceDefinition(resource) + .getBaseDefinition() + .getImplementingClass(); return getSchematronAndCache(theCtx, baseResourceClass); } - private ISchematronResource getSchematronAndCache(IValidationContext theCtx, Class theClass) { + private ISchematronResource getSchematronAndCache( + IValidationContext theCtx, Class theClass) { synchronized (myClassToSchematron) { ISchematronResource retVal = myClassToSchematron.get(theClass); if (retVal != null) { return retVal; } - String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName().toLowerCase() - + ".sch"; + String pathToBase = myCtx.getVersion().getPathToSchemaDefinitions() + '/' + + theCtx.getFhirContext() + .getResourceDefinition(theCtx.getResource()) + .getBaseDefinition() + .getName() + .toLowerCase() + + ".sch"; try (InputStream baseIs = FhirValidator.class.getResourceAsStream(pathToBase)) { if (baseIs == null) { - throw new InternalErrorException(Msg.code(1972) + "Failed to load schematron for resource '" + theCtx.getFhirContext().getResourceDefinition(theCtx.getResource()).getBaseDefinition().getName() + "'. " - + SchemaBaseValidator.RESOURCES_JAR_NOTE); + throw new InternalErrorException(Msg.code(1972) + "Failed to load schematron for resource '" + + theCtx.getFhirContext() + .getResourceDefinition(theCtx.getResource()) + .getBaseDefinition() + .getName() + + "'. " + SchemaBaseValidator.RESOURCES_JAR_NOTE); } } catch (IOException e) { ourLog.error("Failed to close stream", e); } - // Allow Schematron to load SCH files from the 'validation-resources' - // bundles when running in an OSGi container. This is because the + // Allow Schematron to load SCH files from the 'validation-resources' + // bundles when running in an OSGi container. This is because the // Schematron bundle does not have DynamicImport-Package in its manifest. - IReadableResource schResource = new ClassPathResource(pathToBase, this.getClass().getClassLoader()); - retVal = new SchematronResourceSCH(schResource); + IReadableResource schResource = + new ClassPathResource(pathToBase, this.getClass().getClassLoader()); + retVal = new SchematronResourceSCH(schResource); myClassToSchematron.put(theClass, retVal); return retVal; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/schematron/SchematronProvider.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/schematron/SchematronProvider.java index 508a27e1c07..b783b752ead 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/schematron/SchematronProvider.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/validation/schematron/SchematronProvider.java @@ -29,10 +29,9 @@ import java.lang.reflect.Constructor; public class SchematronProvider { - private static final String I18N_KEY_NO_PH_WARNING = FhirValidator.class.getName() + ".noPhWarningOnStartup"; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirValidator.class); - + @CoverageIgnore public static boolean isSchematronAvailable(FhirContext theFhirContext) { try { @@ -43,17 +42,18 @@ public class SchematronProvider { return false; } } - + @SuppressWarnings("unchecked") @CoverageIgnore public static Class getSchematronValidatorClass() { try { - return (Class) Class.forName("ca.uhn.fhir.validation.schematron.SchematronBaseValidator"); + return (Class) + Class.forName("ca.uhn.fhir.validation.schematron.SchematronBaseValidator"); } catch (ClassNotFoundException e) { throw new IllegalStateException(Msg.code(1973) + "Cannot resolve schematron validator ", e); } } - + @CoverageIgnore public static IValidatorModule getSchematronValidatorInstance(FhirContext myContext) { try { diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IAnyResource.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IAnyResource.java index e15dd0ae0ca..0a6c81554f8 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IAnyResource.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IAnyResource.java @@ -30,7 +30,7 @@ public interface IAnyResource extends IBaseResource { /** * Search parameter constant for _id */ - @SearchParamDefinition(name="_id", path="", description="The ID of the resource", type="token") + @SearchParamDefinition(name = "_id", path = "", description = "The ID of the resource", type = "token") String SP_RES_ID = "_id"; /** @@ -54,5 +54,4 @@ public interface IAnyResource extends IBaseResource { IAnyResource setId(String theId); void setUserData(String name, Object value); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBackboneElement.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBackboneElement.java index 5342bf23a96..70b2b3950af 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBackboneElement.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBackboneElement.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBackboneElement extends IBase { - -} +public interface IBackboneElement extends IBase {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBase.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBase.java index 74713fea6a8..aab99bbffe5 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBase.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBase.java @@ -25,7 +25,7 @@ import java.util.List; /** * This interface is a simple marker for anything which is an HL7 * structure of some kind. It is provided mostly to simplify convergence - * between the HL7.org structures and the HAPI ones. + * between the HL7.org structures and the HAPI ones. */ public interface IBase extends Serializable { @@ -34,7 +34,7 @@ public interface IBase extends Serializable { /** * Returns true if any comments would be returned by {@link #getFormatCommentsPre()} * or {@link #getFormatCommentsPost()} - * + * * @since 1.5 */ boolean hasFormatComment(); @@ -42,7 +42,7 @@ public interface IBase extends Serializable { /** * Returns a list of comments appearing immediately before this element within the serialized * form of the resource. Creates the list if it does not exist, so this method will not return null - * + * * @since 1.5 */ List getFormatCommentsPre(); @@ -50,7 +50,7 @@ public interface IBase extends Serializable { /** * Returns a list of comments appearing immediately after this element within the serialized * form of the resource. Creates the list if it does not exist, so this method will not return null - * + * * @since 1.5 */ List getFormatCommentsPost(); @@ -58,7 +58,9 @@ public interface IBase extends Serializable { /** * Returns the FHIR type name for the given element, e.g. "Patient" or "unsignedInt" */ - default String fhirType() { return null; } + default String fhirType() { + return null; + } /** * Retrieves any user suplied data in this element @@ -69,5 +71,4 @@ public interface IBase extends Serializable { * Sets a user supplied data value in this element */ void setUserData(String theName, Object theValue); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBackboneElement.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBackboneElement.java index e818d71cc9b..fae1041b588 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBackboneElement.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBackboneElement.java @@ -21,7 +21,4 @@ package org.hl7.fhir.instance.model.api; import ca.uhn.fhir.model.api.IElement; - -public interface IBaseBackboneElement extends IElement, IBaseHasExtensions, IBaseHasModifierExtensions { - -} +public interface IBaseBackboneElement extends IElement, IBaseHasExtensions, IBaseHasModifierExtensions {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBinary.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBinary.java index 9415e712b48..f7f72f23848 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBinary.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBinary.java @@ -19,7 +19,6 @@ */ package org.hl7.fhir.instance.model.api; - public interface IBaseBinary extends IBaseResource { byte[] getContent(); @@ -36,6 +35,8 @@ public interface IBaseBinary extends IBaseResource { IBaseBinary setContentType(String theContentType); - default boolean hasData() { return getContent() != null && getContent().length > 0; }; - + default boolean hasData() { + return getContent() != null && getContent().length > 0; + } + ; } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBooleanDatatype.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBooleanDatatype.java index 60e777a6b65..e86ead1bd9d 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBooleanDatatype.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBooleanDatatype.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBaseBooleanDatatype extends IPrimitiveType { - -} +public interface IBaseBooleanDatatype extends IPrimitiveType {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBundle.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBundle.java index 50cb3ac75c5..24555462ebb 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBundle.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseBundle.java @@ -19,28 +19,26 @@ */ package org.hl7.fhir.instance.model.api; - public interface IBaseBundle extends IBaseResource { /** * Constant for links provided in the bundle. This constant is used in the * link.type field to indicate that the given link is for - * the next page of results. + * the next page of results. */ String LINK_NEXT = "next"; - + /** * Constant for links provided in the bundle. This constant is used in the * link.type field to indicate that the given link is for - * the previous page of results. + * the previous page of results. */ String LINK_PREV = "previous"; /** * Constant for links provided in the bundle. This constant is used in the * link.type field to indicate that the given link is for - * this bundle. + * this bundle. */ String LINK_SELF = "self"; - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseCoding.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseCoding.java index c7b830d7d20..a7ad3a8f103 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseCoding.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseCoding.java @@ -40,6 +40,4 @@ public interface IBaseCoding extends IBase { IBaseCoding setVersion(String theVersion); IBaseCoding setUserSelected(boolean theUserSelected); - - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseConformance.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseConformance.java index a43b7f1e079..c655cf109a1 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseConformance.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseConformance.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBaseConformance extends IBaseResource { - -} +public interface IBaseConformance extends IBaseResource {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDatatype.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDatatype.java index 2258a418dfa..07b794c3bb9 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDatatype.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDatatype.java @@ -21,7 +21,4 @@ package org.hl7.fhir.instance.model.api; import ca.uhn.fhir.model.api.IElement; - -public interface IBaseDatatype extends IElement { - -} +public interface IBaseDatatype extends IElement {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDatatypeElement.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDatatypeElement.java index 06f0748a1a2..a57b2c2855a 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDatatypeElement.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDatatypeElement.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBaseDatatypeElement extends IBase { - -} +public interface IBaseDatatypeElement extends IBase {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDecimalDatatype.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDecimalDatatype.java index 6d28244b8ee..72a551f39cc 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDecimalDatatype.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseDecimalDatatype.java @@ -21,6 +21,4 @@ package org.hl7.fhir.instance.model.api; import java.math.BigDecimal; -public interface IBaseDecimalDatatype extends IPrimitiveType { - -} +public interface IBaseDecimalDatatype extends IPrimitiveType {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseElement.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseElement.java index 1ab0354cd6d..9847808e263 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseElement.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseElement.java @@ -28,5 +28,4 @@ public interface IBaseElement { Object getUserData(String theName); void setUserData(String theName, Object theValue); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseEnumFactory.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseEnumFactory.java index 05b3c4c9f50..fbb6adeb01d 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseEnumFactory.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseEnumFactory.java @@ -43,6 +43,7 @@ public interface IBaseEnumFactory> extends Serializable { /** * Get the system for a given enum value */ - default String toSystem(T theValue) { return null; } - + default String toSystem(T theValue) { + return null; + } } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseEnumeration.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseEnumeration.java index dc5ae61bc6d..bd4c71c1d3e 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseEnumeration.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseEnumeration.java @@ -19,9 +19,9 @@ */ package org.hl7.fhir.instance.model.api; - public interface IBaseEnumeration> extends IPrimitiveType { - default IBaseEnumFactory getEnumFactory() { return null; } - + default IBaseEnumFactory getEnumFactory() { + return null; + } } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseExtension.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseExtension.java index 50845a4a175..bc2d34528d1 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseExtension.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseExtension.java @@ -25,7 +25,7 @@ import java.util.List; * @param The actual concrete type of the extension * @param Note that this type param is not used anywhere - It is kept only to avoid making a breaking change */ -//public interface IBaseExtension, D> extends ICompositeType { +// public interface IBaseExtension, D> extends ICompositeType { public interface IBaseExtension extends ICompositeType { List getExtension(); @@ -37,5 +37,4 @@ public interface IBaseExtension extends ICompositeType { T setUrl(String theUrl); T setValue(IBaseDatatype theValue); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseFhirEnum.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseFhirEnum.java index b939e748893..a210e8061eb 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseFhirEnum.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseFhirEnum.java @@ -19,47 +19,44 @@ */ package org.hl7.fhir.instance.model.api; - - /* Copyright (c) 2011+, HL7, Inc All rights reserved. -Redistribution and use in source and binary forms, with or without modification, +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of HL7 nor the names of its contributors may be used to - endorse or promote products derived from this software without specific - prior written permission. + * Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + * Neither the name of HL7 nor the names of its contributors may be used to +endorse or promote products derived from this software without specific +prior written permission. -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT -NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * Interface to be implemented by all built-in FHIR enumerations (i.e. the - * actual FHIR-defined Java Enum will implement this interface) + * actual FHIR-defined Java Enum will implement this interface) */ public interface IBaseFhirEnum { - /** - * Get the XML/JSON representation for an enumerated value - * @return the XML/JSON representation - */ - public String toCode(); - + /** + * Get the XML/JSON representation for an enumerated value + * @return the XML/JSON representation + */ + public String toCode(); } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseHasExtensions.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseHasExtensions.java index e3721abd6a9..a99092b2540 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseHasExtensions.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseHasExtensions.java @@ -28,5 +28,4 @@ public interface IBaseHasExtensions extends IBase { List> getExtension(); boolean hasExtension(); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseHasModifierExtensions.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseHasModifierExtensions.java index f0ca9af1fd1..88c9ee26146 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseHasModifierExtensions.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseHasModifierExtensions.java @@ -28,5 +28,4 @@ public interface IBaseHasModifierExtensions { public List> getModifierExtension(); boolean hasModifierExtension(); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseIntegerDatatype.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseIntegerDatatype.java index b40b2640f79..db9d3d55b12 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseIntegerDatatype.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseIntegerDatatype.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBaseIntegerDatatype extends IPrimitiveType { - -} +public interface IBaseIntegerDatatype extends IPrimitiveType {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseLongDatatype.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseLongDatatype.java index 48af47414e1..3078fb5cc37 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseLongDatatype.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseLongDatatype.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBaseLongDatatype extends IPrimitiveType { - -} +public interface IBaseLongDatatype extends IPrimitiveType {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseMetaType.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseMetaType.java index c8b089ca6bf..385bc4de54e 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseMetaType.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseMetaType.java @@ -41,7 +41,7 @@ public interface IBaseMetaType extends ICompositeType { String getVersionId(); IBaseMetaType setLastUpdated(Date theHeaderDateValue); - + IBaseMetaType setVersionId(String theVersionId); /** @@ -50,11 +50,9 @@ public interface IBaseMetaType extends ICompositeType { */ IBaseCoding getTag(String theSystem, String theCode); - /** - * Returns the first security label (if any) that has the given system and code, or returns - * null if none - */ + /** + * Returns the first security label (if any) that has the given system and code, or returns + * null if none + */ IBaseCoding getSecurity(String theSystem, String theCode); - - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseOperationOutcome.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseOperationOutcome.java index 2b1b2aecdbc..d6073b5dfd7 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseOperationOutcome.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseOperationOutcome.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBaseOperationOutcome extends IBaseResource { - -} +public interface IBaseOperationOutcome extends IBaseResource {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseParameters.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseParameters.java index 9beaad81383..c3c681f40fc 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseParameters.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseParameters.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBaseParameters extends IBaseResource { - -} +public interface IBaseParameters extends IBaseResource {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseReference.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseReference.java index c2eaaa15dde..935fc49edab 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseReference.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseReference.java @@ -19,7 +19,6 @@ */ package org.hl7.fhir.instance.model.api; - public interface IBaseReference extends ICompositeType { IBaseResource getResource(); @@ -37,5 +36,4 @@ public interface IBaseReference extends ICompositeType { default boolean hasIdentifier() { return false; } - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseResource.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseResource.java index 8ce10522f74..67e747c47cb 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseResource.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseResource.java @@ -30,11 +30,11 @@ import java.util.HashSet; import java.util.Set; /** - * For now, this is a simple marker interface indicating that a class is a resource type. + * For now, this is a simple marker interface indicating that a class is a resource type. * There are two concrete types of implementations of this interrface. The first are - * HL7.org's Resource structures (e.g. + * HL7.org's Resource structures (e.g. * org.hl7.fhir.instance.model.Patient) and - * the second are HAPI's Resource structures, e.g. + * the second are HAPI's Resource structures, e.g. * ca.uhn.fhir.model.dstu.resource.Patient) */ public interface IBaseResource extends IBase, IElement { @@ -52,7 +52,7 @@ public interface IBaseResource extends IBase, IElement { Set WILDCARD_ALL_SET = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(INCLUDE_ALL))); IIdType getIdElement(); - + IBaseResource setId(String theId); IBaseResource setId(IIdType theId); diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseXhtml.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseXhtml.java index 2520777d940..83ee0411186 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseXhtml.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IBaseXhtml.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface IBaseXhtml extends IPrimitiveType { - -} +public interface IBaseXhtml extends IPrimitiveType {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/ICompositeType.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/ICompositeType.java index 0e02e68d250..2e4408dd845 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/ICompositeType.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/ICompositeType.java @@ -19,7 +19,4 @@ */ package org.hl7.fhir.instance.model.api; - -public interface ICompositeType extends IBaseDatatype { - -} +public interface ICompositeType extends IBaseDatatype {} diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IDomainResource.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IDomainResource.java index c1dacc0860c..ac500f7baae 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IDomainResource.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IDomainResource.java @@ -26,5 +26,4 @@ public interface IDomainResource extends IAnyResource, IBaseHasExtensions, IBase List getContained(); INarrative getText(); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IIdType.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IIdType.java index 27eb3319706..02f29f299de 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IIdType.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IIdType.java @@ -19,16 +19,14 @@ */ package org.hl7.fhir.instance.model.api; - - /** - * Base interface for ID datatype. - * + * Base interface for ID datatype. + * *

    * Concrete Implementations: This interface is often returned and/or accepted by methods in HAPI's API - * where either {@link ca.uhn.fhir.model.primitive.IdDt} (the HAPI structure ID type) or - * org.hl7.fhir.instance.model.IdType (the RI structure ID type) will be used, depending on - * which version of the strctures your application is using. + * where either {@link ca.uhn.fhir.model.primitive.IdDt} (the HAPI structure ID type) or + * org.hl7.fhir.instance.model.IdType (the RI structure ID type) will be used, depending on + * which version of the strctures your application is using. *

    */ public interface IIdType extends IPrimitiveType { @@ -51,17 +49,16 @@ public interface IIdType extends IPrimitiveType { /** * Returns the ID part of this ID (e.g. in the ID http://example.com/Patient/123/_history/456 this would be the * part "123") parsed as a {@link Long}. - * + * * @throws NumberFormatException If the value can't be parsed as a long */ Long getIdPartAsLong(); String getResourceType(); - /** * Returns the value of this ID. Note that this value may be a fully qualified URL, a relative/partial URL, or a simple ID. Use {@link #getIdPart()} to get just the ID portion. - * + * * @see #getIdPart() */ @Override @@ -72,7 +69,7 @@ public interface IIdType extends IPrimitiveType { /** * Returns the version ID part of this ID (e.g. in the ID http://example.com/Patient/123/_history/456 this would be the * part "456") parsed as a {@link Long}. - * + * * @throws NumberFormatException If the value can't be parsed as a long */ Long getVersionIdPartAsLong(); @@ -98,7 +95,7 @@ public interface IIdType extends IPrimitiveType { boolean isEmpty(); /** - * Returns true if the {@link #getIdPart() ID part of this object} is valid according to the FHIR rules for valid IDs. + * Returns true if the {@link #getIdPart() ID part of this object} is valid according to the FHIR rules for valid IDs. *

    * The FHIR specification states: * Any combination of upper or lower case ASCII letters ('A'..'Z', and 'a'..'z', numerals ('0'..'9'), '-' and '.', with a length limit of 64 characters. (This might be an integer, an un-prefixed OID, UUID or any other identifier pattern that meets these constraints.) regex: [A-Za-z0-9\-\.]{1,64} @@ -108,7 +105,7 @@ public interface IIdType extends IPrimitiveType { /** * Returns true if the {@link #getIdPart() ID part of this object} contains - * only numbers + * only numbers */ boolean isIdPartValidLong(); @@ -119,7 +116,7 @@ public interface IIdType extends IPrimitiveType { /** * Returns true if the {@link #getVersionIdPart() version ID part of this object} contains - * only numbers + * only numbers */ boolean isVersionIdPartValidLong(); @@ -133,7 +130,7 @@ public interface IIdType extends IPrimitiveType { IIdType toVersionless(); /** - * Returns a copy of this object, but with a different {@link #getResourceType() resource type} + * Returns a copy of this object, but with a different {@link #getResourceType() resource type} * (or if this object does not have a resource type currently, returns a copy of this object with * the given resource type). *

    @@ -143,9 +140,9 @@ public interface IIdType extends IPrimitiveType { *

    */ IIdType withResourceType(String theResName); - + /** - * Returns a copy of this object, but with a different {@link #getResourceType() resource type} + * Returns a copy of this object, but with a different {@link #getResourceType() resource type} * and {@link #getBaseUrl() base URL} * (or if this object does not have a resource type currently, returns a copy of this object with * the given server base and resource type). @@ -158,7 +155,7 @@ public interface IIdType extends IPrimitiveType { IIdType withServerBase(String theServerBase, String theResourceName); /** - * Returns a copy of this object, but with a different {@link #getVersionIdPart() version ID} + * Returns a copy of this object, but with a different {@link #getVersionIdPart() version ID} * (or if this object does not have a resource type currently, returns a copy of this object with * the given version). *

    @@ -180,9 +177,8 @@ public interface IIdType extends IPrimitiveType { *

  • If theVersionIdPart is populated, theResourceType and theIdPart must be populated
  • *
  • If theBaseUrl is populated and theIdPart is populated, theResourceType must be populated
  • * - * + * * @return Returns a reference to this for easy method chaining */ IIdType setParts(String theBaseUrl, String theResourceType, String theIdPart, String theVersionIdPart); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/INarrative.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/INarrative.java index 0ebaa620b3a..7f5ad13cb98 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/INarrative.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/INarrative.java @@ -19,7 +19,6 @@ */ package org.hl7.fhir.instance.model.api; - public interface INarrative extends ICompositeType { @Override @@ -34,5 +33,4 @@ public interface INarrative extends ICompositeType { public INarrative setStatusAsString(String theString); public String getStatusAsString(); - } diff --git a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IPrimitiveType.java b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IPrimitiveType.java index 33bcc293d53..902db207996 100644 --- a/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IPrimitiveType.java +++ b/hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IPrimitiveType.java @@ -19,8 +19,6 @@ */ package org.hl7.fhir.instance.model.api; - - import javax.annotation.Nullable; public interface IPrimitiveType extends IBaseDatatype { @@ -42,5 +40,4 @@ public interface IPrimitiveType extends IBaseDatatype { static T toValueOrNull(@Nullable IPrimitiveType thePrimitiveType) { return thePrimitiveType != null ? thePrimitiveType.getValue() : null; } - } diff --git a/hapi-fhir-checkstyle/src/main/java/ca/uhn/fhir/checks/HapiErrorCodeCheck.java b/hapi-fhir-checkstyle/src/main/java/ca/uhn/fhir/checks/HapiErrorCodeCheck.java index 6a22f208d72..90ab3b487a8 100644 --- a/hapi-fhir-checkstyle/src/main/java/ca/uhn/fhir/checks/HapiErrorCodeCheck.java +++ b/hapi-fhir-checkstyle/src/main/java/ca/uhn/fhir/checks/HapiErrorCodeCheck.java @@ -24,7 +24,7 @@ public final class HapiErrorCodeCheck extends AbstractCheck { @Override public int[] getRequiredTokens() { - return new int[]{ + return new int[] { TokenTypes.LITERAL_THROW, }; } @@ -59,15 +59,20 @@ public final class HapiErrorCodeCheck extends AbstractCheck { if (msgNode == null) { log(theAst.getLineNo(), "Exception thrown that does not call Msg.code()"); } else { - DetailAST numberNode = msgNode.getParent().getNextSibling().getFirstChild().getFirstChild(); + DetailAST numberNode = + msgNode.getParent().getNextSibling().getFirstChild().getFirstChild(); if (TokenTypes.NUM_INT == numberNode.getType()) { Integer code = Integer.valueOf(numberNode.getText()); if (ourCache.containsKey(code)) { - log(theAst.getLineNo(), "Two different exception messages call Msg.code(" + - code + "). \nEach thrown exception must call Msg.code() with a different code. " + - "\nPreviously found at: " + ourCache.get(code)); + log( + theAst.getLineNo(), + "Two different exception messages call Msg.code(" + code + + "). \nEach thrown exception must call Msg.code() with a different code. " + + "\nPreviously found at: " + + ourCache.get(code)); } else { - String location = getFilePath() + ":" + instantiation.getLineNo() + ":" + instantiation.getColumnNo() + "(" + code + ")"; + String location = getFilePath() + ":" + instantiation.getLineNo() + ":" + + instantiation.getColumnNo() + "(" + code + ")"; ourCache.put(code, location); } } else { @@ -78,7 +83,6 @@ public final class HapiErrorCodeCheck extends AbstractCheck { private DetailAST getMsgNodeOrNull(DetailAST theNode) { - if (TokenTypes.IDENT == theNode.getType() && "Msg".equals(theNode.getText())) { return theNode; } @@ -109,8 +113,7 @@ public final class HapiErrorCodeCheck extends AbstractCheck { private static final Map ourCodesUsed = new HashMap<>(); - ErrorCodeCache() { - } + ErrorCodeCache() {} public boolean containsKey(Integer s) { return ourCodesUsed.containsKey(s); @@ -129,4 +132,3 @@ public final class HapiErrorCodeCheck extends AbstractCheck { } } } - diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/AbstractImportExportCsvConceptMapCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/AbstractImportExportCsvConceptMapCommand.java index 8a9d7f2f265..bdc6d618b20 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/AbstractImportExportCsvConceptMapCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/AbstractImportExportCsvConceptMapCommand.java @@ -48,27 +48,28 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public abstract class AbstractImportExportCsvConceptMapCommand extends BaseRequestGeneratingCommand { // TODO: Don't use qualified names for loggers in HAPI CLI. - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(AbstractImportExportCsvConceptMapCommand.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(AbstractImportExportCsvConceptMapCommand.class); protected static final String CONCEPTMAP_URL_PARAM = "u"; protected static final String CONCEPTMAP_URL_PARAM_LONGOPT = "url"; protected static final String CONCEPTMAP_URL_PARAM_NAME = "url"; - protected static final String CONCEPTMAP_URL_PARAM_DESC = "The URL of the ConceptMap resource to be imported/exported (i.e. ConceptMap.url)."; + protected static final String CONCEPTMAP_URL_PARAM_DESC = + "The URL of the ConceptMap resource to be imported/exported (i.e. ConceptMap.url)."; protected static final String FILE_PARAM = "f"; protected static final String FILE_PARAM_LONGOPT = "filename"; protected static final String FILE_PARAM_NAME = "filename"; - protected static final String FILE_PARAM_DESC = "The path and filename of the CSV file to be imported/exported (e.g. ./input.csv, ./output.csv, etc.)."; + protected static final String FILE_PARAM_DESC = + "The path and filename of the CSV file to be imported/exported (e.g. ./input.csv, ./output.csv, etc.)."; protected IGenericClient client; protected String conceptMapUrl; protected FhirVersionEnum fhirVersion; protected String file; - @Override protected Collection getFilterOutVersions() { - return Sets.newHashSet(FhirVersionEnum.DSTU2_1, - FhirVersionEnum.DSTU2_HL7ORG, FhirVersionEnum.DSTU2); + return Sets.newHashSet(FhirVersionEnum.DSTU2_1, FhirVersionEnum.DSTU2_HL7ORG, FhirVersionEnum.DSTU2); } protected BufferedReader getBufferedReader() throws IOException { @@ -81,13 +82,13 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque protected BOMInputStream getBOMInputStream() throws IOException { return new BOMInputStream( - getInputStream(), - false, - ByteOrderMark.UTF_8, - ByteOrderMark.UTF_16BE, - ByteOrderMark.UTF_16LE, - ByteOrderMark.UTF_32BE, - ByteOrderMark.UTF_32LE); + getInputStream(), + false, + ByteOrderMark.UTF_8, + ByteOrderMark.UTF_16BE, + ByteOrderMark.UTF_16LE, + ByteOrderMark.UTF_32BE, + ByteOrderMark.UTF_32LE); } protected InputStream getInputStream() throws IOException { @@ -103,7 +104,8 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque if (isBlank(targetServer)) { throw new ParseException(Msg.code(1583) + "No target server (-" + BASE_URL_PARAM + ") specified."); } else if (!targetServer.startsWith("http") && !targetServer.startsWith("file")) { - throw new ParseException(Msg.code(1584) + "Invalid target server specified, must begin with 'http' or 'file'."); + throw new ParseException( + Msg.code(1584) + "Invalid target server specified, must begin with 'http' or 'file'."); } conceptMapUrl = theCommandLine.getOptionValue(CONCEPTMAP_URL_PARAM); @@ -125,9 +127,9 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque client = super.newClient(theCommandLine); fhirVersion = ctx.getVersion().getVersion(); - if (fhirVersion != FhirVersionEnum.DSTU3 - && fhirVersion != FhirVersionEnum.R4) { - throw new ParseException(Msg.code(1587) + "This command does not support FHIR version " + fhirVersion + "."); + if (fhirVersion != FhirVersionEnum.DSTU3 && fhirVersion != FhirVersionEnum.R4) { + throw new ParseException( + Msg.code(1587) + "This command does not support FHIR version " + fhirVersion + "."); } if (theCommandLine.hasOption(VERBOSE_LOGGING_PARAM)) { @@ -160,10 +162,10 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque private String target; private String targetVersion; - public TemporaryConceptMapGroup() { - } + public TemporaryConceptMapGroup() {} - public TemporaryConceptMapGroup(String theSource, String theSourceVersion, String theTarget, String theTargetVersion) { + public TemporaryConceptMapGroup( + String theSource, String theSourceVersion, String theTarget, String theTargetVersion) { this.source = theSource; this.sourceVersion = theSourceVersion; this.target = theTarget; @@ -235,21 +237,21 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque TemporaryConceptMapGroup that = (TemporaryConceptMapGroup) o; return new EqualsBuilder() - .append(getSource(), that.getSource()) - .append(getSourceVersion(), that.getSourceVersion()) - .append(getTarget(), that.getTarget()) - .append(getTargetVersion(), that.getTargetVersion()) - .isEquals(); + .append(getSource(), that.getSource()) + .append(getSourceVersion(), that.getSourceVersion()) + .append(getTarget(), that.getTarget()) + .append(getTargetVersion(), that.getTargetVersion()) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(getSource()) - .append(getSourceVersion()) - .append(getTarget()) - .append(getTargetVersion()) - .toHashCode(); + .append(getSource()) + .append(getSourceVersion()) + .append(getTarget()) + .append(getTargetVersion()) + .toHashCode(); } } @@ -257,8 +259,7 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque private String code; private String display; - public TemporaryConceptMapGroupElement() { - } + public TemporaryConceptMapGroupElement() {} public TemporaryConceptMapGroupElement(String theCode, String theDisplay) { this.code = theCode; @@ -304,17 +305,17 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque TemporaryConceptMapGroupElement that = (TemporaryConceptMapGroupElement) o; return new EqualsBuilder() - .append(getCode(), that.getCode()) - .append(getDisplay(), that.getDisplay()) - .isEquals(); + .append(getCode(), that.getCode()) + .append(getDisplay(), that.getDisplay()) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(getCode()) - .append(getDisplay()) - .toHashCode(); + .append(getCode()) + .append(getDisplay()) + .toHashCode(); } } @@ -324,10 +325,10 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque private String equivalence; private String comment; - public TemporaryConceptMapGroupElementTarget() { - } + public TemporaryConceptMapGroupElementTarget() {} - public TemporaryConceptMapGroupElementTarget(String theCode, String theDisplay, String theEquivalence, String theComment) { + public TemporaryConceptMapGroupElementTarget( + String theCode, String theDisplay, String theEquivalence, String theComment) { this.code = theCode; this.display = theDisplay; this.equivalence = theEquivalence; @@ -399,21 +400,21 @@ public abstract class AbstractImportExportCsvConceptMapCommand extends BaseReque TemporaryConceptMapGroupElementTarget that = (TemporaryConceptMapGroupElementTarget) o; return new EqualsBuilder() - .append(getCode(), that.getCode()) - .append(getDisplay(), that.getDisplay()) - .append(getEquivalence(), that.getEquivalence()) - .append(getComment(), that.getComment()) - .isEquals(); + .append(getCode(), that.getCode()) + .append(getDisplay(), that.getDisplay()) + .append(getEquivalence(), that.getEquivalence()) + .append(getComment(), that.getComment()) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(getCode()) - .append(getDisplay()) - .append(getEquivalence()) - .append(getComment()) - .toHashCode(); + .append(getCode()) + .append(getDisplay()) + .append(getEquivalence()) + .append(getComment()) + .toHashCode(); } } } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseApp.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseApp.java index eef1f358258..9adf11b102a 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseApp.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseApp.java @@ -52,7 +52,8 @@ import static org.fusesource.jansi.Ansi.ansi; public abstract class BaseApp { protected static final org.slf4j.Logger ourLog; static final String LINESEP = System.getProperty("line.separator"); - private static final String STACKFILTER_PATTERN = "%xEx{full, sun.reflect, org.junit, org.eclipse, java.lang.reflect.Method, org.springframework, org.hibernate, com.sun.proxy, org.attoparser, org.thymeleaf}"; + private static final String STACKFILTER_PATTERN = + "%xEx{full, sun.reflect, org.junit, org.eclipse, java.lang.reflect.Method, org.springframework, org.hibernate, com.sun.proxy, org.attoparser, org.thymeleaf}"; private static List ourCommands; private static boolean ourDebugMode; @@ -64,7 +65,7 @@ public abstract class BaseApp { ourLog = LoggerFactory.getLogger(App.class); } - private Consumer myStartupHook = noop->{}; + private Consumer myStartupHook = noop -> {}; private MyShutdownHook myShutdownHook; private boolean myShutdownHookHasNotRun; @@ -74,8 +75,10 @@ public abstract class BaseApp { printMessageToStdout(msg); logProductName(); printMessageToStdout("------------------------------------------------------------"); - printMessageToStdout("Process ID : " + ManagementFactory.getRuntimeMXBean().getName()); - printMessageToStdout("Max configured JVM memory (Xmx) : " + FileHelper.getFileSizeDisplay(Runtime.getRuntime().maxMemory(), 1)); + printMessageToStdout("Process ID : " + + ManagementFactory.getRuntimeMXBean().getName()); + printMessageToStdout("Max configured JVM memory (Xmx) : " + + FileHelper.getFileSizeDisplay(Runtime.getRuntime().maxMemory(), 1)); printMessageToStdout("Detected Java version : " + System.getProperty("java.version")); printMessageToStdout("------------------------------------------------------------"); } @@ -90,7 +93,8 @@ public abstract class BaseApp { } protected void logProductName() { - printMessageToStdout("\ud83d\udd25 " + ansi().bold() + " " + provideProductName() + ansi().boldOff() + " " + provideProductVersion() + " - Command Line Tool"); + printMessageToStdout("\ud83d\udd25 " + ansi().bold() + " " + provideProductName() + ansi().boldOff() + " " + + provideProductVersion() + " - Command Line Tool"); } private void logCommandUsage(BaseCommand theCommand) { @@ -155,16 +159,20 @@ public abstract class BaseApp { int longestCommandLength = 0; for (BaseCommand next : ourCommands) { - longestCommandLength = Math.max(longestCommandLength, next.getCommandName().length()); + longestCommandLength = + Math.max(longestCommandLength, next.getCommandName().length()); } for (BaseCommand next : ourCommands) { String left = " " + StringUtils.rightPad(next.getCommandName(), longestCommandLength); - String[] rightParts = WordUtils.wrap(next.getCommandDescription(), 80 - (left.length() + 3)).split("\\n"); + String[] rightParts = WordUtils.wrap(next.getCommandDescription(), 80 - (left.length() + 3)) + .split("\\n"); for (int i = 1; i < rightParts.length; i++) { rightParts[i] = StringUtils.leftPad("", left.length() + 3) + rightParts[i]; } - printMessageToStdout(ansi().bold().fg(Ansi.Color.GREEN) + left + ansi().boldOff().fg(Ansi.Color.WHITE) + " - " + ansi().bold() + StringUtils.join(rightParts, LINESEP)); + printMessageToStdout( + ansi().bold().fg(Ansi.Color.GREEN) + left + ansi().boldOff().fg(Ansi.Color.WHITE) + " - " + + ansi().bold() + StringUtils.join(rightParts, LINESEP)); } printMessageToStdout(""); printMessageToStdout(ansi().boldOff().fg(Ansi.Color.WHITE) + "See what options are available:"); @@ -212,7 +220,6 @@ public abstract class BaseApp { ourCommands.addAll(provideCommands()); Collections.sort(ourCommands); - if (theArgs.length == 0) { logUsage(); return; @@ -224,7 +231,7 @@ public abstract class BaseApp { } Optional commandOpt = parseCommand(theArgs); - if (commandOpt.isEmpty()) return; + if (commandOpt.isEmpty()) return; BaseCommand command = commandOpt.get(); @@ -250,7 +257,8 @@ public abstract class BaseApp { String[] args = Arrays.copyOfRange(theArgs, 1, theArgs.length); parsedOptions = parser.parse(options, args, true); if (!parsedOptions.getArgList().isEmpty()) { - throw new ParseException(Msg.code(1555) + "Unrecognized argument: " + parsedOptions.getArgList().get(0)); + throw new ParseException(Msg.code(1555) + "Unrecognized argument: " + + parsedOptions.getArgList().get(0)); } if (parsedOptions.hasOption("debug")) { @@ -289,14 +297,14 @@ public abstract class BaseApp { runCleanupHookAndUnregister(); exitDueToException(new CommandFailureException("Error: " + t, t)); } - } private Optional parseCommand(String[] theArgs) { Optional commandOpt = getNextCommand(theArgs, 0); if (commandOpt.isEmpty()) { - String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + ansi().boldOff().fg(Ansi.Color.WHITE); + String message = "Unrecognized command: " + ansi().bold().fg(Ansi.Color.RED) + theArgs[0] + + ansi().boldOff().fg(Ansi.Color.WHITE); printMessageToStdout(message); printMessageToStdout(""); logUsage(); @@ -306,7 +314,9 @@ public abstract class BaseApp { } private Optional getNextCommand(String[] theArgs, int thePosition) { - return ourCommands.stream().filter(cmd -> cmd.getCommandName().equals(theArgs[thePosition])).findFirst(); + return ourCommands.stream() + .filter(cmd -> cmd.getCommandName().equals(theArgs[thePosition])) + .findFirst(); } private void processHelp(String[] theArgs) { @@ -324,7 +334,6 @@ public abstract class BaseApp { logCommandUsage(commandOpt.get()); } - private void exitDueToProblem(String theDescription) { if (HapiSystemProperties.isTestModeEnabled()) { throw new Error(Msg.code(1556) + theDescription); diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseClearMigrationLockCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseClearMigrationLockCommand.java index d2fdba59d80..eecfd9d9dd2 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseClearMigrationLockCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseClearMigrationLockCommand.java @@ -42,7 +42,6 @@ public abstract class BaseClearMigrationLockCommand extends BaseCommand { return "This command clears a database migration lock"; } - @Override public String getCommandName() { return CLEAR_LOCK; @@ -54,7 +53,8 @@ public abstract class BaseClearMigrationLockCommand extends BaseCommand { addRequiredOption(retVal, "u", "url", "URL", "The JDBC database URL"); addRequiredOption(retVal, "n", "username", "Username", "The JDBC database username"); addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password"); - addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")"); + addRequiredOption( + retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")"); addRequiredOption(retVal, "l", "lock-uuid", "Lock UUID", "The UUID value of the lock held in the database."); return retVal; } @@ -75,16 +75,17 @@ public abstract class BaseClearMigrationLockCommand extends BaseCommand { try { driverType = DriverTypeEnum.valueOf(driverTypeString); } catch (Exception e) { - throw new ParseException(Msg.code(2774) + "Invalid driver type \"" + driverTypeString + "\". Valid values are: " + driverOptions()); + throw new ParseException(Msg.code(2774) + "Invalid driver type \"" + driverTypeString + + "\". Valid values are: " + driverOptions()); } - - DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password); - HapiMigrator migrator = new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType); + DriverTypeEnum.ConnectionProperties connectionProperties = + driverType.newConnectionProperties(url, username, password); + HapiMigrator migrator = + new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType); migrator.clearMigrationLockWithUUID(lockUUID); } - protected void setMigrationTableName(String theMigrationTableName) { myMigrationTableName = theMigrationTableName; } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseCommand.java index a2f61980875..7e9da8b5f25 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseCommand.java @@ -27,8 +27,8 @@ import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.impl.RestfulClientFactory; import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor; -import ca.uhn.fhir.tls.TlsAuthentication; import ca.uhn.fhir.tls.KeyStoreInfo; +import ca.uhn.fhir.tls.TlsAuthentication; import ca.uhn.fhir.tls.TrustStoreInfo; import com.google.common.base.Charsets; import com.google.common.collect.Sets; @@ -56,7 +56,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.Base64Utils; -import javax.annotation.Nullable; import java.io.BufferedReader; import java.io.Console; import java.io.File; @@ -76,6 +75,7 @@ import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.zip.GZIPInputStream; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -87,17 +87,21 @@ public abstract class BaseCommand implements Comparable { protected static final String BASE_URL_PARAM = "t"; protected static final String BASE_URL_PARAM_LONGOPT = "target"; protected static final String BASE_URL_PARAM_NAME = "target"; - protected static final String BASE_URL_PARAM_DESC = "Base URL for the target server (e.g. \"http://example.com/fhir\")."; + protected static final String BASE_URL_PARAM_DESC = + "Base URL for the target server (e.g. \"http://example.com/fhir\")."; protected static final String TLS_AUTH_PARAM_LONGOPT = "tls-auth"; protected static final String TLS_AUTH_PARAM_NAME = "tls-auth"; - protected static final String TLS_AUTH_PARAM_DESC = "If specified, this parameter supplies a path and filename for a json authentication file that will be used to authenticate HTTPS requests."; + protected static final String TLS_AUTH_PARAM_DESC = + "If specified, this parameter supplies a path and filename for a json authentication file that will be used to authenticate HTTPS requests."; protected static final String BASIC_AUTH_PARAM = "b"; protected static final String BASIC_AUTH_PARAM_LONGOPT = "basic-auth"; protected static final String BASIC_AUTH_PARAM_NAME = "basic-auth"; - protected static final String BASIC_AUTH_PARAM_DESC = "If specified, this parameter supplies a username and password (in the format \"username:password\") to include in an HTTP Basic Auth header. The value \"PROMPT\" may also be used to specify that an interactive prompt should request credentials from the user."; + protected static final String BASIC_AUTH_PARAM_DESC = + "If specified, this parameter supplies a username and password (in the format \"username:password\") to include in an HTTP Basic Auth header. The value \"PROMPT\" may also be used to specify that an interactive prompt should request credentials from the user."; protected static final String BEARER_TOKEN_PARAM_LONGOPT = "bearer-token"; protected static final String BEARER_TOKEN_PARAM_NAME = "bearer-token"; - protected static final String BEARER_TOKEN_PARAM_DESC = "If specified, this parameter supplies a Bearer Token to supply with the request. The value \"PROMPT\" may also be used to specify that an interactive prompt should request a Bearer Token from the user."; + protected static final String BEARER_TOKEN_PARAM_DESC = + "If specified, this parameter supplies a Bearer Token to supply with the request. The value \"PROMPT\" may also be used to specify that an interactive prompt should request a Bearer Token from the user."; protected static final String FHIR_VERSION_PARAM = "v"; protected static final String FHIR_VERSION_PARAM_LONGOPT = "fhir-version"; protected static final String FHIR_VERSION_PARAM_NAME = "version"; @@ -121,19 +125,26 @@ public abstract class BaseCommand implements Comparable { } protected void addBasicAuthOption(Options theOptions) { - addOptionalOption(theOptions, BASIC_AUTH_PARAM, BASIC_AUTH_PARAM_LONGOPT, BASIC_AUTH_PARAM_NAME, BASIC_AUTH_PARAM_DESC); - addOptionalOption(theOptions, null, BEARER_TOKEN_PARAM_LONGOPT, BEARER_TOKEN_PARAM_NAME, BEARER_TOKEN_PARAM_DESC); + addOptionalOption( + theOptions, BASIC_AUTH_PARAM, BASIC_AUTH_PARAM_LONGOPT, BASIC_AUTH_PARAM_NAME, BASIC_AUTH_PARAM_DESC); + addOptionalOption( + theOptions, null, BEARER_TOKEN_PARAM_LONGOPT, BEARER_TOKEN_PARAM_NAME, BEARER_TOKEN_PARAM_DESC); } protected void addThreadCountOption(Options theOptions) { - addOptionalOption(theOptions, null, THREAD_COUNT, "count", "If specified, this argument specifies the number of worker threads used (default is " + DEFAULT_THREAD_COUNT + ")"); + addOptionalOption( + theOptions, + null, + THREAD_COUNT, + "count", + "If specified, this argument specifies the number of worker threads used (default is " + + DEFAULT_THREAD_COUNT + ")"); } - protected void addHttpsAuthOption(Options theOptions){ + protected void addHttpsAuthOption(Options theOptions) { addOptionalOption(theOptions, null, TLS_AUTH_PARAM_LONGOPT, TLS_AUTH_PARAM_NAME, TLS_AUTH_PARAM_DESC); } - protected String promptUser(String thePrompt) throws ParseException { System.out.print(ansi().bold().fgBrightDefault()); System.out.print(thePrompt); @@ -164,15 +175,27 @@ public abstract class BaseCommand implements Comparable { protected void addFhirVersionOption(Options theOptions) { String versions = Arrays.stream(FhirVersionEnum.values()) - .filter(t -> ! getFilterOutVersions().contains(t)) - .map(t -> t.name().toLowerCase()) - .sorted() - .collect(Collectors.joining(", ")); - addRequiredOption(theOptions, FHIR_VERSION_PARAM, FHIR_VERSION_PARAM_LONGOPT, FHIR_VERSION_PARAM_NAME, FHIR_VERSION_PARAM_DESC + versions); + .filter(t -> !getFilterOutVersions().contains(t)) + .map(t -> t.name().toLowerCase()) + .sorted() + .collect(Collectors.joining(", ")); + addRequiredOption( + theOptions, + FHIR_VERSION_PARAM, + FHIR_VERSION_PARAM_LONGOPT, + FHIR_VERSION_PARAM_NAME, + FHIR_VERSION_PARAM_DESC + versions); } - - private void addOption(Options theOptions, OptionGroup theOptionGroup, boolean theRequired, String theOpt, String theLongOpt, boolean theHasArgument, String theArgumentName, String theDescription) { + private void addOption( + Options theOptions, + OptionGroup theOptionGroup, + boolean theRequired, + String theOpt, + String theLongOpt, + boolean theHasArgument, + String theArgumentName, + String theDescription) { Option option = createOption(theRequired, theOpt, theLongOpt, theHasArgument, theDescription); if (theHasArgument && isNotBlank(theArgumentName)) { option.setArgName(theArgumentName); @@ -182,7 +205,8 @@ public abstract class BaseCommand implements Comparable { if (theOptions.getOption(theOpt) != null) { throw new IllegalStateException(Msg.code(1567) + "Duplicate option: " + theOpt); } - if (theOptionGroup != null && theOptionGroup.getOptions().stream().anyMatch(t -> theOpt.equals(t.getOpt()))) { + if (theOptionGroup != null + && theOptionGroup.getOptions().stream().anyMatch(t -> theOpt.equals(t.getOpt()))) { throw new IllegalStateException(Msg.code(1568) + "Duplicate option: " + theOpt); } } @@ -190,7 +214,8 @@ public abstract class BaseCommand implements Comparable { if (theOptions.getOption(theLongOpt) != null) { throw new IllegalStateException(Msg.code(1569) + "Duplicate option: " + theLongOpt); } - if (theOptionGroup != null && theOptionGroup.getOptions().stream().anyMatch(t -> theLongOpt.equals(t.getLongOpt()))) { + if (theOptionGroup != null + && theOptionGroup.getOptions().stream().anyMatch(t -> theLongOpt.equals(t.getLongOpt()))) { throw new IllegalStateException(Msg.code(1570) + "Duplicate option: " + theOpt); } } @@ -202,28 +227,49 @@ public abstract class BaseCommand implements Comparable { } } - protected void addOptionalOption(Options theOptions, String theOpt, String theLong, boolean theTakesArgument, String theDescription) { + protected void addOptionalOption( + Options theOptions, String theOpt, String theLong, boolean theTakesArgument, String theDescription) { addOption(theOptions, null, false, theOpt, theLong, theTakesArgument, null, theDescription); } - protected void addOptionalOption(Options theOptions, String theOpt, String theLong, String theArgumentName, String theDescription) { - addOption(theOptions, null, false, theOpt, theLong, isNotBlank(theArgumentName), theArgumentName, theDescription); + protected void addOptionalOption( + Options theOptions, String theOpt, String theLong, String theArgumentName, String theDescription) { + addOption( + theOptions, null, false, theOpt, theLong, isNotBlank(theArgumentName), theArgumentName, theDescription); } - protected void addOptionalOption(Options theOptions, OptionGroup theOptionGroup, String theOpt, String theLong, String theArgumentName, String theDescription) { - addOption(theOptions, theOptionGroup, false, theOpt, theLong, isNotBlank(theArgumentName), theArgumentName, theDescription); + protected void addOptionalOption( + Options theOptions, + OptionGroup theOptionGroup, + String theOpt, + String theLong, + String theArgumentName, + String theDescription) { + addOption( + theOptions, + theOptionGroup, + false, + theOpt, + theLong, + isNotBlank(theArgumentName), + theArgumentName, + theDescription); } - protected void addRequiredOption(Options theOptions, String theOpt, String theLong, boolean theTakesArgument, String theDescription) { + protected void addRequiredOption( + Options theOptions, String theOpt, String theLong, boolean theTakesArgument, String theDescription) { addOption(theOptions, null, true, theOpt, theLong, theTakesArgument, null, theDescription); } - protected void addRequiredOption(Options theOptions, String theOpt, String theLong, String theArgumentName, String theDescription) { - addOption(theOptions, null, true, theOpt, theLong, isNotBlank(theArgumentName), theArgumentName, theDescription); + protected void addRequiredOption( + Options theOptions, String theOpt, String theLong, String theArgumentName, String theDescription) { + addOption( + theOptions, null, true, theOpt, theLong, isNotBlank(theArgumentName), theArgumentName, theDescription); } protected void addVerboseLoggingOption(Options theOptions) { - addOptionalOption(theOptions, VERBOSE_LOGGING_PARAM, VERBOSE_LOGGING_PARAM_LONGOPT, false, VERBOSE_LOGGING_PARAM_DESC); + addOptionalOption( + theOptions, VERBOSE_LOGGING_PARAM, VERBOSE_LOGGING_PARAM_LONGOPT, false, VERBOSE_LOGGING_PARAM_DESC); } /** @@ -238,7 +284,8 @@ public abstract class BaseCommand implements Comparable { return getCommandName().compareTo(theO.getCommandName()); } - private Option createOption(boolean theRequired, String theOpt, String theLong, boolean theHasArgument, String theDescription) { + private Option createOption( + boolean theRequired, String theOpt, String theLong, boolean theHasArgument, String theDescription) { Option option = new Option(theOpt, theLong, theHasArgument, theDescription); option.setRequired(theRequired); return option; @@ -305,7 +352,8 @@ public abstract class BaseCommand implements Comparable { /** * @return Returns the complete authorization header value using an arbitrary option */ - protected String getAndParseOptionBasicAuthHeader(CommandLine theCommandLine, String theOptionName) throws ParseException { + protected String getAndParseOptionBasicAuthHeader(CommandLine theCommandLine, String theOptionName) + throws ParseException { String basicAuthHeaderValue = null; if (theCommandLine.hasOption(theOptionName)) { String optionValue = theCommandLine.getOptionValue(theOptionName); @@ -321,13 +369,12 @@ public abstract class BaseCommand implements Comparable { return basicAuthHeaderValue; } - - protected Pair parseNameValueParameter( - String separator, String theParamName, String theParam) throws ParseException { + protected Pair parseNameValueParameter(String separator, String theParamName, String theParam) + throws ParseException { String errorMsg = "Parameter " + theParamName + " must be in the format: \"name:value\""; - if (! theParam.contains(separator)) { + if (!theParam.contains(separator)) { throw new ParseException(Msg.code(1571) + errorMsg); } @@ -343,8 +390,9 @@ public abstract class BaseCommand implements Comparable { return Pair.of(nameValue[0], nameValue[1]); } - - public T getAndParseOptionEnum(CommandLine theCommandLine, String theOption, Class theEnumClass, boolean theRequired, T theDefault) throws ParseException { + public T getAndParseOptionEnum( + CommandLine theCommandLine, String theOption, Class theEnumClass, boolean theRequired, T theDefault) + throws ParseException { String val = theCommandLine.getOptionValue(theOption); if (isBlank(val)) { if (theRequired && theDefault == null) { @@ -359,7 +407,8 @@ public abstract class BaseCommand implements Comparable { } } - public Integer getAndParseNonNegativeIntegerParam(CommandLine theCommandLine, String theName) throws ParseException { + public Integer getAndParseNonNegativeIntegerParam(CommandLine theCommandLine, String theName) + throws ParseException { int minimum = 0; return doGetAndParseIntegerParam(theCommandLine, theName, minimum); } @@ -370,7 +419,8 @@ public abstract class BaseCommand implements Comparable { } @Nullable - private Integer doGetAndParseIntegerParam(CommandLine theCommandLine, String theName, int minimum) throws ParseException { + private Integer doGetAndParseIntegerParam(CommandLine theCommandLine, String theName, int minimum) + throws ParseException { String value = theCommandLine.getOptionValue(theName); value = trim(value); if (isBlank(value)) { @@ -380,11 +430,13 @@ public abstract class BaseCommand implements Comparable { try { int valueInt = Integer.parseInt(value); if (valueInt < minimum) { - throw new ParseException(Msg.code(1576) + "Value for argument " + theName + " must be an integer >= " + minimum + ", got: " + value); + throw new ParseException(Msg.code(1576) + "Value for argument " + theName + " must be an integer >= " + + minimum + ", got: " + value); } return valueInt; } catch (NumberFormatException e) { - throw new ParseException(Msg.code(1577) + "Value for argument " + theName + " must be an integer >= " + minimum + ", got: " + value); + throw new ParseException(Msg.code(1577) + "Value for argument " + theName + " must be an integer >= " + + minimum + ", got: " + value); } } @@ -408,7 +460,8 @@ public abstract class BaseCommand implements Comparable { public abstract Options getOptions(); - protected Collection loadFile(String theSpecUrl, String theFilepath, boolean theCacheFile) throws IOException { + protected Collection loadFile(String theSpecUrl, String theFilepath, boolean theCacheFile) + throws IOException { String userHomeDir = System.getProperty("user.home"); File applicationDir = new File(userHomeDir + File.separator + "." + "hapi-fhir-cli"); @@ -425,7 +478,7 @@ public abstract class BaseCommand implements Comparable { File suppliedFile = new File(FilenameUtils.normalize(theFilepath)); if (suppliedFile.isDirectory()) { - inputFiles = FileUtils.listFiles(suppliedFile, new String[]{"zip"}, false); + inputFiles = FileUtils.listFiles(suppliedFile, new String[] {"zip"}, false); } else { inputFiles = Collections.singletonList(suppliedFile); } @@ -435,20 +488,24 @@ public abstract class BaseCommand implements Comparable { File cacheDir = new File(applicationDir, "cache"); FileUtils.forceMkdir(cacheDir); - File inputFile = new File(cacheDir, "examples-json-" + getFhirContext().getVersion().getVersion() + ".zip"); + File inputFile = new File( + cacheDir, "examples-json-" + getFhirContext().getVersion().getVersion() + ".zip"); Date cacheExpiryDate = DateUtils.addHours(new Date(), -12); if (!inputFile.exists() | (theCacheFile && FileUtils.isFileOlder(inputFile, cacheExpiryDate))) { - File exampleFileDownloading = new File(cacheDir, "examples-json-" + getFhirContext().getVersion().getVersion() + ".zip.partial"); + File exampleFileDownloading = new File( + cacheDir, + "examples-json-" + getFhirContext().getVersion().getVersion() + ".zip.partial"); HttpGet get = new HttpGet(theSpecUrl); CloseableHttpClient client = HttpClientBuilder.create().build(); CloseableHttpResponse result = client.execute(get); if (result.getStatusLine().getStatusCode() != 200) { - throw new CommandFailureException(Msg.code(1578) + "Got HTTP " + result.getStatusLine().getStatusCode() + " response code loading " + theSpecUrl); + throw new CommandFailureException(Msg.code(1578) + "Got HTTP " + + result.getStatusLine().getStatusCode() + " response code loading " + theSpecUrl); } ourLog.info("Downloading from remote url: {}", theSpecUrl); @@ -461,39 +518,53 @@ public abstract class BaseCommand implements Comparable { inputFile.deleteOnExit(); } - ourLog.info("Successfully Loaded example pack ({})", FileUtils.byteCountToDisplaySize(FileUtils.sizeOf(inputFile))); + ourLog.info( + "Successfully Loaded example pack ({})", + FileUtils.byteCountToDisplaySize(FileUtils.sizeOf(inputFile))); IOUtils.closeQuietly(result.getEntity().getContent()); } inputFiles = Collections.singletonList(inputFile); - } return inputFiles; } protected IGenericClient newClient(CommandLine theCommandLine) throws ParseException { - return newClient(theCommandLine, BASE_URL_PARAM, BASIC_AUTH_PARAM, BEARER_TOKEN_PARAM_LONGOPT, TLS_AUTH_PARAM_LONGOPT); + return newClient( + theCommandLine, BASE_URL_PARAM, BASIC_AUTH_PARAM, BEARER_TOKEN_PARAM_LONGOPT, TLS_AUTH_PARAM_LONGOPT); } - protected IGenericClient newClient(CommandLine theCommandLine, String theBaseUrlParamName, String theBasicAuthOptionName, - String theBearerTokenOptionName, String theTlsAuthOptionName) throws ParseException { + protected IGenericClient newClient( + CommandLine theCommandLine, + String theBaseUrlParamName, + String theBasicAuthOptionName, + String theBearerTokenOptionName, + String theTlsAuthOptionName) + throws ParseException { String baseUrl = theCommandLine.getOptionValue(theBaseUrlParamName); if (isBlank(baseUrl)) { throw new ParseException(Msg.code(1579) + "No target server (-" + BASE_URL_PARAM + ") specified."); } else if (!baseUrl.startsWith("http") && !baseUrl.startsWith("file")) { - throw new ParseException(Msg.code(1580) + "Invalid target server specified, must begin with 'http' or 'file'."); + throw new ParseException( + Msg.code(1580) + "Invalid target server specified, must begin with 'http' or 'file'."); } - return newClientWithBaseUrl(theCommandLine, baseUrl, theBasicAuthOptionName, theBearerTokenOptionName, theTlsAuthOptionName); + return newClientWithBaseUrl( + theCommandLine, baseUrl, theBasicAuthOptionName, theBearerTokenOptionName, theTlsAuthOptionName); } - protected IGenericClient newClientWithBaseUrl(CommandLine theCommandLine, String theBaseUrl, String theBasicAuthOptionName, - String theBearerTokenOptionName, String theTlsAuthOptionName) throws ParseException { + protected IGenericClient newClientWithBaseUrl( + CommandLine theCommandLine, + String theBaseUrl, + String theBasicAuthOptionName, + String theBearerTokenOptionName, + String theTlsAuthOptionName) + throws ParseException { Optional tlsConfig = createTlsConfig(theCommandLine, theTlsAuthOptionName); RestfulClientFactory restfulClientFactory = tlsConfig.isPresent() - ? new HapiFhirCliRestfulClientFactory(myFhirCtx, tlsConfig.get()) - : new HapiFhirCliRestfulClientFactory(myFhirCtx); + ? new HapiFhirCliRestfulClientFactory(myFhirCtx, tlsConfig.get()) + : new HapiFhirCliRestfulClientFactory(myFhirCtx); myFhirCtx.setRestfulClientFactory(restfulClientFactory); myFhirCtx.getRestfulClientFactory().setSocketTimeout((int) DateUtils.MILLIS_PER_HOUR); @@ -501,42 +572,45 @@ public abstract class BaseCommand implements Comparable { String basicAuthHeaderValue = getAndParseOptionBasicAuthHeader(theCommandLine, theBasicAuthOptionName); if (isNotBlank(basicAuthHeaderValue)) { - retVal.registerInterceptor(new SimpleRequestHeaderInterceptor(Constants.HEADER_AUTHORIZATION, basicAuthHeaderValue)); + retVal.registerInterceptor( + new SimpleRequestHeaderInterceptor(Constants.HEADER_AUTHORIZATION, basicAuthHeaderValue)); } if (isNotBlank(theBearerTokenOptionName)) { String bearerToken = getAndParseBearerTokenAuthHeader(theCommandLine, theBearerTokenOptionName); if (isNotBlank(bearerToken)) { - retVal.registerInterceptor(new SimpleRequestHeaderInterceptor(Constants.HEADER_AUTHORIZATION, Constants.HEADER_AUTHORIZATION_VALPREFIX_BEARER + bearerToken)); + retVal.registerInterceptor(new SimpleRequestHeaderInterceptor( + Constants.HEADER_AUTHORIZATION, Constants.HEADER_AUTHORIZATION_VALPREFIX_BEARER + bearerToken)); } } return retVal; } - private Optional createTlsConfig(CommandLine theCommandLine, String theTlsAuthOptionName){ + private Optional createTlsConfig(CommandLine theCommandLine, String theTlsAuthOptionName) { String httpAuthFilePath = theCommandLine.getOptionValue(theTlsAuthOptionName); - if(isBlank(httpAuthFilePath)){ + if (isBlank(httpAuthFilePath)) { return Optional.empty(); } - try(FileReader fileReader = new FileReader(httpAuthFilePath)) { + try (FileReader fileReader = new FileReader(httpAuthFilePath)) { JsonObject json = JsonParser.parseReader(fileReader).getAsJsonObject(); - Optional keyStoreInfo = createKeyStoreInfo(json.get("keyStore").getAsJsonObject()); - Optional trustStoreInfo = createTrustStoreInfo(json.get("trustStore").getAsJsonObject()); - if(keyStoreInfo.isEmpty() && trustStoreInfo.isEmpty()){ + Optional keyStoreInfo = + createKeyStoreInfo(json.get("keyStore").getAsJsonObject()); + Optional trustStoreInfo = + createTrustStoreInfo(json.get("trustStore").getAsJsonObject()); + if (keyStoreInfo.isEmpty() && trustStoreInfo.isEmpty()) { return Optional.empty(); } return Optional.of(new TlsAuthentication(keyStoreInfo, trustStoreInfo)); - } - catch(Exception e){ - throw new RuntimeException(Msg.code(2253)+"Could not create TLS configuration options", e); + } catch (Exception e) { + throw new RuntimeException(Msg.code(2253) + "Could not create TLS configuration options", e); } } private Optional createKeyStoreInfo(JsonObject theJson) throws ParseException { String filePath = theJson.get("filePath").getAsString(); - if(isBlank(filePath)){ + if (isBlank(filePath)) { return Optional.empty(); } @@ -556,7 +630,7 @@ public abstract class BaseCommand implements Comparable { private Optional createTrustStoreInfo(JsonObject theJson) throws ParseException { String filePath = theJson.get("filePath").getAsString(); - if(isBlank(filePath)){ + if (isBlank(filePath)) { return Optional.empty(); } @@ -570,7 +644,8 @@ public abstract class BaseCommand implements Comparable { return Optional.of(trustStoreInfo); } - private String getAndParseBearerTokenAuthHeader(CommandLine theCommandLine, String theBearerTokenOptionName) throws ParseException { + private String getAndParseBearerTokenAuthHeader(CommandLine theCommandLine, String theBearerTokenOptionName) + throws ParseException { String value = theCommandLine.getOptionValue(theBearerTokenOptionName); if (PROMPT.equals(value)) { return promptUser("Enter Bearer Token: "); @@ -596,7 +671,6 @@ public abstract class BaseCommand implements Comparable { myFhirCtx = versionEnum.newContext(); } - public abstract void run(CommandLine theCommandLine) throws ParseException, ExecutionException; public List provideUsageNotes() { diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseFlywayMigrateDatabaseCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseFlywayMigrateDatabaseCommand.java index 5f554005bcf..56227107752 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseFlywayMigrateDatabaseCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseFlywayMigrateDatabaseCommand.java @@ -71,14 +71,35 @@ public abstract class BaseFlywayMigrateDatabaseCommand extends B public Options getOptions() { Options retVal = new Options(); - addOptionalOption(retVal, "r", "dry-run", false, "Log the SQL statements that would be executed but to not actually make any changes"); + addOptionalOption( + retVal, + "r", + "dry-run", + false, + "Log the SQL statements that would be executed but to not actually make any changes"); addRequiredOption(retVal, "u", "url", "URL", "The JDBC database URL"); addRequiredOption(retVal, "n", "username", "Username", "The JDBC database username"); addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password"); - addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")"); - addOptionalOption(retVal, "x", "flags", "Flags", "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)"); - addOptionalOption(retVal, null, NO_COLUMN_SHRINK, false, "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time."); - addOptionalOption(retVal, null, SKIP_VERSIONS, "Versions", "A comma separated list of schema versions to skip. E.g. 4_1_0.20191214.2,4_1_0.20191214.4"); + addRequiredOption( + retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")"); + addOptionalOption( + retVal, + "x", + "flags", + "Flags", + "A comma-separated list of any specific migration flags (these flags are version specific, see migrator documentation for details)"); + addOptionalOption( + retVal, + null, + NO_COLUMN_SHRINK, + false, + "If this flag is set, the system will not attempt to reduce the length of columns. This is useful in environments with a lot of existing data, where shrinking a column can take a very long time."); + addOptionalOption( + retVal, + null, + SKIP_VERSIONS, + "Versions", + "A comma separated list of schema versions to skip. E.g. 4_1_0.20191214.2,4_1_0.20191214.4"); return retVal; } @@ -98,7 +119,8 @@ public abstract class BaseFlywayMigrateDatabaseCommand extends B try { driverType = DriverTypeEnum.valueOf(driverTypeString); } catch (Exception e) { - throw new ParseException(Msg.code(1535) + "Invalid driver type \"" + driverTypeString + "\". Valid values are: " + driverOptions()); + throw new ParseException(Msg.code(1535) + "Invalid driver type \"" + driverTypeString + + "\". Valid values are: " + driverOptions()); } boolean dryRun = theCommandLine.hasOption("r"); @@ -106,12 +128,14 @@ public abstract class BaseFlywayMigrateDatabaseCommand extends B String flags = theCommandLine.getOptionValue("x"); myFlags = Arrays.stream(defaultString(flags).split(",")) - .map(String::trim) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toSet()); + .map(String::trim) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toSet()); - try (DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password)) { - HapiMigrator migrator = new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType); + try (DriverTypeEnum.ConnectionProperties connectionProperties = + driverType.newConnectionProperties(url, username, password)) { + HapiMigrator migrator = + new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType); migrator.createMigrationTableIfRequired(); migrator.setDryRun(dryRun); diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseRequestGeneratingCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseRequestGeneratingCommand.java index ab4dd16af71..3607b0ae853 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseRequestGeneratingCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BaseRequestGeneratingCommand.java @@ -33,7 +33,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - public abstract class BaseRequestGeneratingCommand extends BaseCommand { public enum BaseRequestGeneratingCommandOptions { @@ -45,12 +44,10 @@ public abstract class BaseRequestGeneratingCommand extends BaseCommand { TLS_AUTH } - protected static final String HEADER_PASSTHROUGH = "hp"; protected static final String HEADER_PASSTHROUGH_NAME = "header"; protected static final String HEADER_PASSTHROUGH_LONGOPT = "header-passthrough"; - @Override public Options getOptions() { return getSomeOptions(Collections.emptySet()); @@ -62,78 +59,83 @@ public abstract class BaseRequestGeneratingCommand extends BaseCommand { protected Options getSomeOptions(Collection theExcludeOptions) { Options options = new Options(); - if (! theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.VERSION)) { + if (!theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.VERSION)) { addFhirVersionOption(options); } - if (! theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.BASE_URL)) { + if (!theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.BASE_URL)) { addBaseUrlOption(options); } - if (! theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.BASIC_AUTH)) { + if (!theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.BASIC_AUTH)) { addBasicAuthOption(options); } - if (! theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.VERBOSE_LOGGING)) { + if (!theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.VERBOSE_LOGGING)) { addVerboseLoggingOption(options); } - if (! theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.HEADER_PASSTHROUGH)) { + if (!theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.HEADER_PASSTHROUGH)) { addHeaderPassthroughOption(options); } - if (! theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.TLS_AUTH)) { + if (!theExcludeOptions.contains(BaseRequestGeneratingCommandOptions.TLS_AUTH)) { addHttpsAuthOption(options); } - return options; } - @Override - protected IGenericClient newClientWithBaseUrl(CommandLine theCommandLine, String theBaseUrl, - String theBasicAuthOptionName, String theBearerTokenOptionName, String theTlsAuthOptionName) throws ParseException { + protected IGenericClient newClientWithBaseUrl( + CommandLine theCommandLine, + String theBaseUrl, + String theBasicAuthOptionName, + String theBearerTokenOptionName, + String theTlsAuthOptionName) + throws ParseException { IGenericClient client = super.newClientWithBaseUrl( - theCommandLine, theBaseUrl, theBasicAuthOptionName, theBearerTokenOptionName, theTlsAuthOptionName); + theCommandLine, theBaseUrl, theBasicAuthOptionName, theBearerTokenOptionName, theTlsAuthOptionName); registerHeaderPassthrough(theCommandLine, client); return client; } - private void registerHeaderPassthrough(CommandLine theCommandLine, IGenericClient theClient) throws ParseException { if (theCommandLine.hasOption(HEADER_PASSTHROUGH)) { - theClient.registerInterceptor( - new AdditionalRequestHeadersInterceptor( + theClient.registerInterceptor(new AdditionalRequestHeadersInterceptor( getAndParseOptionHeadersPassthrough(theCommandLine, HEADER_PASSTHROUGH))); } - } private void addHeaderPassthroughOption(Options theOptions) { - addOptionalOption(theOptions, HEADER_PASSTHROUGH, HEADER_PASSTHROUGH_LONGOPT, HEADER_PASSTHROUGH_NAME, - "If specified, this argument specifies headers to include in the generated request"); + addOptionalOption( + theOptions, + HEADER_PASSTHROUGH, + HEADER_PASSTHROUGH_LONGOPT, + HEADER_PASSTHROUGH_NAME, + "If specified, this argument specifies headers to include in the generated request"); } /** * @return Returns the optional pass-through header name and value */ private Map> getAndParseOptionHeadersPassthrough( - CommandLine theCommandLine, String theOptionName) throws ParseException { + CommandLine theCommandLine, String theOptionName) throws ParseException { - if (! theCommandLine.hasOption(theOptionName)) { + if (!theCommandLine.hasOption(theOptionName)) { return Collections.emptyMap(); } Map> headersMap = new HashMap<>(); - for (String nextOptionValue: theCommandLine.getOptionValues(theOptionName)) { + for (String nextOptionValue : theCommandLine.getOptionValues(theOptionName)) { Pair nextHeader = parseNameValueParameter(":", theOptionName, nextOptionValue); - headersMap.compute(nextHeader.getKey(), (k, v) -> v == null ? new ArrayList<>() : v).add(nextHeader.getValue()); + headersMap + .compute(nextHeader.getKey(), (k, v) -> v == null ? new ArrayList<>() : v) + .add(nextHeader.getValue()); } return headersMap; } - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BulkImportCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BulkImportCommand.java index f1d7c27cef0..8794891e7f3 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BulkImportCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/BulkImportCommand.java @@ -45,14 +45,13 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; -import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.model.Parameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -68,6 +67,7 @@ import java.util.List; import java.util.Locale; import java.util.concurrent.ExecutionException; import java.util.zip.GZIPInputStream; +import javax.annotation.Nonnull; public class BulkImportCommand extends BaseCommand { @@ -84,10 +84,10 @@ public class BulkImportCommand extends BaseCommand { @Override public String getCommandDescription() { - return "Initiates a bulk import against a FHIR server using the $import " + - "operation, and creates a local HTTP server to serve the contents. " + - "This command does not currently support HTTPS so it is only intended " + - "for testing scenarios."; + return "Initiates a bulk import against a FHIR server using the $import " + + "operation, and creates a local HTTP server to serve the contents. " + + "This command does not currently support HTTPS so it is only intended " + + "for testing scenarios."; } @Override @@ -99,9 +99,24 @@ public class BulkImportCommand extends BaseCommand { public Options getOptions() { Options options = new Options(); addFhirVersionOption(options); - addRequiredOption(options, null, PORT, PORT, "The port to listen on. If set to 0, an available free port will be selected."); - addOptionalOption(options, null, SOURCE_BASE, "base url", "The URL to advertise as the base URL for accessing the files (i.e. this is the address that this command will declare that it is listening on). If not present, the server will default to \"http://localhost:[port]\" which will only work if the server is on the same host."); - addRequiredOption(options, null, SOURCE_DIRECTORY, "directory", "The source directory. This directory will be scanned for files with an extensions of .json, .ndjson, .json.gz and .ndjson.gz, and any files in this directory will be assumed to be NDJSON and uploaded. This command will read the first resource from each file to verify its resource type, and will assume that all resources in the file are of the same type."); + addRequiredOption( + options, + null, + PORT, + PORT, + "The port to listen on. If set to 0, an available free port will be selected."); + addOptionalOption( + options, + null, + SOURCE_BASE, + "base url", + "The URL to advertise as the base URL for accessing the files (i.e. this is the address that this command will declare that it is listening on). If not present, the server will default to \"http://localhost:[port]\" which will only work if the server is on the same host."); + addRequiredOption( + options, + null, + SOURCE_DIRECTORY, + "directory", + "The source directory. This directory will be scanned for files with an extensions of .json, .ndjson, .json.gz and .ndjson.gz, and any files in this directory will be assumed to be NDJSON and uploaded. This command will read the first resource from each file to verify its resource type, and will assume that all resources in the file are of the same type."); addRequiredOption(options, null, TARGET_BASE, "base url", "The base URL of the target FHIR server."); addBasicAuthOption(options); return options; @@ -130,20 +145,23 @@ public class BulkImportCommand extends BaseCommand { String targetBaseUrl = theCommandLine.getOptionValue(TARGET_BASE); ourLog.info("Initiating bulk import against server: {}", targetBaseUrl); - IGenericClient client = newClient(theCommandLine, TARGET_BASE, BASIC_AUTH_PARAM, BEARER_TOKEN_PARAM_LONGOPT, TLS_AUTH_PARAM_LONGOPT); + IGenericClient client = newClient( + theCommandLine, TARGET_BASE, BASIC_AUTH_PARAM, BEARER_TOKEN_PARAM_LONGOPT, TLS_AUTH_PARAM_LONGOPT); client.registerInterceptor(new LoggingInterceptor(false)); IBaseParameters request = createRequest(sourceBaseUrl, indexes, resourceTypes); - IBaseResource outcome = client - .operation() - .onServer() - .named(JpaConstants.OPERATION_IMPORT) - .withParameters(request) - .returnResourceType(myFhirCtx.getResourceDefinition("OperationOutcome").getImplementingClass()) - .withAdditionalHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC) - .execute(); + IBaseResource outcome = client.operation() + .onServer() + .named(JpaConstants.OPERATION_IMPORT) + .withParameters(request) + .returnResourceType( + myFhirCtx.getResourceDefinition("OperationOutcome").getImplementingClass()) + .withAdditionalHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC) + .execute(); - ourLog.debug("Got response: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); + ourLog.debug( + "Got response: {}", + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info("Bulk import is now running. Do not terminate this command until all files have been uploaded."); checkJobComplete(outcome.getIdElement().toString(), client); @@ -161,14 +179,13 @@ public class BulkImportCommand extends BaseCommand { } try { - response = client - .operation() - .onServer() - .named(JpaConstants.OPERATION_IMPORT_POLL_STATUS) - .withSearchParameter(Parameters.class, "_jobId", new StringParam(jobId)) - .returnMethodOutcome() - .execute(); - } catch (InternalErrorException e){ + response = client.operation() + .onServer() + .named(JpaConstants.OPERATION_IMPORT_POLL_STATUS) + .withSearchParameter(Parameters.class, "_jobId", new StringParam(jobId)) + .returnMethodOutcome() + .execute(); + } catch (InternalErrorException e) { // handle ERRORED status ourLog.error(e.getMessage()); break; @@ -176,12 +193,12 @@ public class BulkImportCommand extends BaseCommand { if (response.getResponseStatusCode() == 200) { break; - } else if (response.getResponseStatusCode() == 202){ + } else if (response.getResponseStatusCode() == 202) { // still in progress continue; - } - else { - throw new InternalErrorException(Msg.code(2138) + "Unexpected response status code: " + response.getResponseStatusCode() + "."); + } else { + throw new InternalErrorException( + Msg.code(2138) + "Unexpected response status code: " + response.getResponseStatusCode() + "."); } } } @@ -192,11 +209,18 @@ public class BulkImportCommand extends BaseCommand { FhirContext ctx = getFhirContext(); IBaseParameters retVal = ParametersUtil.newInstance(ctx); - ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_INPUT_FORMAT, "code", Constants.CT_FHIR_NDJSON); - ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_INPUT_SOURCE, "code", theBaseUrl); + ParametersUtil.addParameterToParameters( + ctx, retVal, BulkDataImportProvider.PARAM_INPUT_FORMAT, "code", Constants.CT_FHIR_NDJSON); + ParametersUtil.addParameterToParameters( + ctx, retVal, BulkDataImportProvider.PARAM_INPUT_SOURCE, "code", theBaseUrl); - IBase storageDetail = ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_STORAGE_DETAIL); - ParametersUtil.addPartString(ctx, storageDetail, BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE, BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE_VAL_HTTPS); + IBase storageDetail = + ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_STORAGE_DETAIL); + ParametersUtil.addPartString( + ctx, + storageDetail, + BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE, + BulkDataImportProvider.PARAM_STORAGE_DETAIL_TYPE_VAL_HTTPS); for (int i = 0; i < theIndexes.size(); i++) { IBase input = ParametersUtil.addParameterToParameters(ctx, retVal, BulkDataImportProvider.PARAM_INPUT); @@ -251,16 +275,17 @@ public class BulkImportCommand extends BaseCommand { private void scanDirectoryForJsonFiles(String baseDirectory, List types, List files) { try { File directory = new File(baseDirectory); - final String[] extensions = new String[]{".json", ".ndjson", ".json.gz", ".ndjson.gz"}; - final IOFileFilter filter = FileFileFilter.INSTANCE.and(new SuffixFileFilter(extensions, IOCase.INSENSITIVE)); - PathUtils - .walk(directory.toPath(), filter, 1, false, FileVisitOption.FOLLOW_LINKS) - .map(Path::toFile) - .filter(t -> t.isFile()) - .filter(t -> t.exists()) - .forEach(t -> files.add(t)); + final String[] extensions = new String[] {".json", ".ndjson", ".json.gz", ".ndjson.gz"}; + final IOFileFilter filter = + FileFileFilter.INSTANCE.and(new SuffixFileFilter(extensions, IOCase.INSENSITIVE)); + PathUtils.walk(directory.toPath(), filter, 1, false, FileVisitOption.FOLLOW_LINKS) + .map(Path::toFile) + .filter(t -> t.isFile()) + .filter(t -> t.exists()) + .forEach(t -> files.add(t)); if (files.isEmpty()) { - throw new CommandFailureException(Msg.code(2058) + "No files found in directory \"" + directory.getAbsolutePath() + "\". Allowed extensions: " + Arrays.asList(extensions)); + throw new CommandFailureException(Msg.code(2058) + "No files found in directory \"" + + directory.getAbsolutePath() + "\". Allowed extensions: " + Arrays.asList(extensions)); } FhirContext ctx = getFhirContext(); @@ -284,6 +309,4 @@ public class BulkImportCommand extends BaseCommand { throw new CommandFailureException(Msg.code(2059) + e.getMessage(), e); } } - } - diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/CommandFailureException.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/CommandFailureException.java index edc201eef93..bfc8d85e3ff 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/CommandFailureException.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/CommandFailureException.java @@ -34,5 +34,4 @@ public class CommandFailureException extends Error { } private static final long serialVersionUID = 1L; - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/CreatePackageCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/CreatePackageCommand.java index 0fbc3bef4fa..8e46b3b19bf 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/CreatePackageCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/CreatePackageCommand.java @@ -81,13 +81,31 @@ public class CreatePackageCommand extends BaseCommand { Options options = new Options(); addFhirVersionOption(options); - addRequiredOption(options, null, NAME_OPT, "Package Name", "The name/id of the package, e.g. \"com.example.fhir.myapp\""); - addRequiredOption(options, null, VERSION_OPT, "Package Version", "The package version. FHIR packages use SemVer, e.g. \"1.0.0\""); + addRequiredOption( + options, null, NAME_OPT, "Package Name", "The name/id of the package, e.g. \"com.example.fhir.myapp\""); + addRequiredOption( + options, + null, + VERSION_OPT, + "Package Version", + "The package version. FHIR packages use SemVer, e.g. \"1.0.0\""); addOptionalOption(options, null, DESCRIPTION_OPT, "Description", "A description for this package"); - addOptionalOption(options, null, INCLUDE_PACKAGE_OPT, "File Spec", "A file spec to include in the package as a package resource/artifact"); - addOptionalOption(options, null, INCLUDE_EXAMPLE_OPT, "File Spec", "A file spec to include in the package as an example resource/artifact"); - addOptionalOption(options, null, TARGET_DIRECTORY_OPT, "Directory", "The directory in which to place the final package"); - addOptionalOption(options, null, DEPENDENCY_OPT, "name:version", "Include this dependency, in the form \"name:version\""); + addOptionalOption( + options, + null, + INCLUDE_PACKAGE_OPT, + "File Spec", + "A file spec to include in the package as a package resource/artifact"); + addOptionalOption( + options, + null, + INCLUDE_EXAMPLE_OPT, + "File Spec", + "A file spec to include in the package as an example resource/artifact"); + addOptionalOption( + options, null, TARGET_DIRECTORY_OPT, "Directory", "The directory in which to place the final package"); + addOptionalOption( + options, null, DEPENDENCY_OPT, "name:version", "Include this dependency, in the form \"name:version\""); return options; } @@ -99,7 +117,10 @@ public class CreatePackageCommand extends BaseCommand { FileUtils.deleteDirectory(myWorkDirectory); } } catch (IOException e) { - throw new InternalErrorException(Msg.code(1545) + "Failed to delete temporary directory \"" + myWorkDirectory.getAbsolutePath() + "\"", e); + throw new InternalErrorException( + Msg.code(1545) + "Failed to delete temporary directory \"" + myWorkDirectory.getAbsolutePath() + + "\"", + e); } } @@ -163,7 +184,6 @@ public class CreatePackageCommand extends BaseCommand { folder = "example"; addFiles(packageValues, folder); - String targetDirectory = theCommandLine.getOptionValue(TARGET_DIRECTORY_OPT); if (isBlank(targetDirectory)) { targetDirectory = "."; @@ -206,9 +226,13 @@ public class CreatePackageCommand extends BaseCommand { try { String contents = IOUtils.toString(new FileInputStream(next), StandardCharsets.UTF_8); contentBytes = contents.getBytes(StandardCharsets.UTF_8); - type = EncodingEnum.detectEncoding(contents).newParser(myFhirCtx).parseResource(contents).fhirType(); + type = EncodingEnum.detectEncoding(contents) + .newParser(myFhirCtx) + .parseResource(contents) + .fhirType(); } catch (IOException | DataFormatException e) { - throw new ExecutionException(Msg.code(1553) + "Failed to load/parse file: " + next.getName(), e); + throw new ExecutionException( + Msg.code(1553) + "Failed to load/parse file: " + next.getName(), e); } ourLog.info("Adding {} file of type {}: {}", theFolder, type, next.getName()); @@ -218,4 +242,3 @@ public class CreatePackageCommand extends BaseCommand { } } } - diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ExampleDataUploader.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ExampleDataUploader.java index a37f003ebfa..5ca2ebabc96 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ExampleDataUploader.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ExampleDataUploader.java @@ -19,12 +19,12 @@ */ package ca.uhn.fhir.cli; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.BaseRuntimeChildDefinition; import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; @@ -77,7 +77,8 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { // TODO: Don't use qualified names for loggers in HAPI CLI. private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ExampleDataUploader.class); - private IBaseBundle getBundleFromFile(Integer theLimit, File theSuppliedFile, FhirContext theCtx) throws ParseException, IOException { + private IBaseBundle getBundleFromFile(Integer theLimit, File theSuppliedFile, FhirContext theCtx) + throws ParseException, IOException { switch (theCtx.getVersion().getVersion()) { case DSTU2: return getBundleFromFileDstu2(theLimit, theSuppliedFile, theCtx); @@ -86,7 +87,8 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { case R4: return getBundleFromFileR4(theLimit, theSuppliedFile, theCtx); default: - throw new ParseException(Msg.code(1607) + "Invalid spec version for this command: " + theCtx.getVersion().getVersion()); + throw new ParseException(Msg.code(1607) + "Invalid spec version for this command: " + + theCtx.getVersion().getVersion()); } } @@ -128,20 +130,30 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { ourLog.info("FAILED to parse example {}", nextEntry.getName(), e); continue; } - ourLog.info("Found example {} - {} - {} chars", nextEntry.getName(), parsed.getClass().getSimpleName(), exampleString.length()); + ourLog.info( + "Found example {} - {} - {} chars", + nextEntry.getName(), + parsed.getClass().getSimpleName(), + exampleString.length()); if (ctx.getResourceType(parsed).equals("Bundle")) { - BaseRuntimeChildDefinition entryChildDef = ctx.getResourceDefinition(parsed).getChildByName("entry"); - BaseRuntimeElementCompositeDefinition entryDef = (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); + BaseRuntimeChildDefinition entryChildDef = + ctx.getResourceDefinition(parsed).getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryDef = + (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); for (IBase nextEntry1 : entryChildDef.getAccessor().getValues(parsed)) { - List resources = entryDef.getChildByName("resource").getAccessor().getValues(nextEntry1); + List resources = + entryDef.getChildByName("resource").getAccessor().getValues(nextEntry1); if (resources == null) { continue; } for (IBase nextResource : resources) { - if (!ctx.getResourceType(parsed).equals("Bundle") && ctx.getResourceType(parsed).equals("SearchParameter")) { - bundle.addEntry().setRequest(new EntryRequest().setMethod(HTTPVerbEnum.POST)).setResource((IResource) nextResource); + if (!ctx.getResourceType(parsed).equals("Bundle") + && ctx.getResourceType(parsed).equals("SearchParameter")) { + bundle.addEntry() + .setRequest(new EntryRequest().setMethod(HTTPVerbEnum.POST)) + .setResource((IResource) nextResource); } } } @@ -149,14 +161,17 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { if (ctx.getResourceType(parsed).equals("SearchParameter")) { continue; } - bundle.addEntry().setRequest(new EntryRequest().setMethod(HTTPVerbEnum.POST)).setResource((IResource) parsed); + bundle.addEntry() + .setRequest(new EntryRequest().setMethod(HTTPVerbEnum.POST)) + .setResource((IResource) parsed); } } return bundle; } @SuppressWarnings("unchecked") - private org.hl7.fhir.dstu3.model.Bundle getBundleFromFileDstu3(Integer limit, File inputFile, FhirContext ctx) throws IOException { + private org.hl7.fhir.dstu3.model.Bundle getBundleFromFileDstu3(Integer limit, File inputFile, FhirContext ctx) + throws IOException { org.hl7.fhir.dstu3.model.Bundle bundle = new org.hl7.fhir.dstu3.model.Bundle(); bundle.setType(BundleType.TRANSACTION); @@ -198,20 +213,27 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { ourLog.info("FAILED to parse example {}", nextEntry.getName(), e); continue; } - ourLog.info("Found example {} - {} - {} chars", nextEntry.getName(), parsed.getClass().getSimpleName(), exampleString.length()); + ourLog.info( + "Found example {} - {} - {} chars", + nextEntry.getName(), + parsed.getClass().getSimpleName(), + exampleString.length()); ValidationResult result = val.validateWithResult(parsed); - if (! result.isSuccessful()) { + if (!result.isSuccessful()) { ourLog.info("FAILED to validate example {} - {}", nextEntry.getName(), result); continue; } if (ctx.getResourceType(parsed).equals("Bundle")) { - BaseRuntimeChildDefinition entryChildDef = ctx.getResourceDefinition(parsed).getChildByName("entry"); - BaseRuntimeElementCompositeDefinition entryDef = (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); + BaseRuntimeChildDefinition entryChildDef = + ctx.getResourceDefinition(parsed).getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryDef = + (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); for (IBase nextEntry1 : entryChildDef.getAccessor().getValues(parsed)) { - List resources = entryDef.getChildByName("resource").getAccessor().getValues(nextEntry1); + List resources = + entryDef.getChildByName("resource").getAccessor().getValues(nextEntry1); if (resources == null) { continue; } @@ -219,8 +241,12 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { if (nextResource == null) { continue; } - if (!ctx.getResourceDefinition((Class) nextResource.getClass()).getName().equals("Bundle") - && ctx.getResourceDefinition((Class) nextResource.getClass()).getName().equals("SearchParameter")) { + if (!ctx.getResourceDefinition((Class) nextResource.getClass()) + .getName() + .equals("Bundle") + && ctx.getResourceDefinition((Class) nextResource.getClass()) + .getName() + .equals("SearchParameter")) { BundleEntryComponent entry = bundle.addEntry(); entry.getRequest().setMethod(HTTPVerb.POST); entry.setResource((Resource) nextResource); @@ -240,7 +266,8 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { } @SuppressWarnings("unchecked") - private org.hl7.fhir.r4.model.Bundle getBundleFromFileR4(Integer limit, File inputFile, FhirContext ctx) throws IOException { + private org.hl7.fhir.r4.model.Bundle getBundleFromFileR4(Integer limit, File inputFile, FhirContext ctx) + throws IOException { org.hl7.fhir.r4.model.Bundle bundle = new org.hl7.fhir.r4.model.Bundle(); bundle.setType(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION); @@ -282,20 +309,27 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { ourLog.info("FAILED to parse example {}", nextEntry.getName(), e); continue; } - ourLog.info("Found example {} - {} - {} chars", nextEntry.getName(), parsed.getClass().getSimpleName(), exampleString.length()); + ourLog.info( + "Found example {} - {} - {} chars", + nextEntry.getName(), + parsed.getClass().getSimpleName(), + exampleString.length()); ValidationResult result = val.validateWithResult(parsed); - if (! result.isSuccessful()) { + if (!result.isSuccessful()) { ourLog.info("FAILED to validate example {} - {}", nextEntry.getName(), result); continue; } if (ctx.getResourceType(parsed).equals("Bundle")) { - BaseRuntimeChildDefinition entryChildDef = ctx.getResourceDefinition(parsed).getChildByName("entry"); - BaseRuntimeElementCompositeDefinition entryDef = (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); + BaseRuntimeChildDefinition entryChildDef = + ctx.getResourceDefinition(parsed).getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryDef = + (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); for (IBase nextEntry1 : entryChildDef.getAccessor().getValues(parsed)) { - List resources = entryDef.getChildByName("resource").getAccessor().getValues(nextEntry1); + List resources = + entryDef.getChildByName("resource").getAccessor().getValues(nextEntry1); if (resources == null) { continue; } @@ -303,8 +337,12 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { if (nextResource == null) { continue; } - if (!ctx.getResourceDefinition((Class) nextResource.getClass()).getName().equals("Bundle") - && ctx.getResourceDefinition((Class) nextResource.getClass()).getName().equals("SearchParameter")) { + if (!ctx.getResourceDefinition((Class) nextResource.getClass()) + .getName() + .equals("Bundle") + && ctx.getResourceDefinition((Class) nextResource.getClass()) + .getName() + .equals("SearchParameter")) { org.hl7.fhir.r4.model.Bundle.BundleEntryComponent entry = bundle.addEntry(); entry.getRequest().setMethod(org.hl7.fhir.r4.model.Bundle.HTTPVerb.POST); entry.setResource((org.hl7.fhir.r4.model.Resource) nextResource); @@ -346,15 +384,17 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { opt.setRequired(false); options.addOption(opt); - opt = new Option("c", "cache", false, - "Cache the downloaded examples-json.zip file in the ~/.hapi-fhir-cli/cache directory. Use this file for 12 hours if it exists, instead of fetching it from the internet."); + opt = new Option( + "c", + "cache", + false, + "Cache the downloaded examples-json.zip file in the ~/.hapi-fhir-cli/cache directory. Use this file for 12 hours if it exists, instead of fetching it from the internet."); opt.setRequired(false); options.addOption(opt); return options; } - @Override protected Collection getFilterOutVersions() { Collection filterOutCollection = super.getFilterOutVersions(); @@ -386,12 +426,15 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { Entry next = iterator.next(); // DataElement have giant IDs that seem invalid, need to investigate this.. - if ("Subscription".equals(next.getResource().getResourceName()) || "DataElement".equals(next.getResource().getResourceName()) - || "OperationOutcome".equals(next.getResource().getResourceName()) || "OperationDefinition".equals(next.getResource().getResourceName())) { + if ("Subscription".equals(next.getResource().getResourceName()) + || "DataElement".equals(next.getResource().getResourceName()) + || "OperationOutcome".equals(next.getResource().getResourceName()) + || "OperationDefinition".equals(next.getResource().getResourceName())) { ourLog.info("Skipping " + next.getResource().getResourceName() + " example"); iterator.remove(); } else { - IdDt resourceId = new IdDt(next.getResource().getResourceName() + "/EX" + next.getResource().getId().getIdPart()); + IdDt resourceId = new IdDt(next.getResource().getResourceName() + "/EX" + + next.getResource().getId().getIdPart()); if (!fullIds.add(resourceId.toUnqualifiedVersionless().getValue())) { ourLog.info("Discarding duplicate resource: " + resourceId.getValue()); iterator.remove(); @@ -428,10 +471,16 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { // nextRef.getResourceReference().getReferenceElement().setValue(null); // } nextRef.getResourceReference().setResource(null); - String value = nextRef.getResourceReference().getReferenceElement().toUnqualifiedVersionless().getValue(); + String value = nextRef.getResourceReference() + .getReferenceElement() + .toUnqualifiedVersionless() + .getValue(); if (isNotBlank(value)) { - if (!qualIds.contains(value) && !nextRef.getResourceReference().getReferenceElement().isLocal()) { + if (!qualIds.contains(value) + && !nextRef.getResourceReference() + .getReferenceElement() + .isLocal()) { ourLog.info("Discarding unknown reference: {}", value); nextRef.getResourceReference().getReferenceElement().setValue(null); } else { @@ -455,7 +504,6 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { System.gc(); ourLog.info("Final bundle: {} entries", bundle.getEntry().size()); - } private void processBundleDstu3(FhirContext ctx, org.hl7.fhir.dstu3.model.Bundle bundle) { @@ -466,12 +514,17 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { BundleEntryComponent next = iterator.next(); // DataElement have giant IDs that seem invalid, need to investigate this.. - if ("Subscription".equals(next.getResource().getResourceType().name()) || "DataElement".equals(next.getResource().getResourceType().name()) - || "OperationOutcome".equals(next.getResource().getResourceType().name()) || "OperationDefinition".equals(next.getResource().getResourceType().name())) { + if ("Subscription".equals(next.getResource().getResourceType().name()) + || "DataElement".equals(next.getResource().getResourceType().name()) + || "OperationOutcome" + .equals(next.getResource().getResourceType().name()) + || "OperationDefinition" + .equals(next.getResource().getResourceType().name())) { ourLog.info("Skipping " + next.getResource().getResourceType() + " example"); iterator.remove(); } else { - IdDt resourceId = new IdDt(next.getResource().getResourceType() + "/EX" + next.getResource().getIdElement().getIdPart()); + IdDt resourceId = new IdDt(next.getResource().getResourceType() + "/EX" + + next.getResource().getIdElement().getIdPart()); if (!fullIds.add(resourceId.toUnqualifiedVersionless().getValue())) { ourLog.info("Discarding duplicate resource: " + resourceId.getValue()); iterator.remove(); @@ -507,10 +560,16 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { // nextRef.getResourceReference().getReferenceElement().setValue(null); // } nextRef.getResourceReference().setResource(null); - String value = nextRef.getResourceReference().getReferenceElement().toUnqualifiedVersionless().getValue(); + String value = nextRef.getResourceReference() + .getReferenceElement() + .toUnqualifiedVersionless() + .getValue(); if (isNotBlank(value)) { - if (!qualIds.contains(value) && !nextRef.getResourceReference().getReferenceElement().isLocal()) { + if (!qualIds.contains(value) + && !nextRef.getResourceReference() + .getReferenceElement() + .isLocal()) { ourLog.info("Discarding unknown reference: {}", value); nextRef.getResourceReference().getReferenceElement().setValue(null); } else { @@ -534,23 +593,29 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { System.gc(); ourLog.info("Final bundle: {} entries", bundle.getEntry().size()); - } private void processBundleR4(FhirContext ctx, org.hl7.fhir.r4.model.Bundle bundle) { Set fullIds = new HashSet<>(); - for (Iterator iterator = bundle.getEntry().iterator(); iterator.hasNext(); ) { + for (Iterator iterator = + bundle.getEntry().iterator(); + iterator.hasNext(); ) { org.hl7.fhir.r4.model.Bundle.BundleEntryComponent next = iterator.next(); // DataElement have giant IDs that seem invalid, need to investigate this.. - if ("Subscription".equals(next.getResource().getResourceType().name()) || "DataElement".equals(next.getResource().getResourceType().name()) - || "OperationOutcome".equals(next.getResource().getResourceType().name()) || "OperationDefinition".equals(next.getResource().getResourceType().name())) { + if ("Subscription".equals(next.getResource().getResourceType().name()) + || "DataElement".equals(next.getResource().getResourceType().name()) + || "OperationOutcome" + .equals(next.getResource().getResourceType().name()) + || "OperationDefinition" + .equals(next.getResource().getResourceType().name())) { ourLog.info("Skipping " + next.getResource().getResourceType() + " example"); iterator.remove(); } else { - IdDt resourceId = new IdDt(next.getResource().getResourceType() + "/EX" + next.getResource().getIdElement().getIdPart()); + IdDt resourceId = new IdDt(next.getResource().getResourceType() + "/EX" + + next.getResource().getIdElement().getIdPart()); if (!fullIds.add(resourceId.toUnqualifiedVersionless().getValue())) { ourLog.info("Discarding duplicate resource: " + resourceId.getValue()); iterator.remove(); @@ -581,10 +646,16 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { List refs = ctx.newTerser().getAllResourceReferences(next.getResource()); for (ResourceReferenceInfo nextRef : refs) { nextRef.getResourceReference().setResource(null); - String value = nextRef.getResourceReference().getReferenceElement().toUnqualifiedVersionless().getValue(); + String value = nextRef.getResourceReference() + .getReferenceElement() + .toUnqualifiedVersionless() + .getValue(); if (isNotBlank(value)) { - if (!qualIds.contains(value) && !nextRef.getResourceReference().getReferenceElement().isLocal()) { + if (!qualIds.contains(value) + && !nextRef.getResourceReference() + .getReferenceElement() + .isLocal()) { ourLog.info("Discarding unknown reference: {}", value); nextRef.getResourceReference().getReferenceElement().setValue(null); } else { @@ -599,7 +670,6 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { System.gc(); ourLog.info("Final bundle: {} entries", bundle.getEntry().size()); - } @Override @@ -610,8 +680,9 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { String targetServer = theCommandLine.getOptionValue("t"); if (isBlank(targetServer)) { throw new ParseException(Msg.code(1609) + "No target server (-t) specified"); - } else if (! targetServer.startsWith("http") && ! targetServer.startsWith("file")) { - throw new ParseException(Msg.code(1610) + "Invalid target server specified, must begin with 'http' or 'file'"); + } else if (!targetServer.startsWith("http") && !targetServer.startsWith("file")) { + throw new ParseException( + Msg.code(1610) + "Invalid target server specified, must begin with 'http' or 'file'"); } Integer limit = null; @@ -620,7 +691,8 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { try { limit = Integer.parseInt(limitString); } catch (NumberFormatException e) { - throw new ParseException(Msg.code(1611) + "Invalid number for limit (-l) option, must be a number: " + limitString); + throw new ParseException( + Msg.code(1611) + "Invalid number for limit (-l) option, must be a number: " + limitString); } } @@ -636,7 +708,8 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { specUrl = "http://build.fhir.org/examples-json.zip"; break; default: - throw new ParseException(Msg.code(1612) + "Invalid spec version for this command: " + ctx.getVersion().getVersion()); + throw new ParseException(Msg.code(1612) + "Invalid spec version for this command: " + + ctx.getVersion().getVersion()); } String filepath = theCommandLine.getOptionValue('d'); @@ -654,11 +727,10 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { } catch (Exception e) { throw new CommandFailureException(Msg.code(1613) + e); } - - } - private void sendBundleToTarget(String targetServer, FhirContext ctx, IBaseBundle bundle, CommandLine theCommandLine) throws Exception { + private void sendBundleToTarget( + String targetServer, FhirContext ctx, IBaseBundle bundle, CommandLine theCommandLine) throws Exception { List resources = BundleUtil.toListOfResources(ctx, bundle); for (Iterator iter = resources.iterator(); iter.hasNext(); ) { @@ -685,8 +757,10 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { for (int i = 0; i < subResourceList.size(); i++) { IBaseResource nextCandidateSource = subResourceList.get(i); for (ResourceReferenceInfo nextRef : ctx.newTerser().getAllResourceReferences(nextCandidateSource)) { - String nextRefResourceType = nextRef.getResourceReference().getReferenceElement().getResourceType(); - String nextRefIdPart = nextRef.getResourceReference().getReferenceElement().getIdPart(); + String nextRefResourceType = + nextRef.getResourceReference().getReferenceElement().getResourceType(); + String nextRefIdPart = + nextRef.getResourceReference().getReferenceElement().getIdPart(); if (isBlank(nextRefResourceType) || isBlank(nextRefIdPart)) { nextRef.getResourceReference().setResource(null); nextRef.getResourceReference().setReference(null); @@ -698,14 +772,15 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { String nextTarget = nextRefResourceType + "/EX" + nextRefIdPart; nextRef.getResourceReference().setResource(null); nextRef.getResourceReference().setReference(nextTarget); - if (! checkedTargets.add(nextTarget)) { + if (!checkedTargets.add(nextTarget)) { continue; } for (int j = 0; j < resources.size(); j++) { String candidateTarget = resources.get(j).getIdElement().getValue(); if (isNotBlank(nextTarget) && nextTarget.equals(candidateTarget)) { - ourLog.info("Reflexively adding resource {} to bundle as it is a reference target", nextTarget); + ourLog.info( + "Reflexively adding resource {} to bundle as it is a reference target", nextTarget); subResourceList.add(resources.remove(j)); break; } @@ -718,11 +793,15 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { continue; } - ourLog.info("About to upload {} examples in a transaction, {} remaining", subResourceList.size(), resources.size()); + ourLog.info( + "About to upload {} examples in a transaction, {} remaining", + subResourceList.size(), + resources.size()); IVersionSpecificBundleFactory bundleFactory = ctx.newBundleFactory(); bundleFactory.addTotalResultsToBundle(subResourceList.size(), BundleTypeEnum.TRANSACTION); - bundleFactory.addResourcesToBundle(new ArrayList<>(subResourceList), BundleTypeEnum.TRANSACTION, null, null, null); + bundleFactory.addResourcesToBundle( + new ArrayList<>(subResourceList), BundleTypeEnum.TRANSACTION, null, null, null); IBaseResource subBundle = bundleFactory.getResourceBundle(); String encoded = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(subBundle); @@ -758,8 +837,5 @@ public class ExampleDataUploader extends BaseRequestGeneratingCommand { subResourceList.clear(); } - } - - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ExportConceptMapToCsvCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ExportConceptMapToCsvCommand.java index 7bfd94b0109..7cbefd48503 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ExportConceptMapToCsvCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ExportConceptMapToCsvCommand.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.cli; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.apache.commons.cli.Options; import org.apache.commons.csv.CSVFormat; @@ -47,7 +47,8 @@ import static org.apache.commons.lang3.StringUtils.defaultString; public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConceptMapCommand { public static final String COMMAND = "export-conceptmap-to-csv"; // TODO: Don't use qualified names for loggers in HAPI CLI. - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ExportConceptMapToCsvCommand.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(ExportConceptMapToCsvCommand.class); @Override public String getCommandDescription() { @@ -63,7 +64,12 @@ public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConcept public Options getOptions() { Options options = super.getOptions(); - addRequiredOption(options, CONCEPTMAP_URL_PARAM, CONCEPTMAP_URL_PARAM_LONGOPT, CONCEPTMAP_URL_PARAM_NAME, CONCEPTMAP_URL_PARAM_DESC); + addRequiredOption( + options, + CONCEPTMAP_URL_PARAM, + CONCEPTMAP_URL_PARAM_LONGOPT, + CONCEPTMAP_URL_PARAM_NAME, + CONCEPTMAP_URL_PARAM_DESC); addRequiredOption(options, FILE_PARAM, FILE_PARAM_LONGOPT, FILE_PARAM_NAME, FILE_PARAM_DESC); return options; @@ -77,27 +83,26 @@ public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConcept private void searchForConceptMapByUrl() throws ExecutionException { ourLog.info("Searching for ConceptMap with specified URL (i.e. ConceptMap.url): {}", conceptMapUrl); if (fhirVersion == FhirVersionEnum.DSTU3) { - org.hl7.fhir.dstu3.model.Bundle response = client - .search() - .forResource(org.hl7.fhir.dstu3.model.ConceptMap.class) - .where(org.hl7.fhir.dstu3.model.ConceptMap.URL.matches().value(conceptMapUrl)) - .returnBundle(org.hl7.fhir.dstu3.model.Bundle.class) - .execute(); + org.hl7.fhir.dstu3.model.Bundle response = client.search() + .forResource(org.hl7.fhir.dstu3.model.ConceptMap.class) + .where(org.hl7.fhir.dstu3.model.ConceptMap.URL.matches().value(conceptMapUrl)) + .returnBundle(org.hl7.fhir.dstu3.model.Bundle.class) + .execute(); if (response.hasEntry()) { ourLog.info("Found ConceptMap with specified URL (i.e. ConceptMap.url): {}", conceptMapUrl); - org.hl7.fhir.dstu3.model.ConceptMap conceptMap = (org.hl7.fhir.dstu3.model.ConceptMap) response.getEntryFirstRep().getResource(); + org.hl7.fhir.dstu3.model.ConceptMap conceptMap = (org.hl7.fhir.dstu3.model.ConceptMap) + response.getEntryFirstRep().getResource(); convertConceptMapToCsv(conceptMap); } else { ourLog.info("No ConceptMap exists with specified URL (i.e. ConceptMap.url): {}", conceptMapUrl); } } else if (fhirVersion == FhirVersionEnum.R4) { - Bundle response = client - .search() - .forResource(ConceptMap.class) - .where(ConceptMap.URL.matches().value(conceptMapUrl)) - .returnBundle(Bundle.class) - .execute(); + Bundle response = client.search() + .forResource(ConceptMap.class) + .where(ConceptMap.URL.matches().value(conceptMapUrl)) + .returnBundle(Bundle.class) + .execute(); if (response.hasEntry()) { ourLog.info("Found ConceptMap with specified URL (i.e. ConceptMap.url): {}", conceptMapUrl); @@ -123,9 +128,9 @@ public class ExportConceptMapToCsvCommand extends AbstractImportExportCsvConcept try (Writer writer = Files.newBufferedWriter(path)) { CSVFormat format = CSVFormat.DEFAULT - .withRecordSeparator("\n") - .withHeader(Header.class) - .withQuoteMode(QuoteMode.ALL); + .withRecordSeparator("\n") + .withHeader(Header.class) + .withQuoteMode(QuoteMode.ALL); try (CSVPrinter csvPrinter = new CSVPrinter(writer, format)) { for (ConceptMapGroupComponent group : theConceptMap.getGroup()) { for (SourceElementComponent element : group.getElement()) { diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ImportCsvToConceptMapCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ImportCsvToConceptMapCommand.java index 369c8cff340..542b6d948c6 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ImportCsvToConceptMapCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ImportCsvToConceptMapCommand.java @@ -54,20 +54,24 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept public static final String COMMAND = "import-csv-to-conceptmap"; // TODO: Don't use qualified names for loggers in HAPI CLI. - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ImportCsvToConceptMapCommand.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(ImportCsvToConceptMapCommand.class); protected static final String SOURCE_VALUE_SET_PARAM = "i"; protected static final String SOURCE_VALUE_SET_PARAM_LONGOPT = "input"; protected static final String SOURCE_VALUE_SET_PARAM_NAME = "input"; - protected static final String SOURCE_VALUE_SET_PARAM_DESC = "The source value set of the ConceptMap to be imported (i.e. ConceptMap.sourceUri)."; + protected static final String SOURCE_VALUE_SET_PARAM_DESC = + "The source value set of the ConceptMap to be imported (i.e. ConceptMap.sourceUri)."; protected static final String TARGET_VALUE_SET_PARAM = "o"; protected static final String TARGET_VALUE_SET_PARAM_LONGOPT = "output"; protected static final String TARGET_VALUE_SET_PARAM_NAME = "output"; - protected static final String TARGET_VALUE_SET_PARAM_DESC = "The target value set of the ConceptMap to be imported (i.e. ConceptMap.targetUri)."; + protected static final String TARGET_VALUE_SET_PARAM_DESC = + "The target value set of the ConceptMap to be imported (i.e. ConceptMap.targetUri)."; protected static final String CONCEPTMAP_STATUS_PARAM = "s"; protected static final String CONCEPTMAP_STATUS_PARAM_LONGOPT = "status"; protected static final String CONCEPTMAP_STATUS_PARAM_NAME = "status"; - protected static final String CONCEPTMAP_STATUS_PARAM_DESC = "The status of the ConceptMap resource to be imported/exported (i.e. ConceptMap.status)."; + protected static final String CONCEPTMAP_STATUS_PARAM_DESC = + "The status of the ConceptMap resource to be imported/exported (i.e. ConceptMap.status)."; protected String sourceValueSet; protected String targetValueSet; @@ -90,11 +94,31 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept public Options getOptions() { Options options = super.getOptions(); - addRequiredOption(options, CONCEPTMAP_URL_PARAM, CONCEPTMAP_URL_PARAM_LONGOPT, CONCEPTMAP_URL_PARAM_NAME, CONCEPTMAP_URL_PARAM_DESC); - addRequiredOption(options, CONCEPTMAP_STATUS_PARAM, CONCEPTMAP_STATUS_PARAM_LONGOPT, CONCEPTMAP_STATUS_PARAM_NAME, CONCEPTMAP_STATUS_PARAM_DESC); + addRequiredOption( + options, + CONCEPTMAP_URL_PARAM, + CONCEPTMAP_URL_PARAM_LONGOPT, + CONCEPTMAP_URL_PARAM_NAME, + CONCEPTMAP_URL_PARAM_DESC); + addRequiredOption( + options, + CONCEPTMAP_STATUS_PARAM, + CONCEPTMAP_STATUS_PARAM_LONGOPT, + CONCEPTMAP_STATUS_PARAM_NAME, + CONCEPTMAP_STATUS_PARAM_DESC); // - addOptionalOption(options, SOURCE_VALUE_SET_PARAM, SOURCE_VALUE_SET_PARAM_LONGOPT, SOURCE_VALUE_SET_PARAM_NAME, SOURCE_VALUE_SET_PARAM_DESC); - addOptionalOption(options, TARGET_VALUE_SET_PARAM, TARGET_VALUE_SET_PARAM_LONGOPT, TARGET_VALUE_SET_PARAM_NAME, TARGET_VALUE_SET_PARAM_DESC); + addOptionalOption( + options, + SOURCE_VALUE_SET_PARAM, + SOURCE_VALUE_SET_PARAM_LONGOPT, + SOURCE_VALUE_SET_PARAM_NAME, + SOURCE_VALUE_SET_PARAM_DESC); + addOptionalOption( + options, + TARGET_VALUE_SET_PARAM, + TARGET_VALUE_SET_PARAM_LONGOPT, + TARGET_VALUE_SET_PARAM_NAME, + TARGET_VALUE_SET_PARAM_DESC); // addRequiredOption(options, FILE_PARAM, FILE_PARAM_LONGOPT, FILE_PARAM_NAME, FILE_PARAM_DESC); @@ -132,41 +156,44 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept if (fhirVersion == FhirVersionEnum.DSTU3) { org.hl7.fhir.dstu3.model.ConceptMap conceptMap = convertCsvToConceptMapDstu3(); - ourLog.info("Searching for existing ConceptMap with specified URL (i.e. ConceptMap.url): {}", conceptMapUrl); - MethodOutcome methodOutcome = client - .update() - .resource(conceptMap) - .conditional() - .where(org.hl7.fhir.dstu3.model.ConceptMap.URL.matches().value(conceptMapUrl)) - .execute(); + ourLog.info( + "Searching for existing ConceptMap with specified URL (i.e. ConceptMap.url): {}", conceptMapUrl); + MethodOutcome methodOutcome = client.update() + .resource(conceptMap) + .conditional() + .where(org.hl7.fhir.dstu3.model.ConceptMap.URL.matches().value(conceptMapUrl)) + .execute(); if (Boolean.TRUE.equals(methodOutcome.getCreated())) { ourLog.info("Created new ConceptMap: {}", methodOutcome.getId().getValue()); } else { - ourLog.info("Updated existing ConceptMap: {}", methodOutcome.getId().getValue()); + ourLog.info( + "Updated existing ConceptMap: {}", methodOutcome.getId().getValue()); } } else if (fhirVersion == FhirVersionEnum.R4) { ConceptMap conceptMap = convertCsvToConceptMapR4(); - ourLog.info("Searching for existing ConceptMap with specified URL (i.e. ConceptMap.url): {}", conceptMapUrl); - MethodOutcome methodOutcome = client - .update() - .resource(conceptMap) - .conditional() - .where(ConceptMap.URL.matches().value(conceptMapUrl)) - .execute(); + ourLog.info( + "Searching for existing ConceptMap with specified URL (i.e. ConceptMap.url): {}", conceptMapUrl); + MethodOutcome methodOutcome = client.update() + .resource(conceptMap) + .conditional() + .where(ConceptMap.URL.matches().value(conceptMapUrl)) + .execute(); if (Boolean.TRUE.equals(methodOutcome.getCreated())) { ourLog.info("Created new ConceptMap: {}", methodOutcome.getId().getValue()); } else { - ourLog.info("Updated existing ConceptMap: {}", methodOutcome.getId().getValue()); + ourLog.info( + "Updated existing ConceptMap: {}", methodOutcome.getId().getValue()); } } } private org.hl7.fhir.dstu3.model.ConceptMap convertCsvToConceptMapDstu3() throws ExecutionException { try { - return (org.hl7.fhir.dstu3.model.ConceptMap) VersionConvertorFactory_30_40.convertResource(convertCsvToConceptMapR4()); + return (org.hl7.fhir.dstu3.model.ConceptMap) + VersionConvertorFactory_30_40.convertResource(convertCsvToConceptMapR4()); } catch (FHIRException fe) { throw new ExecutionException(Msg.code(1604), fe); } @@ -175,19 +202,16 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept private ConceptMap convertCsvToConceptMapR4() throws ExecutionException { ourLog.info("Converting CSV to ConceptMap..."); ConceptMap retVal = new ConceptMap(); - try ( - Reader reader = getBufferedReader(); - CSVParser csvParser = new CSVParser( - reader, - CSVFormat - .DEFAULT - .withRecordSeparator("\n") - .withHeader(Header.class) - .withFirstRecordAsHeader() - .withIgnoreHeaderCase() - .withIgnoreEmptyLines() - .withTrim()) - ) { + try (Reader reader = getBufferedReader(); + CSVParser csvParser = new CSVParser( + reader, + CSVFormat.DEFAULT + .withRecordSeparator("\n") + .withHeader(Header.class) + .withFirstRecordAsHeader() + .withIgnoreHeaderCase() + .withIgnoreEmptyLines() + .withTrim())) { retVal.setUrl(conceptMapUrl); retVal.setStatus(Enumerations.PublicationStatus.fromCode(status)); @@ -201,13 +225,19 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept TemporaryConceptMapGroup temporaryConceptMapGroup; TemporaryConceptMapGroupElement temporaryConceptMapGroupElement; - Map>> groupMap = parseCsvRecords(csvParser); + Map< + TemporaryConceptMapGroup, + Map>> + groupMap = parseCsvRecords(csvParser); Map> elementMap; Set targetSet; ConceptMapGroupComponent conceptMapGroupComponent; SourceElementComponent sourceElementComponent; TargetElementComponent targetElementComponent; - for (Map.Entry>> groupEntry : groupMap.entrySet()) { + for (Map.Entry< + TemporaryConceptMapGroup, + Map>> + groupEntry : groupMap.entrySet()) { hasElements = false; hasTargets = false; @@ -232,7 +262,8 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept } elementMap = groupEntry.getValue(); - for (Map.Entry> elementEntry : elementMap.entrySet()) { + for (Map.Entry> + elementEntry : elementMap.entrySet()) { temporaryConceptMapGroupElement = elementEntry.getKey(); sourceElementComponent = new SourceElementComponent(); @@ -260,7 +291,8 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept if (temporaryConceptMapGroupElementTarget.hasEquivalence()) { try { - targetElementComponent.setEquivalence(Enumerations.ConceptMapEquivalence.fromCode(temporaryConceptMapGroupElementTarget.getEquivalence())); + targetElementComponent.setEquivalence(Enumerations.ConceptMapEquivalence.fromCode( + temporaryConceptMapGroupElementTarget.getEquivalence())); } catch (FHIRException fe) { throw new ExecutionException(Msg.code(1605), fe); } @@ -294,8 +326,12 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept return retVal; } - private Map>> parseCsvRecords(CSVParser theCsvParser) { - Map>> retVal = new LinkedHashMap<>(); + private Map< + TemporaryConceptMapGroup, + Map>> + parseCsvRecords(CSVParser theCsvParser) { + Map>> + retVal = new LinkedHashMap<>(); TemporaryConceptMapGroup group; TemporaryConceptMapGroupElement element; @@ -306,20 +342,20 @@ public class ImportCsvToConceptMapCommand extends AbstractImportExportCsvConcept for (CSVRecord csvRecord : theCsvParser) { group = new TemporaryConceptMapGroup( - defaultString(csvRecord.get(Header.SOURCE_CODE_SYSTEM)), - defaultString(csvRecord.get(Header.SOURCE_CODE_SYSTEM_VERSION)), - defaultString(csvRecord.get(Header.TARGET_CODE_SYSTEM)), - defaultString(csvRecord.get(Header.TARGET_CODE_SYSTEM_VERSION))); + defaultString(csvRecord.get(Header.SOURCE_CODE_SYSTEM)), + defaultString(csvRecord.get(Header.SOURCE_CODE_SYSTEM_VERSION)), + defaultString(csvRecord.get(Header.TARGET_CODE_SYSTEM)), + defaultString(csvRecord.get(Header.TARGET_CODE_SYSTEM_VERSION))); element = new TemporaryConceptMapGroupElement( - defaultString(csvRecord.get(Header.SOURCE_CODE)), - defaultString(csvRecord.get(Header.SOURCE_DISPLAY))); + defaultString(csvRecord.get(Header.SOURCE_CODE)), + defaultString(csvRecord.get(Header.SOURCE_DISPLAY))); target = new TemporaryConceptMapGroupElementTarget( - defaultString(csvRecord.get(Header.TARGET_CODE)), - defaultString(csvRecord.get(Header.TARGET_DISPLAY)), - defaultString(csvRecord.get(Header.EQUIVALENCE)), - defaultString(csvRecord.get(Header.COMMENT))); + defaultString(csvRecord.get(Header.TARGET_CODE)), + defaultString(csvRecord.get(Header.TARGET_DISPLAY)), + defaultString(csvRecord.get(Header.EQUIVALENCE)), + defaultString(csvRecord.get(Header.COMMENT))); if (!retVal.containsKey(group)) { targetSet = new LinkedHashSet<>(); diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportDstu2.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportDstu2.java index 880f2bf0cda..17c8ff14947 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportDstu2.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportDstu2.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.cli; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.ValidationSupportContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; @@ -33,21 +33,23 @@ public class LoadingValidationSupportDstu2 implements IValidationSupport { private FhirContext myCtx = FhirContext.forDstu2Hl7Org(); // TODO: Don't use qualified names for loggers in HAPI CLI. - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(LoadingValidationSupportDstu2.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(LoadingValidationSupportDstu2.class); @Override public T fetchResource(Class theClass, String theUri) { String resName = myCtx.getResourceType(theClass); ourLog.info("Attempting to fetch {} at URL: {}", resName, theUri); - + myCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); IGenericClient client = myCtx.newRestfulGenericClient("http://example.com"); - + T result; try { result = client.read(theClass, theUri); } catch (BaseServerResponseException e) { - throw new CommandFailureException(Msg.code(1554) + "FAILURE: Received HTTP " + e.getStatusCode() + ": " + e.getMessage()); + throw new CommandFailureException( + Msg.code(1554) + "FAILURE: Received HTTP " + e.getStatusCode() + ": " + e.getMessage()); } ourLog.info("Successfully loaded resource"); return result; @@ -62,5 +64,4 @@ public class LoadingValidationSupportDstu2 implements IValidationSupport { public FhirContext getFhirContext() { return myCtx; } - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportDstu3.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportDstu3.java index c735f0f62c5..b142733790b 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportDstu3.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportDstu3.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.cli; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; @@ -32,21 +32,23 @@ public class LoadingValidationSupportDstu3 implements IValidationSupport { private FhirContext myCtx = FhirContext.forDstu3(); // TODO: Don't use qualified names for loggers in HAPI CLI. - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(LoadingValidationSupportDstu3.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(LoadingValidationSupportDstu3.class); @Override public T fetchResource(Class theClass, String theUri) { String resName = myCtx.getResourceType(theClass); ourLog.info("Attempting to fetch {} at URL: {}", resName, theUri); - + myCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); IGenericClient client = myCtx.newRestfulGenericClient("http://example.com"); - + T result; try { result = client.read(theClass, theUri); } catch (BaseServerResponseException e) { - throw new CommandFailureException(Msg.code(1565) + "FAILURE: Received HTTP " + e.getStatusCode() + ": " + e.getMessage()); + throw new CommandFailureException( + Msg.code(1565) + "FAILURE: Received HTTP " + e.getStatusCode() + ": " + e.getMessage()); } ourLog.info("Successfully loaded resource"); return result; @@ -56,5 +58,4 @@ public class LoadingValidationSupportDstu3 implements IValidationSupport { public FhirContext getFhirContext() { return myCtx; } - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportR4.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportR4.java index c50c8ba1f49..c37261c4653 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportR4.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/LoadingValidationSupportR4.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.cli; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; @@ -44,7 +44,8 @@ public class LoadingValidationSupportR4 implements IValidationSupport { try { result = client.read(theClass, theUri); } catch (BaseServerResponseException e) { - throw new CommandFailureException(Msg.code(1588) + "FAILURE: Received HTTP " + e.getStatusCode() + ": " + e.getMessage()); + throw new CommandFailureException( + Msg.code(1588) + "FAILURE: Received HTTP " + e.getStatusCode() + ": " + e.getMessage()); } ourLog.info("Successfully loaded resource"); return result; @@ -54,6 +55,4 @@ public class LoadingValidationSupportR4 implements IValidationSupport { public FhirContext getFhirContext() { return myCtx; } - - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ReindexTerminologyCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ReindexTerminologyCommand.java index c60803a46a6..36524c393da 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ReindexTerminologyCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ReindexTerminologyCommand.java @@ -30,9 +30,9 @@ import org.apache.commons.cli.ParseException; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.r4.model.Parameters; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.provider.BaseJpaSystemProvider.RESP_PARAM_SUCCESS; @@ -41,7 +41,6 @@ public class ReindexTerminologyCommand extends BaseRequestGeneratingCommand { static final String REINDEX_TERMINOLOGY = "reindex-terminology"; - @Override public String getCommandDescription() { return "Recreates freetext-indexes for terminology data."; @@ -52,7 +51,6 @@ public class ReindexTerminologyCommand extends BaseRequestGeneratingCommand { return REINDEX_TERMINOLOGY; } - @Override public void run(CommandLine theCommandLine) throws ParseException { parseFhirContext(theCommandLine); @@ -66,7 +64,6 @@ public class ReindexTerminologyCommand extends BaseRequestGeneratingCommand { invokeOperation(client); } - private void invokeOperation(IGenericClient theClient) { ourLog.info("Beginning freetext indexing - This may take a while..."); @@ -75,11 +72,11 @@ public class ReindexTerminologyCommand extends BaseRequestGeneratingCommand { String errorMessage = null; try { response = theClient - .operation() - .onServer() - .named(REINDEX_TERMINOLOGY) - .withNoParameters(Parameters.class) - .execute(); + .operation() + .onServer() + .named(REINDEX_TERMINOLOGY) + .withNoParameters(Parameters.class) + .execute(); } catch (BaseServerResponseException e) { int statusCode = e.getStatusCode(); @@ -88,17 +85,17 @@ public class ReindexTerminologyCommand extends BaseRequestGeneratingCommand { if (e.getOperationOutcome() != null) { errorMessage += " : " + e.getOperationOutcome().getFormatCommentsPre(); } - throw new CommandFailureException(Msg.code(2228) + "FAILURE: Received HTTP " + statusCode + ": " + errorMessage); - + throw new CommandFailureException( + Msg.code(2228) + "FAILURE: Received HTTP " + statusCode + ": " + errorMessage); } - - Optional isSuccessResponse = ParametersUtil.getNamedParameterValueAsString(myFhirCtx, response, RESP_PARAM_SUCCESS); - if ( ! isSuccessResponse.isPresent() ) { + Optional isSuccessResponse = + ParametersUtil.getNamedParameterValueAsString(myFhirCtx, response, RESP_PARAM_SUCCESS); + if (!isSuccessResponse.isPresent()) { errorMessage = "Internal error. Command result unknown. Check system logs for details."; } else { - boolean succeeded = Boolean.parseBoolean( isSuccessResponse.get() ); - if ( ! succeeded) { + boolean succeeded = Boolean.parseBoolean(isSuccessResponse.get()); + if (!succeeded) { errorMessage = getResponseMessage(response); } } @@ -109,7 +106,6 @@ public class ReindexTerminologyCommand extends BaseRequestGeneratingCommand { ourLog.info("Recreation of terminology freetext indexes complete!"); ourLog.info("Response:{}{}", NL, getResponseMessage(response)); } - } @Nonnull @@ -118,7 +114,5 @@ public class ReindexTerminologyCommand extends BaseRequestGeneratingCommand { return Strings.join(message, NL); } - public static final String NL = System.getProperty("line.separator"); - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/RunServerCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/RunServerCommand.java index e13b386d691..265df1e2847 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/RunServerCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/RunServerCommand.java @@ -26,5 +26,4 @@ package ca.uhn.fhir.cli; public class RunServerCommand { public static final String RUN_SERVER_COMMAND = "run-server"; - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ToggleSearchParametersCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ToggleSearchParametersCommand.java index bd247398cdd..f88467521ed 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ToggleSearchParametersCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ToggleSearchParametersCommand.java @@ -43,14 +43,16 @@ public class ToggleSearchParametersCommand extends BaseCommand { Options options = new Options(); addFhirVersionOption(options); addBaseUrlOption(options); - addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + ITermLoaderSvc.SCT_URI + ")"); + addRequiredOption( + options, + "u", + "url", + true, + "The code system URL associated with this upload (e.g. " + ITermLoaderSvc.SCT_URI + ")"); addBasicAuthOption(options); return options; } @Override - public void run(CommandLine theCommandLine) throws ParseException, ExecutionException { - - } - + public void run(CommandLine theCommandLine) throws ParseException, ExecutionException {} } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java index f42db49d441..66be000e218 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/UploadTerminologyCommand.java @@ -79,10 +79,25 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { public Options getOptions() { Options options = super.getOptions(); - addRequiredOption(options, "u", "url", true, "The code system URL associated with this upload (e.g. " + ITermLoaderSvc.SCT_URI + ")"); - addOptionalOption(options, "d", "data", true, "Local file to use to upload (can be a raw file or a ZIP containing the raw file)"); + addRequiredOption( + options, + "u", + "url", + true, + "The code system URL associated with this upload (e.g. " + ITermLoaderSvc.SCT_URI + ")"); + addOptionalOption( + options, + "d", + "data", + true, + "Local file to use to upload (can be a raw file or a ZIP containing the raw file)"); addOptionalOption(options, "m", "mode", true, "The upload mode: SNAPSHOT (default), ADD, REMOVE"); - addOptionalOption(options, "s", "size", true, "The maximum size of a single upload (default: 10MB). Examples: 150 kb, 3 mb, 1GB"); + addOptionalOption( + options, + "s", + "size", + true, + "The maximum size of a single upload (default: 10MB). Examples: 150 kb, 3 mb, 1GB"); return options; } @@ -133,14 +148,16 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { invokeOperation(termUrl, datafile, client, requestName); } - private void invokeOperation(String theTermUrl, String[] theDatafile, IGenericClient theClient, String theOperationName) throws ParseException { + private void invokeOperation( + String theTermUrl, String[] theDatafile, IGenericClient theClient, String theOperationName) + throws ParseException { IBaseParameters inputParameters = ParametersUtil.newInstance(myFhirCtx); - boolean isDeltaOperation = - theOperationName.equals(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) || - theOperationName.equals(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE); + boolean isDeltaOperation = theOperationName.equals(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) + || theOperationName.equals(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE); - ParametersUtil.addParameterToParametersUri(myFhirCtx, inputParameters, TerminologyUploaderProvider.PARAM_SYSTEM, theTermUrl); + ParametersUtil.addParameterToParametersUri( + myFhirCtx, inputParameters, TerminologyUploaderProvider.PARAM_SYSTEM, theTermUrl); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ZipOutputStream zipOutputStream = new ZipOutputStream(byteArrayOutputStream, Charsets.UTF_8); @@ -163,11 +180,14 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { String contents = IOUtils.toString(fileInputStream, Charsets.UTF_8); EncodingEnum encoding = EncodingEnum.detectEncodingNoDefault(contents); if (encoding == null) { - throw new ParseException(Msg.code(1541) + "Could not detect FHIR encoding for file: " + nextDataFile); + throw new ParseException( + Msg.code(1541) + "Could not detect FHIR encoding for file: " + nextDataFile); } - IBaseResource resource = encoding.newParser(myFhirCtx).parseResource(contents); - ParametersUtil.addParameterToParameters(myFhirCtx, inputParameters, TerminologyUploaderProvider.PARAM_CODESYSTEM, resource); + IBaseResource resource = + encoding.newParser(myFhirCtx).parseResource(contents); + ParametersUtil.addParameterToParameters( + myFhirCtx, inputParameters, TerminologyUploaderProvider.PARAM_CODESYSTEM, resource); } else { @@ -183,7 +203,6 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { zipOutputStream.flush(); ourLog.info("Finished compressing {}", nextDataFile); - } } else if (nextDataFile.endsWith(".zip")) { @@ -195,10 +214,8 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { } else { throw new ParseException(Msg.code(1542) + "Don't know how to handle file: " + nextDataFile); - } } - } zipOutputStream.flush(); zipOutputStream.close(); @@ -208,7 +225,11 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { if (haveCompressedContents) { byte[] compressedBytes = byteArrayOutputStream.toByteArray(); - ourLog.info("Compressed {} bytes in {} file(s) into {} bytes", FileUtil.formatFileSize(compressedSourceBytesCount), compressedFileCount, FileUtil.formatFileSize(compressedBytes.length)); + ourLog.info( + "Compressed {} bytes in {} file(s) into {} bytes", + FileUtil.formatFileSize(compressedSourceBytesCount), + compressedFileCount, + FileUtil.formatFileSize(compressedBytes.length)); addFileToRequestBundle(inputParameters, "file:/files.zip", compressedBytes); } @@ -216,27 +237,31 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { ourLog.info("Beginning upload - This may take a while..."); if (ourLog.isDebugEnabled() || HapiSystemProperties.isTestModeEnabled()) { - ourLog.debug("Submitting parameters: {}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(inputParameters)); + ourLog.debug( + "Submitting parameters: {}", + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(inputParameters)); } IBaseParameters response; try { response = theClient - .operation() - .onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass()) - .named(theOperationName) - .withParameters(inputParameters) - .execute(); + .operation() + .onType(myFhirCtx.getResourceDefinition("CodeSystem").getImplementingClass()) + .named(theOperationName) + .withParameters(inputParameters) + .execute(); } catch (BaseServerResponseException e) { if (e.getOperationOutcome() != null) { - ourLog.error("Received the following response:\n{}", myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome())); + ourLog.error( + "Received the following response:\n{}", + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome())); } throw e; } - ourLog.info("Upload complete!"); - ourLog.debug("Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response)); + ourLog.debug( + "Response:\n{}", myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(response)); } private void addFileToRequestBundle(IBaseParameters theInputParameters, String theFileName, byte[] theBytes) { @@ -246,7 +271,9 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { String suffix = fileName.substring(fileName.lastIndexOf(".")); if (bytes.length > ourTransferSizeLimit) { - ourLog.info("File size is greater than {} - Going to use a local file reference instead of a direct HTTP transfer. Note that this will only work when executing this command on the same server as the FHIR server itself.", FileUtil.formatFileSize(ourTransferSizeLimit)); + ourLog.info( + "File size is greater than {} - Going to use a local file reference instead of a direct HTTP transfer. Note that this will only work when executing this command on the same server as the FHIR server itself.", + FileUtil.formatFileSize(ourTransferSizeLimit)); try { File tempFile = File.createTempFile("hapi-fhir-cli", suffix); @@ -267,7 +294,8 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { if (bytes != null) { AttachmentUtil.setData(myFhirCtx, attachment, bytes); } - ParametersUtil.addParameterToParameters(myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_FILE, attachment); + ParametersUtil.addParameterToParameters( + myFhirCtx, theInputParameters, TerminologyUploaderProvider.PARAM_FILE, attachment); } /* @@ -276,21 +304,35 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { */ private String getContentType(String theSuffix) { String retVal = ""; - if(StringUtils.isNotBlank(theSuffix)) { + if (StringUtils.isNotBlank(theSuffix)) { switch (theSuffix.toLowerCase()) { - case "csv" : retVal = "text/csv"; break; - case "xml" : retVal = "application/xml"; break; - case "json" : retVal = "application/json"; break; - case "zip" : retVal = "application/zip"; break; - default: retVal = "text/plain"; + case "csv": + retVal = "text/csv"; + break; + case "xml": + retVal = "application/xml"; + break; + case "json": + retVal = "application/json"; + break; + case "zip": + retVal = "application/zip"; + break; + default: + retVal = "text/plain"; } } - ourLog.debug("File suffix given was {} and contentType is {}, defaulting to content type text/plain", theSuffix, retVal); + ourLog.debug( + "File suffix given was {} and contentType is {}, defaulting to content type text/plain", + theSuffix, + retVal); return retVal; } private enum ModeEnum { - SNAPSHOT, ADD, REMOVE + SNAPSHOT, + ADD, + REMOVE } public void setTransferSizeBytes(long theTransferSizeBytes) { @@ -300,6 +342,7 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { ourTransferSizeLimit = theTransferSizeBytes; } } + public void setTransferSizeLimitHuman(String sizeString) { if (isBlank(sizeString)) { setTransferSizeBytes(DEFAULT_TRANSFER_SIZE_LIMIT); @@ -319,5 +362,4 @@ public class UploadTerminologyCommand extends BaseRequestGeneratingCommand { } return retVal; } - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidateCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidateCommand.java index 4b2bd91bb86..8aaceacbe8f 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidateCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidateCommand.java @@ -19,10 +19,8 @@ */ package ca.uhn.fhir.cli; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; -import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.SingleValidationMessage; @@ -37,9 +35,8 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.text.WordUtils; import org.fusesource.jansi.Ansi.Color; -import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; -import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport; import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain; +import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; import java.io.File; import java.io.FileInputStream; @@ -77,15 +74,23 @@ public class ValidateCommand extends BaseCommand { retVal.addOptionGroup(source); retVal.addOption("p", "profile", false, "Validate using Profiles (StructureDefinition / ValueSet)"); - retVal.addOption("r", "fetch-remote", false, - "Allow fetching remote resources (in other words, if a resource being validated refers to an external StructureDefinition, Questionnaire, etc. this flag allows the validator to access the internet to try and fetch this resource)"); + retVal.addOption( + "r", + "fetch-remote", + false, + "Allow fetching remote resources (in other words, if a resource being validated refers to an external StructureDefinition, Questionnaire, etc. this flag allows the validator to access the internet to try and fetch this resource)"); addOptionalOption(retVal, "l", "fetch-local", "filename", "Fetch a profile locally and use it if referenced"); - addOptionalOption(retVal, null, "igpack", true, "If specified, provides the filename of an IGPack file to include in validation"); + addOptionalOption( + retVal, + null, + "igpack", + true, + "If specified, provides the filename of an IGPack file to include in validation"); addOptionalOption(retVal, "x", "xsd", false, "Validate using Schemas"); addOptionalOption(retVal, "s", "sch", false, "Validate using Schematrons"); - addOptionalOption(retVal, "e", "encoding","encoding", "File encoding (default is UTF-8)"); + addOptionalOption(retVal, "e", "encoding", "encoding", "File encoding (default is UTF-8)"); - return retVal; + return retVal; } private String loadFile(String theFileName) throws ParseException { @@ -97,7 +102,8 @@ public class ValidateCommand extends BaseCommand { try { input = IOUtils.toByteArray(new FileInputStream(new File(theFileName))); } catch (IOException e) { - throw new ParseException(Msg.code(1615) + "Failed to load file '" + theFileName + "' - Error: " + e.toString()); + throw new ParseException( + Msg.code(1615) + "Failed to load file '" + theFileName + "' - Error: " + e.toString()); } return input; } @@ -127,7 +133,8 @@ public class ValidateCommand extends BaseCommand { ourLog.info("Fully read - Size is {}", FileHelper.getFileSizeDisplay(contents.length())); } - ca.uhn.fhir.rest.api.EncodingEnum enc = ca.uhn.fhir.rest.api.EncodingEnum.detectEncodingNoDefault(defaultString(contents)); + ca.uhn.fhir.rest.api.EncodingEnum enc = + ca.uhn.fhir.rest.api.EncodingEnum.detectEncodingNoDefault(defaultString(contents)); if (enc == null) { throw new ParseException(Msg.code(1619) + "Could not detect encoding (json/xml) of contents"); } @@ -145,7 +152,8 @@ public class ValidateCommand extends BaseCommand { switch (ctx.getVersion().getVersion()) { case DSTU2: { FhirInstanceValidator instanceValidator; - ValidationSupportChain validationSupportChain = ValidationSupportChainCreator.getValidationSupportChainDstu2(ctx, theCommandLine); + ValidationSupportChain validationSupportChain = + ValidationSupportChainCreator.getValidationSupportChainDstu2(ctx, theCommandLine); instanceValidator = new FhirInstanceValidator(validationSupportChain); val.registerValidatorModule(instanceValidator); break; @@ -154,12 +162,14 @@ public class ValidateCommand extends BaseCommand { case R4: { FhirInstanceValidator instanceValidator = new FhirInstanceValidator(ctx); val.registerValidatorModule(instanceValidator); - ValidationSupportChain validationSupportChain = ValidationSupportChainCreator.getValidationSupportChainR4(ctx, theCommandLine); + ValidationSupportChain validationSupportChain = + ValidationSupportChainCreator.getValidationSupportChainR4(ctx, theCommandLine); instanceValidator.setValidationSupport(validationSupportChain); break; } default: - throw new ParseException(Msg.code(1620) + "Profile validation (-p) is not supported for this FHIR version"); + throw new ParseException( + Msg.code(1620) + "Profile validation (-p) is not supported for this FHIR version"); } } @@ -187,13 +197,13 @@ public class ValidateCommand extends BaseCommand { if (isNotBlank(next.getLocationString())) { b.append(ansi().fg(Color.WHITE)).append(next.getLocationString()); } - String[] message = WordUtils.wrap(next.getMessage(), 80 - leftWidth, "\n", true).split("\\n"); + String[] message = WordUtils.wrap(next.getMessage(), 80 - leftWidth, "\n", true) + .split("\\n"); for (String line : message) { b.append(App.LINESEP); b.append(ansi().fg(Color.WHITE)); b.append(leftPad("", leftWidth)).append(line); } - } b.append(App.LINESEP); diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidationDataUploader.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidationDataUploader.java index a0866e2f0b0..cea85e11174 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidationDataUploader.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidationDataUploader.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.cli; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; import ca.uhn.fhir.model.dstu2.resource.StructureDefinition; @@ -61,7 +61,13 @@ public class ValidationDataUploader extends BaseCommand { for (Iterator iter = theBundle.getEntry().iterator(); iter.hasNext(); ) { IBaseResource next = iter.next().getResource(); for (IIdType nextExclude : myExcludes) { - if (nextExclude.hasResourceType() && nextExclude.toUnqualifiedVersionless().getValue().equals(next.getIdElement().toUnqualifiedVersionless().getValue())) { + if (nextExclude.hasResourceType() + && nextExclude + .toUnqualifiedVersionless() + .getValue() + .equals(next.getIdElement() + .toUnqualifiedVersionless() + .getValue())) { iter.remove(); continue; } else if (nextExclude.getIdPart().equals(next.getIdElement().getIdPart())) { @@ -76,7 +82,13 @@ public class ValidationDataUploader extends BaseCommand { for (Iterator iter = theBundle.getEntry().iterator(); iter.hasNext(); ) { IBaseResource next = iter.next().getResource(); for (IIdType nextExclude : myExcludes) { - if (nextExclude.hasResourceType() && nextExclude.toUnqualifiedVersionless().getValue().equals(next.getIdElement().toUnqualifiedVersionless().getValue())) { + if (nextExclude.hasResourceType() + && nextExclude + .toUnqualifiedVersionless() + .getValue() + .equals(next.getIdElement() + .toUnqualifiedVersionless() + .getValue())) { iter.remove(); continue; } else if (nextExclude.getIdPart().equals(next.getIdElement().getIdPart())) { @@ -88,10 +100,18 @@ public class ValidationDataUploader extends BaseCommand { } private void filterBundle(org.hl7.fhir.r4.model.Bundle theBundle) { - for (Iterator iter = theBundle.getEntry().iterator(); iter.hasNext(); ) { + for (Iterator iter = + theBundle.getEntry().iterator(); + iter.hasNext(); ) { IBaseResource next = iter.next().getResource(); for (IIdType nextExclude : myExcludes) { - if (nextExclude.hasResourceType() && nextExclude.toUnqualifiedVersionless().getValue().equals(next.getIdElement().toUnqualifiedVersionless().getValue())) { + if (nextExclude.hasResourceType() + && nextExclude + .toUnqualifiedVersionless() + .getValue() + .equals(next.getIdElement() + .toUnqualifiedVersionless() + .getValue())) { iter.remove(); continue; } else if (nextExclude.getIdPart().equals(next.getIdElement().getIdPart())) { @@ -119,7 +139,8 @@ public class ValidationDataUploader extends BaseCommand { addFhirVersionOption(options); - opt = new Option(BASE_URL_PARAM, "target", true, "Base URL for the target server (e.g. \"http://example.com/fhir\")"); + opt = new Option( + BASE_URL_PARAM, "target", true, "Base URL for the target server (e.g. \"http://example.com/fhir\")"); opt.setRequired(true); options.addOption(opt); @@ -163,10 +184,10 @@ public class ValidationDataUploader extends BaseCommand { } else if (ctx.getVersion().getVersion() == FhirVersionEnum.R4) { uploadDefinitionsR4(theCommandLine, ctx); } - } - private void uploadDefinitionsDstu2(CommandLine theCommandLine, FhirContext ctx) throws CommandFailureException, ParseException { + private void uploadDefinitionsDstu2(CommandLine theCommandLine, FhirContext ctx) + throws CommandFailureException, ParseException { IGenericClient client = newClient(theCommandLine); ourLog.info("Uploading definitions to server"); @@ -176,7 +197,10 @@ public class ValidationDataUploader extends BaseCommand { String vsContents; try { ctx.getVersion().getPathToSchemaDefinitions(); - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/instance/model/valueset/" + "valuesets.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/instance/model/valueset/" + "valuesets.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1591) + e.toString()); } @@ -188,14 +212,19 @@ public class ValidationDataUploader extends BaseCommand { ValueSet next = (ValueSet) i.getResource(); next.setId(next.getIdElement().toUnqualifiedVersionless()); - ourLog.info("Uploading ValueSet {}/{} : {}", new Object[] {count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading ValueSet {}/{} : {}", + new Object[] {count, total, next.getIdElement().getValue()}); client.update().resource(next).execute(); count++; } try { - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/instance/model/valueset/" + "v3-codesystems.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/instance/model/valueset/" + "v3-codesystems.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1592) + e.toString()); } @@ -207,14 +236,19 @@ public class ValidationDataUploader extends BaseCommand { ValueSet next = (ValueSet) i.getResource(); next.setId(next.getIdElement().toUnqualifiedVersionless()); - ourLog.info("Uploading v3-codesystems ValueSet {}/{} : {}", new Object[] {count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading v3-codesystems ValueSet {}/{} : {}", + new Object[] {count, total, next.getIdElement().getValue()}); client.update().resource(next).execute(); count++; } try { - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/instance/model/valueset/" + "v2-tables.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/instance/model/valueset/" + "v2-tables.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1593) + e.toString()); } @@ -225,7 +259,9 @@ public class ValidationDataUploader extends BaseCommand { ValueSet next = (ValueSet) i.getResource(); next.setId(next.getIdElement().toUnqualifiedVersionless()); - ourLog.info("Uploading v2-tables ValueSet {}/{} : {}", new Object[] {count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading v2-tables ValueSet {}/{} : {}", + new Object[] {count, total, next.getIdElement().getValue()}); client.update().resource(next).execute(); count++; } @@ -235,7 +271,8 @@ public class ValidationDataUploader extends BaseCommand { ResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver(); Resource[] mappingLocations; try { - mappingLocations = patternResolver.getResources("classpath*:org/hl7/fhir/instance/model/profile/" + "*.profile.xml"); + mappingLocations = + patternResolver.getResources("classpath*:org/hl7/fhir/instance/model/profile/" + "*.profile.xml"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1594) + e.toString()); } @@ -244,13 +281,16 @@ public class ValidationDataUploader extends BaseCommand { for (Resource i : mappingLocations) { StructureDefinition next; try { - next = ctx.newXmlParser().parseResource(StructureDefinition.class, IOUtils.toString(i.getInputStream(), "UTF-8")); + next = ctx.newXmlParser() + .parseResource(StructureDefinition.class, IOUtils.toString(i.getInputStream(), "UTF-8")); } catch (Exception e) { throw new CommandFailureException(Msg.code(1595) + e.toString()); } next.setId(next.getIdElement().toUnqualifiedVersionless()); - ourLog.info("Uploading StructureDefinition {}/{} : {}", new Object[] {count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading StructureDefinition {}/{} : {}", + new Object[] {count, total, next.getIdElement().getValue()}); try { client.update().resource(next).execute(); } catch (Exception e) { @@ -266,7 +306,8 @@ public class ValidationDataUploader extends BaseCommand { ourLog.info("Finished uploading definitions to server (took {} ms)", delay); } - private void uploadDefinitionsDstu3(CommandLine theCommandLine, FhirContext theCtx) throws CommandFailureException, ParseException { + private void uploadDefinitionsDstu3(CommandLine theCommandLine, FhirContext theCtx) + throws CommandFailureException, ParseException { IGenericClient client = newClient(theCommandLine); ourLog.info("Uploading definitions to server"); @@ -278,7 +319,10 @@ public class ValidationDataUploader extends BaseCommand { try { theCtx.getVersion().getPathToSchemaDefinitions(); - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/valueset/" + "valuesets.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/dstu3/model/valueset/" + "valuesets.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1596) + e.toString()); } @@ -293,7 +337,9 @@ public class ValidationDataUploader extends BaseCommand { int bytes = theCtx.newXmlParser().encodeResourceToString(next).length(); - ourLog.info("Uploading ValueSet {}/{} : {} ({} bytes}", new Object[] {count, total, next.getIdElement().getValue(), bytes}); + ourLog.info( + "Uploading ValueSet {}/{} : {} ({} bytes}", + new Object[] {count, total, next.getIdElement().getValue(), bytes}); try { IIdType id = client.update().resource(next).execute().getId(); ourLog.info(" - Got ID: {}", id.getValue()); @@ -306,7 +352,10 @@ public class ValidationDataUploader extends BaseCommand { } try { - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/valueset/" + "v3-codesystems.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/dstu3/model/valueset/" + "v3-codesystems.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1597) + e.toString()); } @@ -320,7 +369,9 @@ public class ValidationDataUploader extends BaseCommand { org.hl7.fhir.dstu3.model.Resource next = i.getResource(); next.setId(next.getIdElement().toUnqualifiedVersionless()); - ourLog.info("Uploading v3-codesystems ValueSet {}/{} : {}", new Object[] {count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading v3-codesystems ValueSet {}/{} : {}", + new Object[] {count, total, next.getIdElement().getValue()}); try { client.update().resource(next).execute(); } catch (Exception e) { @@ -330,7 +381,10 @@ public class ValidationDataUploader extends BaseCommand { } try { - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/valueset/" + "v2-tables.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/dstu3/model/valueset/" + "v2-tables.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1598) + e.toString()); } @@ -345,14 +399,15 @@ public class ValidationDataUploader extends BaseCommand { } next.setId(next.getIdElement().toUnqualifiedVersionless()); - ourLog.info("Uploading v2-tables ValueSet {}/{} : {}", new Object[] {count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading v2-tables ValueSet {}/{} : {}", + new Object[] {count, total, next.getIdElement().getValue()}); client.update().resource(next).execute(); count++; } ourLog.info("Finished uploading ValueSets"); - uploadDstu3Profiles(theCtx, client, "profile/profiles-resources"); uploadDstu3Profiles(theCtx, client, "profile/profiles-types"); uploadDstu3Profiles(theCtx, client, "profile/profiles-others"); @@ -365,7 +420,8 @@ public class ValidationDataUploader extends BaseCommand { ourLog.info("Finished uploading definitions to server (took {} ms)", delay); } - private void uploadDefinitionsR4(CommandLine theCommandLine, FhirContext theCtx) throws CommandFailureException, ParseException { + private void uploadDefinitionsR4(CommandLine theCommandLine, FhirContext theCtx) + throws CommandFailureException, ParseException { IGenericClient client = newClient(theCommandLine); ourLog.info("Uploading definitions to server"); @@ -377,7 +433,10 @@ public class ValidationDataUploader extends BaseCommand { try { theCtx.getVersion().getPathToSchemaDefinitions(); - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/r4/model/valueset/" + "valuesets.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/r4/model/valueset/" + "valuesets.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1599) + e.toString()); } @@ -392,7 +451,9 @@ public class ValidationDataUploader extends BaseCommand { int bytes = theCtx.newXmlParser().encodeResourceToString(next).length(); - ourLog.info("Uploading ValueSet {}/{} : {} ({} bytes}", new Object[] {count, total, next.getIdElement().getValue(), bytes}); + ourLog.info( + "Uploading ValueSet {}/{} : {} ({} bytes}", + new Object[] {count, total, next.getIdElement().getValue(), bytes}); try { IIdType id = client.update().resource(next).execute().getId(); ourLog.info(" - Got ID: {}", id.getValue()); @@ -403,7 +464,10 @@ public class ValidationDataUploader extends BaseCommand { } try { - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/r4/model/valueset/" + "v3-codesystems.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/r4/model/valueset/" + "v3-codesystems.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1600) + e.toString()); } @@ -416,14 +480,19 @@ public class ValidationDataUploader extends BaseCommand { org.hl7.fhir.r4.model.Resource next = i.getResource(); next.setId(next.getIdElement().toUnqualifiedVersionless()); - ourLog.info("Uploading v3-codesystems ValueSet {}/{} : {}", new Object[] {count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading v3-codesystems ValueSet {}/{} : {}", + new Object[] {count, total, next.getIdElement().getValue()}); client.update().resource(next).execute(); count++; } try { - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/r4/model/valueset/" + "v2-tables.xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream( + "/org/hl7/fhir/r4/model/valueset/" + "v2-tables.xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1601) + e.toString()); } @@ -434,18 +503,20 @@ public class ValidationDataUploader extends BaseCommand { for (org.hl7.fhir.r4.model.Bundle.BundleEntryComponent i : bundle.getEntry()) { org.hl7.fhir.r4.model.Resource next = i.getResource(); if (next.getIdElement().isIdPartValidLong()) { - next.setIdElement(new org.hl7.fhir.r4.model.IdType("v2-" + next.getIdElement().getIdPart())); + next.setIdElement(new org.hl7.fhir.r4.model.IdType( + "v2-" + next.getIdElement().getIdPart())); } next.setId(next.getIdElement().toUnqualifiedVersionless()); - ourLog.info("Uploading v2-tables ValueSet {}/{} : {}", new Object[] {count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading v2-tables ValueSet {}/{} : {}", + new Object[] {count, total, next.getIdElement().getValue()}); client.update().resource(next).execute(); count++; } ourLog.info("Finished uploading ValueSets"); - uploadR4Profiles(theCtx, client, "profile/profiles-resources"); uploadR4Profiles(theCtx, client, "profile/profiles-types"); uploadR4Profiles(theCtx, client, "profile/profiles-others"); @@ -458,14 +529,17 @@ public class ValidationDataUploader extends BaseCommand { ourLog.info("Finished uploading definitions to server (took {} ms)", delay); } - private void uploadDstu3Profiles(FhirContext ctx, IGenericClient client, String theName) throws CommandFailureException { + private void uploadDstu3Profiles(FhirContext ctx, IGenericClient client, String theName) + throws CommandFailureException { int total; int count; org.hl7.fhir.dstu3.model.Bundle bundle; ourLog.info("Uploading " + theName); String vsContents; try { - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/" + theName + ".xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/" + theName + ".xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1602) + e.toString()); } @@ -488,7 +562,10 @@ public class ValidationDataUploader extends BaseCommand { return -1; } // StructureDefinition, then OperationDefinition, then CompartmentDefinition - return theO2.getResource().getClass().getName().compareTo(theO1.getResource().getClass().getName()); + return theO2.getResource() + .getClass() + .getName() + .compareTo(theO1.getResource().getClass().getName()); } }); @@ -499,21 +576,26 @@ public class ValidationDataUploader extends BaseCommand { continue; } - ourLog.info("Uploading {} StructureDefinition {}/{} : {}", new Object[] {theName, count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading {} StructureDefinition {}/{} : {}", + new Object[] {theName, count, total, next.getIdElement().getValue()}); client.update().resource(next).execute(); count++; } } - private void uploadR4Profiles(FhirContext theContext, IGenericClient theClient, String theName) throws CommandFailureException { + private void uploadR4Profiles(FhirContext theContext, IGenericClient theClient, String theName) + throws CommandFailureException { int total; int count; org.hl7.fhir.r4.model.Bundle bundle; ourLog.info("Uploading " + theName); String vsContents; try { - vsContents = IOUtils.toString(ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/r4/model/" + theName + ".xml"), "UTF-8"); + vsContents = IOUtils.toString( + ValidationDataUploader.class.getResourceAsStream("/org/hl7/fhir/r4/model/" + theName + ".xml"), + "UTF-8"); } catch (IOException e) { throw new CommandFailureException(Msg.code(1603) + e.toString()); } @@ -525,7 +607,9 @@ public class ValidationDataUploader extends BaseCommand { Collections.sort(bundle.getEntry(), new Comparator() { @Override - public int compare(org.hl7.fhir.r4.model.Bundle.BundleEntryComponent theO1, org.hl7.fhir.r4.model.Bundle.BundleEntryComponent theO2) { + public int compare( + org.hl7.fhir.r4.model.Bundle.BundleEntryComponent theO1, + org.hl7.fhir.r4.model.Bundle.BundleEntryComponent theO2) { if (theO1.getResource() == null && theO2.getResource() == null) { return 0; } @@ -536,7 +620,10 @@ public class ValidationDataUploader extends BaseCommand { return -1; } // StructureDefinition, then OperationDefinition, then CompartmentDefinition - return theO2.getResource().getClass().getName().compareTo(theO1.getResource().getClass().getName()); + return theO2.getResource() + .getClass() + .getName() + .compareTo(theO1.getResource().getClass().getName()); } }); @@ -547,7 +634,9 @@ public class ValidationDataUploader extends BaseCommand { continue; } - ourLog.info("Uploading {} StructureDefinition {}/{} : {}", new Object[] {theName, count, total, next.getIdElement().getValue()}); + ourLog.info( + "Uploading {} StructureDefinition {}/{} : {}", + new Object[] {theName, count, total, next.getIdElement().getValue()}); try { theClient.update().resource(next).execute(); } catch (BaseServerResponseException e) { @@ -557,5 +646,4 @@ public class ValidationDataUploader extends BaseCommand { count++; } } - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidationSupportChainCreator.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidationSupportChainCreator.java index e07ebc6a8ed..879b681426b 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidationSupportChainCreator.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/ValidationSupportChainCreator.java @@ -34,8 +34,7 @@ public class ValidationSupportChainCreator { public static ValidationSupportChain getValidationSupportChainR4(FhirContext ctx, CommandLine commandLine) { ValidationSupportChain chain = new ValidationSupportChain( - new DefaultProfileValidationSupport(ctx), - new InMemoryTerminologyServerValidationSupport(ctx)); + new DefaultProfileValidationSupport(ctx), new InMemoryTerminologyServerValidationSupport(ctx)); if (commandLine.hasOption("l")) { try { @@ -59,7 +58,7 @@ public class ValidationSupportChainCreator { public static ValidationSupportChain getValidationSupportChainDstu2(FhirContext ctx, CommandLine commandLine) { ValidationSupportChain chain = new ValidationSupportChain( - new DefaultProfileValidationSupport(ctx), new InMemoryTerminologyServerValidationSupport(ctx)); + new DefaultProfileValidationSupport(ctx), new InMemoryTerminologyServerValidationSupport(ctx)); if (commandLine.hasOption("r")) { chain.addValidationSupport(new LoadingValidationSupportDstu2()); diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/WebsocketSubscribeCommand.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/WebsocketSubscribeCommand.java index 523a9df9221..8ec137c69b7 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/WebsocketSubscribeCommand.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/WebsocketSubscribeCommand.java @@ -69,7 +69,8 @@ public class WebsocketSubscribeCommand extends BaseCommand { public void run(CommandLine theCommandLine) throws ParseException { String target = theCommandLine.getOptionValue("t"); if (isBlank(target) || (!target.startsWith("ws://") && !target.startsWith("wss://"))) { - throw new ParseException(Msg.code(1536) + "Target (-t) needs to be in the form \"ws://foo\" or \"wss://foo\""); + throw new ParseException( + Msg.code(1536) + "Target (-t) needs to be in the form \"ws://foo\" or \"wss://foo\""); } IdDt subsId = new IdDt(theCommandLine.getOptionValue("i")); @@ -110,7 +111,6 @@ public class WebsocketSubscribeCommand extends BaseCommand { @SuppressWarnings("unused") private Session session; - public SimpleEchoSocket(String theSubsId) { mySubsId = theSubsId; } @@ -150,5 +150,4 @@ public class WebsocketSubscribeCommand extends BaseCommand { LOG_RECV.info("{}", theMsg); } } - } diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/client/HapiFhirCliRestfulClientFactory.java b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/client/HapiFhirCliRestfulClientFactory.java index 7bc7cfca520..1939fa234ba 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/client/HapiFhirCliRestfulClientFactory.java +++ b/hapi-fhir-cli/hapi-fhir-cli-api/src/main/java/ca/uhn/fhir/cli/client/HapiFhirCliRestfulClientFactory.java @@ -38,10 +38,10 @@ import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import javax.net.ssl.SSLContext; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import javax.net.ssl.SSLContext; /** * Intended for use with the HapiFhir CLI only. @@ -72,16 +72,20 @@ public class HapiFhirCliRestfulClientFactory extends RestfulClientFactory { } @Override - public synchronized IHttpClient getHttpClient(StringBuilder theUrl, Map> theIfNoneExistParams, - String theIfNoneExistString, RequestTypeEnum theRequestType, List
    theHeaders) { - return new ApacheHttpClient(getNativeHttpClient(), theUrl, theIfNoneExistParams, theIfNoneExistString, theRequestType, theHeaders); + public synchronized IHttpClient getHttpClient( + StringBuilder theUrl, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders) { + return new ApacheHttpClient( + getNativeHttpClient(), theUrl, theIfNoneExistParams, theIfNoneExistString, theRequestType, theHeaders); } public HttpClient getNativeHttpClient() { if (myHttpClient == null) { - RequestConfig defaultRequestConfig = - RequestConfig.custom() + RequestConfig defaultRequestConfig = RequestConfig.custom() .setSocketTimeout(getSocketTimeout()) .setConnectTimeout(getConnectTimeout()) .setConnectionRequestTimeout(getConnectionRequestTimeout()) @@ -89,24 +93,21 @@ public class HapiFhirCliRestfulClientFactory extends RestfulClientFactory { .build(); HttpClientBuilder builder = HttpClients.custom() - .useSystemProperties() - .setDefaultRequestConfig(defaultRequestConfig) - .disableCookieManagement(); + .useSystemProperties() + .setDefaultRequestConfig(defaultRequestConfig) + .disableCookieManagement(); PoolingHttpClientConnectionManager connectionManager; - if(myTlsAuthentication != null){ + if (myTlsAuthentication != null) { SSLContext sslContext = TlsAuthenticationSvc.createSslContext(myTlsAuthentication); SSLConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(sslContext); builder.setSSLSocketFactory(sslConnectionSocketFactory); - Registry registry = RegistryBuilder - . create() - .register("https", sslConnectionSocketFactory) - .build(); - connectionManager = new PoolingHttpClientConnectionManager( - registry, null, null, null, 5000, TimeUnit.MILLISECONDS - ); - } - else { + Registry registry = RegistryBuilder.create() + .register("https", sslConnectionSocketFactory) + .build(); + connectionManager = + new PoolingHttpClientConnectionManager(registry, null, null, null, 5000, TimeUnit.MILLISECONDS); + } else { connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); } @@ -125,12 +126,12 @@ public class HapiFhirCliRestfulClientFactory extends RestfulClientFactory { myHttpClient = null; } - public void useHttp(){ + public void useHttp() { myTlsAuthentication = null; resetHttpClient(); } - public void useHttps(TlsAuthentication theTlsAuthentication){ + public void useHttps(TlsAuthentication theTlsAuthentication) { myTlsAuthentication = theTlsAuthentication; resetHttpClient(); } @@ -144,5 +145,4 @@ public class HapiFhirCliRestfulClientFactory extends RestfulClientFactory { public void setProxy(String theHost, Integer thePort) { throw new UnsupportedOperationException(Msg.code(2120)); } - } diff --git a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulClient.java b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulClient.java index 7c43da19c41..0a347e3adc5 100644 --- a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulClient.java +++ b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulClient.java @@ -53,127 +53,130 @@ import static ca.uhn.fhir.okhttp.utils.UrlStringUtils.withTrailingQuestionMarkRe */ public class OkHttpRestfulClient implements IHttpClient { - private Call.Factory myClient; - private StringBuilder myUrl; - private Map> myIfNoneExistParams; - private String myIfNoneExistString; - private RequestTypeEnum myRequestType; - private List
    myHeaders; - private OkHttpRestfulRequest myRequest; + private Call.Factory myClient; + private StringBuilder myUrl; + private Map> myIfNoneExistParams; + private String myIfNoneExistString; + private RequestTypeEnum myRequestType; + private List
    myHeaders; + private OkHttpRestfulRequest myRequest; - public OkHttpRestfulClient(Call.Factory theClient, - StringBuilder theUrl, - Map> theIfNoneExistParams, - String theIfNoneExistString, - RequestTypeEnum theRequestType, - List
    theHeaders) { - myClient = theClient; - myUrl = theUrl; - myIfNoneExistParams = theIfNoneExistParams; - myIfNoneExistString = theIfNoneExistString; - myRequestType = theRequestType; - myHeaders = theHeaders; - } + public OkHttpRestfulClient( + Call.Factory theClient, + StringBuilder theUrl, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders) { + myClient = theClient; + myUrl = theUrl; + myIfNoneExistParams = theIfNoneExistParams; + myIfNoneExistString = theIfNoneExistString; + myRequestType = theRequestType; + myHeaders = theHeaders; + } - @Override - public IHttpRequest createByteRequest(FhirContext theContext, String theContents, String theContentType, EncodingEnum theEncoding) { - initBaseRequest(theContext, theEncoding, createPostBody(theContents, theContentType)); - return myRequest; - } + @Override + public IHttpRequest createByteRequest( + FhirContext theContext, String theContents, String theContentType, EncodingEnum theEncoding) { + initBaseRequest(theContext, theEncoding, createPostBody(theContents, theContentType)); + return myRequest; + } - private void initBaseRequest(FhirContext theContext, EncodingEnum theEncoding, RequestBody body) { - String sanitisedUrl = withTrailingQuestionMarkRemoved(myUrl.toString()); - myRequest = new OkHttpRestfulRequest(myClient, sanitisedUrl, myRequestType, body); - addHeadersToRequest(myRequest, theEncoding, theContext); - } + private void initBaseRequest(FhirContext theContext, EncodingEnum theEncoding, RequestBody body) { + String sanitisedUrl = withTrailingQuestionMarkRemoved(myUrl.toString()); + myRequest = new OkHttpRestfulRequest(myClient, sanitisedUrl, myRequestType, body); + addHeadersToRequest(myRequest, theEncoding, theContext); + } - private RequestBody createPostBody(String theContents, String theContentType) { - return RequestBody.create(MediaType.parse(theContentType), theContents); - } + private RequestBody createPostBody(String theContents, String theContentType) { + return RequestBody.create(MediaType.parse(theContentType), theContents); + } - @Override - public IHttpRequest createParamRequest(FhirContext theContext, Map> theParams, EncodingEnum theEncoding) { - initBaseRequest(theContext, theEncoding, getFormBodyFromParams(theParams)); - return myRequest; - } + @Override + public IHttpRequest createParamRequest( + FhirContext theContext, Map> theParams, EncodingEnum theEncoding) { + initBaseRequest(theContext, theEncoding, getFormBodyFromParams(theParams)); + return myRequest; + } - private RequestBody getFormBodyFromParams(Map> queryParams) { - FormBody.Builder formBuilder = new FormBody.Builder(); - for (Map.Entry> paramEntry : queryParams.entrySet()) { - for (String value : paramEntry.getValue()) { - formBuilder.add(paramEntry.getKey(), value); - } - } + private RequestBody getFormBodyFromParams(Map> queryParams) { + FormBody.Builder formBuilder = new FormBody.Builder(); + for (Map.Entry> paramEntry : queryParams.entrySet()) { + for (String value : paramEntry.getValue()) { + formBuilder.add(paramEntry.getKey(), value); + } + } - return formBuilder.build(); - } + return formBuilder.build(); + } - @Override - public IHttpRequest createBinaryRequest(FhirContext theContext, IBaseBinary theBinary) { - initBaseRequest(theContext, null, createPostBody(theBinary.getContent(), theBinary.getContentType())); - return myRequest; - } + @Override + public IHttpRequest createBinaryRequest(FhirContext theContext, IBaseBinary theBinary) { + initBaseRequest(theContext, null, createPostBody(theBinary.getContent(), theBinary.getContentType())); + return myRequest; + } - private RequestBody createPostBody(byte[] theContents, String theContentType) { - return RequestBody.create(MediaType.parse(theContentType), theContents); - } + private RequestBody createPostBody(byte[] theContents, String theContentType) { + return RequestBody.create(MediaType.parse(theContentType), theContents); + } - @Override - public IHttpRequest createGetRequest(FhirContext theContext, EncodingEnum theEncoding) { - initBaseRequest(theContext, theEncoding, null); - return myRequest; - } + @Override + public IHttpRequest createGetRequest(FhirContext theContext, EncodingEnum theEncoding) { + initBaseRequest(theContext, theEncoding, null); + return myRequest; + } - private void addHeadersToRequest(OkHttpRestfulRequest theHttpRequest, EncodingEnum theEncoding, FhirContext theContext) { - if (myHeaders != null) { - for (Header next : myHeaders) { - theHttpRequest.addHeader(next.getName(), next.getValue()); - } - } + private void addHeadersToRequest( + OkHttpRestfulRequest theHttpRequest, EncodingEnum theEncoding, FhirContext theContext) { + if (myHeaders != null) { + for (Header next : myHeaders) { + theHttpRequest.addHeader(next.getName(), next.getValue()); + } + } - addUserAgentHeader(theHttpRequest, theContext); - addAcceptCharsetHeader(theHttpRequest); - MethodUtil.addAcceptHeaderToRequest(theEncoding, theHttpRequest, theContext); - addIfNoneExistHeader(theHttpRequest); - } + addUserAgentHeader(theHttpRequest, theContext); + addAcceptCharsetHeader(theHttpRequest); + MethodUtil.addAcceptHeaderToRequest(theEncoding, theHttpRequest, theContext); + addIfNoneExistHeader(theHttpRequest); + } - private void addUserAgentHeader(OkHttpRestfulRequest theHttpRequest, FhirContext theContext) { - theHttpRequest.addHeader("User-Agent", HttpClientUtil.createUserAgentString(theContext, "okhttp")); - } + private void addUserAgentHeader(OkHttpRestfulRequest theHttpRequest, FhirContext theContext) { + theHttpRequest.addHeader("User-Agent", HttpClientUtil.createUserAgentString(theContext, "okhttp")); + } - private void addAcceptCharsetHeader(OkHttpRestfulRequest theHttpRequest) { - theHttpRequest.addHeader("Accept-Charset", "utf-8"); - } + private void addAcceptCharsetHeader(OkHttpRestfulRequest theHttpRequest) { + theHttpRequest.addHeader("Accept-Charset", "utf-8"); + } - private void addIfNoneExistHeader(IHttpRequest result) { - if (myIfNoneExistParams != null) { - addIfNoneExistHeaderFromParams(result, myIfNoneExistParams); - } else if (myIfNoneExistString != null) { - addIfNoneExistHeaderFromString(result, myIfNoneExistString); - } - } + private void addIfNoneExistHeader(IHttpRequest result) { + if (myIfNoneExistParams != null) { + addIfNoneExistHeaderFromParams(result, myIfNoneExistParams); + } else if (myIfNoneExistString != null) { + addIfNoneExistHeaderFromString(result, myIfNoneExistString); + } + } - private void addIfNoneExistHeaderFromString(IHttpRequest result, String ifNoneExistString) { - StringBuilder sb = newHeaderBuilder(myUrl); - boolean shouldAddQuestionMark = !hasQuestionMark(sb); - sb.append(shouldAddQuestionMark ? '?' : '&'); - sb.append(everythingAfterFirstQuestionMark(ifNoneExistString)); - result.addHeader(Constants.HEADER_IF_NONE_EXIST, sb.toString()); - } + private void addIfNoneExistHeaderFromString(IHttpRequest result, String ifNoneExistString) { + StringBuilder sb = newHeaderBuilder(myUrl); + boolean shouldAddQuestionMark = !hasQuestionMark(sb); + sb.append(shouldAddQuestionMark ? '?' : '&'); + sb.append(everythingAfterFirstQuestionMark(ifNoneExistString)); + result.addHeader(Constants.HEADER_IF_NONE_EXIST, sb.toString()); + } - private void addIfNoneExistHeaderFromParams(IHttpRequest result, Map> ifNoneExistParams) { - StringBuilder sb = newHeaderBuilder(myUrl); - boolean shouldAddInitialQuestionMark = !hasQuestionMark(sb); - BaseHttpClientInvocation.appendExtraParamsWithQuestionMark(ifNoneExistParams, sb, shouldAddInitialQuestionMark); - result.addHeader(Constants.HEADER_IF_NONE_EXIST, sb.toString()); - } - - public static StringBuilder newHeaderBuilder(StringBuilder baseUrl) { - StringBuilder sb = new StringBuilder(baseUrl); - if (endsWith(baseUrl, '/')) { - deleteLastCharacter(sb); - } - return sb; - } + private void addIfNoneExistHeaderFromParams(IHttpRequest result, Map> ifNoneExistParams) { + StringBuilder sb = newHeaderBuilder(myUrl); + boolean shouldAddInitialQuestionMark = !hasQuestionMark(sb); + BaseHttpClientInvocation.appendExtraParamsWithQuestionMark(ifNoneExistParams, sb, shouldAddInitialQuestionMark); + result.addHeader(Constants.HEADER_IF_NONE_EXIST, sb.toString()); + } + public static StringBuilder newHeaderBuilder(StringBuilder baseUrl) { + StringBuilder sb = new StringBuilder(baseUrl); + if (endsWith(baseUrl, '/')) { + deleteLastCharacter(sb); + } + return sb; + } } diff --git a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulClientFactory.java b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulClientFactory.java index 7ad0f06df7b..01d8687ea36 100644 --- a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulClientFactory.java +++ b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulClientFactory.java @@ -19,23 +19,20 @@ */ package ca.uhn.fhir.okhttp.client; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.api.RequestTypeEnum; +import ca.uhn.fhir.rest.client.api.Header; +import ca.uhn.fhir.rest.client.api.IHttpClient; +import ca.uhn.fhir.rest.client.impl.RestfulClientFactory; +import okhttp3.Call; +import okhttp3.OkHttpClient; + import java.net.InetSocketAddress; import java.net.Proxy; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.rest.api.RequestTypeEnum; -import ca.uhn.fhir.rest.client.api.Header; -import ca.uhn.fhir.rest.client.api.IHttpClient; -import ca.uhn.fhir.rest.client.impl.RestfulClientFactory; -import ca.uhn.fhir.tls.TlsAuthentication; -import okhttp3.Call; -import okhttp3.OkHttpClient; -import java.util.Optional; - /** * A Restful client factory based on OkHttp. * @@ -43,63 +40,65 @@ import java.util.Optional; */ public class OkHttpRestfulClientFactory extends RestfulClientFactory { - private Call.Factory myNativeClient; + private Call.Factory myNativeClient; - public OkHttpRestfulClientFactory() { - super(); - } + public OkHttpRestfulClientFactory() { + super(); + } - public OkHttpRestfulClientFactory(FhirContext theFhirContext) { - super(theFhirContext); - } + public OkHttpRestfulClientFactory(FhirContext theFhirContext) { + super(theFhirContext); + } - @Override - protected IHttpClient getHttpClient(String theServerBase) { - return getHttpClient(new StringBuilder(theServerBase), null, null, null, null); - } + @Override + protected IHttpClient getHttpClient(String theServerBase) { + return getHttpClient(new StringBuilder(theServerBase), null, null, null, null); + } - @Override - protected void resetHttpClient() { - myNativeClient = null; - } + @Override + protected void resetHttpClient() { + myNativeClient = null; + } public synchronized Call.Factory getNativeClient() { if (myNativeClient == null) { myNativeClient = new OkHttpClient() - .newBuilder() - .connectTimeout(getConnectTimeout(), TimeUnit.MILLISECONDS) - .readTimeout(getSocketTimeout(), TimeUnit.MILLISECONDS) - .writeTimeout(getSocketTimeout(), TimeUnit.MILLISECONDS) - .build(); + .newBuilder() + .connectTimeout(getConnectTimeout(), TimeUnit.MILLISECONDS) + .readTimeout(getSocketTimeout(), TimeUnit.MILLISECONDS) + .writeTimeout(getSocketTimeout(), TimeUnit.MILLISECONDS) + .build(); } return myNativeClient; } @Override - public IHttpClient getHttpClient(StringBuilder theUrl, - Map> theIfNoneExistParams, - String theIfNoneExistString, - RequestTypeEnum theRequestType, - List
    theHeaders) { - return new OkHttpRestfulClient(getNativeClient(), theUrl, theIfNoneExistParams, theIfNoneExistString, theRequestType, theHeaders); + public IHttpClient getHttpClient( + StringBuilder theUrl, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders) { + return new OkHttpRestfulClient( + getNativeClient(), theUrl, theIfNoneExistParams, theIfNoneExistString, theRequestType, theHeaders); } - /** - * Only accepts clients of type {@link OkHttpClient} - * - * @param okHttpClient - */ - @Override - public void setHttpClient(Object okHttpClient) { - myNativeClient = (Call.Factory) okHttpClient; - } - - @Override - public void setProxy(String theHost, Integer thePort) { - Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(theHost, thePort)); - OkHttpClient.Builder builder = ((OkHttpClient)getNativeClient()).newBuilder().proxy(proxy); - setHttpClient(builder.build()); - } + /** + * Only accepts clients of type {@link OkHttpClient} + * + * @param okHttpClient + */ + @Override + public void setHttpClient(Object okHttpClient) { + myNativeClient = (Call.Factory) okHttpClient; + } + @Override + public void setProxy(String theHost, Integer thePort) { + Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(theHost, thePort)); + OkHttpClient.Builder builder = + ((OkHttpClient) getNativeClient()).newBuilder().proxy(proxy); + setHttpClient(builder.build()); + } } diff --git a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulRequest.java b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulRequest.java index 31612a7e701..5820e05fdab 100644 --- a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulRequest.java +++ b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulRequest.java @@ -41,53 +41,54 @@ import java.util.Map; */ public class OkHttpRestfulRequest extends BaseHttpRequest implements IHttpRequest { - private final Request.Builder myRequestBuilder; - private Factory myClient; - private String myUrl; - private RequestTypeEnum myRequestTypeEnum; - private RequestBody myRequestBody; + private final Request.Builder myRequestBuilder; + private Factory myClient; + private String myUrl; + private RequestTypeEnum myRequestTypeEnum; + private RequestBody myRequestBody; - public OkHttpRestfulRequest(Call.Factory theClient, String theUrl, RequestTypeEnum theRequestTypeEnum, RequestBody theRequestBody) { - myClient = theClient; - myUrl = theUrl; - myRequestTypeEnum = theRequestTypeEnum; - myRequestBody = theRequestBody; + public OkHttpRestfulRequest( + Call.Factory theClient, String theUrl, RequestTypeEnum theRequestTypeEnum, RequestBody theRequestBody) { + myClient = theClient; + myUrl = theUrl; + myRequestTypeEnum = theRequestTypeEnum; + myRequestBody = theRequestBody; - myRequestBuilder = new Request.Builder().url(theUrl); - } + myRequestBuilder = new Request.Builder().url(theUrl); + } - public Request.Builder getRequest() { - return myRequestBuilder; - } + public Request.Builder getRequest() { + return myRequestBuilder; + } - @Override - public void addHeader(String theName, String theValue) { - myRequestBuilder.addHeader(theName, theValue); - } + @Override + public void addHeader(String theName, String theValue) { + myRequestBuilder.addHeader(theName, theValue); + } - @Override - public IHttpResponse execute() throws IOException { - StopWatch responseStopWatch = new StopWatch(); - myRequestBuilder.method(getHttpVerbName(), myRequestBody); - Call call = myClient.newCall(myRequestBuilder.build()); - return new OkHttpRestfulResponse(call.execute(), responseStopWatch); - } + @Override + public IHttpResponse execute() throws IOException { + StopWatch responseStopWatch = new StopWatch(); + myRequestBuilder.method(getHttpVerbName(), myRequestBody); + Call call = myClient.newCall(myRequestBuilder.build()); + return new OkHttpRestfulResponse(call.execute(), responseStopWatch); + } - @Override - public Map> getAllHeaders() { - return Collections.unmodifiableMap(myRequestBuilder.build().headers().toMultimap()); - } + @Override + public Map> getAllHeaders() { + return Collections.unmodifiableMap(myRequestBuilder.build().headers().toMultimap()); + } - @Override - public String getRequestBodyFromStream() { - // returning null to indicate this is not supported, as documented in IHttpRequest's contract - return null; - } + @Override + public String getRequestBodyFromStream() { + // returning null to indicate this is not supported, as documented in IHttpRequest's contract + return null; + } - @Override - public String getUri() { - return myUrl; - } + @Override + public String getUri() { + return myUrl; + } @Override public void setUri(String theUrl) { @@ -95,13 +96,13 @@ public class OkHttpRestfulRequest extends BaseHttpRequest implements IHttpReques } @Override - public String getHttpVerbName() { - return myRequestTypeEnum.name(); - } + public String getHttpVerbName() { + return myRequestTypeEnum.name(); + } @Override public void removeHeaders(String theHeaderName) { - myRequestBuilder.removeHeader(theHeaderName); + myRequestBuilder.removeHeader(theHeaderName); } @Override diff --git a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulResponse.java b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulResponse.java index 7b1c3212846..2caec5b467c 100644 --- a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulResponse.java +++ b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/client/OkHttpRestfulResponse.java @@ -20,21 +20,19 @@ package ca.uhn.fhir.okhttp.client; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.client.api.IHttpResponse; +import ca.uhn.fhir.rest.client.impl.BaseHttpResponse; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.util.StopWatch; +import okhttp3.MediaType; +import okhttp3.Response; +import org.apache.commons.io.IOUtils; + import java.io.*; import java.util.List; import java.util.Map; -import ca.uhn.fhir.rest.client.impl.BaseHttpResponse; -import ca.uhn.fhir.util.StopWatch; -import org.apache.commons.io.IOUtils; - -import ca.uhn.fhir.rest.api.Constants; - -import ca.uhn.fhir.rest.client.api.IHttpResponse; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import okhttp3.MediaType; -import okhttp3.Response; - /** * Wraps an OkHttp {@link Response} * @@ -140,5 +138,4 @@ public class OkHttpRestfulResponse extends BaseHttpResponse implements IHttpResp private MediaType typeAndSubtypeOnly(MediaType input) { return MediaType.parse(input.type() + "/" + input.subtype()); } - } diff --git a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/utils/UrlStringUtils.java b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/utils/UrlStringUtils.java index 4bda70d377e..d4aa6906471 100644 --- a/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/utils/UrlStringUtils.java +++ b/hapi-fhir-client-okhttp/src/main/java/ca/uhn/fhir/okhttp/utils/UrlStringUtils.java @@ -21,24 +21,23 @@ package ca.uhn.fhir.okhttp.utils; public class UrlStringUtils { - public static String withTrailingQuestionMarkRemoved(String input) { - return input.replaceAll("\\?$", ""); - } + public static String withTrailingQuestionMarkRemoved(String input) { + return input.replaceAll("\\?$", ""); + } - public static String everythingAfterFirstQuestionMark(String input) { - return input.substring(input.indexOf('?') + 1); - } + public static String everythingAfterFirstQuestionMark(String input) { + return input.substring(input.indexOf('?') + 1); + } - public static boolean hasQuestionMark(StringBuilder sb) { - return sb.indexOf("?") != -1; - } + public static boolean hasQuestionMark(StringBuilder sb) { + return sb.indexOf("?") != -1; + } - public static void deleteLastCharacter(StringBuilder sb) { - sb.deleteCharAt(sb.length() - 1); - } - - public static boolean endsWith(StringBuilder sb, char c) { - return sb.length() > 0 && sb.charAt(sb.length() - 1) == c; - } + public static void deleteLastCharacter(StringBuilder sb) { + sb.deleteCharAt(sb.length() - 1); + } + public static boolean endsWith(StringBuilder sb, char c) { + return sb.length() > 0 && sb.charAt(sb.length() - 1) == c; + } } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpClient.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpClient.java index 4e090d7daf0..4235ef0de4a 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpClient.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpClient.java @@ -20,10 +20,10 @@ package ca.uhn.fhir.rest.client.apache; import ca.uhn.fhir.i18n.Msg; -import java.io.UnsupportedEncodingException; -import java.util.*; -import java.util.Map.Entry; - +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.RequestTypeEnum; +import ca.uhn.fhir.rest.client.api.*; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.HttpClient; @@ -32,22 +32,27 @@ import org.apache.http.client.methods.*; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.message.BasicNameValuePair; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.RequestTypeEnum; -import ca.uhn.fhir.rest.client.api.*; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import java.io.UnsupportedEncodingException; +import java.util.Map.Entry; +import java.util.*; /** * A Http Client based on Apache. This is an adapter around the class * {@link org.apache.http.client.HttpClient HttpClient} - * + * * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ public class ApacheHttpClient extends BaseHttpClient implements IHttpClient { private final HttpClient myClient; - public ApacheHttpClient(HttpClient theClient, StringBuilder theUrl, Map> theIfNoneExistParams, String theIfNoneExistString, RequestTypeEnum theRequestType, List
    theHeaders) { + public ApacheHttpClient( + HttpClient theClient, + StringBuilder theUrl, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders) { super(theUrl, theIfNoneExistParams, theIfNoneExistString, theRequestType, theHeaders); this.myClient = theClient; } @@ -55,29 +60,28 @@ public class ApacheHttpClient extends BaseHttpClient implements IHttpClient { private HttpRequestBase constructRequestBase(HttpEntity theEntity) { String url = myUrl.toString(); switch (myRequestType) { - case DELETE: - return new HttpDelete(url); - case PATCH: - HttpPatch httpPatch = new HttpPatch(url); - httpPatch.setEntity(theEntity); - return httpPatch; - case OPTIONS: - return new HttpOptions(url); - case POST: - HttpPost httpPost = new HttpPost(url); - httpPost.setEntity(theEntity); - return httpPost; - case PUT: - HttpPut httpPut = new HttpPut(url); - httpPut.setEntity(theEntity); - return httpPut; - case GET: - default: - return new HttpGet(url); + case DELETE: + return new HttpDelete(url); + case PATCH: + HttpPatch httpPatch = new HttpPatch(url); + httpPatch.setEntity(theEntity); + return httpPatch; + case OPTIONS: + return new HttpOptions(url); + case POST: + HttpPost httpPost = new HttpPost(url); + httpPost.setEntity(theEntity); + return httpPost; + case PUT: + HttpPut httpPut = new HttpPut(url); + httpPut.setEntity(theEntity); + return httpPut; + case GET: + default: + return new HttpGet(url); } } - private UrlEncodedFormEntity createFormEntity(List parameters) { try { return new UrlEncodedFormEntity(parameters, "UTF-8"); @@ -86,10 +90,9 @@ public class ApacheHttpClient extends BaseHttpClient implements IHttpClient { } } - @Override protected IHttpRequest createHttpRequest() { - return createHttpRequest((HttpEntity)null); + return createHttpRequest((HttpEntity) null); } @Override @@ -122,7 +125,6 @@ public class ApacheHttpClient extends BaseHttpClient implements IHttpClient { return createHttpRequest(entity); } - @Override protected IHttpRequest createHttpRequest(String theContents) { /* diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpRequest.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpRequest.java index 371213c7af4..a3b62d055f5 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpRequest.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpRequest.java @@ -108,7 +108,9 @@ public class ApacheHttpRequest extends BaseHttpRequest implements IHttpRequest { HttpEntity entity = ((HttpEntityEnclosingRequest) myRequest).getEntity(); if (entity.isRepeatable()) { final Header contentTypeHeader = myRequest.getFirstHeader("Content-Type"); - Charset charset = contentTypeHeader == null ? null : ContentType.parse(contentTypeHeader.getValue()).getCharset(); + Charset charset = contentTypeHeader == null + ? null + : ContentType.parse(contentTypeHeader.getValue()).getCharset(); return IOUtils.toString(entity.getContent(), charset); } } @@ -129,5 +131,4 @@ public class ApacheHttpRequest extends BaseHttpRequest implements IHttpRequest { public String toString() { return myRequest.toString(); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpResponse.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpResponse.java index 527d02a1f13..33c573d7054 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpResponse.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheHttpResponse.java @@ -20,26 +20,25 @@ package ca.uhn.fhir.rest.client.apache; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.client.api.IHttpResponse; +import ca.uhn.fhir.rest.client.impl.BaseHttpResponse; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.util.StopWatch; +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.entity.ContentType; +import org.apache.http.*; + import java.io.*; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.*; -import ca.uhn.fhir.rest.client.impl.BaseHttpResponse; -import ca.uhn.fhir.util.StopWatch; -import org.apache.commons.io.IOUtils; -import org.apache.http.*; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.entity.ContentType; - -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.client.api.IHttpResponse; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; - /** * A Http Response based on Apache. This is an adapter around the class * {@link org.apache.http.HttpResponse HttpResponse} - * + * * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpResponse { @@ -90,13 +89,15 @@ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpRespons return new StringReader(""); } Charset charset = null; - if (entity.getContentType() != null && entity.getContentType().getElements() != null + if (entity.getContentType() != null + && entity.getContentType().getElements() != null && entity.getContentType().getElements().length > 0) { ContentType ct = ContentType.get(entity); charset = ct.getCharset(); } if (charset == null) { - if (Constants.STATUS_HTTP_204_NO_CONTENT != myResponse.getStatusLine().getStatusCode()) { + if (Constants.STATUS_HTTP_204_NO_CONTENT + != myResponse.getStatusLine().getStatusCode()) { ourLog.debug("Response did not specify a charset, defaulting to utf-8"); } charset = StandardCharsets.UTF_8; @@ -114,7 +115,6 @@ public class ApacheHttpResponse extends BaseHttpResponse implements IHttpRespons List list = headers.computeIfAbsent(name, k -> new ArrayList<>()); list.add(next.getValue()); } - } return headers; } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheRestfulClientFactory.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheRestfulClientFactory.java index e5519f8b5a8..b76e06d2d5e 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheRestfulClientFactory.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ApacheRestfulClientFactory.java @@ -44,7 +44,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * A Restful Factory to create clients, requests and responses based on the Apache httpclient library. - * + * * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ public class ApacheRestfulClientFactory extends RestfulClientFactory { @@ -61,7 +61,7 @@ public class ApacheRestfulClientFactory extends RestfulClientFactory { /** * Constructor - * + * * @param theContext * The context */ @@ -75,17 +75,21 @@ public class ApacheRestfulClientFactory extends RestfulClientFactory { } @Override - public synchronized IHttpClient getHttpClient(StringBuilder theUrl, Map> theIfNoneExistParams, - String theIfNoneExistString, RequestTypeEnum theRequestType, List
    theHeaders) { - return new ApacheHttpClient(getNativeHttpClient(), theUrl, theIfNoneExistParams, theIfNoneExistString, theRequestType, theHeaders); + public synchronized IHttpClient getHttpClient( + StringBuilder theUrl, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders) { + return new ApacheHttpClient( + getNativeHttpClient(), theUrl, theIfNoneExistParams, theIfNoneExistString, theRequestType, theHeaders); } public HttpClient getNativeHttpClient() { if (myHttpClient == null) { - //TODO: Use of a deprecated method should be resolved. - RequestConfig defaultRequestConfig = - RequestConfig.custom() + // TODO: Use of a deprecated method should be resolved. + RequestConfig defaultRequestConfig = RequestConfig.custom() .setSocketTimeout(getSocketTimeout()) .setConnectTimeout(getConnectTimeout()) .setConnectionRequestTimeout(getConnectionRequestTimeout()) @@ -94,25 +98,26 @@ public class ApacheRestfulClientFactory extends RestfulClientFactory { .build(); HttpClientBuilder builder = getHttpClientBuilder() - .useSystemProperties() - .setDefaultRequestConfig(defaultRequestConfig) - .disableCookieManagement(); + .useSystemProperties() + .setDefaultRequestConfig(defaultRequestConfig) + .disableCookieManagement(); - PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); + PoolingHttpClientConnectionManager connectionManager = + new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); connectionManager.setMaxTotal(getPoolMaxTotal()); connectionManager.setDefaultMaxPerRoute(getPoolMaxPerRoute()); builder.setConnectionManager(connectionManager); if (myProxy != null && isNotBlank(getProxyUsername()) && isNotBlank(getProxyPassword())) { CredentialsProvider credsProvider = new BasicCredentialsProvider(); - credsProvider.setCredentials(new AuthScope(myProxy.getHostName(), myProxy.getPort()), + credsProvider.setCredentials( + new AuthScope(myProxy.getHostName(), myProxy.getPort()), new UsernamePasswordCredentials(getProxyUsername(), getProxyPassword())); builder.setProxyAuthenticationStrategy(new ProxyAuthenticationStrategy()); builder.setDefaultCredentialsProvider(credsProvider); } myHttpClient = builder.build(); - } return myHttpClient; @@ -144,5 +149,4 @@ public class ApacheRestfulClientFactory extends RestfulClientFactory { myProxy = null; } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/BaseHttpClient.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/BaseHttpClient.java index 113f72f7502..79fc6d71461 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/BaseHttpClient.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/BaseHttpClient.java @@ -19,11 +19,6 @@ */ package ca.uhn.fhir.rest.client.apache; -import java.util.List; -import java.util.Map; - -import org.hl7.fhir.instance.model.api.IBaseBinary; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.client.api.Header; @@ -32,6 +27,10 @@ import ca.uhn.fhir.rest.client.api.IHttpClient; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.client.method.MethodUtil; +import org.hl7.fhir.instance.model.api.IBaseBinary; + +import java.util.List; +import java.util.Map; public abstract class BaseHttpClient implements IHttpClient { @@ -44,7 +43,12 @@ public abstract class BaseHttpClient implements IHttpClient { /** * Constructor */ - public BaseHttpClient(StringBuilder theUrl, Map> theIfNoneExistParams, String theIfNoneExistString, RequestTypeEnum theRequestType, List
    theHeaders) { + public BaseHttpClient( + StringBuilder theUrl, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders) { this.myUrl = theUrl; this.myIfNoneExistParams = theIfNoneExistParams; this.myIfNoneExistString = theIfNoneExistString; @@ -93,7 +97,8 @@ public abstract class BaseHttpClient implements IHttpClient { } @Override - public IHttpRequest createByteRequest(FhirContext theContext, String theContents, String theContentType, EncodingEnum theEncoding) { + public IHttpRequest createByteRequest( + FhirContext theContext, String theContents, String theContentType, EncodingEnum theEncoding) { IHttpRequest retVal = createHttpRequest(theContents); addHeadersToRequest(retVal, theEncoding, theContext); retVal.addHeader(Constants.HEADER_CONTENT_TYPE, theContentType + Constants.HEADER_SUFFIX_CT_UTF_8); @@ -116,7 +121,8 @@ public abstract class BaseHttpClient implements IHttpClient { protected abstract IHttpRequest createHttpRequest(String theContents); @Override - public IHttpRequest createParamRequest(FhirContext theContext, Map> theParams, EncodingEnum theEncoding) { + public IHttpRequest createParamRequest( + FhirContext theContext, Map> theParams, EncodingEnum theEncoding) { IHttpRequest retVal = createHttpRequest(theParams); addHeadersToRequest(retVal, theEncoding, theContext); return retVal; diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/GZipContentInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/GZipContentInterceptor.java index 23c7ccbad55..ce6869ec4fd 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/GZipContentInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/GZipContentInterceptor.java @@ -19,34 +19,33 @@ */ package ca.uhn.fhir.rest.client.apache; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.zip.GZIPOutputStream; - +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.client.api.*; import org.apache.http.Header; import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.entity.ByteArrayEntity; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.client.api.*; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.zip.GZIPOutputStream; /** * Client interceptor which GZip compresses outgoing (POST/PUT) contents being uploaded - * from the client to the server. This can improve performance by reducing network + * from the client to the server. This can improve performance by reducing network * load time. */ public class GZipContentInterceptor implements IClientInterceptor { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(GZipContentInterceptor.class); - + @Override public void interceptRequest(IHttpRequest theRequestInterface) { HttpRequestBase theRequest = ((ApacheHttpRequest) theRequestInterface).getApacheRequest(); if (theRequest instanceof HttpEntityEnclosingRequest) { Header[] encodingHeaders = theRequest.getHeaders(Constants.HEADER_CONTENT_ENCODING); if (encodingHeaders == null || encodingHeaders.length == 0) { - HttpEntityEnclosingRequest req = (HttpEntityEnclosingRequest)theRequest; - + HttpEntityEnclosingRequest req = (HttpEntityEnclosingRequest) theRequest; + ByteArrayOutputStream bos = new ByteArrayOutputStream(); GZIPOutputStream gos; try { @@ -57,19 +56,17 @@ public class GZipContentInterceptor implements IClientInterceptor { ourLog.warn("Failed to GZip outgoing content", e); return; } - + byte[] byteArray = bos.toByteArray(); ByteArrayEntity newEntity = new ByteArrayEntity(byteArray); req.setEntity(newEntity); req.addHeader(Constants.HEADER_CONTENT_ENCODING, "gzip"); } } - } @Override public void interceptResponse(IHttpResponse theResponse) throws IOException { // nothing } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ResourceEntity.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ResourceEntity.java index 418be8d4b01..800f95fccf3 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ResourceEntity.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/apache/ResourceEntity.java @@ -34,6 +34,8 @@ import java.nio.charset.UnsupportedCharsetException; public class ResourceEntity extends StringEntity { public ResourceEntity(FhirContext theContext, IBaseResource theResource) throws UnsupportedCharsetException { - super(theContext.newJsonParser().encodeResourceToString(theResource), ContentType.parse(Constants.CT_FHIR_JSON_NEW)); + super( + theContext.newJsonParser().encodeResourceToString(theResource), + ContentType.parse(Constants.CT_FHIR_JSON_NEW)); } } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/BaseClient.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/BaseClient.java index c9ee276d150..857fc4181b5 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/BaseClient.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/BaseClient.java @@ -67,7 +67,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -79,6 +78,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -157,7 +157,8 @@ public abstract class BaseClient implements IRestfulClient { public T fetchResourceFromUrl(Class theResourceType, String theUrl) { BaseHttpClientInvocation clientInvocation = new HttpGetClientInvocation(getFhirContext(), theUrl); ResourceResponseHandler binding = new ResourceResponseHandler<>(theResourceType); - return invokeClient(getFhirContext(), binding, clientInvocation, null, false, false, null, null, null, null, null); + return invokeClient( + getFhirContext(), binding, clientInvocation, null, false, false, null, null, null, null, null); } void forceConformanceCheck() { @@ -229,17 +230,42 @@ public abstract class BaseClient implements IRestfulClient { myRequestFormatParamStyle = theRequestFormatParamStyle; } - protected T invokeClient(FhirContext theContext, IClientResponseHandler binding, BaseHttpClientInvocation clientInvocation) { + protected T invokeClient( + FhirContext theContext, IClientResponseHandler binding, BaseHttpClientInvocation clientInvocation) { return invokeClient(theContext, binding, clientInvocation, false); } - protected T invokeClient(FhirContext theContext, IClientResponseHandler binding, BaseHttpClientInvocation clientInvocation, boolean theLogRequestAndResponse) { - return invokeClient(theContext, binding, clientInvocation, null, null, theLogRequestAndResponse, null, null, null, null, null); + protected T invokeClient( + FhirContext theContext, + IClientResponseHandler binding, + BaseHttpClientInvocation clientInvocation, + boolean theLogRequestAndResponse) { + return invokeClient( + theContext, + binding, + clientInvocation, + null, + null, + theLogRequestAndResponse, + null, + null, + null, + null, + null); } - protected T invokeClient(FhirContext theContext, IClientResponseHandler binding, BaseHttpClientInvocation clientInvocation, EncodingEnum theEncoding, Boolean thePrettyPrint, - boolean theLogRequestAndResponse, SummaryEnum theSummaryMode, Set theSubsetElements, CacheControlDirective theCacheControlDirective, String theCustomAcceptHeader, - Map> theCustomHeaders) { + protected T invokeClient( + FhirContext theContext, + IClientResponseHandler binding, + BaseHttpClientInvocation clientInvocation, + EncodingEnum theEncoding, + Boolean thePrettyPrint, + boolean theLogRequestAndResponse, + SummaryEnum theSummaryMode, + Set theSubsetElements, + CacheControlDirective theCacheControlDirective, + String theCustomAcceptHeader, + Map> theCustomHeaders) { if (!myDontValidateConformance) { myFactory.validateServerBaseIfConfiguredToDoSo(myUrlBase, myClient, this); @@ -273,7 +299,8 @@ public abstract class BaseClient implements IRestfulClient { } if (theSubsetElements != null && theSubsetElements.isEmpty() == false) { - params.put(Constants.PARAM_ELEMENTS, Collections.singletonList(StringUtils.join(theSubsetElements, ','))); + params.put( + Constants.PARAM_ELEMENTS, Collections.singletonList(StringUtils.join(theSubsetElements, ','))); } EncodingEnum encoding = getEncoding(); @@ -293,7 +320,11 @@ public abstract class BaseClient implements IRestfulClient { addToCacheControlHeader(b, Constants.CACHE_CONTROL_NO_CACHE, theCacheControlDirective.isNoCache()); addToCacheControlHeader(b, Constants.CACHE_CONTROL_NO_STORE, theCacheControlDirective.isNoStore()); if (theCacheControlDirective.getMaxResults() != null) { - addToCacheControlHeader(b, Constants.CACHE_CONTROL_MAX_RESULTS + "=" + theCacheControlDirective.getMaxResults().intValue(), true); + addToCacheControlHeader( + b, + Constants.CACHE_CONTROL_MAX_RESULTS + "=" + + theCacheControlDirective.getMaxResults().intValue(), + true); } if (b.length() > 0) { httpRequest.addHeader(Constants.HEADER_CACHE_CONTROL, b.toString()); @@ -309,7 +340,8 @@ public abstract class BaseClient implements IRestfulClient { } if (theCustomHeaders != null) { - AdditionalRequestHeadersInterceptor interceptor = new AdditionalRequestHeadersInterceptor(theCustomHeaders); + AdditionalRequestHeadersInterceptor interceptor = + new AdditionalRequestHeadersInterceptor(theCustomHeaders); interceptor.interceptRequest(httpRequest); } @@ -366,7 +398,8 @@ public abstract class BaseClient implements IRestfulClient { keepResponseAndLogIt(theLogRequestAndResponse, response, body); - BaseServerResponseException exception = BaseServerResponseException.newInstance(response.getStatus(), message); + BaseServerResponseException exception = + BaseServerResponseException.newInstance(response.getStatus(), message); exception.setOperationOutcome(oo); if (body != null) { @@ -396,7 +429,7 @@ public abstract class BaseClient implements IRestfulClient { } if (inputStreamToReturn == null) { - inputStreamToReturn = new ByteArrayInputStream(new byte[]{}); + inputStreamToReturn = new ByteArrayInputStream(new byte[] {}); } return binding.invokeClient(mimeType, inputStreamToReturn, response.getStatus(), headers); @@ -405,16 +438,32 @@ public abstract class BaseClient implements IRestfulClient { } catch (DataFormatException e) { String msg; if (httpRequest != null) { - msg = getFhirContext().getLocalizer().getMessage(BaseClient.class, "failedToParseResponse", httpRequest.getHttpVerbName(), httpRequest.getUri(), e.toString()); + msg = getFhirContext() + .getLocalizer() + .getMessage( + BaseClient.class, + "failedToParseResponse", + httpRequest.getHttpVerbName(), + httpRequest.getUri(), + e.toString()); } else { - msg = getFhirContext().getLocalizer().getMessage(BaseClient.class, "failedToParseResponse", "UNKNOWN", "UNKNOWN", e.toString()); + msg = getFhirContext() + .getLocalizer() + .getMessage(BaseClient.class, "failedToParseResponse", "UNKNOWN", "UNKNOWN", e.toString()); } throw new FhirClientConnectionException(Msg.code(1359) + msg, e); } catch (IllegalStateException e) { throw new FhirClientConnectionException(Msg.code(1360) + e); } catch (IOException e) { String msg; - msg = getFhirContext().getLocalizer().getMessage(BaseClient.class, "failedToParseResponse", httpRequest.getHttpVerbName(), httpRequest.getUri(), e.toString()); + msg = getFhirContext() + .getLocalizer() + .getMessage( + BaseClient.class, + "failedToParseResponse", + httpRequest.getHttpVerbName(), + httpRequest.getUri(), + e.toString()); throw new FhirClientConnectionException(Msg.code(1361) + msg, e); } catch (RuntimeException e) { throw e; @@ -510,9 +559,13 @@ public abstract class BaseClient implements IRestfulClient { protected final class ResourceOrBinaryResponseHandler extends ResourceResponseHandler { - @Override - public IBaseResource invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) throws BaseServerResponseException { + public IBaseResource invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws BaseServerResponseException { /* * For operation responses, if the response content type is a FHIR content-type @@ -523,7 +576,8 @@ public abstract class BaseClient implements IRestfulClient { */ EncodingEnum respType = EncodingEnum.forContentType(theResponseMimeType); if (respType != null || theResponseStatusCode < 200 || theResponseStatusCode >= 300) { - return super.invokeClient(theResponseMimeType, theResponseInputStream, theResponseStatusCode, theHeaders); + return super.invokeClient( + theResponseMimeType, theResponseInputStream, theResponseStatusCode, theHeaders); } // Create a Binary resource to return @@ -546,7 +600,6 @@ public abstract class BaseClient implements IRestfulClient { return responseBinary; } - } protected class ResourceResponseHandler implements IClientResponseHandler { @@ -564,11 +617,16 @@ public abstract class BaseClient implements IRestfulClient { this(theReturnType, null, null); } - public ResourceResponseHandler(Class theReturnType, Class thePreferResponseType, IIdType theId) { + public ResourceResponseHandler( + Class theReturnType, Class thePreferResponseType, IIdType theId) { this(theReturnType, thePreferResponseType, theId, false); } - public ResourceResponseHandler(Class theReturnType, Class thePreferResponseType, IIdType theId, boolean theAllowHtmlResponse) { + public ResourceResponseHandler( + Class theReturnType, + Class thePreferResponseType, + IIdType theId, + boolean theAllowHtmlResponse) { this(theReturnType, toTypeList(thePreferResponseType), theId, theAllowHtmlResponse); } @@ -576,7 +634,11 @@ public abstract class BaseClient implements IRestfulClient { this(theClass, thePreferResponseTypes, null, false); } - public ResourceResponseHandler(Class theReturnType, List> thePreferResponseTypes, IIdType theId, boolean theAllowHtmlResponse) { + public ResourceResponseHandler( + Class theReturnType, + List> thePreferResponseTypes, + IIdType theId, + boolean theAllowHtmlResponse) { myReturnType = theReturnType; myId = theId; myPreferResponseTypes = thePreferResponseTypes; @@ -584,17 +646,25 @@ public abstract class BaseClient implements IRestfulClient { } @Override - public T invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) throws BaseServerResponseException { + public T invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws BaseServerResponseException { if (theResponseStatusCode == Constants.STATUS_HTTP_204_NO_CONTENT) { return null; } EncodingEnum respType = EncodingEnum.forContentType(theResponseMimeType); if (respType == null) { - if (myAllowHtmlResponse && theResponseMimeType.toLowerCase().contains(Constants.CT_HTML) && myReturnType != null) { + if (myAllowHtmlResponse + && theResponseMimeType.toLowerCase().contains(Constants.CT_HTML) + && myReturnType != null) { return readHtmlResponse(theResponseInputStream); } - throw NonFhirResponseException.newInstance(theResponseStatusCode, theResponseMimeType, theResponseInputStream); + throw NonFhirResponseException.newInstance( + theResponseStatusCode, theResponseMimeType, theResponseInputStream); } IParser parser = respType.newParser(getFhirContext()); parser.setServerBaseUrl(getUrlBase()); @@ -613,7 +683,8 @@ public abstract class BaseClient implements IRestfulClient { RuntimeResourceDefinition resDef = getFhirContext().getResourceDefinition(myReturnType); IBaseResource instance = resDef.newInstance(); BaseRuntimeChildDefinition textChild = resDef.getChildByName("text"); - BaseRuntimeElementCompositeDefinition textElement = (BaseRuntimeElementCompositeDefinition) textChild.getChildByName("text"); + BaseRuntimeElementCompositeDefinition textElement = + (BaseRuntimeElementCompositeDefinition) textChild.getChildByName("text"); IBase textInstance = textElement.newInstance(); textChild.getMutator().addValue(instance, textInstance); @@ -623,13 +694,15 @@ public abstract class BaseClient implements IRestfulClient { try { divInstance.setValueAsString(IOUtils.toString(theResponseInputStream, Charsets.UTF_8)); } catch (Exception e) { - throw new InvalidResponseException(Msg.code(1364) + "Failed to process HTML response from server: " + e.getMessage(), 400, e); + throw new InvalidResponseException( + Msg.code(1364) + "Failed to process HTML response from server: " + e.getMessage(), 400, e); } divChild.getMutator().addValue(textInstance, divInstance); return (T) instance; } - public ResourceResponseHandler setPreferResponseTypes(List> thePreferResponseTypes) { + public ResourceResponseHandler setPreferResponseTypes( + List> thePreferResponseTypes) { myPreferResponseTypes = thePreferResponseTypes; return this; } @@ -643,5 +716,4 @@ public abstract class BaseClient implements IRestfulClient { } return preferResponseTypes; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/BaseHttpClientInvocation.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/BaseHttpClientInvocation.java index 7605978792c..3ac5f8d170f 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/BaseHttpClientInvocation.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/BaseHttpClientInvocation.java @@ -49,7 +49,7 @@ public abstract class BaseHttpClientInvocation { /** * Create an HTTP request out of this client request - * + * * @param theUrlBase * The FHIR server base url (with a trailing "/") * @param theExtraParams @@ -58,21 +58,26 @@ public abstract class BaseHttpClientInvocation { * The encoding to use for any serialized content sent to the * server */ - public abstract IHttpRequest asHttpRequest(String theUrlBase, Map> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint); + public abstract IHttpRequest asHttpRequest( + String theUrlBase, + Map> theExtraParams, + EncodingEnum theEncoding, + Boolean thePrettyPrint); /** * Create an HTTP request for the given url, encoding and request-type - * + * * @param theUrl * The complete FHIR url to which the http request will be sent * @param theEncoding * The encoding to use for any serialized content sent to the * server * @param theRequestType - * the type of HTTP request (GET, DELETE, ..) - */ + * the type of HTTP request (GET, DELETE, ..) + */ protected IHttpRequest createHttpRequest(String theUrl, EncodingEnum theEncoding, RequestTypeEnum theRequestType) { - IHttpClient httpClient = getRestfulClientFactory().getHttpClient(new StringBuilder(theUrl), null, null, theRequestType, myHeaders); + IHttpClient httpClient = getRestfulClientFactory() + .getHttpClient(new StringBuilder(theUrl), null, null, theRequestType, myHeaders); return httpClient.createGetRequest(getContext(), theEncoding); } @@ -98,7 +103,8 @@ public abstract class BaseHttpClientInvocation { return myContext.getRestfulClientFactory(); } - public static void appendExtraParamsWithQuestionMark(Map> theExtraParams, StringBuilder theUrlBuilder, boolean theWithQuestionMark) { + public static void appendExtraParamsWithQuestionMark( + Map> theExtraParams, StringBuilder theUrlBuilder, boolean theWithQuestionMark) { if (theExtraParams == null) { return; } @@ -120,5 +126,4 @@ public abstract class BaseHttpClientInvocation { } } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/ClientInvocationHandler.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/ClientInvocationHandler.java index dd40a85179e..036568cd306 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/ClientInvocationHandler.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/ClientInvocationHandler.java @@ -19,16 +19,16 @@ */ package ca.uhn.fhir.rest.client.impl; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.Method; -import java.util.Map; - import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.client.api.IHttpClient; import ca.uhn.fhir.rest.client.impl.ClientInvocationHandlerFactory.ILambda; import ca.uhn.fhir.rest.client.method.BaseMethodBinding; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Method; +import java.util.Map; + public class ClientInvocationHandler extends BaseClient implements InvocationHandler { private final Map> myBindings; @@ -36,7 +36,14 @@ public class ClientInvocationHandler extends BaseClient implements InvocationHan private FhirContext myContext; private Map myMethodToLambda; - public ClientInvocationHandler(IHttpClient theClient, FhirContext theContext, String theUrlBase, Map theMethodToReturnValue, Map> theBindings, Map theMethodToLambda, RestfulClientFactory theFactory) { + public ClientInvocationHandler( + IHttpClient theClient, + FhirContext theContext, + String theUrlBase, + Map theMethodToReturnValue, + Map> theBindings, + Map theMethodToLambda, + RestfulClientFactory theFactory) { super(theClient, theUrlBase, theFactory); myContext = theContext; @@ -67,12 +74,13 @@ public class ClientInvocationHandler extends BaseClient implements InvocationHan return lambda.handle(this, theArgs); } - throw new UnsupportedOperationException(Msg.code(1403) + "The method '" + theMethod.getName() + "' in type " + theMethod.getDeclaringClass().getSimpleName() + " has no handler. Did you forget to annotate it with a RESTful method annotation?"); + throw new UnsupportedOperationException(Msg.code(1403) + "The method '" + theMethod.getName() + "' in type " + + theMethod.getDeclaringClass().getSimpleName() + + " has no handler. Did you forget to annotate it with a RESTful method annotation?"); } @Override public FhirContext getFhirContext() { return myContext; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/ClientInvocationHandlerFactory.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/ClientInvocationHandlerFactory.java index 33ef2db8d68..41b60033b6c 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/ClientInvocationHandlerFactory.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/ClientInvocationHandlerFactory.java @@ -19,21 +19,19 @@ */ package ca.uhn.fhir.rest.client.impl; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; -import java.util.HashMap; -import java.util.Map; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.SummaryEnum; -import ca.uhn.fhir.rest.client.api.IClientInterceptor; import ca.uhn.fhir.rest.client.api.IHttpClient; import ca.uhn.fhir.rest.client.api.IRestfulClient; import ca.uhn.fhir.rest.client.method.BaseMethodBinding; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.Map; public class ClientInvocationHandlerFactory { @@ -44,7 +42,11 @@ public class ClientInvocationHandlerFactory { private final Map myMethodToReturnValue = new HashMap(); private final String myUrlBase; - public ClientInvocationHandlerFactory(IHttpClient theClient, FhirContext theContext, String theUrlBase, Class theClientType) { + public ClientInvocationHandlerFactory( + IHttpClient theClient, + FhirContext theContext, + String theUrlBase, + Class theClientType) { myClient = theClient; myUrlBase = theUrlBase; myContext = theContext; @@ -56,15 +58,21 @@ public class ClientInvocationHandlerFactory { myMethodToLambda.put(theClientType.getMethod("setEncoding", EncodingEnum.class), new SetEncodingLambda()); myMethodToLambda.put(theClientType.getMethod("setPrettyPrint", Boolean.class), new SetPrettyPrintLambda()); - myMethodToLambda.put(theClientType.getMethod("registerInterceptor", Object.class), new RegisterInterceptorLambda()); - myMethodToLambda.put(theClientType.getMethod("unregisterInterceptor", Object.class), new UnregisterInterceptorLambda()); + myMethodToLambda.put( + theClientType.getMethod("registerInterceptor", Object.class), new RegisterInterceptorLambda()); + myMethodToLambda.put( + theClientType.getMethod("unregisterInterceptor", Object.class), new UnregisterInterceptorLambda()); myMethodToLambda.put(theClientType.getMethod("setSummary", SummaryEnum.class), new SetSummaryLambda()); - myMethodToLambda.put(theClientType.getMethod("fetchResourceFromUrl", Class.class, String.class), new FetchResourceFromUrlLambda()); + myMethodToLambda.put( + theClientType.getMethod("fetchResourceFromUrl", Class.class, String.class), + new FetchResourceFromUrlLambda()); } catch (NoSuchMethodException e) { - throw new ConfigurationException(Msg.code(1352) + "Failed to find methods on client. This is a HAPI bug!", e); + throw new ConfigurationException( + Msg.code(1352) + "Failed to find methods on client. This is a HAPI bug!", e); } catch (SecurityException e) { - throw new ConfigurationException(Msg.code(1353) + "Failed to find methods on client. This is a HAPI bug!", e); + throw new ConfigurationException( + Msg.code(1353) + "Failed to find methods on client. This is a HAPI bug!", e); } } @@ -73,7 +81,14 @@ public class ClientInvocationHandlerFactory { } ClientInvocationHandler newInvocationHandler(RestfulClientFactory theRestfulClientFactory) { - return new ClientInvocationHandler(myClient, myContext, myUrlBase, myMethodToReturnValue, myBindings, myMethodToLambda, theRestfulClientFactory); + return new ClientInvocationHandler( + myClient, + myContext, + myUrlBase, + myMethodToReturnValue, + myBindings, + myMethodToLambda, + theRestfulClientFactory); } public interface ILambda { @@ -88,18 +103,18 @@ public class ClientInvocationHandlerFactory { return null; } } - + class FetchResourceFromUrlLambda implements ILambda { @Override public Object handle(ClientInvocationHandler theTarget, Object[] theArgs) { @SuppressWarnings("unchecked") Class type = (Class) theArgs[0]; String url = (String) theArgs[1]; - + return theTarget.fetchResourceFromUrl(type, url); } } - + class SetEncodingLambda implements ILambda { @Override public Object handle(ClientInvocationHandler theTarget, Object[] theArgs) { @@ -135,5 +150,4 @@ public class ClientInvocationHandlerFactory { return null; } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java index f21c1be4c71..59aa5b066ec 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/GenericClient.java @@ -173,7 +173,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; */ public class GenericClient extends BaseClient implements IGenericClient { - private static final String I18N_CANNOT_DETEMINE_RESOURCE_TYPE = GenericClient.class.getName() + ".cannotDetermineResourceTypeFromUri"; + private static final String I18N_CANNOT_DETEMINE_RESOURCE_TYPE = + GenericClient.class.getName() + ".cannotDetermineResourceTypeFromUri"; private static final String I18N_INCOMPLETE_URI_FOR_READ = GenericClient.class.getName() + ".incompleteUriForRead"; private static final String I18N_NO_VERSION_ID_FOR_VREAD = GenericClient.class.getName() + ".noVersionIdForVread"; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(GenericClient.class); @@ -184,7 +185,8 @@ public class GenericClient extends BaseClient implements IGenericClient { /** * For now, this is a part of the internal API of HAPI - Use with caution as this method may change! */ - public GenericClient(FhirContext theContext, IHttpClient theHttpClient, String theServerBase, RestfulClientFactory theFactory) { + public GenericClient( + FhirContext theContext, IHttpClient theHttpClient, String theServerBase, RestfulClientFactory theFactory) { super(theHttpClient, theServerBase, theFactory); myContext = theContext; } @@ -204,9 +206,18 @@ public class GenericClient extends BaseClient implements IGenericClient { return new DeleteInternal(); } - private T doReadOrVRead(final Class theType, IIdType theId, boolean theVRead, ICallable theNotModifiedHandler, String theIfVersionMatches, Boolean thePrettyPrint, - SummaryEnum theSummary, EncodingEnum theEncoding, Set theSubsetElements, String theCustomAcceptHeaderValue, - Map> theCustomHeaders) { + private T doReadOrVRead( + final Class theType, + IIdType theId, + boolean theVRead, + ICallable theNotModifiedHandler, + String theIfVersionMatches, + Boolean thePrettyPrint, + SummaryEnum theSummary, + EncodingEnum theEncoding, + Set theSubsetElements, + String theCustomAcceptHeaderValue, + Map> theCustomHeaders) { String resName = toResourceName(theType); IIdType id = theId; if (!id.hasBaseUrl()) { @@ -228,7 +239,8 @@ public class GenericClient extends BaseClient implements IGenericClient { } } if (isKeepResponses()) { - myLastRequest = invocation.asHttpRequest(getServerBase(), createExtraParams(theCustomAcceptHeaderValue), getEncoding(), isPrettyPrint()); + myLastRequest = invocation.asHttpRequest( + getServerBase(), createExtraParams(theCustomAcceptHeaderValue), getEncoding(), isPrettyPrint()); } if (theIfVersionMatches != null) { @@ -236,17 +248,39 @@ public class GenericClient extends BaseClient implements IGenericClient { } boolean allowHtmlResponse = SummaryEnum.TEXT.equals(theSummary); - ResourceResponseHandler binding = new ResourceResponseHandler<>(theType, (Class) null, id, allowHtmlResponse); + ResourceResponseHandler binding = + new ResourceResponseHandler<>(theType, (Class) null, id, allowHtmlResponse); if (theNotModifiedHandler == null) { - return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements, null, theCustomAcceptHeaderValue, theCustomHeaders); + return invokeClient( + myContext, + binding, + invocation, + theEncoding, + thePrettyPrint, + myLogRequestAndResponse, + theSummary, + theSubsetElements, + null, + theCustomAcceptHeaderValue, + theCustomHeaders); } try { - return invokeClient(myContext, binding, invocation, theEncoding, thePrettyPrint, myLogRequestAndResponse, theSummary, theSubsetElements, null, theCustomAcceptHeaderValue, theCustomHeaders); + return invokeClient( + myContext, + binding, + invocation, + theEncoding, + thePrettyPrint, + myLogRequestAndResponse, + theSummary, + theSubsetElements, + null, + theCustomAcceptHeaderValue, + theCustomHeaders); } catch (NotModifiedException e) { return theNotModifiedHandler.call(); } - } @Override @@ -344,11 +378,15 @@ public class GenericClient extends BaseClient implements IGenericClient { IdDt id = new IdDt(theUrl); String resourceType = id.getResourceType(); if (isBlank(resourceType)) { - throw new IllegalArgumentException(Msg.code(1365) + myContext.getLocalizer().getMessage(I18N_INCOMPLETE_URI_FOR_READ, theUrl.getValueAsString())); + throw new IllegalArgumentException(Msg.code(1365) + + myContext.getLocalizer().getMessage(I18N_INCOMPLETE_URI_FOR_READ, theUrl.getValueAsString())); } RuntimeResourceDefinition def = myContext.getResourceDefinition(resourceType); if (def == null) { - throw new IllegalArgumentException(Msg.code(1366) + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, theUrl.getValueAsString())); + throw new IllegalArgumentException(Msg.code(1366) + + myContext + .getLocalizer() + .getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, theUrl.getValueAsString())); } return read(def.getImplementingClass(), id); } @@ -377,7 +415,8 @@ public class GenericClient extends BaseClient implements IGenericClient { public MethodOutcome update(IdDt theIdDt, IBaseResource theResource) { BaseHttpClientInvocation invocation = MethodUtil.createUpdateInvocation(theResource, null, theIdDt, myContext); if (isKeepResponses()) { - myLastRequest = invocation.asHttpRequest(getServerBase(), createExtraParams(null), getEncoding(), isPrettyPrint()); + myLastRequest = + invocation.asHttpRequest(getServerBase(), createExtraParams(null), getEncoding(), isPrettyPrint()); } OutcomeResponseHandler binding = new OutcomeResponseHandler(); @@ -401,7 +440,8 @@ public class GenericClient extends BaseClient implements IGenericClient { invocation = ValidateMethodBindingDstu2Plus.createValidateInvocation(myContext, theResource); if (isKeepResponses()) { - myLastRequest = invocation.asHttpRequest(getServerBase(), createExtraParams(null), getEncoding(), isPrettyPrint()); + myLastRequest = + invocation.asHttpRequest(getServerBase(), createExtraParams(null), getEncoding(), isPrettyPrint()); } OutcomeResponseHandler binding = new OutcomeResponseHandler(); @@ -412,7 +452,8 @@ public class GenericClient extends BaseClient implements IGenericClient { @Override public T vread(final Class theType, IdDt theId) { if (!theId.hasVersionIdPart()) { - throw new IllegalArgumentException(Msg.code(1367) + myContext.getLocalizer().getMessage(I18N_NO_VERSION_ID_FOR_VREAD, theId.getValue())); + throw new IllegalArgumentException(Msg.code(1367) + + myContext.getLocalizer().getMessage(I18N_NO_VERSION_ID_FOR_VREAD, theId.getValue())); } return doReadOrVRead(theType, theId, true, null, null, false, null, null, null, null, null); } @@ -429,7 +470,8 @@ public class GenericClient extends BaseClient implements IGenericClient { GET } - private abstract class BaseClientExecutable, Y> implements IClientExecutable { + private abstract class BaseClientExecutable, Y> + implements IClientExecutable { EncodingEnum myParamEncoding; Boolean myPrettyPrint; @@ -531,19 +573,34 @@ public class GenericClient extends BaseClient implements IGenericClient { return mySubsetElements; } - protected Z invoke(Map> theParams, IClientResponseHandler theHandler, BaseHttpClientInvocation theInvocation) { + protected Z invoke( + Map> theParams, + IClientResponseHandler theHandler, + BaseHttpClientInvocation theInvocation) { if (isKeepResponses()) { myLastRequest = theInvocation.asHttpRequest(getServerBase(), theParams, getEncoding(), myPrettyPrint); } - Z resp = invokeClient(myContext, theHandler, theInvocation, myParamEncoding, myPrettyPrint, myQueryLogRequestAndResponse || myLogRequestAndResponse, mySummaryMode, mySubsetElements, myCacheControlDirective, myCustomAcceptHeaderValue, myCustomHeaderValues); + Z resp = invokeClient( + myContext, + theHandler, + theInvocation, + myParamEncoding, + myPrettyPrint, + myQueryLogRequestAndResponse || myLogRequestAndResponse, + mySummaryMode, + mySubsetElements, + myCacheControlDirective, + myCustomAcceptHeaderValue, + myCustomHeaderValues); return resp; } protected IBaseResource parseResourceBody(String theResourceBody) { EncodingEnum encoding = EncodingEnum.detectEncodingNoDefault(theResourceBody); if (encoding == null) { - throw new IllegalArgumentException(Msg.code(1368) + myContext.getLocalizer().getMessage(GenericClient.class, "cantDetermineRequestType")); + throw new IllegalArgumentException(Msg.code(1368) + + myContext.getLocalizer().getMessage(GenericClient.class, "cantDetermineRequestType")); } return encoding.newParser(myContext).parseResource(theResourceBody); } @@ -579,10 +636,11 @@ public class GenericClient extends BaseClient implements IGenericClient { mySummaryMode = theSummary; return ((T) this); } - } - private abstract class BaseSearch, QUERY extends IBaseQuery, OUTPUT> extends BaseClientExecutable implements IBaseQuery { + private abstract class BaseSearch< + EXEC extends IClientExecutable, QUERY extends IBaseQuery, OUTPUT> + extends BaseClientExecutable implements IBaseQuery { private Map> myParams = new LinkedHashMap<>(); @@ -640,10 +698,10 @@ public class GenericClient extends BaseClient implements IGenericClient { } return (QUERY) this; } - } - private class CreateInternal extends BaseSearch implements ICreate, ICreateTyped, ICreateWithQuery, ICreateWithQueryTyped { + private class CreateInternal extends BaseSearch + implements ICreate, ICreateTyped, ICreateWithQuery, ICreateWithQueryTyped { private boolean myConditional; private PreferReturnEnum myPrefer; @@ -689,7 +747,6 @@ public class GenericClient extends BaseClient implements IGenericClient { Map> params = new HashMap<>(); return invoke(params, binding, invocation); - } @Override @@ -711,10 +768,10 @@ public class GenericClient extends BaseClient implements IGenericClient { myResourceBody = theResourceBody; return this; } - } - private class DeleteInternal extends BaseSearch implements IDelete, IDeleteTyped, IDeleteWithQuery, IDeleteWithQueryTyped { + private class DeleteInternal extends BaseSearch + implements IDelete, IDeleteTyped, IDeleteWithQuery, IDeleteWithQueryTyped { private boolean myConditional; private IIdType myId; @@ -741,7 +798,8 @@ public class GenericClient extends BaseClient implements IGenericClient { if (myId != null) { invocation = DeleteMethodBinding.createDeleteInvocation(getFhirContext(), myId, getParamMap()); } else if (myConditional) { - invocation = DeleteMethodBinding.createDeleteInvocation(getFhirContext(), myResourceType, getParamMap()); + invocation = + DeleteMethodBinding.createDeleteInvocation(getFhirContext(), myResourceType, getParamMap()); } else { invocation = DeleteMethodBinding.createDeleteInvocation(getFhirContext(), mySearchUrl, getParamMap()); } @@ -757,7 +815,9 @@ public class GenericClient extends BaseClient implements IGenericClient { IIdType id = theResource.getIdElement(); Validate.notNull(id, "theResource.getIdElement() can not be null"); if (!id.hasResourceType() || !id.hasIdPart()) { - throw new IllegalArgumentException(Msg.code(1369) + "theResource.getId() must contain a resource type and logical ID at a minimum (e.g. Patient/1234), found: " + id.getValue()); + throw new IllegalArgumentException(Msg.code(1369) + + "theResource.getId() must contain a resource type and logical ID at a minimum (e.g. Patient/1234), found: " + + id.getValue()); } myId = id; return this; @@ -767,7 +827,9 @@ public class GenericClient extends BaseClient implements IGenericClient { public IDeleteTyped resourceById(IIdType theId) { Validate.notNull(theId, "theId can not be null"); if (!theId.hasResourceType() || !theId.hasIdPart()) { - throw new IllegalArgumentException(Msg.code(1370) + "theId must contain a resource type and logical ID at a minimum (e.g. Patient/1234)found: " + theId.getValue()); + throw new IllegalArgumentException(Msg.code(1370) + + "theId must contain a resource type and logical ID at a minimum (e.g. Patient/1234)found: " + + theId.getValue()); } myId = theId; return this; @@ -781,7 +843,8 @@ public class GenericClient extends BaseClient implements IGenericClient { } Validate.notBlank(theLogicalId, "theLogicalId can not be blank/null"); if (theLogicalId.contains("/")) { - throw new IllegalArgumentException(Msg.code(1372) + "LogicalId can not contain '/' (should only be the logical ID portion, not a qualified ID)"); + throw new IllegalArgumentException(Msg.code(1372) + + "LogicalId can not contain '/' (should only be the logical ID portion, not a qualified ID)"); } myId = new IdDt(theResourceType, theLogicalId); return this; @@ -820,7 +883,8 @@ public class GenericClient extends BaseClient implements IGenericClient { } @SuppressWarnings({"rawtypes", "unchecked"}) - private class FetchConformanceInternal extends BaseClientExecutable implements IFetchConformanceUntyped, IFetchConformanceTyped { + private class FetchConformanceInternal extends BaseClientExecutable + implements IFetchConformanceUntyped, IFetchConformanceTyped { private RuntimeResourceDefinition myType; @Override @@ -836,15 +900,16 @@ public class GenericClient extends BaseClient implements IGenericClient { Validate.notNull(theResourceType, "theResourceType must not be null"); myType = myContext.getResourceDefinition(theResourceType); if (myType == null) { - throw new IllegalArgumentException(Msg.code(1374) + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, theResourceType)); + throw new IllegalArgumentException(Msg.code(1374) + + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, theResourceType)); } return this; } - } @SuppressWarnings({"unchecked", "rawtypes"}) - private class GetPageInternal extends BaseClientExecutable, Object> implements IGetPageTyped { + private class GetPageInternal extends BaseClientExecutable, Object> + implements IGetPageTyped { private Class myBundleType; private String myUrl; @@ -859,7 +924,8 @@ public class GenericClient extends BaseClient implements IGenericClient { @Override public Object execute() { IClientResponseHandler binding = new ResourceResponseHandler(myBundleType, getPreferResponseTypes()); - HttpSimpleClientInvocation invocationGet = new HttpSimpleClientInvocation(myContext, myUrl, myPagingHttpMethod); + HttpSimpleClientInvocation invocationGet = + new HttpSimpleClientInvocation(myContext, myUrl, myPagingHttpMethod); return invoke(null, binding, invocationGet); } @@ -868,7 +934,6 @@ public class GenericClient extends BaseClient implements IGenericClient { myPagingHttpMethod = thePagingHttpMethod; return this; } - } @SuppressWarnings("rawtypes") @@ -922,7 +987,8 @@ public class GenericClient extends BaseClient implements IGenericClient { id = null; } - HttpGetClientInvocation invocation = HistoryMethodBinding.createHistoryInvocation(myContext, resourceName, id, mySince, myCount, myAt); + HttpGetClientInvocation invocation = + HistoryMethodBinding.createHistoryInvocation(myContext, resourceName, id, mySince, myCount, myAt); IClientResponseHandler handler; handler = new ResourceResponseHandler(myReturnType, getPreferResponseTypes(myType)); @@ -933,7 +999,8 @@ public class GenericClient extends BaseClient implements IGenericClient { @Override public IHistoryUntyped onInstance(IIdType theId) { if (!theId.hasResourceType()) { - throw new IllegalArgumentException(Msg.code(1375) + "Resource ID does not have a resource type: " + theId.getValue()); + throw new IllegalArgumentException( + Msg.code(1375) + "Resource ID does not have a resource type: " + theId.getValue()); } myId = theId; return this; @@ -979,7 +1046,6 @@ public class GenericClient extends BaseClient implements IGenericClient { mySince = theCutoff; return this; } - } @SuppressWarnings({"unchecked", "rawtypes"}) @@ -1013,17 +1079,23 @@ public class GenericClient extends BaseClient implements IGenericClient { RuntimeResourceDefinition def = myContext.getResourceDefinition(theBundle); List links = def.getChildByName("link").getAccessor().getValues(theBundle); if (links == null || links.isEmpty()) { - throw new IllegalArgumentException(Msg.code(1377) + myContext.getLocalizer().getMessage(GenericClient.class, "noPagingLinkFoundInBundle", theWantRel)); + throw new IllegalArgumentException(Msg.code(1377) + + myContext + .getLocalizer() + .getMessage(GenericClient.class, "noPagingLinkFoundInBundle", theWantRel)); } for (IBase nextLink : links) { - BaseRuntimeElementCompositeDefinition linkDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(nextLink.getClass()); - List rel = linkDef.getChildByName("relation").getAccessor().getValues(nextLink); + BaseRuntimeElementCompositeDefinition linkDef = + (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(nextLink.getClass()); + List rel = + linkDef.getChildByName("relation").getAccessor().getValues(nextLink); if (rel == null || rel.isEmpty()) { continue; } String relation = ((IPrimitiveType) rel.get(0)).getValueAsString(); if (theWantRel.equals(relation) || (PREVIOUS.equals(theWantRel) && PREV.equals(relation))) { - List urls = linkDef.getChildByName("url").getAccessor().getValues(nextLink); + List urls = + linkDef.getChildByName("url").getAccessor().getValues(nextLink); if (urls == null || urls.isEmpty()) { continue; } @@ -1034,18 +1106,21 @@ public class GenericClient extends BaseClient implements IGenericClient { return (IGetPageTyped) byUrl(url).andReturnBundle(theBundle.getClass()); } } - throw new IllegalArgumentException(Msg.code(1378) + myContext.getLocalizer().getMessage(GenericClient.class, "noPagingLinkFoundInBundle", theWantRel)); + throw new IllegalArgumentException(Msg.code(1378) + + myContext + .getLocalizer() + .getMessage(GenericClient.class, "noPagingLinkFoundInBundle", theWantRel)); } @Override public IGetPageTyped previous(T theBundle) { return nextOrPrevious(PREVIOUS, theBundle); } - } @SuppressWarnings("rawtypes") - private class MetaInternal extends BaseClientExecutable implements IMeta, IMetaAddOrDeleteUnsourced, IMetaGetUnsourced, IMetaAddOrDeleteSourced { + private class MetaInternal extends BaseClientExecutable + implements IMeta, IMetaAddOrDeleteUnsourced, IMetaGetUnsourced, IMetaAddOrDeleteSourced { private IIdType myId; private IBaseMetaType myMeta; @@ -1075,19 +1150,30 @@ public class GenericClient extends BaseClient implements IGenericClient { switch (myOperation) { case ADD: ParametersUtil.addParameterToParameters(myContext, parameters, "meta", myMeta); - invocation = OperationMethodBinding.createOperationInvocation(myContext, myId.getResourceType(), myId.getIdPart(), null, "$meta-add", parameters, false); + invocation = OperationMethodBinding.createOperationInvocation( + myContext, myId.getResourceType(), myId.getIdPart(), null, "$meta-add", parameters, false); break; case DELETE: ParametersUtil.addParameterToParameters(myContext, parameters, "meta", myMeta); - invocation = OperationMethodBinding.createOperationInvocation(myContext, myId.getResourceType(), myId.getIdPart(), null, "$meta-delete", parameters, false); + invocation = OperationMethodBinding.createOperationInvocation( + myContext, + myId.getResourceType(), + myId.getIdPart(), + null, + "$meta-delete", + parameters, + false); break; case GET: if (myId != null) { - invocation = OperationMethodBinding.createOperationInvocation(myContext, myOnType, myId.getIdPart(), null, "$meta", parameters, true); + invocation = OperationMethodBinding.createOperationInvocation( + myContext, myOnType, myId.getIdPart(), null, "$meta", parameters, true); } else if (myOnType != null) { - invocation = OperationMethodBinding.createOperationInvocation(myContext, myOnType, null, null, "$meta", parameters, true); + invocation = OperationMethodBinding.createOperationInvocation( + myContext, myOnType, null, null, "$meta", parameters, true); } else { - invocation = OperationMethodBinding.createOperationInvocation(myContext, null, null, null, "$meta", parameters, true); + invocation = OperationMethodBinding.createOperationInvocation( + myContext, null, null, null, "$meta", parameters, true); } break; } @@ -1149,7 +1235,6 @@ public class GenericClient extends BaseClient implements IGenericClient { myOnType = theId.getResourceType(); myId = theId; } - } private final class MetaParametersResponseHandler implements IClientResponseHandler { @@ -1162,35 +1247,48 @@ public class GenericClient extends BaseClient implements IGenericClient { @SuppressWarnings("unchecked") @Override - public T invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) throws BaseServerResponseException { + public T invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws BaseServerResponseException { EncodingEnum respType = EncodingEnum.forContentType(theResponseMimeType); if (respType == null) { - throw NonFhirResponseException.newInstance(theResponseStatusCode, theResponseMimeType, theResponseInputStream); + throw NonFhirResponseException.newInstance( + theResponseStatusCode, theResponseMimeType, theResponseInputStream); } IParser parser = respType.newParser(myContext); RuntimeResourceDefinition type = myContext.getResourceDefinition("Parameters"); IBaseResource retVal = parser.parseResource(type.getImplementingClass(), theResponseInputStream); BaseRuntimeChildDefinition paramChild = type.getChildByName("parameter"); - BaseRuntimeElementCompositeDefinition paramChildElem = (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); + BaseRuntimeElementCompositeDefinition paramChildElem = + (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); List parameter = paramChild.getAccessor().getValues(retVal); if (parameter == null || parameter.isEmpty()) { return (T) myContext.getElementDefinition(myType).newInstance(); } IBase param = parameter.get(0); - List meta = paramChildElem.getChildByName("value[x]").getAccessor().getValues(param); + List meta = + paramChildElem.getChildByName("value[x]").getAccessor().getValues(param); if (meta.isEmpty()) { return (T) myContext.getElementDefinition(myType).newInstance(); } return (T) meta.get(0); - } } @SuppressWarnings("rawtypes") private class OperationInternal extends BaseClientExecutable - implements IOperation, IOperationUnnamed, IOperationUntyped, IOperationUntypedWithInput, IOperationUntypedWithInputAndPartialOutput, IOperationProcessMsg, IOperationProcessMsgMode { + implements IOperation, + IOperationUnnamed, + IOperationUntyped, + IOperationUntypedWithInput, + IOperationUntypedWithInputAndPartialOutput, + IOperationProcessMsg, + IOperationProcessMsgMode { private IIdType myId; private Boolean myIsAsync; @@ -1207,19 +1305,22 @@ public class GenericClient extends BaseClient implements IGenericClient { @SuppressWarnings("unchecked") private void addParam(String theName, IBase theValue) { BaseRuntimeChildDefinition parameterChild = myParametersDef.getChildByName("parameter"); - BaseRuntimeElementCompositeDefinition parameterElem = (BaseRuntimeElementCompositeDefinition) parameterChild.getChildByName("parameter"); + BaseRuntimeElementCompositeDefinition parameterElem = + (BaseRuntimeElementCompositeDefinition) parameterChild.getChildByName("parameter"); IBase parameter = parameterElem.newInstance(); parameterChild.getMutator().addValue(myParameters, parameter); - IPrimitiveType name = (IPrimitiveType) myContext.getElementDefinition("string").newInstance(); + IPrimitiveType name = (IPrimitiveType) + myContext.getElementDefinition("string").newInstance(); name.setValue(theName); parameterElem.getChildByName("name").getMutator().setValue(parameter, name); if (theValue instanceof IBaseDatatype) { BaseRuntimeElementDefinition datatypeDef = myContext.getElementDefinition(theValue.getClass()); if (datatypeDef instanceof IRuntimeDatatypeDefinition) { - Class profileOf = ((IRuntimeDatatypeDefinition) datatypeDef).getProfileOf(); + Class profileOf = + ((IRuntimeDatatypeDefinition) datatypeDef).getProfileOf(); if (profileOf != null) { datatypeDef = myContext.getElementDefinition(profileOf); } @@ -1230,12 +1331,14 @@ public class GenericClient extends BaseClient implements IGenericClient { } else if (theValue instanceof IBaseResource) { parameterElem.getChildByName("resource").getMutator().setValue(parameter, theValue); } else { - throw new IllegalArgumentException(Msg.code(1380) + "Don't know how to handle parameter of type " + theValue.getClass()); + throw new IllegalArgumentException( + Msg.code(1380) + "Don't know how to handle parameter of type " + theValue.getClass()); } } private void addParam(String theName, IQueryParameterType theValue) { - IPrimitiveType stringType = ParametersUtil.createString(myContext, theValue.getValueAsQueryToken(myContext)); + IPrimitiveType stringType = + ParametersUtil.createString(myContext, theValue.getValueAsQueryToken(myContext)); addParam(theName, stringType); } @@ -1248,7 +1351,8 @@ public class GenericClient extends BaseClient implements IGenericClient { } @Override - public IOperationUntypedWithInputAndPartialOutput andSearchParameter(String theName, IQueryParameterType theValue) { + public IOperationUntypedWithInputAndPartialOutput andSearchParameter( + String theName, IQueryParameterType theValue) { addParam(theName, theValue); return this; @@ -1258,7 +1362,9 @@ public class GenericClient extends BaseClient implements IGenericClient { public IOperationProcessMsgMode asynchronous(Class theResponseClass) { myIsAsync = true; Validate.notNull(theResponseClass, "theReturnType must not be null"); - Validate.isTrue(IBaseResource.class.isAssignableFrom(theResponseClass), "theReturnType must be a class which extends from IBaseResource"); + Validate.isTrue( + IBaseResource.class.isAssignableFrom(theResponseClass), + "theReturnType must be a class which extends from IBaseResource"); myReturnResourceType = theResponseClass; return this; } @@ -1266,7 +1372,9 @@ public class GenericClient extends BaseClient implements IGenericClient { @SuppressWarnings("unchecked") @Override public Object execute() { - if (myOperationName != null && myOperationName.equals(Constants.EXTOP_PROCESS_MESSAGE) && myMsgBundle != null) { + if (myOperationName != null + && myOperationName.equals(Constants.EXTOP_PROCESS_MESSAGE) + && myMsgBundle != null) { Map> urlParams = new LinkedHashMap>(); // Set Url parameter Async and Response-Url if (myIsAsync != null) { @@ -1277,7 +1385,8 @@ public class GenericClient extends BaseClient implements IGenericClient { urlParams.put(Constants.PARAM_RESPONSE_URL, Arrays.asList(String.valueOf(myResponseUrl))); } // If is $process-message operation - BaseHttpClientInvocation invocation = OperationMethodBinding.createProcessMsgInvocation(myContext, myOperationName, myMsgBundle, urlParams); + BaseHttpClientInvocation invocation = OperationMethodBinding.createProcessMsgInvocation( + myContext, myOperationName, myMsgBundle, urlParams); ResourceResponseHandler handler = new ResourceResponseHandler(); handler.setPreferResponseTypes(getPreferResponseTypes(myType)); @@ -1295,7 +1404,11 @@ public class GenericClient extends BaseClient implements IGenericClient { version = null; } else if (myId != null) { resourceName = myId.getResourceType(); - Validate.notBlank(defaultString(resourceName), "Can not invoke operation \"$%s\" on instance \"%s\" - No resource type specified", myOperationName, myId.getValue()); + Validate.notBlank( + defaultString(resourceName), + "Can not invoke operation \"$%s\" on instance \"%s\" - No resource type specified", + myOperationName, + myId.getValue()); id = myId.getIdPart(); version = myId.getVersionIdPart(); } else { @@ -1304,7 +1417,8 @@ public class GenericClient extends BaseClient implements IGenericClient { version = null; } - BaseHttpClientInvocation invocation = OperationMethodBinding.createOperationInvocation(myContext, resourceName, id, version, myOperationName, myParameters, myUseHttpGet); + BaseHttpClientInvocation invocation = OperationMethodBinding.createOperationInvocation( + myContext, resourceName, id, version, myOperationName, myParameters, myUseHttpGet); if (myReturnResourceType != null) { ResourceResponseHandler handler; @@ -1312,8 +1426,8 @@ public class GenericClient extends BaseClient implements IGenericClient { Object retVal = invoke(null, handler, invocation); return retVal; } - IClientResponseHandler handler = new ResourceOrBinaryResponseHandler() - .setPreferResponseTypes(getPreferResponseTypes(myType)); + IClientResponseHandler handler = + new ResourceOrBinaryResponseHandler().setPreferResponseTypes(getPreferResponseTypes(myType)); if (myReturnMethodOutcome) { handler = new MethodOutcomeResponseHandler(handler); @@ -1325,14 +1439,18 @@ public class GenericClient extends BaseClient implements IGenericClient { return retVal; } - if (myContext.getResourceDefinition((IBaseResource) retVal).getName().equals("Parameters")) { + if (myContext + .getResourceDefinition((IBaseResource) retVal) + .getName() + .equals("Parameters")) { return retVal; } RuntimeResourceDefinition def = myContext.getResourceDefinition("Parameters"); IBaseResource parameters = def.newInstance(); BaseRuntimeChildDefinition paramChild = def.getChildByName("parameter"); - BaseRuntimeElementCompositeDefinition paramChildElem = (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); + BaseRuntimeElementCompositeDefinition paramChildElem = + (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); IBase parameter = paramChildElem.newInstance(); paramChild.getMutator().addValue(parameters, parameter); @@ -1395,7 +1513,9 @@ public class GenericClient extends BaseClient implements IGenericClient { @Override public IOperationUntypedWithInput returnResourceType(Class theReturnType) { Validate.notNull(theReturnType, "theReturnType must not be null"); - Validate.isTrue(IBaseResource.class.isAssignableFrom(theReturnType), "theReturnType must be a class which extends from IBaseResource"); + Validate.isTrue( + IBaseResource.class.isAssignableFrom(theReturnType), + "theReturnType must be a class which extends from IBaseResource"); myReturnResourceType = theReturnType; return this; } @@ -1424,7 +1544,10 @@ public class GenericClient extends BaseClient implements IGenericClient { @Override public IOperationProcessMsg setResponseUrlParam(String responseUrl) { Validate.notEmpty(responseUrl, "responseUrl must not be null"); - Validate.matchesPattern(responseUrl, "^(https?)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|]", "responseUrl must be a valid URL"); + Validate.matchesPattern( + responseUrl, + "^(https?)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|]", + "responseUrl must be a valid URL"); myResponseUrl = responseUrl; return this; } @@ -1433,7 +1556,9 @@ public class GenericClient extends BaseClient implements IGenericClient { public IOperationProcessMsgMode synchronous(Class theResponseClass) { myIsAsync = false; Validate.notNull(theResponseClass, "theReturnType must not be null"); - Validate.isTrue(IBaseResource.class.isAssignableFrom(theResponseClass), "theReturnType must be a class which extends from IBaseResource"); + Validate.isTrue( + IBaseResource.class.isAssignableFrom(theResponseClass), + "theReturnType must be a class which extends from IBaseResource"); myReturnResourceType = theResponseClass; return this; } @@ -1446,15 +1571,20 @@ public class GenericClient extends BaseClient implements IGenericClient { @SuppressWarnings("unchecked") @Override - public IOperationUntypedWithInputAndPartialOutput withNoParameters(Class theOutputParameterType) { + public IOperationUntypedWithInputAndPartialOutput withNoParameters( + Class theOutputParameterType) { Validate.notNull(theOutputParameterType, "theOutputParameterType may not be null"); RuntimeResourceDefinition def = myContext.getResourceDefinition(theOutputParameterType); if (def == null) { - throw new IllegalArgumentException(Msg.code(1381) + "theOutputParameterType must refer to a HAPI FHIR Resource type: " + theOutputParameterType.getName()); + throw new IllegalArgumentException( + Msg.code(1381) + "theOutputParameterType must refer to a HAPI FHIR Resource type: " + + theOutputParameterType.getName()); } if (!"Parameters".equals(def.getName())) { - throw new IllegalArgumentException(Msg.code(1382) + "theOutputParameterType must refer to a HAPI FHIR Resource type for a resource named " + "Parameters" + " - " + theOutputParameterType.getName() - + " is a resource named: " + def.getName()); + throw new IllegalArgumentException(Msg.code(1382) + + "theOutputParameterType must refer to a HAPI FHIR Resource type for a resource named " + + "Parameters" + " - " + theOutputParameterType.getName() + " is a resource named: " + + def.getName()); } myParameters = (IBaseParameters) def.newInstance(); return this; @@ -1462,7 +1592,8 @@ public class GenericClient extends BaseClient implements IGenericClient { @SuppressWarnings("unchecked") @Override - public IOperationUntypedWithInputAndPartialOutput withParameter(Class theParameterType, String theName, IBase theValue) { + public IOperationUntypedWithInputAndPartialOutput withParameter( + Class theParameterType, String theName, IBase theValue) { Validate.notNull(theParameterType, "theParameterType must not be null"); Validate.notEmpty(theName, "theName must not be null"); Validate.notNull(theValue, "theValue must not be null"); @@ -1486,7 +1617,8 @@ public class GenericClient extends BaseClient implements IGenericClient { @SuppressWarnings("unchecked") @Override - public IOperationUntypedWithInputAndPartialOutput withSearchParameter(Class theParameterType, String theName, IQueryParameterType theValue) { + public IOperationUntypedWithInputAndPartialOutput withSearchParameter( + Class theParameterType, String theName, IQueryParameterType theValue) { Validate.notNull(theParameterType, "theParameterType must not be null"); Validate.notEmpty(theName, "theName must not be null"); Validate.notNull(theValue, "theValue must not be null"); @@ -1498,10 +1630,8 @@ public class GenericClient extends BaseClient implements IGenericClient { return this; } - } - private final class MethodOutcomeResponseHandler implements IClientResponseHandler { private final IClientResponseHandler myWrap; @@ -1510,8 +1640,14 @@ public class GenericClient extends BaseClient implements IGenericClient { } @Override - public MethodOutcome invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) throws IOException, BaseServerResponseException { - IBaseResource response = myWrap.invokeClient(theResponseMimeType, theResponseInputStream, theResponseStatusCode, theHeaders); + public MethodOutcome invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws IOException, BaseServerResponseException { + IBaseResource response = + myWrap.invokeClient(theResponseMimeType, theResponseInputStream, theResponseStatusCode, theHeaders); MethodOutcome retVal = new MethodOutcome(); if (response instanceof IBaseOperationOutcome) { @@ -1539,15 +1675,28 @@ public class GenericClient extends BaseClient implements IGenericClient { } @Override - public MethodOutcome invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) throws BaseServerResponseException { - MethodOutcome response = MethodUtil.process2xxResponse(myContext, theResponseStatusCode, theResponseMimeType, theResponseInputStream, theHeaders); + public MethodOutcome invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws BaseServerResponseException { + MethodOutcome response = MethodUtil.process2xxResponse( + myContext, theResponseStatusCode, theResponseMimeType, theResponseInputStream, theHeaders); response.setCreatedUsingStatusCode(theResponseStatusCode); if (myPrefer == PreferReturnEnum.REPRESENTATION) { if (response.getResource() == null) { - if (response.getId() != null && isNotBlank(response.getId().getValue()) && response.getId().hasBaseUrl()) { - ourLog.info("Server did not return resource for Prefer-representation, going to fetch: {}", response.getId().getValue()); - IBaseResource resource = read().resource(response.getId().getResourceType()).withUrl(response.getId()).execute(); + if (response.getId() != null + && isNotBlank(response.getId().getValue()) + && response.getId().hasBaseUrl()) { + ourLog.info( + "Server did not return resource for Prefer-representation, going to fetch: {}", + response.getId().getValue()); + IBaseResource resource = read().resource( + response.getId().getResourceType()) + .withUrl(response.getId()) + .execute(); response.setResource(resource); } } @@ -1559,7 +1708,8 @@ public class GenericClient extends BaseClient implements IGenericClient { } } - private class PatchInternal extends BaseSearch implements IPatch, IPatchWithBody, IPatchExecutable, IPatchWithQuery, IPatchWithQueryTyped { + private class PatchInternal extends BaseSearch + implements IPatch, IPatchWithBody, IPatchExecutable, IPatchWithQuery, IPatchWithQueryTyped { private boolean myConditional; private IIdType myId; @@ -1605,10 +1755,12 @@ public class GenericClient extends BaseClient implements IGenericClient { if (isNotBlank(mySearchUrl)) { invocation = MethodUtil.createPatchInvocation(myContext, mySearchUrl, myPatchType, myPatchBody); } else if (myConditional) { - invocation = MethodUtil.createPatchInvocation(myContext, myPatchType, myPatchBody, myResourceType, getParamMap()); + invocation = MethodUtil.createPatchInvocation( + myContext, myPatchType, myPatchBody, myResourceType, getParamMap()); } else { if (myId == null || myId.hasIdPart() == false) { - throw new InvalidRequestException(Msg.code(1385) + "No ID supplied for resource to patch, can not invoke server"); + throw new InvalidRequestException( + Msg.code(1385) + "No ID supplied for resource to patch, can not invoke server"); } invocation = MethodUtil.createPatchInvocation(myContext, myId, myPatchType, myPatchBody); } @@ -1619,7 +1771,6 @@ public class GenericClient extends BaseClient implements IGenericClient { Map> params = new HashMap<>(); return invoke(params, binding, invocation); - } @Override @@ -1661,8 +1812,14 @@ public class GenericClient extends BaseClient implements IGenericClient { if (theId == null) { throw new NullPointerException(Msg.code(1387) + "theId can not be null"); } - Validate.notBlank(theId.getIdPart(), "theId must not be blank and must contain a resource type and ID (e.g. \"Patient/123\"), found: %s", UrlUtil.sanitizeUrlPart(theId.getValue())); - Validate.notBlank(theId.getResourceType(), "theId must not be blank and must contain a resource type and ID (e.g. \"Patient/123\"), found: %s", UrlUtil.sanitizeUrlPart(theId.getValue())); + Validate.notBlank( + theId.getIdPart(), + "theId must not be blank and must contain a resource type and ID (e.g. \"Patient/123\"), found: %s", + UrlUtil.sanitizeUrlPart(theId.getValue())); + Validate.notBlank( + theId.getResourceType(), + "theId must not be blank and must contain a resource type and ID (e.g. \"Patient/123\"), found: %s", + UrlUtil.sanitizeUrlPart(theId.getValue())); myId = theId; return this; } @@ -1674,7 +1831,6 @@ public class GenericClient extends BaseClient implements IGenericClient { } return withId(new IdDt(theId)); } - } @SuppressWarnings({"rawtypes", "unchecked"}) @@ -1685,11 +1841,33 @@ public class GenericClient extends BaseClient implements IGenericClient { private RuntimeResourceDefinition myType; @Override - public Object execute() {// AAA + public Object execute() { // AAA if (myId.hasVersionIdPart()) { - return doReadOrVRead(myType.getImplementingClass(), myId, true, myNotModifiedHandler, myIfVersionMatches, myPrettyPrint, mySummaryMode, myParamEncoding, getSubsetElements(), getCustomAcceptHeaderValue(), myCustomHeaderValues); + return doReadOrVRead( + myType.getImplementingClass(), + myId, + true, + myNotModifiedHandler, + myIfVersionMatches, + myPrettyPrint, + mySummaryMode, + myParamEncoding, + getSubsetElements(), + getCustomAcceptHeaderValue(), + myCustomHeaderValues); } - return doReadOrVRead(myType.getImplementingClass(), myId, false, myNotModifiedHandler, myIfVersionMatches, myPrettyPrint, mySummaryMode, myParamEncoding, getSubsetElements(), getCustomAcceptHeaderValue(), myCustomHeaderValues); + return doReadOrVRead( + myType.getImplementingClass(), + myId, + false, + myNotModifiedHandler, + myIfVersionMatches, + myPrettyPrint, + mySummaryMode, + myParamEncoding, + getSubsetElements(), + getCustomAcceptHeaderValue(), + myCustomHeaderValues); } @Override @@ -1730,11 +1908,13 @@ public class GenericClient extends BaseClient implements IGenericClient { private void processUrl() { String resourceType = myId.getResourceType(); if (isBlank(resourceType)) { - throw new IllegalArgumentException(Msg.code(1389) + myContext.getLocalizer().getMessage(I18N_INCOMPLETE_URI_FOR_READ, myId)); + throw new IllegalArgumentException( + Msg.code(1389) + myContext.getLocalizer().getMessage(I18N_INCOMPLETE_URI_FOR_READ, myId)); } myType = myContext.getResourceDefinition(resourceType); if (myType == null) { - throw new IllegalArgumentException(Msg.code(1390) + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, myId)); + throw new IllegalArgumentException( + Msg.code(1390) + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, myId)); } } @@ -1743,7 +1923,8 @@ public class GenericClient extends BaseClient implements IGenericClient { Validate.notNull(theResourceType, "theResourceType must not be null"); myType = myContext.getResourceDefinition(theResourceType); if (myType == null) { - throw new IllegalArgumentException(Msg.code(1391) + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, theResourceType)); + throw new IllegalArgumentException(Msg.code(1391) + + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, theResourceType)); } return this; } @@ -1753,7 +1934,8 @@ public class GenericClient extends BaseClient implements IGenericClient { Validate.notBlank(theResourceAsText, "You must supply a value for theResourceAsText"); myType = myContext.getResourceDefinition(theResourceAsText); if (myType == null) { - throw new IllegalArgumentException(Msg.code(1392) + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, theResourceAsText)); + throw new IllegalArgumentException(Msg.code(1392) + + myContext.getLocalizer().getMessage(I18N_CANNOT_DETEMINE_RESOURCE_TYPE, theResourceAsText)); } return this; } @@ -1805,18 +1987,24 @@ public class GenericClient extends BaseClient implements IGenericClient { processUrl(); return this; } - } private final class ResourceListResponseHandler implements IClientResponseHandler> { @SuppressWarnings("unchecked") @Override - public List invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) - throws BaseServerResponseException { - Class bundleType = myContext.getResourceDefinition("Bundle").getImplementingClass(); - ResourceResponseHandler handler = new ResourceResponseHandler<>((Class) bundleType); - IBaseResource response = handler.invokeClient(theResponseMimeType, theResponseInputStream, theResponseStatusCode, theHeaders); + public List invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws BaseServerResponseException { + Class bundleType = + myContext.getResourceDefinition("Bundle").getImplementingClass(); + ResourceResponseHandler handler = + new ResourceResponseHandler<>((Class) bundleType); + IBaseResource response = handler.invokeClient( + theResponseMimeType, theResponseInputStream, theResponseStatusCode, theHeaders); IVersionSpecificBundleFactory bundleFactory = myContext.newBundleFactory(); bundleFactory.initializeWithBundleResource(response); return bundleFactory.toListOfResources(); @@ -1824,7 +2012,8 @@ public class GenericClient extends BaseClient implements IGenericClient { } @SuppressWarnings({"rawtypes", "unchecked"}) - private class SearchInternal extends BaseSearch, IQuery, OUTPUT> implements IQuery, IUntypedQuery> { + private class SearchInternal extends BaseSearch, IQuery, OUTPUT> + implements IQuery, IUntypedQuery> { private String myCompartmentName; private List myInclude = new ArrayList<>(); @@ -1858,7 +2047,8 @@ public class GenericClient extends BaseClient implements IGenericClient { mySearchUrl = theSearchUrl; int qIndex = mySearchUrl.indexOf('?'); if (qIndex != -1) { - mySearchUrl = mySearchUrl.substring(0, qIndex) + validateAndEscapeConditionalUrl(mySearchUrl.substring(qIndex)); + mySearchUrl = mySearchUrl.substring(0, qIndex) + + validateAndEscapeConditionalUrl(mySearchUrl.substring(qIndex)); } } else { String searchUrl = theSearchUrl; @@ -1866,7 +2056,9 @@ public class GenericClient extends BaseClient implements IGenericClient { searchUrl = searchUrl.substring(1); } if (!searchUrl.matches("[a-zA-Z]+($|\\?.*)")) { - throw new IllegalArgumentException(Msg.code(1393) + "Search URL must be either a complete URL starting with http: or https:, or a relative FHIR URL in the form [ResourceType]?[Params]"); + throw new IllegalArgumentException( + Msg.code(1393) + + "Search URL must be either a complete URL starting with http: or https:, or a relative FHIR URL in the form [ResourceType]?[Params]"); } int qIndex = searchUrl.indexOf('?'); if (qIndex == -1) { @@ -1994,13 +2186,14 @@ public class GenericClient extends BaseClient implements IGenericClient { BaseHttpClientInvocation invocation; if (mySearchUrl != null) { - invocation = SearchMethodBinding.createSearchInvocation(myContext, mySearchUrl, UrlSourceEnum.EXPLICIT, params); + invocation = SearchMethodBinding.createSearchInvocation( + myContext, mySearchUrl, UrlSourceEnum.EXPLICIT, params); } else { - invocation = SearchMethodBinding.createSearchInvocation(myContext, myResourceName, params, resourceId, myCompartmentName, mySearchStyle); + invocation = SearchMethodBinding.createSearchInvocation( + myContext, myResourceName, params, resourceId, myCompartmentName, mySearchStyle); } return (OUTPUT) invoke(params, binding, invocation); - } @Override @@ -2127,19 +2320,23 @@ public class GenericClient extends BaseClient implements IGenericClient { myTags.add(new TokenParam(theSystem, theCode)); return this; } - } private final class StringResponseHandler implements IClientResponseHandler { @Override - public String invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) - throws IOException, BaseServerResponseException { + public String invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws IOException, BaseServerResponseException { return IOUtils.toString(theResponseInputStream, Charsets.UTF_8); } } - private final class TransactionExecutable extends BaseClientExecutable, T> implements ITransactionTyped { + private final class TransactionExecutable extends BaseClientExecutable, T> + implements ITransactionTyped { private IBaseBundle myBaseBundle; private String myRawBundle; @@ -2158,7 +2355,8 @@ public class GenericClient extends BaseClient implements IGenericClient { myRawBundle = theBundle; myRawBundleEncoding = EncodingEnum.detectEncodingNoDefault(myRawBundle); if (myRawBundleEncoding == null) { - throw new IllegalArgumentException(Msg.code(1395) + myContext.getLocalizer().getMessage(GenericClient.class, "cantDetermineRequestType")); + throw new IllegalArgumentException(Msg.code(1395) + + myContext.getLocalizer().getMessage(GenericClient.class, "cantDetermineRequestType")); } } @@ -2168,11 +2366,14 @@ public class GenericClient extends BaseClient implements IGenericClient { Map> params = new HashMap>(); if (myResources != null) { ResourceListResponseHandler binding = new ResourceListResponseHandler(); - BaseHttpClientInvocation invocation = TransactionMethodBinding.createTransactionInvocation(myResources, myContext); + BaseHttpClientInvocation invocation = + TransactionMethodBinding.createTransactionInvocation(myResources, myContext); return (T) invoke(params, binding, invocation); } else if (myBaseBundle != null) { - ResourceResponseHandler binding = new ResourceResponseHandler(myBaseBundle.getClass(), getPreferResponseTypes()); - BaseHttpClientInvocation invocation = TransactionMethodBinding.createTransactionInvocation(myBaseBundle, myContext); + ResourceResponseHandler binding = + new ResourceResponseHandler(myBaseBundle.getClass(), getPreferResponseTypes()); + BaseHttpClientInvocation invocation = + TransactionMethodBinding.createTransactionInvocation(myBaseBundle, myContext); return (T) invoke(params, binding, invocation); // } else if (myRawBundle != null) { } else { @@ -2183,14 +2384,15 @@ public class GenericClient extends BaseClient implements IGenericClient { if (getParamEncoding() != null) { if (EncodingEnum.detectEncodingNoDefault(myRawBundle) != getParamEncoding()) { IBaseResource parsed = parseResourceBody(myRawBundle); - myRawBundle = getParamEncoding().newParser(getFhirContext()).encodeResourceToString(parsed); + myRawBundle = + getParamEncoding().newParser(getFhirContext()).encodeResourceToString(parsed); } } - BaseHttpClientInvocation invocation = TransactionMethodBinding.createTransactionInvocation(myRawBundle, myContext); + BaseHttpClientInvocation invocation = + TransactionMethodBinding.createTransactionInvocation(myRawBundle, myContext); return (T) invoke(params, binding, invocation); } } - } private final class TransactionInternal implements ITransaction { @@ -2212,7 +2414,8 @@ public class GenericClient extends BaseClient implements IGenericClient { Validate.notNull(theResources, "theResources must not be null"); for (IBaseResource next : theResources) { - BundleEntryTransactionMethodEnum entryMethod = ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(next); + BundleEntryTransactionMethodEnum entryMethod = + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(next); if (entryMethod == null) { if (isBlank(next.getIdElement().getValue())) { @@ -2222,7 +2425,6 @@ public class GenericClient extends BaseClient implements IGenericClient { } ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(next, entryMethod); } - } return new TransactionExecutable<>(theResources); @@ -2230,7 +2432,7 @@ public class GenericClient extends BaseClient implements IGenericClient { } private class UpdateInternal extends BaseSearch - implements IUpdate, IUpdateTyped, IUpdateExecutable, IUpdateWithQuery, IUpdateWithQueryTyped { + implements IUpdate, IUpdateTyped, IUpdateExecutable, IUpdateWithQuery, IUpdateWithQueryTyped { private boolean myConditional; private IIdType myId; @@ -2280,14 +2482,17 @@ public class GenericClient extends BaseClient implements IGenericClient { } if (myId == null || myId.hasIdPart() == false) { - throw new InvalidRequestException(Msg.code(1396) + "No ID supplied for resource to update, can not invoke server"); + throw new InvalidRequestException( + Msg.code(1396) + "No ID supplied for resource to update, can not invoke server"); } if (myIsHistoryRewrite) { if (!myId.hasVersionIdPart()) { - throw new InvalidRequestException(Msg.code(2090) + "ID must contain a history version, found: " + myId.getVersionIdPart()); + throw new InvalidRequestException(Msg.code(2090) + "ID must contain a history version, found: " + + myId.getVersionIdPart()); } - invocation = MethodUtil.createUpdateHistoryRewriteInvocation(myResource, myResourceBody, myId, myContext); + invocation = MethodUtil.createUpdateHistoryRewriteInvocation( + myResource, myResourceBody, myId, myContext); invocation.addHeader(Constants.HEADER_REWRITE_HISTORY, "true"); } else { invocation = MethodUtil.createUpdateInvocation(myResource, myResourceBody, myId, myContext); @@ -2300,7 +2505,6 @@ public class GenericClient extends BaseClient implements IGenericClient { Map> params = new HashMap<>(); return invoke(params, binding, invocation); - } @Override @@ -2329,7 +2533,8 @@ public class GenericClient extends BaseClient implements IGenericClient { throw new NullPointerException(Msg.code(1397) + "theId can not be null"); } if (theId.hasIdPart() == false) { - throw new NullPointerException(Msg.code(1398) + "theId must not be blank and must contain an ID, found: " + theId.getValue()); + throw new NullPointerException( + Msg.code(1398) + "theId must not be blank and must contain an ID, found: " + theId.getValue()); } myId = theId; return this; @@ -2341,20 +2546,22 @@ public class GenericClient extends BaseClient implements IGenericClient { throw new NullPointerException(Msg.code(1399) + "theId can not be null"); } if (isBlank(theId)) { - throw new NullPointerException(Msg.code(1400) + "theId must not be blank and must contain an ID, found: " + theId); + throw new NullPointerException( + Msg.code(1400) + "theId must not be blank and must contain an ID, found: " + theId); } myId = new IdDt(theId); return this; } - } - private class ValidateInternal extends BaseClientExecutable implements IValidate, IValidateUntyped { + private class ValidateInternal extends BaseClientExecutable + implements IValidate, IValidateUntyped { private IBaseResource myResource; @Override public MethodOutcome execute() { - BaseHttpClientInvocation invocation = ValidateMethodBindingDstu2Plus.createValidateInvocation(myContext, myResource); + BaseHttpClientInvocation invocation = + ValidateMethodBindingDstu2Plus.createValidateInvocation(myContext, myResource); ResourceResponseHandler handler = new ResourceResponseHandler<>(null, null); MethodOutcomeResponseHandler methodHandler = new MethodOutcomeResponseHandler(handler); return invoke(null, methodHandler, invocation); @@ -2374,7 +2581,8 @@ public class GenericClient extends BaseClient implements IGenericClient { EncodingEnum enc = EncodingEnum.detectEncodingNoDefault(theResourceRaw); if (enc == null) { - throw new IllegalArgumentException(Msg.code(1401) + myContext.getLocalizer().getMessage(GenericClient.class, "cantDetermineRequestType")); + throw new IllegalArgumentException(Msg.code(1401) + + myContext.getLocalizer().getMessage(GenericClient.class, "cantDetermineRequestType")); } switch (enc) { case XML: @@ -2386,7 +2594,6 @@ public class GenericClient extends BaseClient implements IGenericClient { } return this; } - } @SuppressWarnings("rawtypes") @@ -2472,7 +2679,6 @@ public class GenericClient extends BaseClient implements IGenericClient { public String getParamValue() { return myParamValue; } - } private static void addParam(Map> params, String parameterName, String parameterValue) { @@ -2484,7 +2690,8 @@ public class GenericClient extends BaseClient implements IGenericClient { private static void addPreferHeader(PreferReturnEnum thePrefer, BaseHttpClientInvocation theInvocation) { if (thePrefer != null) { - theInvocation.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + thePrefer.getHeaderValue()); + theInvocation.addHeader( + Constants.HEADER_PREFER, Constants.HEADER_PREFER_RETURN + '=' + thePrefer.getHeaderValue()); } } @@ -2498,7 +2705,9 @@ public class GenericClient extends BaseClient implements IGenericClient { if (nextChar == '?') { haveHadQuestionMark = true; } else if (!Character.isLetter(nextChar)) { - throw new IllegalArgumentException(Msg.code(1402) + "Conditional URL must be in the format \"[ResourceType]?[Params]\" and must not have a base URL - Found: " + theSearchUrl); + throw new IllegalArgumentException(Msg.code(1402) + + "Conditional URL must be in the format \"[ResourceType]?[Params]\" and must not have a base URL - Found: " + + theSearchUrl); } b.append(nextChar); } else { @@ -2517,5 +2726,4 @@ public class GenericClient extends BaseClient implements IGenericClient { } return b.toString(); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/HttpBasicAuthInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/HttpBasicAuthInterceptor.java index b9aa2e72901..8b62c883546 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/HttpBasicAuthInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/HttpBasicAuthInterceptor.java @@ -19,8 +19,9 @@ */ package ca.uhn.fhir.rest.client.impl; -import java.io.IOException; - +import ca.uhn.fhir.rest.client.api.IBasicClient; +import ca.uhn.fhir.rest.client.api.IClientInterceptor; +import ca.uhn.fhir.rest.client.api.IGenericClient; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; @@ -31,21 +32,18 @@ import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.protocol.HttpContext; -import ca.uhn.fhir.rest.client.api.IBasicClient; -import ca.uhn.fhir.rest.client.api.IClientInterceptor; -import ca.uhn.fhir.rest.client.api.IGenericClient; - +import java.io.IOException; /** - * @deprecated Use {@link ca.uhn.fhir.rest.client.interceptor.BasicAuthInterceptor} instead. Note that BasicAuthInterceptor class is a HAPI client interceptor instead of being a commons-httpclient interceptor, so you register it to your client instance once it's created using {@link IGenericClient#registerInterceptor(IClientInterceptor)} or {@link IBasicClient#registerInterceptor(IClientInterceptor)} instead + * @deprecated Use {@link ca.uhn.fhir.rest.client.interceptor.BasicAuthInterceptor} instead. Note that BasicAuthInterceptor class is a HAPI client interceptor instead of being a commons-httpclient interceptor, so you register it to your client instance once it's created using {@link IGenericClient#registerInterceptor(IClientInterceptor)} or {@link IBasicClient#registerInterceptor(IClientInterceptor)} instead */ @Deprecated -public class HttpBasicAuthInterceptor implements HttpRequestInterceptor { +public class HttpBasicAuthInterceptor implements HttpRequestInterceptor { private String myUsername; private String myPassword; - - public HttpBasicAuthInterceptor(String theUsername, String thePassword) { + + public HttpBasicAuthInterceptor(String theUsername, String thePassword) { super(); myUsername = theUsername; myPassword = thePassword; @@ -53,13 +51,11 @@ public class HttpBasicAuthInterceptor implements HttpRequestInterceptor { @Override public void process(final HttpRequest request, final HttpContext context) throws HttpException, IOException { - AuthState authState = (AuthState) context.getAttribute(HttpClientContext.TARGET_AUTH_STATE); - - if (authState.getAuthScheme() == null) { - Credentials creds = new UsernamePasswordCredentials(myUsername, myPassword); - authState.update(new BasicScheme(), creds); - } - - } + AuthState authState = (AuthState) context.getAttribute(HttpClientContext.TARGET_AUTH_STATE); + if (authState.getAuthScheme() == null) { + Credentials creds = new UsernamePasswordCredentials(myUsername, myPassword); + authState.update(new BasicScheme(), creds); + } + } } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/RestfulClientFactory.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/RestfulClientFactory.java index e8539cc7896..3369dff8911 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/RestfulClientFactory.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/impl/RestfulClientFactory.java @@ -58,7 +58,8 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { private int myConnectionRequestTimeout = DEFAULT_CONNECTION_REQUEST_TIMEOUT; private int myConnectTimeout = DEFAULT_CONNECT_TIMEOUT; private FhirContext myContext; - private final Map, ClientInvocationHandlerFactory> myInvocationHandlers = new HashMap<>(); + private final Map, ClientInvocationHandlerFactory> myInvocationHandlers = + new HashMap<>(); private ServerValidationModeEnum myServerValidationMode = DEFAULT_SERVER_VALIDATION_MODE; private int mySocketTimeout = DEFAULT_SOCKET_TIMEOUT; private String myProxyUsername; @@ -69,12 +70,11 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { /** * Constructor */ - public RestfulClientFactory() { - } + public RestfulClientFactory() {} /** * Constructor - * + * * @param theFhirContext * The context */ @@ -133,13 +133,15 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { } @SuppressWarnings("unchecked") - private T instantiateProxy(Class theClientType, InvocationHandler theInvocationHandler) { - return (T) Proxy.newProxyInstance(theClientType.getClassLoader(), new Class[] { theClientType }, theInvocationHandler); + private T instantiateProxy( + Class theClientType, InvocationHandler theInvocationHandler) { + return (T) Proxy.newProxyInstance( + theClientType.getClassLoader(), new Class[] {theClientType}, theInvocationHandler); } /** * Instantiates a new client instance - * + * * @param theClientType * The client type, which is an interface type to be instantiated * @param theServerBase @@ -153,7 +155,8 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { validateConfigured(); if (!theClientType.isInterface()) { - throw new ConfigurationException(Msg.code(1354) + theClientType.getCanonicalName() + " is not an interface"); + throw new ConfigurationException( + Msg.code(1354) + theClientType.getCanonicalName() + " is not an interface"); } ClientInvocationHandlerFactory invocationHandler = myInvocationHandlers.get(theClientType); @@ -177,7 +180,9 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { */ protected void validateConfigured() { if (getFhirContext() == null) { - throw new IllegalStateException(Msg.code(1355) + getClass().getSimpleName() + " does not have FhirContext defined. This must be set via " + getClass().getSimpleName() + "#setFhirContext(FhirContext)"); + throw new IllegalStateException(Msg.code(1355) + getClass().getSimpleName() + + " does not have FhirContext defined. This must be set via " + + getClass().getSimpleName() + "#setFhirContext(FhirContext)"); } } @@ -214,14 +219,16 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { */ public void setFhirContext(FhirContext theContext) { if (myContext != null && myContext != theContext) { - throw new IllegalStateException(Msg.code(1356) + "RestfulClientFactory instance is already associated with one FhirContext. RestfulClientFactory instances can not be shared."); + throw new IllegalStateException( + Msg.code(1356) + + "RestfulClientFactory instance is already associated with one FhirContext. RestfulClientFactory instances can not be shared."); } myContext = theContext; } /** * Return the fhir context - * + * * @return the fhir context */ public FhirContext getFhirContext() { @@ -265,7 +272,8 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { } @Override - public void validateServerBaseIfConfiguredToDoSo(String theServerBase, IHttpClient theHttpClient, IRestfulClient theClient) { + public void validateServerBaseIfConfiguredToDoSo( + String theServerBase, IHttpClient theHttpClient, IRestfulClient theClient) { String serverBase = normalizeBaseUrlForMap(theServerBase); switch (getServerValidationMode()) { @@ -285,7 +293,6 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { } break; } - } @SuppressWarnings("unchecked") @@ -307,28 +314,42 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { @SuppressWarnings("rawtypes") Class implementingClass; try { - implementingClass = myContext.getResourceDefinition(capabilityStatementResourceName).getImplementingClass(); + implementingClass = myContext + .getResourceDefinition(capabilityStatementResourceName) + .getImplementingClass(); } catch (DataFormatException e) { if (!myContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) { capabilityStatementResourceName = "Conformance"; - implementingClass = myContext.getResourceDefinition(capabilityStatementResourceName).getImplementingClass(); + implementingClass = myContext + .getResourceDefinition(capabilityStatementResourceName) + .getImplementingClass(); } else { throw e; } } try { - conformance = (IBaseResource) client.fetchConformance().ofType(implementingClass).execute(); + conformance = (IBaseResource) + client.fetchConformance().ofType(implementingClass).execute(); } catch (FhirClientConnectionException e) { - if (!myContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3) && e.getCause() instanceof DataFormatException) { + if (!myContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3) + && e.getCause() instanceof DataFormatException) { capabilityStatementResourceName = "CapabilityStatement"; - implementingClass = myContext.getResourceDefinition(capabilityStatementResourceName).getImplementingClass(); - conformance = (IBaseResource) client.fetchConformance().ofType(implementingClass).execute(); + implementingClass = myContext + .getResourceDefinition(capabilityStatementResourceName) + .getImplementingClass(); + conformance = (IBaseResource) + client.fetchConformance().ofType(implementingClass).execute(); } else { throw e; } } } catch (FhirClientConnectionException e) { - String msg = myContext.getLocalizer().getMessage(RestfulClientFactory.class, "failedToRetrieveConformance", theServerBase + Constants.URL_TOKEN_METADATA); + String msg = myContext + .getLocalizer() + .getMessage( + RestfulClientFactory.class, + "failedToRetrieveConformance", + theServerBase + Constants.URL_TOKEN_METADATA); throw new FhirClientConnectionException(Msg.code(1357) + msg, e); } @@ -353,15 +374,24 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { serverFhirVersionEnum = FhirVersionEnum.R4; } else { // we'll be lenient and accept this - ourLog.debug("Server conformance statement indicates unknown FHIR version: {}", serverFhirVersionString); + ourLog.debug( + "Server conformance statement indicates unknown FHIR version: {}", serverFhirVersionString); } } if (serverFhirVersionEnum != null) { FhirVersionEnum contextFhirVersion = myContext.getVersion().getVersion(); if (!contextFhirVersion.isEquivalentTo(serverFhirVersionEnum)) { - throw new FhirClientInappropriateForServerException(Msg.code(1358) + myContext.getLocalizer().getMessage(RestfulClientFactory.class, "wrongVersionInConformance", - theServerBase + Constants.URL_TOKEN_METADATA, serverFhirVersionString, serverFhirVersionEnum, contextFhirVersion)); + throw new FhirClientInappropriateForServerException(Msg.code(1358) + + myContext + .getLocalizer() + .getMessage( + RestfulClientFactory.class, + "wrongVersionInConformance", + theServerBase + Constants.URL_TOKEN_METADATA, + serverFhirVersionString, + serverFhirVersionEnum, + contextFhirVersion)); } } @@ -375,10 +405,9 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { } } - /** * Get the http client for the given server base - * + * * @param theServerBase * the server base * @return the http client @@ -390,5 +419,4 @@ public abstract class RestfulClientFactory implements IRestfulClientFactory { * new http client needs to be created */ protected abstract void resetHttpClient(); - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/AdditionalRequestHeadersInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/AdditionalRequestHeadersInterceptor.java index 2357d2dd1a3..2e57fea9ca2 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/AdditionalRequestHeadersInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/AdditionalRequestHeadersInterceptor.java @@ -112,5 +112,4 @@ public class AdditionalRequestHeadersInterceptor { } } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/BasicAuthInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/BasicAuthInterceptor.java index e9cf82a57e3..6cb8d3c1dfa 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/BasicAuthInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/BasicAuthInterceptor.java @@ -19,17 +19,14 @@ */ package ca.uhn.fhir.rest.client.interceptor; -import java.io.IOException; -import java.io.UnsupportedEncodingException; - -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.lang3.StringUtils; - import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.client.api.*; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; +import java.io.IOException; + /** * HTTP interceptor to be used for adding HTTP basic auth username/password tokens * to requests @@ -57,7 +54,8 @@ public class BasicAuthInterceptor implements IClientInterceptor { */ public BasicAuthInterceptor(String theCredentialString) { Validate.notBlank(theCredentialString, "theCredentialString must not be null or blank"); - Validate.isTrue(theCredentialString.contains(":"), "theCredentialString must be in the format 'username:password'"); + Validate.isTrue( + theCredentialString.contains(":"), "theCredentialString must be in the format 'username:password'"); String encoded = Base64.encodeBase64String(theCredentialString.getBytes(Constants.CHARSET_US_ASCII)); myHeaderValue = "Basic " + encoded; } @@ -71,5 +69,4 @@ public class BasicAuthInterceptor implements IClientInterceptor { public void interceptResponse(IHttpResponse theResponse) throws IOException { // nothing } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/BearerTokenAuthInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/BearerTokenAuthInterceptor.java index 3d6928fbb20..b346fc42093 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/BearerTokenAuthInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/BearerTokenAuthInterceptor.java @@ -52,7 +52,7 @@ public class BearerTokenAuthInterceptor implements IClientInterceptor { /** * Constructor - * + * * @param theToken * The bearer token to use (must not be null) */ @@ -70,7 +70,8 @@ public class BearerTokenAuthInterceptor implements IClientInterceptor { @Override public void interceptRequest(IHttpRequest theRequest) { - theRequest.addHeader(Constants.HEADER_AUTHORIZATION, (Constants.HEADER_AUTHORIZATION_VALPREFIX_BEARER + myToken)); + theRequest.addHeader( + Constants.HEADER_AUTHORIZATION, (Constants.HEADER_AUTHORIZATION_VALPREFIX_BEARER + myToken)); } @Override @@ -85,5 +86,4 @@ public class BearerTokenAuthInterceptor implements IClientInterceptor { Validate.notNull(theToken, "theToken must not be null"); myToken = theToken; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/CapturingInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/CapturingInterceptor.java index 2126a1e90eb..5225ead1d24 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/CapturingInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/CapturingInterceptor.java @@ -66,7 +66,7 @@ public class CapturingInterceptor { @Hook(value = Pointcut.CLIENT_RESPONSE, order = InterceptorOrders.CAPTURING_INTERCEPTOR_RESPONSE) public void interceptResponse(IHttpResponse theResponse) { - //Buffer the reponse to avoid errors when content has already been read and the entity is not repeatable + // Buffer the reponse to avoid errors when content has already been read and the entity is not repeatable bufferResponse(theResponse); myLastResponse = theResponse; @@ -86,5 +86,4 @@ public class CapturingInterceptor { throw new InternalErrorException(Msg.code(1404) + "Unable to buffer the entity for capturing", e); } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/CookieInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/CookieInterceptor.java index 690870c1141..923680eaabc 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/CookieInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/CookieInterceptor.java @@ -29,7 +29,6 @@ import ca.uhn.fhir.rest.client.api.*; *    Cookie: [key]=[value]
    *

    */ - public class CookieInterceptor implements IClientInterceptor { private final String sessionCookie; @@ -39,7 +38,7 @@ public class CookieInterceptor implements IClientInterceptor { @Override public void interceptRequest(IHttpRequest theRequest) { - theRequest.addHeader(Constants.HEADER_COOKIE, sessionCookie); //$NON-NLS-1$ + theRequest.addHeader(Constants.HEADER_COOKIE, sessionCookie); // $NON-NLS-1$ } @Override diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java index 8f75eb872ad..448a8b88adb 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/LoggingInterceptor.java @@ -94,7 +94,8 @@ public class LoggingInterceptor implements IClientInterceptor { myLog.debug("Client request body:\n{}", content); } } catch (IllegalStateException | IOException e) { - myLog.warn("Failed to replay request contents (during logging attempt, actual FHIR call did not fail)", e); + myLog.warn( + "Failed to replay request contents (during logging attempt, actual FHIR call did not fail)", e); } } } @@ -240,5 +241,4 @@ public class LoggingInterceptor implements IClientInterceptor { myLogResponseSummary = theValue; return this; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/SimpleRequestHeaderInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/SimpleRequestHeaderInterceptor.java index a715cefc150..c4149cffc9c 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/SimpleRequestHeaderInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/SimpleRequestHeaderInterceptor.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.rest.client.interceptor; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.io.IOException; - import ca.uhn.fhir.rest.client.api.IClientInterceptor; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.api.IHttpResponse; import org.apache.commons.lang3.Validate; +import java.io.IOException; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + /** * This interceptor adds an arbitrary header to requests made by this client. Both the * header name and the header value are specified by the calling code. @@ -69,12 +69,13 @@ public class SimpleRequestHeaderInterceptor implements IClientInterceptor { int colonIdx = theCompleteHeader.indexOf(':'); if (colonIdx != -1) { setHeaderName(theCompleteHeader.substring(0, colonIdx).trim()); - setHeaderValue(theCompleteHeader.substring(colonIdx+1, theCompleteHeader.length()).trim()); + setHeaderValue(theCompleteHeader + .substring(colonIdx + 1, theCompleteHeader.length()) + .trim()); } else { setHeaderName(theCompleteHeader.trim()); setHeaderValue(null); } - } public String getHeaderName() { @@ -104,5 +105,4 @@ public class SimpleRequestHeaderInterceptor implements IClientInterceptor { public void setHeaderValue(String theHeaderValue) { myHeaderValue = theHeaderValue; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/ThreadLocalCapturingInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/ThreadLocalCapturingInterceptor.java index cc71bf32a26..96fe8774152 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/ThreadLocalCapturingInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/ThreadLocalCapturingInterceptor.java @@ -22,9 +22,6 @@ package ca.uhn.fhir.rest.client.interceptor; import ca.uhn.fhir.rest.client.api.IClientInterceptor; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.api.IHttpResponse; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; - -import java.io.IOException; /** * This is a client interceptor that captures the current request and response diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UrlTenantSelectionInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UrlTenantSelectionInterceptor.java index ee583fbc2c0..c98b553f078 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UrlTenantSelectionInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UrlTenantSelectionInterceptor.java @@ -79,7 +79,11 @@ public class UrlTenantSelectionInterceptor { serverBase = serverBase.substring(0, serverBase.length() - 1); } - Validate.isTrue(requestUri.startsWith(serverBase), "Request URI %s does not start with server base %s", requestUri, serverBase); + Validate.isTrue( + requestUri.startsWith(serverBase), + "Request URI %s does not start with server base %s", + requestUri, + serverBase); if (theRequest.getUrlSource() == UrlSourceEnum.EXPLICIT) { return; @@ -88,5 +92,4 @@ public class UrlTenantSelectionInterceptor { String newUri = serverBase + "/" + tenantId + requestUri.substring(serverBase.length()); theRequest.setUri(newUri); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UserInfoInterceptor.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UserInfoInterceptor.java index 590a6c67634..5df8a810673 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UserInfoInterceptor.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/interceptor/UserInfoInterceptor.java @@ -19,44 +19,41 @@ */ package ca.uhn.fhir.rest.client.interceptor; -import java.io.IOException; - import ca.uhn.fhir.rest.client.api.IClientInterceptor; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.api.IHttpResponse; +import java.io.IOException; + /** * HTTP interceptor to be used for adding HTTP headers containing user identifying info for auditing purposes to the request */ public class UserInfoInterceptor implements IClientInterceptor { - + public static final String HEADER_USER_ID = "fhir-user-id"; - public static final String HEADER_USER_NAME = "fhir-user-name"; + public static final String HEADER_USER_NAME = "fhir-user-name"; public static final String HEADER_APPLICATION_NAME = "fhir-app-name"; - + private String myUserId; - private String myUserName; + private String myUserName; private String myAppName; - - public UserInfoInterceptor(String theUserId, String theUserName, String theAppName) { + + public UserInfoInterceptor(String theUserId, String theUserName, String theAppName) { super(); myUserId = theUserId; - myUserName = theUserName; + myUserName = theUserName; myAppName = theAppName; } @Override public void interceptRequest(IHttpRequest theRequest) { - if(myUserId != null) theRequest.addHeader(HEADER_USER_ID, myUserId); - if(myUserName != null) theRequest.addHeader(HEADER_USER_NAME, myUserName); - if(myAppName != null) theRequest.addHeader(HEADER_APPLICATION_NAME, myAppName); + if (myUserId != null) theRequest.addHeader(HEADER_USER_ID, myUserId); + if (myUserName != null) theRequest.addHeader(HEADER_USER_NAME, myUserName); + if (myAppName != null) theRequest.addHeader(HEADER_APPLICATION_NAME, myAppName); } @Override public void interceptResponse(IHttpResponse theResponse) throws IOException { // nothing } - - - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/AtParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/AtParameter.java index 4e40fbe9867..f764aa70bce 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/AtParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/AtParameter.java @@ -27,5 +27,4 @@ class AtParameter extends SinceOrAtParameter { public AtParameter() { super(Constants.PARAM_AT, At.class); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseHttpClientInvocationWithContents.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseHttpClientInvocationWithContents.java index 83ac509a84b..86a1bde415d 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseHttpClientInvocationWithContents.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseHttpClientInvocationWithContents.java @@ -59,7 +59,6 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca private final String myUrlPath; private IIdType myForceResourceId; - public BaseHttpClientInvocationWithContents(FhirContext theContext, IBaseResource theResource, String theUrlPath) { super(theContext); myResource = theResource; @@ -69,7 +68,8 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca myBundleType = null; } - public BaseHttpClientInvocationWithContents(FhirContext theContext, List theResources, BundleTypeEnum theBundleType) { + public BaseHttpClientInvocationWithContents( + FhirContext theContext, List theResources, BundleTypeEnum theBundleType) { super(theContext); myResource = null; myUrlPath = null; @@ -78,7 +78,8 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca myBundleType = theBundleType; } - public BaseHttpClientInvocationWithContents(FhirContext theContext, Map> theParams, String... theUrlPath) { + public BaseHttpClientInvocationWithContents( + FhirContext theContext, Map> theParams, String... theUrlPath) { super(theContext); myResource = null; myUrlPath = StringUtils.join(theUrlPath, '/'); @@ -88,7 +89,8 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca myBundleType = null; } - public BaseHttpClientInvocationWithContents(FhirContext theContext, String theContents, boolean theIsBundle, String theUrlPath) { + public BaseHttpClientInvocationWithContents( + FhirContext theContext, String theContents, boolean theIsBundle, String theUrlPath) { super(theContext); myResource = null; myUrlPath = theUrlPath; @@ -98,7 +100,12 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca } @Override - public IHttpRequest asHttpRequest(String theUrlBase, Map> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) throws DataFormatException { + public IHttpRequest asHttpRequest( + String theUrlBase, + Map> theExtraParams, + EncodingEnum theEncoding, + Boolean thePrettyPrint) + throws DataFormatException { StringBuilder url = new StringBuilder(); if (myUrlPath == null) { @@ -114,11 +121,13 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca } appendExtraParamsWithQuestionMark(theExtraParams, url, url.indexOf("?") == -1); - IHttpClient httpClient = getRestfulClientFactory().getHttpClient(url, myIfNoneExistParams, myIfNoneExistString, getRequestType(), getHeaders()); + IHttpClient httpClient = getRestfulClientFactory() + .getHttpClient(url, myIfNoneExistParams, myIfNoneExistString, getRequestType(), getHeaders()); if (myResource != null && IBaseBinary.class.isAssignableFrom(myResource.getClass())) { IBaseBinary binary = (IBaseBinary) myResource; - if (isNotBlank(binary.getContentType()) && EncodingEnum.forContentTypeStrict(binary.getContentType()) == null) { + if (isNotBlank(binary.getContentType()) + && EncodingEnum.forContentTypeStrict(binary.getContentType()) == null) { if (binary.hasData()) { return httpClient.createBinaryRequest(getContext(), binary); } @@ -130,11 +139,10 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca encoding = EncodingEnum.detectEncoding(myContents); } - if (myParams != null) { return httpClient.createParamRequest(getContext(), myParams, encoding); } - encoding = ObjectUtils.defaultIfNull(encoding, EncodingEnum.JSON); + encoding = ObjectUtils.defaultIfNull(encoding, EncodingEnum.JSON); String contents = encodeContents(thePrettyPrint, encoding); String contentType = getContentType(encoding); return httpClient.createByteRequest(getContext(), contents, contentType, encoding); @@ -171,7 +179,7 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca if (myForceResourceId != null) { parser.setEncodeForceResourceId(myForceResourceId); } - + parser.setOmitResourceId(myOmitResourceId); if (myResources != null) { IVersionSpecificBundleFactory bundleFactory = getContext().newBundleFactory(); @@ -201,5 +209,4 @@ abstract class BaseHttpClientInvocationWithContents extends BaseHttpClientInvoca public void setOmitResourceId(boolean theOmitResourceId) { myOmitResourceId = theOmitResourceId; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseMethodBinding.java index 1368843854e..0e8e1c33a6b 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseMethodBinding.java @@ -19,18 +19,8 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.io.IOException; -import java.io.InputStream; -import java.io.Reader; -import java.lang.reflect.Method; -import java.util.*; - -import org.apache.commons.io.IOUtils; -import org.hl7.fhir.instance.model.api.IAnyResource; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.*; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.*; import ca.uhn.fhir.model.base.resource.BaseOperationOutcome; import ca.uhn.fhir.parser.IParser; @@ -40,8 +30,14 @@ import ca.uhn.fhir.rest.client.exceptions.NonFhirResponseException; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.server.exceptions.*; import ca.uhn.fhir.util.ReflectionUtil; +import org.apache.commons.io.IOUtils; +import org.hl7.fhir.instance.model.api.IAnyResource; +import org.hl7.fhir.instance.model.api.IBaseResource; -import static org.apache.commons.lang3.StringUtils.isNotBlank; +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Method; +import java.util.*; public abstract class BaseMethodBinding implements IClientResponseHandler { @@ -71,13 +67,17 @@ public abstract class BaseMethodBinding implements IClientResponseHandler break; } } - } - protected IParser createAppropriateParserForParsingResponse(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, List> thePreferTypes) { + protected IParser createAppropriateParserForParsingResponse( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + List> thePreferTypes) { EncodingEnum encoding = EncodingEnum.forContentType(theResponseMimeType); if (encoding == null) { - NonFhirResponseException ex = NonFhirResponseException.newInstance(theResponseStatusCode, theResponseMimeType, theResponseInputStream); + NonFhirResponseException ex = NonFhirResponseException.newInstance( + theResponseStatusCode, theResponseMimeType, theResponseInputStream); populateException(ex, theResponseInputStream); throw ex; } @@ -142,33 +142,37 @@ public abstract class BaseMethodBinding implements IClientResponseHandler return mySupportsConditionalMultiple; } - protected BaseServerResponseException processNon2xxResponseAndReturnExceptionToThrow(int theStatusCode, String theResponseMimeType, InputStream theResponseInputStream) { + protected BaseServerResponseException processNon2xxResponseAndReturnExceptionToThrow( + int theStatusCode, String theResponseMimeType, InputStream theResponseInputStream) { BaseServerResponseException ex; switch (theStatusCode) { - case Constants.STATUS_HTTP_400_BAD_REQUEST: - ex = new InvalidRequestException("Server responded with HTTP 400"); - break; - case Constants.STATUS_HTTP_404_NOT_FOUND: - ex = new ResourceNotFoundException("Server responded with HTTP 404"); - break; - case Constants.STATUS_HTTP_405_METHOD_NOT_ALLOWED: - ex = new MethodNotAllowedException("Server responded with HTTP 405"); - break; - case Constants.STATUS_HTTP_409_CONFLICT: - ex = new ResourceVersionConflictException("Server responded with HTTP 409"); - break; - case Constants.STATUS_HTTP_412_PRECONDITION_FAILED: - ex = new PreconditionFailedException("Server responded with HTTP 412"); - break; - case Constants.STATUS_HTTP_422_UNPROCESSABLE_ENTITY: - IParser parser = createAppropriateParserForParsingResponse(theResponseMimeType, theResponseInputStream, theStatusCode, null); - // TODO: handle if something other than OO comes back - BaseOperationOutcome operationOutcome = (BaseOperationOutcome) parser.parseResource(theResponseInputStream); - ex = new UnprocessableEntityException(myContext, operationOutcome); - break; - default: - ex = new UnclassifiedServerFailureException(theStatusCode, "Server responded with HTTP " + theStatusCode); - break; + case Constants.STATUS_HTTP_400_BAD_REQUEST: + ex = new InvalidRequestException("Server responded with HTTP 400"); + break; + case Constants.STATUS_HTTP_404_NOT_FOUND: + ex = new ResourceNotFoundException("Server responded with HTTP 404"); + break; + case Constants.STATUS_HTTP_405_METHOD_NOT_ALLOWED: + ex = new MethodNotAllowedException("Server responded with HTTP 405"); + break; + case Constants.STATUS_HTTP_409_CONFLICT: + ex = new ResourceVersionConflictException("Server responded with HTTP 409"); + break; + case Constants.STATUS_HTTP_412_PRECONDITION_FAILED: + ex = new PreconditionFailedException("Server responded with HTTP 412"); + break; + case Constants.STATUS_HTTP_422_UNPROCESSABLE_ENTITY: + IParser parser = createAppropriateParserForParsingResponse( + theResponseMimeType, theResponseInputStream, theStatusCode, null); + // TODO: handle if something other than OO comes back + BaseOperationOutcome operationOutcome = + (BaseOperationOutcome) parser.parseResource(theResponseInputStream); + ex = new UnprocessableEntityException(myContext, operationOutcome); + break; + default: + ex = new UnclassifiedServerFailureException( + theStatusCode, "Server responded with HTTP " + theStatusCode); + break; } populateException(ex, theResponseInputStream); @@ -198,7 +202,21 @@ public abstract class BaseMethodBinding implements IClientResponseHandler Patch patch = theMethod.getAnnotation(Patch.class); // ** if you add another annotation above, also add it to the next line: - if (!verifyMethodHasZeroOrOneOperationAnnotation(theMethod, read, search, conformance, create, update, delete, history, validate, addTags, deleteTags, transaction, operation, getPage, + if (!verifyMethodHasZeroOrOneOperationAnnotation( + theMethod, + read, + search, + conformance, + create, + update, + delete, + history, + validate, + addTags, + deleteTags, + transaction, + operation, + getPage, patch)) { return null; } @@ -220,15 +238,20 @@ public abstract class BaseMethodBinding implements IClientResponseHandler returnTypeFromMethod = ReflectionUtil.getGenericCollectionTypeOfMethodReturnType(theMethod); if (returnTypeFromMethod == null) { ourLog.trace("Method {} returns a non-typed list, can't verify return type", theMethod); - } else if (!verifyIsValidResourceReturnType(returnTypeFromMethod) && !isResourceInterface(returnTypeFromMethod)) { - throw new ConfigurationException(Msg.code(1427) + "Method '" + theMethod.getName() + "' from client type " + theMethod.getDeclaringClass().getCanonicalName() - + " returns a collection with generic type " + toLogString(returnTypeFromMethod) - + " - Must return a resource type or a collection (List, Set) with a resource type parameter (e.g. List or List )"); + } else if (!verifyIsValidResourceReturnType(returnTypeFromMethod) + && !isResourceInterface(returnTypeFromMethod)) { + throw new ConfigurationException( + Msg.code(1427) + "Method '" + theMethod.getName() + "' from client type " + + theMethod.getDeclaringClass().getCanonicalName() + + " returns a collection with generic type " + toLogString(returnTypeFromMethod) + + " - Must return a resource type or a collection (List, Set) with a resource type parameter (e.g. List or List )"); } } else { if (!isResourceInterface(returnTypeFromMethod) && !verifyIsValidResourceReturnType(returnTypeFromMethod)) { - throw new ConfigurationException(Msg.code(1428) + "Method '" + theMethod.getName() + "' from client type " + theMethod.getDeclaringClass().getCanonicalName() - + " returns " + toLogString(returnTypeFromMethod) + " - Must return a resource type (eg Patient, Bundle" + throw new ConfigurationException(Msg.code(1428) + "Method '" + theMethod.getName() + + "' from client type " + theMethod.getDeclaringClass().getCanonicalName() + + " returns " + toLogString(returnTypeFromMethod) + + " - Must return a resource type (eg Patient, Bundle" + ", etc., see the documentation for more details)"); } } @@ -258,8 +281,10 @@ public abstract class BaseMethodBinding implements IClientResponseHandler if (!isResourceInterface(returnTypeFromAnnotation)) { if (!verifyIsValidResourceReturnType(returnTypeFromAnnotation)) { - throw new ConfigurationException(Msg.code(1429) + "Method '" + theMethod.getName() + "' from client type " + theMethod.getDeclaringClass().getCanonicalName() - + " returns " + toLogString(returnTypeFromAnnotation) + " according to annotation - Must return a resource type"); + throw new ConfigurationException(Msg.code(1429) + "Method '" + theMethod.getName() + + "' from client type " + theMethod.getDeclaringClass().getCanonicalName() + " returns " + + toLogString(returnTypeFromAnnotation) + + " according to annotation - Must return a resource type"); } returnType = returnTypeFromAnnotation; } else { @@ -290,13 +315,17 @@ public abstract class BaseMethodBinding implements IClientResponseHandler } else if (history != null) { return new HistoryMethodBinding(theMethod, theContext, theProvider); } else if (validate != null) { - return new ValidateMethodBindingDstu2Plus(returnType, returnTypeFromRp, theMethod, theContext, theProvider, validate); + return new ValidateMethodBindingDstu2Plus( + returnType, returnTypeFromRp, theMethod, theContext, theProvider, validate); } else if (transaction != null) { return new TransactionMethodBinding(theMethod, theContext, theProvider); } else if (operation != null) { - return new OperationMethodBinding(returnType, returnTypeFromRp, theMethod, theContext, theProvider, operation); + return new OperationMethodBinding( + returnType, returnTypeFromRp, theMethod, theContext, theProvider, operation); } else { - throw new ConfigurationException(Msg.code(1430) + "Did not detect any FHIR annotations on method '" + theMethod.getName() + "' on type: " + theMethod.getDeclaringClass().getCanonicalName()); + throw new ConfigurationException( + Msg.code(1430) + "Did not detect any FHIR annotations on method '" + theMethod.getName() + + "' on type: " + theMethod.getDeclaringClass().getCanonicalName()); } // // each operation name must have a request type annotation and be @@ -322,7 +351,9 @@ public abstract class BaseMethodBinding implements IClientResponseHandler } public static boolean isResourceInterface(Class theReturnTypeFromMethod) { - return theReturnTypeFromMethod.equals(IBaseResource.class) || theReturnTypeFromMethod.equals(IResource.class) || theReturnTypeFromMethod.equals(IAnyResource.class); + return theReturnTypeFromMethod.equals(IBaseResource.class) + || theReturnTypeFromMethod.equals(IResource.class) + || theReturnTypeFromMethod.equals(IAnyResource.class); } private static void populateException(BaseServerResponseException theEx, InputStream theResponseInputStream) { @@ -360,10 +391,11 @@ public abstract class BaseMethodBinding implements IClientResponseHandler if (obj1 == null) { obj1 = object; } else { - throw new ConfigurationException(Msg.code(1431) + "Method " + theNextMethod.getName() + " on type '" + theNextMethod.getDeclaringClass().getSimpleName() + " has annotations @" - + obj1.getClass().getSimpleName() + " and @" + object.getClass().getSimpleName() + ". Can not have both."); + throw new ConfigurationException(Msg.code(1431) + "Method " + theNextMethod.getName() + " on type '" + + theNextMethod.getDeclaringClass().getSimpleName() + " has annotations @" + + obj1.getClass().getSimpleName() + " and @" + + object.getClass().getSimpleName() + ". Can not have both."); } - } } if (obj1 == null) { @@ -375,5 +407,4 @@ public abstract class BaseMethodBinding implements IClientResponseHandler } return true; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseOutcomeReturningMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseOutcomeReturningMethodBinding.java index ec5254c5d67..91db00c0dbd 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseOutcomeReturningMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseOutcomeReturningMethodBinding.java @@ -19,34 +19,36 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.io.InputStream; -import java.io.Reader; -import java.lang.reflect.Method; -import java.util.*; - -import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import org.apache.commons.lang3.StringUtils; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.io.InputStream; +import java.lang.reflect.Method; +import java.util.*; abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseOutcomeReturningMethodBinding.class); private boolean myReturnVoid; - public BaseOutcomeReturningMethodBinding(Method theMethod, FhirContext theContext, Class theMethodAnnotation, Object theProvider) { + public BaseOutcomeReturningMethodBinding( + Method theMethod, FhirContext theContext, Class theMethodAnnotation, Object theProvider) { super(theMethod, theContext, theProvider); if (!theMethod.getReturnType().equals(MethodOutcome.class)) { if (!allowVoidReturnType()) { - throw new ConfigurationException(Msg.code(1413) + "Method " + theMethod.getName() + " in type " + theMethod.getDeclaringClass().getCanonicalName() + " is a @" + theMethodAnnotation.getSimpleName() + " method but it does not return " + MethodOutcome.class); + throw new ConfigurationException(Msg.code(1413) + "Method " + theMethod.getName() + " in type " + + theMethod.getDeclaringClass().getCanonicalName() + " is a @" + + theMethodAnnotation.getSimpleName() + " method but it does not return " + + MethodOutcome.class); } else if (theMethod.getReturnType() == void.class) { myReturnVoid = true; } @@ -69,15 +71,22 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding> theHeaders) throws BaseServerResponseException { + public MethodOutcome invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws BaseServerResponseException { if (theResponseStatusCode >= 200 && theResponseStatusCode < 300) { if (myReturnVoid) { return null; } - MethodOutcome retVal = MethodUtil.process2xxResponse(getContext(), theResponseStatusCode, theResponseMimeType, theResponseInputStream, theHeaders); + MethodOutcome retVal = MethodUtil.process2xxResponse( + getContext(), theResponseStatusCode, theResponseMimeType, theResponseInputStream, theHeaders); return retVal; } - throw processNon2xxResponseAndReturnExceptionToThrow(theResponseStatusCode, theResponseMimeType, theResponseInputStream); + throw processNon2xxResponseAndReturnExceptionToThrow( + theResponseStatusCode, theResponseMimeType, theResponseInputStream); } public boolean isReturnVoid() { @@ -86,9 +95,8 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding provideAllowableRequestTypes(); - - - protected static void parseContentLocation(FhirContext theContext, MethodOutcome theOutcomeToPopulate, String theLocationHeader) { + protected static void parseContentLocation( + FhirContext theContext, MethodOutcome theOutcomeToPopulate, String theLocationHeader) { if (StringUtils.isBlank(theLocationHeader)) { return; } @@ -97,5 +105,4 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding theMethodAnnotationType, Class theResourceTypeFromAnnotation) { + public BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody( + Method theMethod, + FhirContext theContext, + Object theProvider, + Class theMethodAnnotationType, + Class theResourceTypeFromAnnotation) { super(theMethod, theContext, theMethodAnnotationType, theProvider); Class resourceType = theResourceTypeFromAnnotation; @@ -42,14 +46,19 @@ public abstract class BaseOutcomeReturningMethodBindingWithResourceIdButNoResour RuntimeResourceDefinition def = theContext.getResourceDefinition(resourceType); myResourceName = def.getName(); } else { - throw new ConfigurationException(Msg.code(1474) + "Can not determine resource type for method '" + theMethod.getName() + "' on type " + theMethod.getDeclaringClass().getCanonicalName() + " - Did you forget to include the resourceType() value on the @" + Delete.class.getSimpleName() + " method annotation?"); + throw new ConfigurationException( + Msg.code(1474) + "Can not determine resource type for method '" + theMethod.getName() + "' on type " + + theMethod.getDeclaringClass().getCanonicalName() + + " - Did you forget to include the resourceType() value on the @" + + Delete.class.getSimpleName() + " method annotation?"); } myIdParameterIndex = ParameterUtil.findIdParameterIndex(theMethod, getContext()); if (myIdParameterIndex == null) { - throw new ConfigurationException(Msg.code(1475) + "Method '" + theMethod.getName() + "' on type '" + theMethod.getDeclaringClass().getCanonicalName() + "' has no parameter annotated with the @" + IdParam.class.getSimpleName() + " annotation"); + throw new ConfigurationException(Msg.code(1475) + "Method '" + theMethod.getName() + "' on type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' has no parameter annotated with the @" + + IdParam.class.getSimpleName() + " annotation"); } - } @Override @@ -60,6 +69,4 @@ public abstract class BaseOutcomeReturningMethodBindingWithResourceIdButNoResour protected Integer getIdParameterIndex() { return myIdParameterIndex; } - - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseOutcomeReturningMethodBindingWithResourceParam.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseOutcomeReturningMethodBindingWithResourceParam.java index e2929aaad70..0aee75ba2ff 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseOutcomeReturningMethodBindingWithResourceParam.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseOutcomeReturningMethodBindingWithResourceParam.java @@ -19,23 +19,23 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOutcomeReturningMethodBinding { private String myResourceName; private int myResourceParameterIndex = -1; private Class myResourceType; - public BaseOutcomeReturningMethodBindingWithResourceParam(Method theMethod, FhirContext theContext, Class theMethodAnnotation, Object theProvider) { + public BaseOutcomeReturningMethodBindingWithResourceParam( + Method theMethod, FhirContext theContext, Class theMethodAnnotation, Object theProvider) { super(theMethod, theContext, theMethodAnnotation, theProvider); ResourceParameter resourceParameter = null; @@ -45,7 +45,9 @@ abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOu if (next instanceof ResourceParameter) { resourceParameter = (ResourceParameter) next; if (myResourceType != null) { - throw new ConfigurationException(Msg.code(1468) + "Method " + theMethod.getName() + " on type " + theMethod.getDeclaringClass() + " has more than one @ResourceParam. Only one is allowed."); + throw new ConfigurationException(Msg.code(1468) + "Method " + theMethod.getName() + " on type " + + theMethod.getDeclaringClass() + + " has more than one @ResourceParam. Only one is allowed."); } myResourceType = resourceParameter.getResourceType(); @@ -56,18 +58,19 @@ abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOu } if (myResourceType == null) { - throw new ConfigurationException(Msg.code(1469) + "Unable to determine resource type for method: " + theMethod); + throw new ConfigurationException( + Msg.code(1469) + "Unable to determine resource type for method: " + theMethod); } myResourceName = theContext.getResourceType(myResourceType); if (resourceParameter == null) { - throw new ConfigurationException(Msg.code(1470) + "Method " + theMethod.getName() + " in type " + theMethod.getDeclaringClass().getCanonicalName() + " does not have a resource parameter annotated with @" + ResourceParam.class.getSimpleName()); + throw new ConfigurationException(Msg.code(1470) + "Method " + theMethod.getName() + " in type " + + theMethod.getDeclaringClass().getCanonicalName() + + " does not have a resource parameter annotated with @" + ResourceParam.class.getSimpleName()); } - } - @Override public String getResourceName() { return myResourceName; @@ -87,8 +90,8 @@ abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOu /** * Subclasses may override */ - protected void validateResourceIdAndUrlIdForNonConditionalOperation(IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { + protected void validateResourceIdAndUrlIdForNonConditionalOperation( + IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { return; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseQueryParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseQueryParameter.java index 69738ce2863..2b238f8fb68 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseQueryParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseQueryParameter.java @@ -19,12 +19,11 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.QualifiedParamList; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.hl7.fhir.instance.model.api.IBaseResource; import java.lang.reflect.Method; @@ -37,24 +36,35 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public abstract class BaseQueryParameter implements IParameter { - public abstract List encode(FhirContext theContext, Object theObject) throws InternalErrorException; + public abstract List encode(FhirContext theContext, Object theObject) + throws InternalErrorException; public abstract String getName(); public abstract RestSearchParameterTypeEnum getParamType(); @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore for now } public abstract boolean isRequired(); @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException { + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) + throws InternalErrorException { if (theSourceClientArgument == null) { if (isRequired()) { - throw new NullPointerException(Msg.code(1451) + "SearchParameter '" + getName() + "' is required and may not be null"); + throw new NullPointerException( + Msg.code(1451) + "SearchParameter '" + getName() + "' is required and may not be null"); } } else { List value = encode(theContext, theSourceClientArgument); @@ -70,12 +80,11 @@ public abstract class BaseQueryParameter implements IParameter { String qualifier = nextParamEntry.getQualifier(); String paramName = isNotBlank(qualifier) ? getName() + qualifier : getName(); - List paramValues = theTargetQueryArguments.computeIfAbsent(paramName, k -> new ArrayList<>(value.size())); + List paramValues = + theTargetQueryArguments.computeIfAbsent(paramName, k -> new ArrayList<>(value.size())); paramValues.add(b.toString()); } - } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseResourceReturningMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseResourceReturningMethodBinding.java index 5dbc8c30efc..05458381be9 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseResourceReturningMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/BaseResourceReturningMethodBinding.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.parser.IParser; @@ -49,7 +49,8 @@ import java.util.Set; public abstract class BaseResourceReturningMethodBinding extends BaseMethodBinding { protected static final Set ALLOWED_PARAMS; - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseResourceReturningMethodBinding.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(BaseResourceReturningMethodBinding.class); static { HashSet set = new HashSet(); @@ -73,7 +74,8 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi private List> myPreferTypesList; @SuppressWarnings("unchecked") - public BaseResourceReturningMethodBinding(Class theReturnResourceType, Method theMethod, FhirContext theContext, Object theProvider) { + public BaseResourceReturningMethodBinding( + Class theReturnResourceType, Method theMethod, FhirContext theContext, Object theProvider) { super(theMethod, theContext, theProvider); Class methodReturnType = theMethod.getReturnType(); @@ -83,13 +85,18 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi Class collectionType = ReflectionUtil.getGenericCollectionTypeOfMethodReturnType(theMethod); if (collectionType != null) { if (!Object.class.equals(collectionType) && !IBaseResource.class.isAssignableFrom(collectionType)) { - throw new ConfigurationException(Msg.code(1458) + "Method " + theMethod.getDeclaringClass().getSimpleName() + "#" + theMethod.getName() + " returns an invalid collection generic type: " + collectionType); + throw new ConfigurationException(Msg.code(1458) + "Method " + + theMethod.getDeclaringClass().getSimpleName() + "#" + theMethod.getName() + + " returns an invalid collection generic type: " + collectionType); } } myResourceListCollectionType = collectionType; } else if (IBaseResource.class.isAssignableFrom(methodReturnType)) { - if (Modifier.isAbstract(methodReturnType.getModifiers()) == false && theContext.getResourceDefinition((Class) methodReturnType).isBundle()) { + if (Modifier.isAbstract(methodReturnType.getModifiers()) == false + && theContext + .getResourceDefinition((Class) methodReturnType) + .isBundle()) { myMethodReturnType = MethodReturnTypeEnum.BUNDLE_RESOURCE; } else { myMethodReturnType = MethodReturnTypeEnum.RESOURCE; @@ -97,12 +104,15 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi } else if (MethodOutcome.class.isAssignableFrom(methodReturnType)) { myMethodReturnType = MethodReturnTypeEnum.METHOD_OUTCOME; } else { - throw new ConfigurationException(Msg.code(1459) + "Invalid return type '" + methodReturnType.getCanonicalName() + "' on method '" + theMethod.getName() + "' on type: " + theMethod.getDeclaringClass().getCanonicalName()); + throw new ConfigurationException(Msg.code(1459) + "Invalid return type '" + + methodReturnType.getCanonicalName() + "' on method '" + theMethod.getName() + "' on type: " + + theMethod.getDeclaringClass().getCanonicalName()); } if (theReturnResourceType != null) { if (IBaseResource.class.isAssignableFrom(theReturnResourceType)) { - if (Modifier.isAbstract(theReturnResourceType.getModifiers()) || Modifier.isInterface(theReturnResourceType.getModifiers())) { + if (Modifier.isAbstract(theReturnResourceType.getModifiers()) + || Modifier.isInterface(theReturnResourceType.getModifiers())) { // If we're returning an abstract type, that's ok } else { myResourceType = (Class) theReturnResourceType; @@ -131,63 +141,76 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi public abstract ReturnTypeEnum getReturnType(); @Override - public Object invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) throws IOException { - + public Object invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws IOException { + if (Constants.STATUS_HTTP_204_NO_CONTENT == theResponseStatusCode) { return toReturnType(null); } - - IParser parser = createAppropriateParserForParsingResponse(theResponseMimeType, theResponseInputStream, theResponseStatusCode, myPreferTypesList); + + IParser parser = createAppropriateParserForParsingResponse( + theResponseMimeType, theResponseInputStream, theResponseStatusCode, myPreferTypesList); switch (getReturnType()) { - case BUNDLE: { + case BUNDLE: { + IBaseBundle bundle; + List listOfResources; + Class type = + getContext().getResourceDefinition("Bundle").getImplementingClass(); + bundle = (IBaseBundle) parser.parseResource(type, theResponseInputStream); + listOfResources = BundleUtil.toListOfResources(getContext(), bundle); - IBaseBundle bundle; - List listOfResources; - Class type = getContext().getResourceDefinition("Bundle").getImplementingClass(); - bundle = (IBaseBundle) parser.parseResource(type, theResponseInputStream); - listOfResources = BundleUtil.toListOfResources(getContext(), bundle); - - switch (getMethodReturnType()) { - case BUNDLE_RESOURCE: - return bundle; - case LIST_OF_RESOURCES: - if (myResourceListCollectionType != null) { - for (Iterator iter = listOfResources.iterator(); iter.hasNext();) { - IBaseResource next = iter.next(); - if (!myResourceListCollectionType.isAssignableFrom(next.getClass())) { - ourLog.debug("Not returning resource of type {} because it is not a subclass or instance of {}", next.getClass(), myResourceListCollectionType); - iter.remove(); + switch (getMethodReturnType()) { + case BUNDLE_RESOURCE: + return bundle; + case LIST_OF_RESOURCES: + if (myResourceListCollectionType != null) { + for (Iterator iter = listOfResources.iterator(); + iter.hasNext(); ) { + IBaseResource next = iter.next(); + if (!myResourceListCollectionType.isAssignableFrom(next.getClass())) { + ourLog.debug( + "Not returning resource of type {} because it is not a subclass or instance of {}", + next.getClass(), + myResourceListCollectionType); + iter.remove(); + } + } } - } + return listOfResources; + case RESOURCE: + List list = BundleUtil.toListOfResources(getContext(), bundle); + if (list.size() == 0) { + return null; + } else if (list.size() == 1) { + return list.get(0); + } else { + throw new InvalidResponseException( + Msg.code(1460) + + "FHIR server call returned a bundle with multiple resources, but this method is only able to returns one.", + theResponseStatusCode); + } + default: + break; } - return listOfResources; - case RESOURCE: - List list = BundleUtil.toListOfResources(getContext(), bundle); - if (list.size() == 0) { - return null; - } else if (list.size() == 1) { - return list.get(0); - } else { - throw new InvalidResponseException(Msg.code(1460) + "FHIR server call returned a bundle with multiple resources, but this method is only able to returns one.", theResponseStatusCode); - } - default: break; } - break; - } - case RESOURCE: { - IBaseResource resource; - if (myResourceType != null) { - resource = parser.parseResource(myResourceType, theResponseInputStream); - } else { - resource = parser.parseResource(theResponseInputStream); + case RESOURCE: { + IBaseResource resource; + if (myResourceType != null) { + resource = parser.parseResource(myResourceType, theResponseInputStream); + } else { + resource = parser.parseResource(theResponseInputStream); + } + + MethodUtil.parseClientRequestResourceHeaders(null, theHeaders, resource); + + return toReturnType(resource); } - - MethodUtil.parseClientRequestResourceHeaders(null, theHeaders, resource); - - return toReturnType(resource); - } } throw new IllegalStateException(Msg.code(1461) + "Should not get here!"); @@ -195,25 +218,25 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi private Object toReturnType(IBaseResource resource) { Object retVal = null; - + switch (getMethodReturnType()) { - case LIST_OF_RESOURCES: - retVal = Collections.emptyList(); - if (resource != null) { - retVal = Collections.singletonList(resource); - } - break; - case RESOURCE: - retVal = resource; - break; - case BUNDLE_RESOURCE: - retVal = resource; - break; - case METHOD_OUTCOME: - MethodOutcome outcome = new MethodOutcome(); - outcome.setOperationOutcome((IBaseOperationOutcome) resource); - retVal = outcome; - break; + case LIST_OF_RESOURCES: + retVal = Collections.emptyList(); + if (resource != null) { + retVal = Collections.singletonList(resource); + } + break; + case RESOURCE: + retVal = resource; + break; + case BUNDLE_RESOURCE: + retVal = resource; + break; + case METHOD_OUTCOME: + MethodOutcome outcome = new MethodOutcome(); + outcome.setOperationOutcome((IBaseOperationOutcome) resource); + retVal = outcome; + break; } return retVal; } @@ -224,7 +247,9 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi if (myResourceType != null && !BaseMethodBinding.isResourceInterface(myResourceType)) { preferTypes = new ArrayList>(1); preferTypes.add(myResourceType); - } else if (myResourceListCollectionType != null && IBaseResource.class.isAssignableFrom(myResourceListCollectionType) && !BaseMethodBinding.isResourceInterface(myResourceListCollectionType)) { + } else if (myResourceListCollectionType != null + && IBaseResource.class.isAssignableFrom(myResourceListCollectionType) + && !BaseMethodBinding.isResourceInterface(myResourceListCollectionType)) { preferTypes = new ArrayList>(1); preferTypes.add((Class) myResourceListCollectionType); } @@ -260,12 +285,10 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi public IBaseResource getResource() { return myResource; } - } public enum ReturnTypeEnum { BUNDLE, RESOURCE } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ConditionalParamBinder.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ConditionalParamBinder.java index e888a393c1f..37ce4bdf3eb 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ConditionalParamBinder.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ConditionalParamBinder.java @@ -19,18 +19,17 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; -import java.util.*; - -import org.apache.commons.lang3.Validate; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.ConditionalUrlParam; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.apache.commons.lang3.Validate; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; +import java.util.*; class ConditionalParamBinder implements IParameter { @@ -42,9 +41,17 @@ class ConditionalParamBinder implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { - if (theOuterCollectionType != null || theInnerCollectionType != null || theParameterType.equals(String.class) == false) { - throw new ConfigurationException(Msg.code(1439) + "Parameters annotated with @" + ConditionalUrlParam.class.getSimpleName() + " must be of type String, found incorrect parameter in method \"" + theMethod + "\""); + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { + if (theOuterCollectionType != null + || theInnerCollectionType != null + || theParameterType.equals(String.class) == false) { + throw new ConfigurationException( + Msg.code(1439) + "Parameters annotated with @" + ConditionalUrlParam.class.getSimpleName() + + " must be of type String, found incorrect parameter in method \"" + theMethod + "\""); } } @@ -53,8 +60,13 @@ class ConditionalParamBinder implements IParameter { } @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException { - throw new UnsupportedOperationException(Msg.code(1440) + "Can not use @" + getClass().getName() + " annotated parameters in client"); + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) + throws InternalErrorException { + throw new UnsupportedOperationException( + Msg.code(1440) + "Can not use @" + getClass().getName() + " annotated parameters in client"); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ConformanceMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ConformanceMethodBinding.java index 9a0e72d772c..0070396bbf2 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ConformanceMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ConformanceMethodBinding.java @@ -19,16 +19,15 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; - -import org.hl7.fhir.instance.model.api.IBaseConformance; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseConformance; + +import java.lang.reflect.Method; public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding { @@ -37,10 +36,12 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding MethodReturnTypeEnum methodReturnType = getMethodReturnType(); Class genericReturnType = (Class) theMethod.getGenericReturnType(); - if (methodReturnType != MethodReturnTypeEnum.RESOURCE || !IBaseConformance.class.isAssignableFrom(genericReturnType)) { - throw new ConfigurationException(Msg.code(1426) + "Conformance resource provider method '" + theMethod.getName() + "' should return a Conformance resource class, returns: " + theMethod.getReturnType()); + if (methodReturnType != MethodReturnTypeEnum.RESOURCE + || !IBaseConformance.class.isAssignableFrom(genericReturnType)) { + throw new ConfigurationException( + Msg.code(1426) + "Conformance resource provider method '" + theMethod.getName() + + "' should return a Conformance resource class, returns: " + theMethod.getReturnType()); } - } @Override @@ -62,7 +63,6 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding return retVal; } - @Override public RestOperationTypeEnum getRestOperationType() { return RestOperationTypeEnum.METADATA; @@ -72,5 +72,4 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding protected BundleTypeEnum getResponseBundleType() { return null; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/CountParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/CountParameter.java index 60b714b2098..d56f7bd2875 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/CountParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/CountParameter.java @@ -19,28 +19,30 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.model.primitive.IntegerDt; +import ca.uhn.fhir.rest.annotation.Count; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.param.ParameterUtil; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.reflect.Method; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.primitive.IntegerDt; -import ca.uhn.fhir.rest.annotation.Count; -import ca.uhn.fhir.rest.annotation.Since; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.param.ParameterUtil; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; - public class CountParameter implements IParameter { @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException { if (theSourceClientArgument != null) { IntegerDt since = ParameterUtil.toInteger(theSourceClientArgument); @@ -51,15 +53,20 @@ public class CountParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(1420) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + throw new ConfigurationException(Msg.code(1420) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + " but can not be of collection type"); } if (!ParameterUtil.isBindableIntegerType(theParameterType)) { - throw new ConfigurationException(Msg.code(1421) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + throw new ConfigurationException(Msg.code(1421) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + " but type '" + theParameterType + "' is an invalid type, must be Integer or IntegerType"); } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/CreateMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/CreateMethodBinding.java index 7440d42f20e..1a37c46fbdc 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/CreateMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/CreateMethodBinding.java @@ -19,24 +19,22 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.lang.reflect.Method; -import java.util.Collections; -import java.util.Set; - -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Create; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.lang.reflect.Method; +import java.util.Collections; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class CreateMethodBinding extends BaseOutcomeReturningMethodBindingWithResourceParam { @@ -76,19 +74,26 @@ public class CreateMethodBinding extends BaseOutcomeReturningMethodBindingWithRe } @Override - protected void validateResourceIdAndUrlIdForNonConditionalOperation(IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { + protected void validateResourceIdAndUrlIdForNonConditionalOperation( + IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { if (isNotBlank(theUrlId)) { - String msg = getContext().getLocalizer().getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "idInUrlForCreate", theUrlId); + String msg = getContext() + .getLocalizer() + .getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "idInUrlForCreate", theUrlId); throw new InvalidRequestException(Msg.code(1411) + msg); } if (getContext().getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) { if (isNotBlank(theResourceId)) { - String msg = getContext().getLocalizer().getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "idInBodyForCreate", theResourceId); + String msg = getContext() + .getLocalizer() + .getMessage( + BaseOutcomeReturningMethodBindingWithResourceParam.class, + "idInBodyForCreate", + theResourceId); throw new InvalidRequestException(Msg.code(1412) + msg); } } else { - theResource.setId((IIdType)null); + theResource.setId((IIdType) null); } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/DeleteMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/DeleteMethodBinding.java index 19eb6b1b58d..a69d57f5c62 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/DeleteMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/DeleteMethodBinding.java @@ -19,25 +19,29 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; -import java.util.*; - -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Delete; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.lang.reflect.Method; +import java.util.*; public class DeleteMethodBinding extends BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody { public DeleteMethodBinding(Method theMethod, FhirContext theContext, Object theProvider) { - super(theMethod, theContext, theProvider, Delete.class, theMethod.getAnnotation(Delete.class).type()); + super( + theMethod, + theContext, + theProvider, + Delete.class, + theMethod.getAnnotation(Delete.class).type()); } @Override @@ -73,7 +77,8 @@ public class DeleteMethodBinding extends BaseOutcomeReturningMethodBindingWithRe if (id.hasResourceType() == false) { id = id.withResourceType(getResourceName()); } else if (getResourceName().equals(id.getResourceType()) == false) { - throw new InvalidRequestException(Msg.code(1473) + "ID parameter has the wrong resource type, expected '" + getResourceName() + "', found: " + id.getResourceType()); + throw new InvalidRequestException(Msg.code(1473) + "ID parameter has the wrong resource type, expected '" + + getResourceName() + "', found: " + id.getResourceType()); } HttpDeleteClientInvocation retVal = createDeleteInvocation(getContext(), id, Collections.emptyMap()); @@ -86,18 +91,18 @@ public class DeleteMethodBinding extends BaseOutcomeReturningMethodBindingWithRe return retVal; } - public static HttpDeleteClientInvocation createDeleteInvocation(FhirContext theContext, IIdType theId, Map> theAdditionalParams) { + public static HttpDeleteClientInvocation createDeleteInvocation( + FhirContext theContext, IIdType theId, Map> theAdditionalParams) { return new HttpDeleteClientInvocation(theContext, theId, theAdditionalParams); } - @Override protected String getMatchingOperation() { return null; } - public static HttpDeleteClientInvocation createDeleteInvocation(FhirContext theContext, String theSearchUrl, Map> theParams) { + public static HttpDeleteClientInvocation createDeleteInvocation( + FhirContext theContext, String theSearchUrl, Map> theParams) { return new HttpDeleteClientInvocation(theContext, theSearchUrl, theParams); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ElementsParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ElementsParameter.java index 8662adaf123..d40a6daf8a3 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ElementsParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ElementsParameter.java @@ -19,8 +19,13 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isNotBlank; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.SummaryEnum; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; import java.lang.reflect.Method; import java.util.Collection; @@ -28,19 +33,17 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.SummaryEnum; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class ElementsParameter implements IParameter { @SuppressWarnings("unchecked") @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException { if (theSourceClientArgument instanceof Collection) { StringBuilder values = new StringBuilder(); @@ -62,11 +65,15 @@ public class ElementsParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(1445) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is of type " + SummaryEnum.class + throw new ConfigurationException(Msg.code(1445) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is of type " + SummaryEnum.class + " but can not be a collection of collections"); } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HistoryMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HistoryMethodBinding.java index 71c9157b6b6..86997ad19e1 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HistoryMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HistoryMethodBinding.java @@ -19,27 +19,26 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.util.Date; - -import ca.uhn.fhir.rest.param.DateParam; -import ca.uhn.fhir.rest.param.DateRangeParam; -import org.hl7.fhir.instance.model.api.*; - import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.annotation.History; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; +import ca.uhn.fhir.rest.param.DateParam; +import ca.uhn.fhir.rest.param.DateRangeParam; import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.*; + +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.Date; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class HistoryMethodBinding extends BaseResourceReturningMethodBinding { @@ -69,7 +68,6 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding { } else { myResourceName = null; } - } @Override @@ -99,19 +97,27 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding { } String historyId = id != null ? id.getIdPart() : null; - HttpGetClientInvocation retVal = createHistoryInvocation(getContext(), resourceName, historyId, null, null, null); + HttpGetClientInvocation retVal = + createHistoryInvocation(getContext(), resourceName, historyId, null, null, null); if (theArgs != null) { for (int idx = 0; idx < theArgs.length; idx++) { IParameter nextParam = getParameters().get(idx); - nextParam.translateClientArgumentIntoQueryArgument(getContext(), theArgs[idx], retVal.getParameters(), null); + nextParam.translateClientArgumentIntoQueryArgument( + getContext(), theArgs[idx], retVal.getParameters(), null); } } return retVal; } - public static HttpGetClientInvocation createHistoryInvocation(FhirContext theContext, String theResourceName, String theId, IPrimitiveType theSince, Integer theLimit, DateRangeParam theAt) { + public static HttpGetClientInvocation createHistoryInvocation( + FhirContext theContext, + String theResourceName, + String theId, + IPrimitiveType theSince, + Integer theLimit, + DateRangeParam theAt) { StringBuilder b = new StringBuilder(); if (theResourceName != null) { b.append(theResourceName); @@ -157,5 +163,4 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding { } return null; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpDeleteClientInvocation.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpDeleteClientInvocation.java index c67c34d67d0..8018a515dfb 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpDeleteClientInvocation.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpDeleteClientInvocation.java @@ -19,36 +19,41 @@ */ package ca.uhn.fhir.rest.client.method; -import java.util.List; -import java.util.Map; - -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.util.List; +import java.util.Map; public class HttpDeleteClientInvocation extends BaseHttpClientInvocation { private String myUrlPath; private Map> myParams; - public HttpDeleteClientInvocation(FhirContext theContext, IIdType theId, Map> theAdditionalParams) { + public HttpDeleteClientInvocation( + FhirContext theContext, IIdType theId, Map> theAdditionalParams) { super(theContext); myUrlPath = theId.toUnqualifiedVersionless().getValue(); myParams = theAdditionalParams; } - public HttpDeleteClientInvocation(FhirContext theContext, String theSearchUrl, Map> theParams) { + public HttpDeleteClientInvocation( + FhirContext theContext, String theSearchUrl, Map> theParams) { super(theContext); myUrlPath = theSearchUrl; myParams = theParams; } @Override - public IHttpRequest asHttpRequest(String theUrlBase, Map> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) { + public IHttpRequest asHttpRequest( + String theUrlBase, + Map> theExtraParams, + EncodingEnum theEncoding, + Boolean thePrettyPrint) { StringBuilder b = new StringBuilder(); b.append(theUrlBase); if (!theUrlBase.endsWith("/")) { @@ -61,5 +66,4 @@ public class HttpDeleteClientInvocation extends BaseHttpClientInvocation { return createHttpRequest(b.toString(), theEncoding, RequestTypeEnum.DELETE); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpGetClientInvocation.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpGetClientInvocation.java index 4a6a594deaf..a26c7b45dd6 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpGetClientInvocation.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpGetClientInvocation.java @@ -43,11 +43,16 @@ public class HttpGetClientInvocation extends BaseHttpClientInvocation { private final String myUrlPath; private final UrlSourceEnum myUrlSource; - public HttpGetClientInvocation(FhirContext theContext, Map> theParameters, String... theUrlFragments) { + public HttpGetClientInvocation( + FhirContext theContext, Map> theParameters, String... theUrlFragments) { this(theContext, theParameters, UrlSourceEnum.GENERATED, theUrlFragments); } - public HttpGetClientInvocation(FhirContext theContext, Map> theParameters, UrlSourceEnum theUrlSource, String... theUrlFragments) { + public HttpGetClientInvocation( + FhirContext theContext, + Map> theParameters, + UrlSourceEnum theUrlSource, + String... theUrlFragments) { super(theContext); myParameters = theParameters; myUrlPath = StringUtils.join(theUrlFragments, '/'); @@ -61,7 +66,6 @@ public class HttpGetClientInvocation extends BaseHttpClientInvocation { myUrlSource = UrlSourceEnum.GENERATED; } - private boolean addQueryParameter(StringBuilder b, boolean first, String nextKey, String nextValue) { boolean retVal = first; if (retVal) { @@ -78,7 +82,11 @@ public class HttpGetClientInvocation extends BaseHttpClientInvocation { } @Override - public IHttpRequest asHttpRequest(String theUrlBase, Map> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) { + public IHttpRequest asHttpRequest( + String theUrlBase, + Map> theExtraParams, + EncodingEnum theEncoding, + Boolean thePrettyPrint) { StringBuilder b = new StringBuilder(); if (!myUrlPath.contains("://")) { @@ -115,5 +123,4 @@ public class HttpGetClientInvocation extends BaseHttpClientInvocation { public String getUrlPath() { return myUrlPath; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPatchClientInvocation.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPatchClientInvocation.java index 4936e407124..12c81ef31ed 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPatchClientInvocation.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPatchClientInvocation.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.rest.client.method; -import java.util.List; -import java.util.Map; - -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.client.api.IHttpClient; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.util.List; +import java.util.Map; public class HttpPatchClientInvocation extends BaseHttpClientInvocation { @@ -45,7 +44,8 @@ public class HttpPatchClientInvocation extends BaseHttpClientInvocation { myContents = theContents; } - public HttpPatchClientInvocation(FhirContext theContext, String theUrlPath, String theContentType, String theContents) { + public HttpPatchClientInvocation( + FhirContext theContext, String theUrlPath, String theContentType, String theContents) { super(theContext); myUrlPath = theUrlPath; myContentType = theContentType; @@ -53,7 +53,11 @@ public class HttpPatchClientInvocation extends BaseHttpClientInvocation { } @Override - public IHttpRequest asHttpRequest(String theUrlBase, Map> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) { + public IHttpRequest asHttpRequest( + String theUrlBase, + Map> theExtraParams, + EncodingEnum theEncoding, + Boolean thePrettyPrint) { StringBuilder b = new StringBuilder(); b.append(theUrlBase); if (!theUrlBase.endsWith("/")) { @@ -64,16 +68,13 @@ public class HttpPatchClientInvocation extends BaseHttpClientInvocation { appendExtraParamsWithQuestionMark(myParams, b, b.indexOf("?") == -1); appendExtraParamsWithQuestionMark(theExtraParams, b, b.indexOf("?") == -1); - - return createHttpRequest(b.toString(), theEncoding, RequestTypeEnum.PATCH); } @Override protected IHttpRequest createHttpRequest(String theUrl, EncodingEnum theEncoding, RequestTypeEnum theRequestType) { - IHttpClient httpClient = getRestfulClientFactory().getHttpClient(new StringBuilder(theUrl), null, null, theRequestType, getHeaders()); + IHttpClient httpClient = getRestfulClientFactory() + .getHttpClient(new StringBuilder(theUrl), null, null, theRequestType, getHeaders()); return httpClient.createByteRequest(getContext(), myContents, myContentType, null); } - - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPostClientInvocation.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPostClientInvocation.java index dc53d89f3d9..d28a12a9fd6 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPostClientInvocation.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPostClientInvocation.java @@ -19,18 +19,16 @@ */ package ca.uhn.fhir.rest.client.method; -import java.util.List; -import java.util.Map; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.api.RequestTypeEnum; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.util.List; +import java.util.Map; public class HttpPostClientInvocation extends BaseHttpClientInvocationWithContents { - public HttpPostClientInvocation(FhirContext theContext, IBaseResource theResource, String theUrlExtension) { super(theContext, theResource, theUrlExtension); } @@ -39,15 +37,18 @@ public class HttpPostClientInvocation extends BaseHttpClientInvocationWithConten super(theContext, theResource, null); } - public HttpPostClientInvocation(FhirContext theContext, List theResources, BundleTypeEnum theBundleType) { + public HttpPostClientInvocation( + FhirContext theContext, List theResources, BundleTypeEnum theBundleType) { super(theContext, theResources, theBundleType); } - public HttpPostClientInvocation(FhirContext theContext, String theContents, boolean theIsBundle, String theUrlExtension) { + public HttpPostClientInvocation( + FhirContext theContext, String theContents, boolean theIsBundle, String theUrlExtension) { super(theContext, theContents, theIsBundle, theUrlExtension); } - public HttpPostClientInvocation(FhirContext theContext, Map> theParams, String... theUrlExtension) { + public HttpPostClientInvocation( + FhirContext theContext, Map> theParams, String... theUrlExtension) { super(theContext, theParams, theUrlExtension); } @@ -55,5 +56,4 @@ public class HttpPostClientInvocation extends BaseHttpClientInvocationWithConten protected RequestTypeEnum getRequestType() { return RequestTypeEnum.POST; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPutClientInvocation.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPutClientInvocation.java index 0d659660751..acd82a18ef2 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPutClientInvocation.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpPutClientInvocation.java @@ -19,10 +19,9 @@ */ package ca.uhn.fhir.rest.client.method; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.RequestTypeEnum; +import org.hl7.fhir.instance.model.api.IBaseResource; public class HttpPutClientInvocation extends BaseHttpClientInvocationWithContents { @@ -30,7 +29,8 @@ public class HttpPutClientInvocation extends BaseHttpClientInvocationWithContent super(theContext, theResource, theUrlExtension); } - public HttpPutClientInvocation(FhirContext theContext, String theContents, boolean theIsBundle, String theUrlExtension) { + public HttpPutClientInvocation( + FhirContext theContext, String theContents, boolean theIsBundle, String theUrlExtension) { super(theContext, theContents, theIsBundle, theUrlExtension); } @@ -38,5 +38,4 @@ public class HttpPutClientInvocation extends BaseHttpClientInvocationWithContent protected RequestTypeEnum getRequestType() { return RequestTypeEnum.PUT; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpSimpleClientInvocation.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpSimpleClientInvocation.java index ef9de50f79f..25f6f9e1b64 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpSimpleClientInvocation.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/HttpSimpleClientInvocation.java @@ -36,14 +36,19 @@ public class HttpSimpleClientInvocation extends BaseHttpClientInvocation { private PagingHttpMethodEnum myPagingHttpMethod; - public HttpSimpleClientInvocation(FhirContext theContext, String theUrlPath, PagingHttpMethodEnum thePagingHttpMethod) { + public HttpSimpleClientInvocation( + FhirContext theContext, String theUrlPath, PagingHttpMethodEnum thePagingHttpMethod) { super(theContext); myUrl = theUrlPath; myPagingHttpMethod = thePagingHttpMethod; } @Override - public IHttpRequest asHttpRequest(String theUrlBase, Map> theExtraParams, EncodingEnum theEncoding, Boolean thePrettyPrint) { + public IHttpRequest asHttpRequest( + String theUrlBase, + Map> theExtraParams, + EncodingEnum theEncoding, + Boolean thePrettyPrint) { IHttpRequest retVal = createHttpRequest(myUrl, theEncoding, myPagingHttpMethod.getRequestType()); retVal.setUrlSource(myUrlSource); return retVal; diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IClientResponseHandler.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IClientResponseHandler.java index a3e11f61c09..c45881c11c8 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IClientResponseHandler.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IClientResponseHandler.java @@ -19,16 +19,19 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; + import java.io.IOException; import java.io.InputStream; -import java.io.Reader; import java.util.List; import java.util.Map; -import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; - public interface IClientResponseHandler { - T invokeClient(String theResponseMimeType, InputStream theResponseInputStream, int theResponseStatusCode, Map> theHeaders) throws IOException, BaseServerResponseException; - + T invokeClient( + String theResponseMimeType, + InputStream theResponseInputStream, + int theResponseStatusCode, + Map> theHeaders) + throws IOException, BaseServerResponseException; } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IClientResponseHandlerHandlesBinary.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IClientResponseHandlerHandlesBinary.java index 4fd0469bdd4..d39cdd74996 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IClientResponseHandlerHandlesBinary.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IClientResponseHandlerHandlesBinary.java @@ -19,21 +19,25 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; + import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.util.List; import java.util.Map; -import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; - public interface IClientResponseHandlerHandlesBinary extends IClientResponseHandler { /** * If this method returns true, {@link #invokeClient(String, InputStream, int, Map)} should be invoked instead of {@link #invokeClient(String, Reader, int, Map)} */ boolean isBinary(); - - T invokeClientForBinary(String theResponseMimeType, InputStream theResponseReader, int theResponseStatusCode, Map> theHeaders) throws IOException, BaseServerResponseException; + T invokeClientForBinary( + String theResponseMimeType, + InputStream theResponseReader, + int theResponseStatusCode, + Map> theHeaders) + throws IOException, BaseServerResponseException; } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IParameter.java index 584531d1dc3..c9e884e9f4e 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IParameter.java @@ -19,21 +19,27 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.reflect.Method; import java.util.Collection; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; - public interface IParameter { - void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType); + void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType); - void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException; - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IRestfulHeader.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IRestfulHeader.java index 61e92e117db..ba12d9e238f 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IRestfulHeader.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IRestfulHeader.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.client.method; -public interface IRestfulHeader { - -} +public interface IRestfulHeader {} diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IncludeParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IncludeParameter.java index f1da6e62379..412a90c71fa 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IncludeParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/IncludeParameter.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.rest.annotation.IncludeParam; import ca.uhn.fhir.rest.api.Constants; @@ -44,8 +44,10 @@ class IncludeParameter extends BaseQueryParameter { private Class mySpecType; private boolean myReverse; - - public IncludeParameter(IncludeParam theAnnotation, Class> theInstantiableCollectionType, Class theSpecType) { + public IncludeParameter( + IncludeParam theAnnotation, + Class> theInstantiableCollectionType, + Class theSpecType) { myInstantiableCollectionType = theInstantiableCollectionType; myReverse = theAnnotation.reverse(); if (theAnnotation.allow().length > 0) { @@ -61,9 +63,9 @@ class IncludeParameter extends BaseQueryParameter { mySpecType = theSpecType; if (mySpecType != Include.class && mySpecType != String.class) { - throw new ConfigurationException(Msg.code(1462) + "Invalid @" + IncludeParam.class.getSimpleName() + " parameter type: " + mySpecType); + throw new ConfigurationException(Msg.code(1462) + "Invalid @" + IncludeParam.class.getSimpleName() + + " parameter type: " + mySpecType); } - } @SuppressWarnings("unchecked") @@ -87,7 +89,8 @@ class IncludeParameter extends BaseQueryParameter { return retVal; } - private void convertAndAddIncludeToList(ArrayList theQualifiedParamLists, Include theInclude, FhirContext theContext) { + private void convertAndAddIncludeToList( + ArrayList theQualifiedParamLists, Include theInclude, FhirContext theContext) { String qualifier = null; if (theInclude.isRecurse()) { if (theContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4)) { @@ -117,5 +120,4 @@ class IncludeParameter extends BaseQueryParameter { public boolean isRequired() { return false; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/MethodUtil.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/MethodUtil.java index 50bdba8afd3..5021374f9b6 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/MethodUtil.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/MethodUtil.java @@ -92,8 +92,8 @@ public class MethodUtil { // nothing } - public static void addAcceptHeaderToRequest(EncodingEnum theEncoding, IHttpRequest theHttpRequest, - FhirContext theContext) { + public static void addAcceptHeaderToRequest( + EncodingEnum theEncoding, IHttpRequest theHttpRequest, FhirContext theContext) { if (theEncoding == null) { if (theContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2_1) == false) { theHttpRequest.addHeader(Constants.HEADER_ACCEPT, Constants.HEADER_ACCEPT_VALUE_XML_OR_JSON_LEGACY); @@ -113,7 +113,6 @@ public class MethodUtil { theHttpRequest.addHeader(Constants.HEADER_ACCEPT, Constants.HEADER_ACCEPT_VALUE_XML_NON_LEGACY); } } - } public static HttpGetClientInvocation createConformanceInvocation(FhirContext theContext) { @@ -124,8 +123,8 @@ public class MethodUtil { return createCreateInvocation(theResource, null, theContext); } - public static HttpPostClientInvocation createCreateInvocation(IBaseResource theResource, String theResourceBody, - FhirContext theContext) { + public static HttpPostClientInvocation createCreateInvocation( + IBaseResource theResource, String theResourceBody, FhirContext theContext) { RuntimeResourceDefinition def = theContext.getResourceDefinition(theResource); String resourceName = def.getName(); @@ -144,38 +143,48 @@ public class MethodUtil { return retVal; } - public static HttpPostClientInvocation createCreateInvocation(IBaseResource theResource, String theResourceBody, - FhirContext theContext, Map> theIfNoneExistParams) { + public static HttpPostClientInvocation createCreateInvocation( + IBaseResource theResource, + String theResourceBody, + FhirContext theContext, + Map> theIfNoneExistParams) { HttpPostClientInvocation retVal = createCreateInvocation(theResource, theResourceBody, theContext); retVal.setIfNoneExistParams(theIfNoneExistParams); return retVal; } - public static HttpPostClientInvocation createCreateInvocation(IBaseResource theResource, String theResourceBody, - FhirContext theContext, String theIfNoneExistUrl) { + public static HttpPostClientInvocation createCreateInvocation( + IBaseResource theResource, String theResourceBody, FhirContext theContext, String theIfNoneExistUrl) { HttpPostClientInvocation retVal = createCreateInvocation(theResource, theResourceBody, theContext); retVal.setIfNoneExistString(theIfNoneExistUrl); return retVal; } - public static HttpPatchClientInvocation createPatchInvocation(FhirContext theContext, IIdType theId, - PatchTypeEnum thePatchType, String theBody) { + public static HttpPatchClientInvocation createPatchInvocation( + FhirContext theContext, IIdType theId, PatchTypeEnum thePatchType, String theBody) { return PatchMethodBinding.createPatchInvocation(theContext, theId, thePatchType, theBody); } - public static HttpPatchClientInvocation createPatchInvocation(FhirContext theContext, PatchTypeEnum thePatchType, - String theBody, String theResourceType, Map> theMatchParams) { - return PatchMethodBinding.createPatchInvocation(theContext, thePatchType, theBody, theResourceType, - theMatchParams); + public static HttpPatchClientInvocation createPatchInvocation( + FhirContext theContext, + PatchTypeEnum thePatchType, + String theBody, + String theResourceType, + Map> theMatchParams) { + return PatchMethodBinding.createPatchInvocation( + theContext, thePatchType, theBody, theResourceType, theMatchParams); } - public static HttpPatchClientInvocation createPatchInvocation(FhirContext theContext, String theUrl, - PatchTypeEnum thePatchType, String theBody) { + public static HttpPatchClientInvocation createPatchInvocation( + FhirContext theContext, String theUrl, PatchTypeEnum thePatchType, String theBody) { return PatchMethodBinding.createPatchInvocation(theContext, theUrl, thePatchType, theBody); } - public static HttpPutClientInvocation createUpdateInvocation(FhirContext theContext, IBaseResource theResource, - String theResourceBody, Map> theMatchParams) { + public static HttpPutClientInvocation createUpdateInvocation( + FhirContext theContext, + IBaseResource theResource, + String theResourceBody, + Map> theMatchParams) { String resourceType = theContext.getResourceType(theResource); StringBuilder b = createUrl(resourceType, theMatchParams); @@ -190,8 +199,8 @@ public class MethodUtil { return retVal; } - public static HttpPutClientInvocation createUpdateInvocation(FhirContext theContext, IBaseResource theResource, - String theResourceBody, String theMatchUrl) { + public static HttpPutClientInvocation createUpdateInvocation( + FhirContext theContext, IBaseResource theResource, String theResourceBody, String theMatchUrl) { HttpPutClientInvocation retVal; if (StringUtils.isBlank(theResourceBody)) { retVal = new HttpPutClientInvocation(theContext, theResource, theMatchUrl); @@ -202,8 +211,8 @@ public class MethodUtil { return retVal; } - public static HttpPutClientInvocation createUpdateInvocation(IBaseResource theResource, String theResourceBody, - IIdType theId, FhirContext theContext) { + public static HttpPutClientInvocation createUpdateInvocation( + IBaseResource theResource, String theResourceBody, IIdType theId, FhirContext theContext) { String resourceName = theContext.getResourceType(theResource); StringBuilder urlBuilder = new StringBuilder(); urlBuilder.append(resourceName); @@ -227,8 +236,8 @@ public class MethodUtil { return retVal; } - public static HttpPutClientInvocation createUpdateHistoryRewriteInvocation(IBaseResource theResource, String theResourceBody, - IIdType theId, FhirContext theContext) { + public static HttpPutClientInvocation createUpdateHistoryRewriteInvocation( + IBaseResource theResource, String theResourceBody, IIdType theId, FhirContext theContext) { String resourceName = theContext.getResourceType(theResource); StringBuilder urlBuilder = new StringBuilder(); urlBuilder.append(resourceName); @@ -272,8 +281,11 @@ public class MethodUtil { } @SuppressWarnings("unchecked") - public static List getResourceParameters(final FhirContext theContext, Method theMethod, - Object theProvider, RestOperationTypeEnum theRestfulOperationTypeEnum) { + public static List getResourceParameters( + final FhirContext theContext, + Method theMethod, + Object theProvider, + RestOperationTypeEnum theRestfulOperationTypeEnum) { List parameters = new ArrayList<>(); Class[] parameterTypes = theMethod.getParameterTypes(); @@ -298,9 +310,11 @@ public class MethodUtil { parameterType = ReflectionUtil.getGenericCollectionTypeOfMethodParameter(theMethod, paramIndex); } if (Collection.class.isAssignableFrom(parameterType)) { - throw new ConfigurationException(Msg.code(1433) + "Argument #" + paramIndex + " of Method '" + theMethod.getName() - + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() - + "' is of an invalid generic type (can not be a collection of a collection of a collection)"); + throw new ConfigurationException( + Msg.code(1433) + "Argument #" + paramIndex + " of Method '" + theMethod.getName() + + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + + "' is of an invalid generic type (can not be a collection of a collection of a collection)"); } } @@ -339,19 +353,22 @@ public class MethodUtil { if (parameterType == String.class) { instantiableCollectionType = null; specType = String.class; - } else if ((parameterType != Include.class) || innerCollectionType == null + } else if ((parameterType != Include.class) + || innerCollectionType == null || outerCollectionType != null) { - throw new ConfigurationException(Msg.code(1434) + "Method '" + theMethod.getName() + "' is annotated with @" + throw new ConfigurationException(Msg.code(1434) + "Method '" + theMethod.getName() + + "' is annotated with @" + IncludeParam.class.getSimpleName() + " but has a type other than Collection<" + Include.class.getSimpleName() + ">"); } else { - instantiableCollectionType = (Class>) CollectionBinder - .getInstantiableCollectionType(innerCollectionType, - "Method '" + theMethod.getName() + "'"); + instantiableCollectionType = (Class>) + CollectionBinder.getInstantiableCollectionType( + innerCollectionType, "Method '" + theMethod.getName() + "'"); specType = parameterType; } - param = new IncludeParameter((IncludeParam) nextAnnotation, instantiableCollectionType, specType); + param = new IncludeParameter( + (IncludeParam) nextAnnotation, instantiableCollectionType, specType); } else if (nextAnnotation instanceof ResourceParam) { if (IBaseResource.class.isAssignableFrom(parameterType)) { // good @@ -374,12 +391,12 @@ public class MethodUtil { param = new ElementsParameter(); } else if (nextAnnotation instanceof Since) { param = new SinceParameter(); - ((SinceParameter) param).setType(theContext, parameterType, innerCollectionType, - outerCollectionType); + ((SinceParameter) param) + .setType(theContext, parameterType, innerCollectionType, outerCollectionType); } else if (nextAnnotation instanceof At) { param = new AtParameter(); - ((AtParameter) param).setType(theContext, parameterType, innerCollectionType, - outerCollectionType); + ((AtParameter) param) + .setType(theContext, parameterType, innerCollectionType, outerCollectionType); } else if (nextAnnotation instanceof Count) { param = new CountParameter(); } else if (nextAnnotation instanceof Offset) { @@ -389,8 +406,8 @@ public class MethodUtil { } else if (nextAnnotation instanceof TransactionParam) { param = new TransactionParameter(theContext); } else if (nextAnnotation instanceof ConditionalUrlParam) { - param = new ConditionalParamBinder(theRestfulOperationTypeEnum, - ((ConditionalUrlParam) nextAnnotation).supportsMultiple()); + param = new ConditionalParamBinder( + theRestfulOperationTypeEnum, ((ConditionalUrlParam) nextAnnotation).supportsMultiple()); } else if (nextAnnotation instanceof OperationParam) { Operation op = theMethod.getAnnotation(Operation.class); param = new OperationParameter(theContext, op.name(), ((OperationParam) nextAnnotation)); @@ -400,12 +417,13 @@ public class MethodUtil { + Validate.class.getSimpleName() + "." + Validate.Mode.class.getSimpleName() + " must be of type " + ValidationModeEnum.class.getName()); } - param = new OperationParameter(theContext, Constants.EXTOP_VALIDATE, - Constants.EXTOP_VALIDATE_MODE, 0, 1).setConverter(new IOperationParamConverter() { + param = new OperationParameter( + theContext, Constants.EXTOP_VALIDATE, Constants.EXTOP_VALIDATE_MODE, 0, 1) + .setConverter(new IOperationParamConverter() { @Override public Object outgoingClient(Object theObject) { - return ParametersUtil.createString(theContext, - ((ValidationModeEnum) theObject).getCode()); + return ParametersUtil.createString( + theContext, ((ValidationModeEnum) theObject).getCode()); } }); } else if (nextAnnotation instanceof Validate.Profile) { @@ -414,8 +432,9 @@ public class MethodUtil { + Validate.class.getSimpleName() + "." + Validate.Profile.class.getSimpleName() + " must be of type " + String.class.getName()); } - param = new OperationParameter(theContext, Constants.EXTOP_VALIDATE, - Constants.EXTOP_VALIDATE_PROFILE, 0, 1).setConverter(new IOperationParamConverter() { + param = new OperationParameter( + theContext, Constants.EXTOP_VALIDATE, Constants.EXTOP_VALIDATE_PROFILE, 0, 1) + .setConverter(new IOperationParamConverter() { @Override public Object outgoingClient(Object theObject) { @@ -425,16 +444,15 @@ public class MethodUtil { } else { continue; } - } - } if (param == null) { - throw new ConfigurationException(Msg.code(1438) + "Parameter #" + ((paramIndex + 1)) + "/" + (parameterTypes.length) - + " of method '" + theMethod.getName() + "' on type '" - + theMethod.getDeclaringClass().getCanonicalName() - + "' has no recognized FHIR interface parameter annotations. Don't know how to handle this parameter"); + throw new ConfigurationException( + Msg.code(1438) + "Parameter #" + ((paramIndex + 1)) + "/" + (parameterTypes.length) + + " of method '" + theMethod.getName() + "' on type '" + + theMethod.getDeclaringClass().getCanonicalName() + + "' has no recognized FHIR interface parameter annotations. Don't know how to handle this parameter"); } param.initializeTypes(theMethod, outerCollectionType, innerCollectionType, parameterType); @@ -445,8 +463,8 @@ public class MethodUtil { return parameters; } - public static void parseClientRequestResourceHeaders(IIdType theRequestedId, Map> theHeaders, - IBaseResource resource) { + public static void parseClientRequestResourceHeaders( + IIdType theRequestedId, Map> theHeaders, IBaseResource resource) { List lmHeaders = theHeaders.get(Constants.HEADER_LAST_MODIFIED_LOWERCASE); if (lmHeaders != null && lmHeaders.size() > 0 && StringUtils.isNotBlank(lmHeaders.get(0))) { String headerValue = lmHeaders.get(0); @@ -507,11 +525,14 @@ public class MethodUtil { theRequestedId.applyTo(resource); } } - } - public static MethodOutcome process2xxResponse(FhirContext theContext, int theResponseStatusCode, - String theResponseMimeType, InputStream theResponseReader, Map> theHeaders) { + public static MethodOutcome process2xxResponse( + FhirContext theContext, + int theResponseStatusCode, + String theResponseMimeType, + InputStream theResponseReader, + Map> theHeaders) { List locationHeaders = new ArrayList<>(); List lh = theHeaders.get(Constants.HEADER_LOCATION_LC); if (lh != null) { @@ -556,11 +577,10 @@ public class MethodUtil { } } else { - BaseOutcomeReturningMethodBinding.ourLog.debug("Ignoring response content of type: {}", - theResponseMimeType); + BaseOutcomeReturningMethodBinding.ourLog.debug( + "Ignoring response content of type: {}", theResponseMimeType); } } return retVal; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/NullParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/NullParameter.java index 6821158f919..d80cc262a19 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/NullParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/NullParameter.java @@ -19,24 +19,31 @@ */ package ca.uhn.fhir.rest.client.method; -import java.lang.reflect.Method; -import java.util.*; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; +import java.util.*; class NullParameter implements IParameter { @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException { - //nothing - } - - @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) + throws InternalErrorException { // nothing } + @Override + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { + // nothing + } } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OffsetParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OffsetParameter.java index 4acb821e580..edd8a345088 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OffsetParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OffsetParameter.java @@ -19,47 +19,56 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.model.primitive.IntegerDt; +import ca.uhn.fhir.rest.annotation.Offset; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.param.ParameterUtil; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.reflect.Method; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.primitive.IntegerDt; -import ca.uhn.fhir.rest.annotation.Offset; -import ca.uhn.fhir.rest.annotation.Since; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.param.ParameterUtil; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; - public class OffsetParameter implements IParameter { @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException { if (theSourceClientArgument != null) { IntegerDt since = ParameterUtil.toInteger(theSourceClientArgument); if (since.isEmpty() == false) { - theTargetQueryArguments.put(Constants.PARAM_OFFSET, Collections.singletonList(since.getValueAsString())); + theTargetQueryArguments.put( + Constants.PARAM_OFFSET, Collections.singletonList(since.getValueAsString())); } } } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(1476) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Offset.class.getName() - + " but can not be of collection type"); + throw new ConfigurationException(Msg.code(1476) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + + Offset.class.getName() + " but can not be of collection type"); } if (!ParameterUtil.isBindableIntegerType(theParameterType)) { - throw new ConfigurationException(Msg.code(1477) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Offset.class.getName() - + " but type '" + theParameterType + "' is an invalid type, must be Integer or IntegerType"); + throw new ConfigurationException(Msg.code(1477) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + + Offset.class.getName() + " but type '" + theParameterType + + "' is an invalid type, must be Integer or IntegerType"); } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OperationMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OperationMethodBinding.java index 6303b04d322..b852238c9bc 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OperationMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OperationMethodBinding.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.annotation.Description; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.annotation.Operation; @@ -54,9 +54,16 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { private BundleTypeEnum myBundleType; private String myDescription; - protected OperationMethodBinding(Class theReturnResourceType, Class theReturnTypeFromRp, Method theMethod, FhirContext theContext, Object theProvider, - boolean theIdempotent, String theOperationName, Class theOperationType, - BundleTypeEnum theBundleType) { + protected OperationMethodBinding( + Class theReturnResourceType, + Class theReturnTypeFromRp, + Method theMethod, + FhirContext theContext, + Object theProvider, + boolean theIdempotent, + String theOperationName, + Class theOperationType, + BundleTypeEnum theBundleType) { super(theReturnResourceType, theMethod, theContext, theProvider); myBundleType = theBundleType; @@ -72,8 +79,9 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { } if (isBlank(theOperationName)) { - throw new ConfigurationException(Msg.code(1452) + "Method '" + theMethod.getName() + "' on type " + theMethod.getDeclaringClass().getName() + " is annotated with @" + Operation.class.getSimpleName() - + " but this annotation has no name defined"); + throw new ConfigurationException(Msg.code(1452) + "Method '" + theMethod.getName() + "' on type " + + theMethod.getDeclaringClass().getName() + " is annotated with @" + Operation.class.getSimpleName() + + " but this annotation has no name defined"); } if (theOperationName.startsWith("$") == false) { theOperationName = "$" + theOperationName; @@ -99,12 +107,25 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { } else { myOtherOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE; } - } - public OperationMethodBinding(Class theReturnResourceType, Class theReturnTypeFromRp, Method theMethod, FhirContext theContext, Object theProvider, - Operation theAnnotation) { - this(theReturnResourceType, theReturnTypeFromRp, theMethod, theContext, theProvider, theAnnotation.idempotent(), theAnnotation.name(), theAnnotation.type(), theAnnotation.bundleType()); + public OperationMethodBinding( + Class theReturnResourceType, + Class theReturnTypeFromRp, + Method theMethod, + FhirContext theContext, + Object theProvider, + Operation theAnnotation) { + this( + theReturnResourceType, + theReturnTypeFromRp, + theMethod, + theContext, + theProvider, + theAnnotation.idempotent(), + theAnnotation.name(), + theAnnotation.type(), + theAnnotation.bundleType()); } public String getDescription() { @@ -144,7 +165,8 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { IIdType idDt = (IIdType) theArgs[myIdParamIndex]; id = idDt.getValue(); } - IBaseParameters parameters = (IBaseParameters) getContext().getResourceDefinition("Parameters").newInstance(); + IBaseParameters parameters = (IBaseParameters) + getContext().getResourceDefinition("Parameters").newInstance(); if (theArgs != null) { for (int idx = 0; idx < theArgs.length; idx++) { @@ -160,8 +182,14 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { return myIdempotent; } - public static BaseHttpClientInvocation createOperationInvocation(FhirContext theContext, String theResourceName, String theId, String theVersion, String theOperationName, IBaseParameters theInput, - boolean theUseHttpGet) { + public static BaseHttpClientInvocation createOperationInvocation( + FhirContext theContext, + String theResourceName, + String theId, + String theVersion, + String theOperationName, + IBaseParameters theInput, + boolean theUseHttpGet) { StringBuilder b = new StringBuilder(); if (theResourceName != null) { b.append(theResourceName); @@ -192,7 +220,8 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { for (Object nextParameter : parameters) { IPrimitiveType nextNameDt = (IPrimitiveType) t.getSingleValueOrNull((IBase) nextParameter, "name"); if (nextNameDt == null || nextNameDt.isEmpty()) { - ourLog.warn("Ignoring input parameter with no value in Parameters.parameter.name in operation client invocation"); + ourLog.warn( + "Ignoring input parameter with no value in Parameters.parameter.name in operation client invocation"); continue; } String nextName = nextNameDt.getValueAsString(); @@ -205,7 +234,9 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { continue; } if (!(value instanceof IPrimitiveType)) { - throw new IllegalArgumentException(Msg.code(1453) + "Can not invoke operation as HTTP GET when it has parameters with a composite (non priitive) datatype as the value. Found value: " + value.getClass().getName()); + throw new IllegalArgumentException(Msg.code(1453) + + "Can not invoke operation as HTTP GET when it has parameters with a composite (non priitive) datatype as the value. Found value: " + + value.getClass().getName()); } IPrimitiveType primitive = (IPrimitiveType) value; params.get(nextName).add(primitive.getValueAsString()); @@ -213,7 +244,11 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { return new HttpGetClientInvocation(theContext, params, b.toString()); } - public static BaseHttpClientInvocation createProcessMsgInvocation(FhirContext theContext, String theOperationName, IBaseBundle theInput, Map> urlParams) { + public static BaseHttpClientInvocation createProcessMsgInvocation( + FhirContext theContext, + String theOperationName, + IBaseBundle theInput, + Map> urlParams) { StringBuilder b = new StringBuilder(); if (b.length() > 0) { @@ -227,7 +262,5 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { BaseHttpClientInvocation.appendExtraParamsWithQuestionMark(urlParams, b, b.indexOf("?") == -1); return new HttpPostClientInvocation(theContext, theInput, b.toString()); - } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OperationParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OperationParameter.java index d2076119137..80fbbc51199 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OperationParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/OperationParameter.java @@ -19,33 +19,30 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; - +import ca.uhn.fhir.model.api.IDatatype; +import ca.uhn.fhir.model.api.IQueryParameterAnd; +import ca.uhn.fhir.model.api.IQueryParameterOr; +import ca.uhn.fhir.model.api.IQueryParameterType; +import ca.uhn.fhir.rest.annotation.OperationParam; +import ca.uhn.fhir.rest.api.ValidationModeEnum; +import ca.uhn.fhir.rest.param.DateRangeParam; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.util.ParametersUtil; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.api.IDatatype; -import ca.uhn.fhir.model.api.IQueryParameterAnd; -import ca.uhn.fhir.model.api.IQueryParameterOr; -import ca.uhn.fhir.model.api.IQueryParameterType; -import ca.uhn.fhir.rest.annotation.OperationParam; -import ca.uhn.fhir.rest.api.QualifiedParamList; -import ca.uhn.fhir.rest.api.ValidationModeEnum; -import ca.uhn.fhir.rest.param.DateRangeParam; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.util.ParametersUtil; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.Collection; +import java.util.List; +import java.util.Map; public class OperationParameter implements IParameter { @@ -56,8 +53,10 @@ public class OperationParameter implements IParameter { private final FhirContext myContext; private IOperationParamConverter myConverter; + @SuppressWarnings("rawtypes") private int myMax; + private int myMin; private final String myName; private Class myParameterType; @@ -79,7 +78,6 @@ public class OperationParameter implements IParameter { return myContext; } - public String getName() { return myName; } @@ -97,10 +95,17 @@ public class OperationParameter implements IParameter { @SuppressWarnings("unchecked") @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (getContext().getVersion().getVersion().isRi()) { if (IDatatype.class.isAssignableFrom(theParameterType)) { - throw new ConfigurationException(Msg.code(1408) + "Incorrect use of type " + theParameterType.getSimpleName() + " as parameter type for method when context is for version " + getContext().getVersion().getVersion().name() + " in method: " + theMethod.toString()); + throw new ConfigurationException(Msg.code(1408) + "Incorrect use of type " + + theParameterType.getSimpleName() + + " as parameter type for method when context is for version " + + getContext().getVersion().getVersion().name() + " in method: " + theMethod.toString()); } } @@ -121,12 +126,11 @@ public class OperationParameter implements IParameter { boolean typeIsConcrete = !myParameterType.isInterface() && !Modifier.isAbstract(myParameterType.getModifiers()); - //@formatter:off - boolean isSearchParam = - IQueryParameterType.class.isAssignableFrom(myParameterType) || - IQueryParameterOr.class.isAssignableFrom(myParameterType) || - IQueryParameterAnd.class.isAssignableFrom(myParameterType); - //@formatter:off + // @formatter:off + boolean isSearchParam = IQueryParameterType.class.isAssignableFrom(myParameterType) + || IQueryParameterOr.class.isAssignableFrom(myParameterType) + || IQueryParameterAnd.class.isAssignableFrom(myParameterType); + // @formatter:off /* * Note: We say here !IBase.class.isAssignableFrom because a bunch of DSTU1/2 datatypes also @@ -148,19 +152,21 @@ public class OperationParameter implements IParameter { } else if (myParameterType.equals(ValidationModeEnum.class)) { myParamType = "code"; } else if (IBase.class.isAssignableFrom(myParameterType) && typeIsConcrete) { - myParamType = myContext.getElementDefinition((Class) myParameterType).getName(); + myParamType = myContext + .getElementDefinition((Class) myParameterType) + .getName(); } else if (isSearchParam) { myParamType = "string"; mySearchParameterBinding = new SearchParameter(myName, myMin > 0); mySearchParameterBinding.setCompositeTypes(COMPOSITE_TYPES); - mySearchParameterBinding.setType(myContext, theParameterType, theInnerCollectionType, theOuterCollectionType); + mySearchParameterBinding.setType( + myContext, theParameterType, theInnerCollectionType, theOuterCollectionType); myConverter = new OperationParamConverter(); } else { - throw new ConfigurationException(Msg.code(1409) + "Invalid type for @OperationParam: " + myParameterType.getName()); + throw new ConfigurationException( + Msg.code(1409) + "Invalid type for @OperationParam: " + myParameterType.getName()); } - } - } public OperationParameter setConverter(IOperationParamConverter theConverter) { @@ -169,7 +175,12 @@ public class OperationParameter implements IParameter { } @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException { + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) + throws InternalErrorException { assert theTargetResource != null; Object sourceClientArgument = theSourceClientArgument; if (sourceClientArgument == null) { @@ -180,11 +191,10 @@ public class OperationParameter implements IParameter { sourceClientArgument = myConverter.outgoingClient(sourceClientArgument); } - ParametersUtil.addParameterToParameters(theContext, (IBaseParameters) theTargetResource, myName, sourceClientArgument); + ParametersUtil.addParameterToParameters( + theContext, (IBaseParameters) theTargetResource, myName, sourceClientArgument); } - - public static void throwInvalidMode(String paramValues) { throw new InvalidRequestException(Msg.code(1410) + "Invalid mode value: \"" + paramValues + "\""); } @@ -192,7 +202,6 @@ public class OperationParameter implements IParameter { interface IOperationParamConverter { Object outgoingClient(Object theObject); - } class OperationParamConverter implements IOperationParamConverter { @@ -204,12 +213,10 @@ public class OperationParameter implements IParameter { @Override public Object outgoingClient(Object theObject) { IQueryParameterType obj = (IQueryParameterType) theObject; - IPrimitiveType retVal = (IPrimitiveType) myContext.getElementDefinition("string").newInstance(); + IPrimitiveType retVal = + (IPrimitiveType) myContext.getElementDefinition("string").newInstance(); retVal.setValueAsString(obj.getValueAsQueryToken(myContext)); return retVal; } - } - - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PageMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PageMethodBinding.java index 61fb5f01338..32d838d84dc 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PageMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PageMethodBinding.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; public class PageMethodBinding extends BaseResourceReturningMethodBinding { @@ -61,5 +60,4 @@ public class PageMethodBinding extends BaseResourceReturningMethodBinding { public IBaseResource provider() { return null; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PatchMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PatchMethodBinding.java index ca7e1e57a03..fd53b07ae4f 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PatchMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PatchMethodBinding.java @@ -19,22 +19,21 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.annotation.Annotation; -import java.lang.reflect.Method; -import java.util.*; - -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Patch; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Method; +import java.util.*; /** * Base class for an operation that has a resource type but not a resource body in the @@ -47,9 +46,16 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes private int myResourceParamIndex; public PatchMethodBinding(Method theMethod, FhirContext theContext, Object theProvider) { - super(theMethod, theContext, theProvider, Patch.class, theMethod.getAnnotation(Patch.class).type()); + super( + theMethod, + theContext, + theProvider, + Patch.class, + theMethod.getAnnotation(Patch.class).type()); - for (ListIterator> iter = Arrays.asList(theMethod.getParameterTypes()).listIterator(); iter.hasNext();) { + for (ListIterator> iter = + Arrays.asList(theMethod.getParameterTypes()).listIterator(); + iter.hasNext(); ) { int nextIndex = iter.nextIndex(); Class next = iter.next(); if (next.equals(PatchTypeEnum.class)) { @@ -63,10 +69,12 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes } if (myPatchTypeParameterIndex == -1) { - throw new ConfigurationException(Msg.code(1414) + "Method has no parameter of type " + PatchTypeEnum.class.getName() + " - " + theMethod.toString()); + throw new ConfigurationException(Msg.code(1414) + "Method has no parameter of type " + + PatchTypeEnum.class.getName() + " - " + theMethod.toString()); } if (myResourceParamIndex == -1) { - throw new ConfigurationException(Msg.code(1415) + "Method has no parameter with @" + ResourceParam.class.getSimpleName() + " annotation - " + theMethod.toString()); + throw new ConfigurationException(Msg.code(1415) + "Method has no parameter with @" + + ResourceParam.class.getSimpleName() + " annotation - " + theMethod.toString()); } } @@ -103,7 +111,8 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes if (idDt.hasResourceType() == false) { idDt = idDt.withResourceType(getResourceName()); } else if (getResourceName().equals(idDt.getResourceType()) == false) { - throw new InvalidRequestException(Msg.code(1417) + "ID parameter has the wrong resource type, expected '" + getResourceName() + "', found: " + idDt.getResourceType()); + throw new InvalidRequestException(Msg.code(1417) + "ID parameter has the wrong resource type, expected '" + + getResourceName() + "', found: " + idDt.getResourceType()); } PatchTypeEnum patchType = (PatchTypeEnum) theArgs[myPatchTypeParameterIndex]; @@ -119,13 +128,17 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes return retVal; } - public static HttpPatchClientInvocation createPatchInvocation(FhirContext theContext, IIdType theId, PatchTypeEnum thePatchType, String theBody) { - HttpPatchClientInvocation retVal = new HttpPatchClientInvocation(theContext, theId, thePatchType.getContentType(), theBody); + public static HttpPatchClientInvocation createPatchInvocation( + FhirContext theContext, IIdType theId, PatchTypeEnum thePatchType, String theBody) { + HttpPatchClientInvocation retVal = + new HttpPatchClientInvocation(theContext, theId, thePatchType.getContentType(), theBody); return retVal; } - public static HttpPatchClientInvocation createPatchInvocation(FhirContext theContext, String theUrlPath, PatchTypeEnum thePatchType, String theBody) { - HttpPatchClientInvocation retVal = new HttpPatchClientInvocation(theContext, theUrlPath, thePatchType.getContentType(), theBody); + public static HttpPatchClientInvocation createPatchInvocation( + FhirContext theContext, String theUrlPath, PatchTypeEnum thePatchType, String theBody) { + HttpPatchClientInvocation retVal = + new HttpPatchClientInvocation(theContext, theUrlPath, thePatchType.getContentType(), theBody); return retVal; } @@ -134,11 +147,16 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes return null; } - public static HttpPatchClientInvocation createPatchInvocation(FhirContext theContext, PatchTypeEnum thePatchType, String theBody, String theResourceType, Map> theMatchParams) { + public static HttpPatchClientInvocation createPatchInvocation( + FhirContext theContext, + PatchTypeEnum thePatchType, + String theBody, + String theResourceType, + Map> theMatchParams) { StringBuilder urlBuilder = MethodUtil.createUrl(theResourceType, theMatchParams); String url = urlBuilder.toString(); - HttpPatchClientInvocation retVal = new HttpPatchClientInvocation(theContext, url, thePatchType.getContentType(), theBody); + HttpPatchClientInvocation retVal = + new HttpPatchClientInvocation(theContext, url, thePatchType.getContentType(), theBody); return retVal; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PatchTypeParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PatchTypeParameter.java index 36a853f19fd..0fd904cb9c4 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PatchTypeParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/PatchTypeParameter.java @@ -19,26 +19,32 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.reflect.Method; import java.util.Collection; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; - class PatchTypeParameter implements IParameter { @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException { // nothing } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/RawParamsParmeter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/RawParamsParmeter.java index 98ceed0bd82..eb147489828 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/RawParamsParmeter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/RawParamsParmeter.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.rest.client.method; -import java.lang.reflect.Method; -import java.util.*; - -import org.apache.commons.lang3.Validate; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.annotation.RawParam; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.apache.commons.lang3.Validate; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; +import java.util.*; public class RawParamsParmeter implements IParameter { @@ -36,14 +35,23 @@ public class RawParamsParmeter implements IParameter { } @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException { // not supported on client for now } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { - Validate.isTrue(theParameterType.equals(Map.class), "Parameter with @" + RawParam.class + " must be of type Map>"); + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { + Validate.isTrue( + theParameterType.equals(Map.class), + "Parameter with @" + RawParam.class + " must be of type Map>"); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ReadMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ReadMethodBinding.java index fc1cb67ac7d..d2710f86fe4 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ReadMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ReadMethodBinding.java @@ -19,32 +19,36 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.io.IOException; -import java.io.InputStream; -import java.lang.reflect.Method; -import java.util.*; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.Validate; -import org.hl7.fhir.instance.model.api.*; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.annotation.*; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.Validate; +import org.hl7.fhir.instance.model.api.*; -public class ReadMethodBinding extends BaseResourceReturningMethodBinding implements IClientResponseHandlerHandlesBinary { +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Method; +import java.util.*; + +public class ReadMethodBinding extends BaseResourceReturningMethodBinding + implements IClientResponseHandlerHandlesBinary { private Integer myIdIndex; private boolean mySupportsVersion; private Class myIdParameterType; @SuppressWarnings("unchecked") - public ReadMethodBinding(Class theAnnotatedResourceType, Method theMethod, FhirContext theContext, Object theProvider) { + public ReadMethodBinding( + Class theAnnotatedResourceType, + Method theMethod, + FhirContext theContext, + Object theProvider) { super(theAnnotatedResourceType, theMethod, theContext, theProvider); Validate.notNull(theMethod, "Method must not be null"); @@ -57,15 +61,17 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding implem myIdIndex = idIndex; if (myIdIndex == null) { - throw new ConfigurationException(Msg.code(1423) + "@" + Read.class.getSimpleName() + " method " + theMethod.getName() + " on type \"" + theMethod.getDeclaringClass().getName() - + "\" does not have a parameter annotated with @" + IdParam.class.getSimpleName()); + throw new ConfigurationException( + Msg.code(1423) + "@" + Read.class.getSimpleName() + " method " + theMethod.getName() + " on type \"" + + theMethod.getDeclaringClass().getName() + "\" does not have a parameter annotated with @" + + IdParam.class.getSimpleName()); } myIdParameterType = (Class) parameterTypes[myIdIndex]; if (!IIdType.class.isAssignableFrom(myIdParameterType)) { - throw new ConfigurationException(Msg.code(1424) + "ID parameter must be of type IdDt or IdType - Found: " + myIdParameterType); + throw new ConfigurationException( + Msg.code(1424) + "ID parameter must be of type IdDt or IdType - Found: " + myIdParameterType); } - } @Override @@ -92,7 +98,8 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding implem IIdType id = ((IIdType) theArgs[myIdIndex]); String resourceName = getResourceName(); if (id.hasVersionIdPart()) { - retVal = createVReadInvocation(getContext(), new IdDt(resourceName, id.getIdPart(), id.getVersionIdPart()), resourceName); + retVal = createVReadInvocation( + getContext(), new IdDt(resourceName, id.getIdPart(), id.getVersionIdPart()), resourceName); } else { retVal = createReadInvocation(getContext(), id, resourceName); } @@ -106,22 +113,27 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding implem } @Override - public Object invokeClientForBinary(String theResponseMimeType, InputStream theResponseReader, int theResponseStatusCode, Map> theHeaders) + public Object invokeClientForBinary( + String theResponseMimeType, + InputStream theResponseReader, + int theResponseStatusCode, + Map> theHeaders) throws IOException, BaseServerResponseException { byte[] contents = IOUtils.toByteArray(theResponseReader); - IBaseBinary resource = (IBaseBinary) getContext().getResourceDefinition("Binary").newInstance(); + IBaseBinary resource = + (IBaseBinary) getContext().getResourceDefinition("Binary").newInstance(); resource.setContentType(theResponseMimeType); resource.setContent(contents); switch (getMethodReturnType()) { - case LIST_OF_RESOURCES: - return Collections.singletonList(resource); - case RESOURCE: - return resource; - case BUNDLE_RESOURCE: - case METHOD_OUTCOME: - break; + case LIST_OF_RESOURCES: + return Collections.singletonList(resource); + case RESOURCE: + return resource; + case BUNDLE_RESOURCE: + case METHOD_OUTCOME: + break; } throw new IllegalStateException(Msg.code(1425) + "" + getMethodReturnType()); // should not happen @@ -144,17 +156,19 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding implem return new HttpGetClientInvocation(theContext, theId.getValue()); } - public static HttpGetClientInvocation createReadInvocation(FhirContext theContext, IIdType theId, String theResourceName) { + public static HttpGetClientInvocation createReadInvocation( + FhirContext theContext, IIdType theId, String theResourceName) { return new HttpGetClientInvocation(theContext, new IdDt(theResourceName, theId.getIdPart()).getValue()); } - public static HttpGetClientInvocation createVReadInvocation(FhirContext theContext, IIdType theId, String theResourceName) { - return new HttpGetClientInvocation(theContext, new IdDt(theResourceName, theId.getIdPart(), theId.getVersionIdPart()).getValue()); + public static HttpGetClientInvocation createVReadInvocation( + FhirContext theContext, IIdType theId, String theResourceName) { + return new HttpGetClientInvocation( + theContext, new IdDt(theResourceName, theId.getIdPart(), theId.getVersionIdPart()).getValue()); } @Override protected BundleTypeEnum getResponseBundleType() { return null; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ResourceParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ResourceParameter.java index c651602fb0a..401460ca050 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ResourceParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ResourceParameter.java @@ -19,17 +19,15 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.reflect.Method; import java.util.Collection; import java.util.List; import java.util.Map; -import org.apache.commons.lang3.Validate; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; - public class ResourceParameter implements IParameter { private Class myResourceType; @@ -45,12 +43,20 @@ public class ResourceParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore for now } @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException { // ignore, as this is handles as a special case } @@ -65,5 +71,4 @@ public class ResourceParameter implements IParameter { public Class getResourceType() { return myResourceType; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SearchMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SearchMethodBinding.java index 3d01eada4de..a80f540e1a1 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SearchMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SearchMethodBinding.java @@ -19,10 +19,9 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.api.annotation.Description; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.api.Constants; @@ -46,7 +45,6 @@ import java.util.Map; import java.util.Map.Entry; import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; public class SearchMethodBinding extends BaseResourceReturningMethodBinding { private String myCompartmentName; @@ -54,7 +52,11 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { private Integer myIdParamIndex; private String myQueryName; - public SearchMethodBinding(Class theReturnResourceType, Method theMethod, FhirContext theContext, Object theProvider) { + public SearchMethodBinding( + Class theReturnResourceType, + Method theMethod, + FhirContext theContext, + Object theProvider) { super(theReturnResourceType, theMethod, theContext, theProvider); Search search = theMethod.getAnnotation(Search.class); this.myQueryName = StringUtils.defaultIfBlank(search.queryName(), null); @@ -76,8 +78,13 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { SearchParameter sp = (SearchParameter) next; if (sp.getName().startsWith("_")) { if (ALLOWED_PARAMS.contains(sp.getName())) { - String msg = getContext().getLocalizer().getMessage(getClass().getName() + ".invalidSpecialParamName", theMethod.getName(), theMethod.getDeclaringClass().getSimpleName(), - sp.getName()); + String msg = getContext() + .getLocalizer() + .getMessage( + getClass().getName() + ".invalidSpecialParamName", + theMethod.getName(), + theMethod.getDeclaringClass().getSimpleName(), + sp.getName()); throw new ConfigurationException(Msg.code(1442) + msg); } } @@ -93,10 +100,14 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { * Only compartment searching methods may have an ID parameter */ if (isBlank(myCompartmentName) && myIdParamIndex != null) { - String msg = theContext.getLocalizer().getMessage(getClass().getName() + ".idWithoutCompartment", theMethod.getName(), theMethod.getDeclaringClass()); + String msg = theContext + .getLocalizer() + .getMessage( + getClass().getName() + ".idWithoutCompartment", + theMethod.getName(), + theMethod.getDeclaringClass()); throw new ConfigurationException(Msg.code(1443) + msg); } - } public String getDescription() { @@ -115,13 +126,15 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { @Override public ReturnTypeEnum getReturnType() { - return ReturnTypeEnum.BUNDLE; + return ReturnTypeEnum.BUNDLE; } - @Override public BaseHttpClientInvocation invokeClient(Object[] theArgs) throws InternalErrorException { - assert (myQueryName == null || ((theArgs != null ? theArgs.length : 0) == getParameters().size())) : "Wrong number of arguments: " + (theArgs != null ? theArgs.length : "null"); + assert (myQueryName == null + || ((theArgs != null ? theArgs.length : 0) + == getParameters().size())) + : "Wrong number of arguments: " + (theArgs != null ? theArgs.length : "null"); Map> queryStringArgs = new LinkedHashMap>(); @@ -139,29 +152,36 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { } } - BaseHttpClientInvocation retVal = createSearchInvocation(getContext(), resourceName, queryStringArgs, id, myCompartmentName, null); + BaseHttpClientInvocation retVal = + createSearchInvocation(getContext(), resourceName, queryStringArgs, id, myCompartmentName, null); return retVal; } - @Override protected boolean isAddContentLocationHeader() { return false; } - @Override public String toString() { return getMethod().toString(); } - public static BaseHttpClientInvocation createSearchInvocation(FhirContext theContext, String theSearchUrl, UrlSourceEnum theUrlSource, Map> theParams) { + public static BaseHttpClientInvocation createSearchInvocation( + FhirContext theContext, + String theSearchUrl, + UrlSourceEnum theUrlSource, + Map> theParams) { return new HttpGetClientInvocation(theContext, theParams, theUrlSource, theSearchUrl); } - - public static BaseHttpClientInvocation createSearchInvocation(FhirContext theContext, String theResourceName, Map> theParameters, IIdType theId, String theCompartmentName, + public static BaseHttpClientInvocation createSearchInvocation( + FhirContext theContext, + String theResourceName, + Map> theParameters, + IIdType theId, + String theCompartmentName, SearchStyleEnum theSearchStyle) { SearchStyleEnum searchStyle = theSearchStyle; if (searchStyle == null) { @@ -185,7 +205,9 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { boolean compartmentSearch = false; if (theCompartmentName != null) { if (theId == null || !theId.hasIdPart()) { - String msg = theContext.getLocalizer().getMessage(SearchMethodBinding.class.getName() + ".idNullForCompartmentSearch"); + String msg = theContext + .getLocalizer() + .getMessage(SearchMethodBinding.class.getName() + ".idNullForCompartmentSearch"); throw new InvalidRequestException(Msg.code(1444) + msg); } compartmentSearch = true; @@ -195,30 +217,44 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { * Are we doing a get (GET [base]/Patient?name=foo) or a get with search (GET [base]/Patient/_search?name=foo) or a post (POST [base]/Patient with parameters in the POST body) */ switch (searchStyle) { - case GET: - default: - if (compartmentSearch) { - invocation = new HttpGetClientInvocation(theContext, theParameters, theResourceName, theId.getIdPart(), theCompartmentName); - } else { - invocation = new HttpGetClientInvocation(theContext, theParameters, theResourceName); - } - break; - case GET_WITH_SEARCH: - if (compartmentSearch) { - invocation = new HttpGetClientInvocation(theContext, theParameters, theResourceName, theId.getIdPart(), theCompartmentName, Constants.PARAM_SEARCH); - } else { - invocation = new HttpGetClientInvocation(theContext, theParameters, theResourceName, Constants.PARAM_SEARCH); - } - break; - case POST: - if (compartmentSearch) { - invocation = new HttpPostClientInvocation(theContext, theParameters, theResourceName, theId.getIdPart(), theCompartmentName, Constants.PARAM_SEARCH); - } else { - invocation = new HttpPostClientInvocation(theContext, theParameters, theResourceName, Constants.PARAM_SEARCH); - } + case GET: + default: + if (compartmentSearch) { + invocation = new HttpGetClientInvocation( + theContext, theParameters, theResourceName, theId.getIdPart(), theCompartmentName); + } else { + invocation = new HttpGetClientInvocation(theContext, theParameters, theResourceName); + } + break; + case GET_WITH_SEARCH: + if (compartmentSearch) { + invocation = new HttpGetClientInvocation( + theContext, + theParameters, + theResourceName, + theId.getIdPart(), + theCompartmentName, + Constants.PARAM_SEARCH); + } else { + invocation = new HttpGetClientInvocation( + theContext, theParameters, theResourceName, Constants.PARAM_SEARCH); + } + break; + case POST: + if (compartmentSearch) { + invocation = new HttpPostClientInvocation( + theContext, + theParameters, + theResourceName, + theId.getIdPart(), + theCompartmentName, + Constants.PARAM_SEARCH); + } else { + invocation = new HttpPostClientInvocation( + theContext, theParameters, theResourceName, Constants.PARAM_SEARCH); + } } return invocation; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SearchParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SearchParameter.java index b679772d070..058eda8f287 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SearchParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SearchParameter.java @@ -19,14 +19,8 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.util.*; - -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IPrimitiveType; - import ca.uhn.fhir.context.*; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.*; import ca.uhn.fhir.model.base.composite.BaseIdentifierDt; import ca.uhn.fhir.model.base.composite.BaseQuantityDt; @@ -36,9 +30,13 @@ import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.param.*; import ca.uhn.fhir.rest.param.binder.*; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.util.CollectionUtil; import ca.uhn.fhir.util.ReflectionUtil; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IPrimitiveType; + +import java.util.*; public class SearchParameter extends BaseQueryParameter { @@ -54,45 +52,66 @@ public class SearchParameter extends BaseQueryParameter { ourParamTypes.put(StringParam.class, RestSearchParameterTypeEnum.STRING); ourParamTypes.put(StringOrListParam.class, RestSearchParameterTypeEnum.STRING); ourParamTypes.put(StringAndListParam.class, RestSearchParameterTypeEnum.STRING); - ourParamQualifiers.put(RestSearchParameterTypeEnum.STRING, CollectionUtil.newSet(Constants.PARAMQUALIFIER_STRING_EXACT, Constants.PARAMQUALIFIER_STRING_CONTAINS, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.STRING, + CollectionUtil.newSet( + Constants.PARAMQUALIFIER_STRING_EXACT, + Constants.PARAMQUALIFIER_STRING_CONTAINS, + Constants.PARAMQUALIFIER_MISSING, + EMPTY_STRING)); ourParamTypes.put(UriParam.class, RestSearchParameterTypeEnum.URI); ourParamTypes.put(UriOrListParam.class, RestSearchParameterTypeEnum.URI); ourParamTypes.put(UriAndListParam.class, RestSearchParameterTypeEnum.URI); // TODO: are these right for URI? - ourParamQualifiers.put(RestSearchParameterTypeEnum.URI, CollectionUtil.newSet(Constants.PARAMQUALIFIER_STRING_EXACT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.URI, + CollectionUtil.newSet( + Constants.PARAMQUALIFIER_STRING_EXACT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(TokenParam.class, RestSearchParameterTypeEnum.TOKEN); ourParamTypes.put(TokenOrListParam.class, RestSearchParameterTypeEnum.TOKEN); ourParamTypes.put(TokenAndListParam.class, RestSearchParameterTypeEnum.TOKEN); - ourParamQualifiers.put(RestSearchParameterTypeEnum.TOKEN, CollectionUtil.newSet(Constants.PARAMQUALIFIER_TOKEN_TEXT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.TOKEN, + CollectionUtil.newSet( + Constants.PARAMQUALIFIER_TOKEN_TEXT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(DateParam.class, RestSearchParameterTypeEnum.DATE); ourParamTypes.put(DateOrListParam.class, RestSearchParameterTypeEnum.DATE); ourParamTypes.put(DateAndListParam.class, RestSearchParameterTypeEnum.DATE); ourParamTypes.put(DateRangeParam.class, RestSearchParameterTypeEnum.DATE); - ourParamQualifiers.put(RestSearchParameterTypeEnum.DATE, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.DATE, + CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(QuantityParam.class, RestSearchParameterTypeEnum.QUANTITY); ourParamTypes.put(QuantityOrListParam.class, RestSearchParameterTypeEnum.QUANTITY); ourParamTypes.put(QuantityAndListParam.class, RestSearchParameterTypeEnum.QUANTITY); - ourParamQualifiers.put(RestSearchParameterTypeEnum.QUANTITY, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.QUANTITY, + CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(NumberParam.class, RestSearchParameterTypeEnum.NUMBER); ourParamTypes.put(NumberOrListParam.class, RestSearchParameterTypeEnum.NUMBER); ourParamTypes.put(NumberAndListParam.class, RestSearchParameterTypeEnum.NUMBER); - ourParamQualifiers.put(RestSearchParameterTypeEnum.NUMBER, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.NUMBER, + CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(ReferenceParam.class, RestSearchParameterTypeEnum.REFERENCE); ourParamTypes.put(ReferenceOrListParam.class, RestSearchParameterTypeEnum.REFERENCE); ourParamTypes.put(ReferenceAndListParam.class, RestSearchParameterTypeEnum.REFERENCE); // --vvvv-- no empty because that gets added from OptionalParam#chainWhitelist - ourParamQualifiers.put(RestSearchParameterTypeEnum.REFERENCE, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.REFERENCE, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING)); ourParamTypes.put(CompositeParam.class, RestSearchParameterTypeEnum.COMPOSITE); ourParamTypes.put(CompositeOrListParam.class, RestSearchParameterTypeEnum.COMPOSITE); ourParamTypes.put(CompositeAndListParam.class, RestSearchParameterTypeEnum.COMPOSITE); - ourParamQualifiers.put(RestSearchParameterTypeEnum.COMPOSITE, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.COMPOSITE, + CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(HasParam.class, RestSearchParameterTypeEnum.HAS); ourParamTypes.put(HasOrListParam.class, RestSearchParameterTypeEnum.HAS); @@ -108,8 +127,7 @@ public class SearchParameter extends BaseQueryParameter { private boolean myRequired; private Class myType; - public SearchParameter() { - } + public SearchParameter() {} public SearchParameter(String theName, boolean theRequired) { this.myName = theName; @@ -118,7 +136,7 @@ public class SearchParameter extends BaseQueryParameter { /* * (non-Javadoc) - * + * * @see ca.uhn.fhir.rest.param.IParameter#encode(java.lang.Object) */ @Override @@ -140,7 +158,7 @@ public class SearchParameter extends BaseQueryParameter { /* * (non-Javadoc) - * + * * @see ca.uhn.fhir.rest.param.IParameter#getName() */ @Override @@ -194,7 +212,11 @@ public class SearchParameter extends BaseQueryParameter { } @SuppressWarnings("unchecked") - public void setType(FhirContext theContext, final Class type, Class> theInnerCollectionType, Class> theOuterCollectionType) { + public void setType( + FhirContext theContext, + final Class type, + Class> theInnerCollectionType, + Class> theOuterCollectionType) { this.myType = type; if (IQueryParameterType.class.isAssignableFrom(type)) { @@ -202,7 +224,8 @@ public class SearchParameter extends BaseQueryParameter { } else if (IQueryParameterOr.class.isAssignableFrom(type)) { myParamBinder = new QueryParameterOrBinder((Class>) type, myCompositeTypes); } else if (IQueryParameterAnd.class.isAssignableFrom(type)) { - myParamBinder = new QueryParameterAndBinder((Class>) type, myCompositeTypes); + myParamBinder = + new QueryParameterAndBinder((Class>) type, myCompositeTypes); } else if (String.class.equals(type)) { myParamBinder = new StringBinder(); myParamType = RestSearchParameterTypeEnum.STRING; @@ -213,7 +236,8 @@ public class SearchParameter extends BaseQueryParameter { myParamBinder = new CalendarBinder(); myParamType = RestSearchParameterTypeEnum.DATE; } else if (IPrimitiveType.class.isAssignableFrom(type) && ReflectionUtil.isInstantiable(type)) { - RuntimePrimitiveDatatypeDefinition def = (RuntimePrimitiveDatatypeDefinition) theContext.getElementDefinition((Class>) type); + RuntimePrimitiveDatatypeDefinition def = (RuntimePrimitiveDatatypeDefinition) + theContext.getElementDefinition((Class>) type); if (def.getNativeType() != null) { if (def.getNativeType().equals(Date.class)) { myParamBinder = new FhirPrimitiveBinder((Class>) type); @@ -224,7 +248,8 @@ public class SearchParameter extends BaseQueryParameter { } } } else { - throw new ConfigurationException(Msg.code(1406) + "Unsupported data type for parameter: " + type.getCanonicalName()); + throw new ConfigurationException( + Msg.code(1406) + "Unsupported data type for parameter: " + type.getCanonicalName()); } RestSearchParameterTypeEnum typeEnum = ourParamTypes.get(type); @@ -281,5 +306,4 @@ public class SearchParameter extends BaseQueryParameter { retVal.append("required", myRequired); return retVal.toString(); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SinceOrAtParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SinceOrAtParameter.java index 95449e9f033..99d399b1df6 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SinceOrAtParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SinceOrAtParameter.java @@ -21,53 +21,61 @@ package ca.uhn.fhir.rest.client.method; class SinceOrAtParameter extends SearchParameter { -// private Class myType; -// private String myParamName; -// private Class myAnnotationType; + // private Class myType; + // private String myParamName; + // private Class myAnnotationType; public SinceOrAtParameter(String theParamName, Class theAnnotationType) { super(theParamName, false); -// myParamName = theParamName; -// myAnnotationType = theAnnotationType; + // myParamName = theParamName; + // myAnnotationType = theAnnotationType; } -// @Override -// public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException { -// if (theSourceClientArgument != null) { -// InstantDt since = ParameterUtil.toInstant(theSourceClientArgument); -// if (since.isEmpty() == false) { -// theTargetQueryArguments.put(myParamName, Collections.singletonList(since.getValueAsString())); -// } -// } -// } -// -// @Override -// public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { -// String[] sinceParams = theRequest.getParameters().remove(myParamName); -// if (sinceParams != null) { -// if (sinceParams.length > 0) { -// if (StringUtils.isNotBlank(sinceParams[0])) { -// try { -// return ParameterUtil.fromInstant(myType, sinceParams); -// } catch (DataFormatException e) { -// throw new InvalidRequestException(Msg.code(1465) + "Invalid " + Constants.PARAM_SINCE + " value: " + sinceParams[0]); -// } -// } -// } -// } -// return ParameterUtil.fromInstant(myType, null); -// } -// -// @Override -// public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { -// if (theOuterCollectionType != null) { -// throw new ConfigurationException(Msg.code(1466) + "Method '" + theMethod.getName() + "' in type '" + "' is annotated with @" + myAnnotationType.getName() + " but can not be of collection type"); -// } -// if (ParameterUtil.getBindableInstantTypes().contains(theParameterType)) { -// myType = theParameterType; -// } else { -// throw new ConfigurationException(Msg.code(1467) + "Method '" + theMethod.getName() + "' in type '" + "' is annotated with @" + myAnnotationType.getName() + " but is an invalid type, must be one of: " + ParameterUtil.getBindableInstantTypes()); -// } -// } + // @Override + // public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, + // Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException + // { + // if (theSourceClientArgument != null) { + // InstantDt since = ParameterUtil.toInstant(theSourceClientArgument); + // if (since.isEmpty() == false) { + // theTargetQueryArguments.put(myParamName, Collections.singletonList(since.getValueAsString())); + // } + // } + // } + // + // @Override + // public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding + // theMethodBinding) throws InternalErrorException, InvalidRequestException { + // String[] sinceParams = theRequest.getParameters().remove(myParamName); + // if (sinceParams != null) { + // if (sinceParams.length > 0) { + // if (StringUtils.isNotBlank(sinceParams[0])) { + // try { + // return ParameterUtil.fromInstant(myType, sinceParams); + // } catch (DataFormatException e) { + // throw new InvalidRequestException(Msg.code(1465) + "Invalid " + Constants.PARAM_SINCE + " value: " + + // sinceParams[0]); + // } + // } + // } + // } + // return ParameterUtil.fromInstant(myType, null); + // } + // + // @Override + // public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + // if (theOuterCollectionType != null) { + // throw new ConfigurationException(Msg.code(1466) + "Method '" + theMethod.getName() + "' in type '" + "' is + // annotated with @" + myAnnotationType.getName() + " but can not be of collection type"); + // } + // if (ParameterUtil.getBindableInstantTypes().contains(theParameterType)) { + // myType = theParameterType; + // } else { + // throw new ConfigurationException(Msg.code(1467) + "Method '" + theMethod.getName() + "' in type '" + "' is + // annotated with @" + myAnnotationType.getName() + " but is an invalid type, must be one of: " + + // ParameterUtil.getBindableInstantTypes()); + // } + // } } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SinceParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SinceParameter.java index 819cefdd8de..ad392379b29 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SinceParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SinceParameter.java @@ -27,5 +27,4 @@ class SinceParameter extends SinceOrAtParameter { public SinceParameter() { super(Constants.PARAM_SINCE, Since.class); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SortParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SortParameter.java index 69b09adffdc..bafcab02325 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SortParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SortParameter.java @@ -19,8 +19,17 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isNotBlank; +import ca.uhn.fhir.rest.annotation.Sort; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.SortOrderEnum; +import ca.uhn.fhir.rest.api.SortSpec; +import ca.uhn.fhir.rest.param.ParameterUtil; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; import java.lang.reflect.Method; import java.util.ArrayList; @@ -28,17 +37,7 @@ import java.util.Collection; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.rest.annotation.Sort; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.SortOrderEnum; -import ca.uhn.fhir.rest.api.SortSpec; -import ca.uhn.fhir.rest.param.ParameterUtil; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class SortParameter implements IParameter { @@ -49,18 +48,30 @@ public class SortParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null || theInnerCollectionType != null) { - throw new ConfigurationException(Msg.code(1463) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Sort.class.getName() + " but can not be of collection type"); + throw new ConfigurationException(Msg.code(1463) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Sort.class.getName() + + " but can not be of collection type"); } if (!theParameterType.equals(SortSpec.class)) { - throw new ConfigurationException(Msg.code(1464) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Sort.class.getName() + " but is an invalid type, must be: " + SortSpec.class.getCanonicalName()); + throw new ConfigurationException(Msg.code(1464) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Sort.class.getName() + + " but is an invalid type, must be: " + SortSpec.class.getCanonicalName()); } - } @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException { + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) + throws InternalErrorException { SortSpec ss = (SortSpec) theSourceClientArgument; if (myContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2)) { @@ -96,7 +107,6 @@ public class SortParameter implements IParameter { } } - public static String createSortStringDstu3(SortSpec ss) { StringBuilder val = new StringBuilder(); while (ss != null) { @@ -117,5 +127,4 @@ public class SortParameter implements IParameter { String string = val.toString(); return string; } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SummaryEnumParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SummaryEnumParameter.java index c31bc7fbbaf..f092f332fe2 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SummaryEnumParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/SummaryEnumParameter.java @@ -19,7 +19,14 @@ */ package ca.uhn.fhir.rest.client.method; +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.SummaryEnum; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; @@ -27,19 +34,15 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import org.hl7.fhir.instance.model.api.IBaseResource; - -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.SummaryEnum; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; - public class SummaryEnumParameter implements IParameter { @SuppressWarnings("unchecked") @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException { if (theSourceClientArgument instanceof Collection) { List values = new ArrayList(); @@ -58,11 +61,15 @@ public class SummaryEnumParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(1422) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is of type " + SummaryEnum.class + throw new ConfigurationException(Msg.code(1422) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is of type " + SummaryEnum.class + " but can not be a collection of collections"); } } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/TransactionMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/TransactionMethodBinding.java index 812e59f6682..90c2139dc47 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/TransactionMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/TransactionMethodBinding.java @@ -19,21 +19,20 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; -import java.util.List; - -import org.hl7.fhir.instance.model.api.IBaseBundle; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.annotation.Transaction; import ca.uhn.fhir.rest.annotation.TransactionParam; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseBundle; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; +import java.util.List; public class TransactionMethodBinding extends BaseResourceReturningMethodBinding { @@ -47,8 +46,10 @@ public class TransactionMethodBinding extends BaseResourceReturningMethodBinding for (IParameter next : getParameters()) { if (next instanceof TransactionParameter) { if (myTransactionParamIndex != -1) { - throw new ConfigurationException(Msg.code(1418) + "Method '" + theMethod.getName() + "' in type " + theMethod.getDeclaringClass().getCanonicalName() + " has multiple parameters annotated with the @" + TransactionParam.class + " annotation, exactly one is required for @" + Transaction.class - + " methods"); + throw new ConfigurationException(Msg.code(1418) + "Method '" + theMethod.getName() + "' in type " + + theMethod.getDeclaringClass().getCanonicalName() + + " has multiple parameters annotated with the @" + TransactionParam.class + + " annotation, exactly one is required for @" + Transaction.class + " methods"); } myTransactionParamIndex = index; } @@ -56,7 +57,9 @@ public class TransactionMethodBinding extends BaseResourceReturningMethodBinding } if (myTransactionParamIndex == -1) { - throw new ConfigurationException(Msg.code(1419) + "Method '" + theMethod.getName() + "' in type " + theMethod.getDeclaringClass().getCanonicalName() + " does not have a parameter annotated with the @" + TransactionParam.class + " annotation"); + throw new ConfigurationException(Msg.code(1419) + "Method '" + theMethod.getName() + "' in type " + + theMethod.getDeclaringClass().getCanonicalName() + + " does not have a parameter annotated with the @" + TransactionParam.class + " annotation"); } } @@ -75,32 +78,30 @@ public class TransactionMethodBinding extends BaseResourceReturningMethodBinding return ReturnTypeEnum.BUNDLE; } - @Override public BaseHttpClientInvocation invokeClient(Object[] theArgs) throws InternalErrorException { FhirContext context = getContext(); Object arg = theArgs[myTransactionParamIndex]; - + if (arg instanceof IBaseBundle) { return createTransactionInvocation((IBaseBundle) arg, context); } - + @SuppressWarnings("unchecked") List resources = (List) arg; return createTransactionInvocation(resources, context); } - public static BaseHttpClientInvocation createTransactionInvocation(IBaseBundle theBundle, FhirContext theContext) { return new HttpPostClientInvocation(theContext, theBundle); } - public static BaseHttpClientInvocation createTransactionInvocation(List theResources, FhirContext theContext) { + public static BaseHttpClientInvocation createTransactionInvocation( + List theResources, FhirContext theContext) { return new HttpPostClientInvocation(theContext, theResources, BundleTypeEnum.TRANSACTION); } public static BaseHttpClientInvocation createTransactionInvocation(String theRawBundle, FhirContext theContext) { return new HttpPostClientInvocation(theContext, theRawBundle, true, ""); } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/TransactionParameter.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/TransactionParameter.java index 69dee2dd6ed..546fa2bdff2 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/TransactionParameter.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/TransactionParameter.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.util.*; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.*; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.rest.annotation.TransactionParam; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.*; public class TransactionParameter implements IParameter { @@ -42,17 +41,25 @@ public class TransactionParameter implements IParameter { } private String createParameterTypeError(Method theMethod) { - return "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + TransactionParam.class.getName() - + " but is not of type List<" + IResource.class.getCanonicalName() + "> or Bundle"; + return "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + + TransactionParam.class.getName() + " but is not of type List<" + IResource.class.getCanonicalName() + + "> or Bundle"; } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(1454) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + throw new ConfigurationException(Msg.code(1454) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + TransactionParam.class.getName() + " but can not be a collection of collections"); } - if (Modifier.isInterface(theParameterType.getModifiers()) == false && IBaseResource.class.isAssignableFrom(theParameterType)) { + if (Modifier.isInterface(theParameterType.getModifiers()) == false + && IBaseResource.class.isAssignableFrom(theParameterType)) { @SuppressWarnings("unchecked") Class parameterType = (Class) theParameterType; RuntimeResourceDefinition def = myContext.getResourceDefinition(parameterType); @@ -65,7 +72,8 @@ public class TransactionParameter implements IParameter { if (theInnerCollectionType.equals(List.class) == false) { throw new ConfigurationException(Msg.code(1456) + createParameterTypeError(theMethod)); } - if (theParameterType.equals(IResource.class) == false && theParameterType.equals(IBaseResource.class) == false) { + if (theParameterType.equals(IResource.class) == false + && theParameterType.equals(IBaseResource.class) == false) { throw new ConfigurationException(Msg.code(1457) + createParameterTypeError(theMethod)); } myParamStyle = ParamStyle.RESOURCE_LIST; @@ -73,7 +81,11 @@ public class TransactionParameter implements IParameter { } @Override - public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) + public void translateClientArgumentIntoQueryArgument( + FhirContext theContext, + Object theSourceClientArgument, + Map> theTargetQueryArguments, + IBaseResource theTargetResource) throws InternalErrorException { // nothing @@ -89,5 +101,4 @@ public class TransactionParameter implements IParameter { /** List of resources */ RESOURCE_LIST } - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/UpdateMethodBinding.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/UpdateMethodBinding.java index e78d61ef87c..75e2a7086c0 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/UpdateMethodBinding.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/UpdateMethodBinding.java @@ -19,24 +19,22 @@ */ package ca.uhn.fhir.rest.client.method; -import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isBlank; - -import java.lang.reflect.Method; -import java.util.Collections; -import java.util.Set; - -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Update; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.lang.reflect.Method; +import java.util.Collections; +import java.util.Set; + +import static org.apache.commons.lang3.StringUtils.isBlank; public class UpdateMethodBinding extends BaseOutcomeReturningMethodBindingWithResourceParam { @@ -48,7 +46,6 @@ public class UpdateMethodBinding extends BaseOutcomeReturningMethodBindingWithRe myIdParameterIndex = ParameterUtil.findIdParameterIndex(theMethod, getContext()); } - @Override protected BaseHttpClientInvocation createClientInvocation(Object[] theArgs, IBaseResource theResource) { IIdType idDt = (IIdType) theArgs[myIdParameterIndex]; @@ -91,23 +88,33 @@ public class UpdateMethodBinding extends BaseOutcomeReturningMethodBindingWithRe } @Override - protected void validateResourceIdAndUrlIdForNonConditionalOperation(IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { + protected void validateResourceIdAndUrlIdForNonConditionalOperation( + IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { if (isBlank(theMatchUrl)) { if (isBlank(theUrlId)) { - String msg = getContext().getLocalizer().getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "noIdInUrlForUpdate"); + String msg = getContext() + .getLocalizer() + .getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "noIdInUrlForUpdate"); throw new InvalidRequestException(Msg.code(1448) + msg); } if (isBlank(theResourceId)) { - String msg = getContext().getLocalizer().getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "noIdInBodyForUpdate"); + String msg = getContext() + .getLocalizer() + .getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "noIdInBodyForUpdate"); throw new InvalidRequestException(Msg.code(1449) + msg); } if (!theResourceId.equals(theUrlId)) { - String msg = getContext().getLocalizer().getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "incorrectIdForUpdate", theResourceId, theUrlId); + String msg = getContext() + .getLocalizer() + .getMessage( + BaseOutcomeReturningMethodBindingWithResourceParam.class, + "incorrectIdForUpdate", + theResourceId, + theUrlId); throw new InvalidRequestException(Msg.code(1450) + msg); } } else { - theResource.setId((IIdType)null); + theResource.setId((IIdType) null); } - } } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ValidateMethodBindingDstu2Plus.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ValidateMethodBindingDstu2Plus.java index 973a3f7a9cb..23f84d1c98d 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ValidateMethodBindingDstu2Plus.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/method/ValidateMethodBindingDstu2Plus.java @@ -19,13 +19,6 @@ */ package ca.uhn.fhir.rest.client.method; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.List; - -import org.hl7.fhir.instance.model.api.IBaseParameters; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.annotation.Validate; @@ -33,12 +26,32 @@ import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.util.ParametersUtil; +import org.hl7.fhir.instance.model.api.IBaseParameters; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.List; public class ValidateMethodBindingDstu2Plus extends OperationMethodBinding { - public ValidateMethodBindingDstu2Plus(Class theReturnResourceType, Class theReturnTypeFromRp, Method theMethod, FhirContext theContext, Object theProvider, + public ValidateMethodBindingDstu2Plus( + Class theReturnResourceType, + Class theReturnTypeFromRp, + Method theMethod, + FhirContext theContext, + Object theProvider, Validate theAnnotation) { - super(null, theReturnTypeFromRp, theMethod, theContext, theProvider, true, Constants.EXTOP_VALIDATE, theAnnotation.type(), BundleTypeEnum.COLLECTION); + super( + null, + theReturnTypeFromRp, + theMethod, + theContext, + theProvider, + true, + Constants.EXTOP_VALIDATE, + theAnnotation.type(), + BundleTypeEnum.COLLECTION); List newParams = new ArrayList(); int idx = 0; @@ -49,7 +62,8 @@ public class ValidateMethodBindingDstu2Plus extends OperationMethodBinding { if (String.class.equals(parameterType) || EncodingEnum.class.equals(parameterType)) { newParams.add(next); } else { - OperationParameter parameter = new OperationParameter(theContext, Constants.EXTOP_VALIDATE, Constants.EXTOP_VALIDATE_RESOURCE, 0, 1); + OperationParameter parameter = new OperationParameter( + theContext, Constants.EXTOP_VALIDATE, Constants.EXTOP_VALIDATE_RESOURCE, 0, 1); parameter.initializeTypes(theMethod, null, null, parameterType); newParams.add(parameter); } @@ -62,20 +76,18 @@ public class ValidateMethodBindingDstu2Plus extends OperationMethodBinding { idx++; } setParameters(newParams); - } - - + public static BaseHttpClientInvocation createValidateInvocation(FhirContext theContext, IBaseResource theResource) { - IBaseParameters parameters = (IBaseParameters) theContext.getResourceDefinition("Parameters").newInstance(); + IBaseParameters parameters = + (IBaseParameters) theContext.getResourceDefinition("Parameters").newInstance(); ParametersUtil.addParameterToParameters(theContext, parameters, "resource", theResource); - + String resourceName = theContext.getResourceType(theResource); String resourceId = theResource.getIdElement().getIdPart(); - - BaseHttpClientInvocation retVal = createOperationInvocation(theContext, resourceName, resourceId, null,Constants.EXTOP_VALIDATE, parameters, false); + + BaseHttpClientInvocation retVal = createOperationInvocation( + theContext, resourceName, resourceId, null, Constants.EXTOP_VALIDATE, parameters, false); return retVal; } - - } diff --git a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/tls/TlsAuthenticationSvc.java b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/tls/TlsAuthenticationSvc.java index 547ccbceb39..a829b16f4c5 100644 --- a/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/tls/TlsAuthenticationSvc.java +++ b/hapi-fhir-client/src/main/java/ca/uhn/fhir/rest/client/tls/TlsAuthenticationSvc.java @@ -33,88 +33,88 @@ import org.apache.http.ssl.PrivateKeyStrategy; import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.ssl.SSLContexts; +import java.io.FileInputStream; +import java.io.InputStream; +import java.security.KeyStore; +import java.util.Optional; import javax.annotation.Nonnull; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; -import java.io.FileInputStream; -import java.io.InputStream; -import java.security.KeyStore; -import java.util.Optional; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class TlsAuthenticationSvc { - private TlsAuthenticationSvc(){} + private TlsAuthenticationSvc() {} - public static SSLContext createSslContext(@Nonnull TlsAuthentication theTlsAuthentication){ + public static SSLContext createSslContext(@Nonnull TlsAuthentication theTlsAuthentication) { Validate.notNull(theTlsAuthentication, "theTlsAuthentication cannot be null"); - try{ + try { SSLContextBuilder contextBuilder = SSLContexts.custom(); - if(theTlsAuthentication.getKeyStoreInfo().isPresent()){ - KeyStoreInfo keyStoreInfo = theTlsAuthentication.getKeyStoreInfo().get(); + if (theTlsAuthentication.getKeyStoreInfo().isPresent()) { + KeyStoreInfo keyStoreInfo = + theTlsAuthentication.getKeyStoreInfo().get(); PrivateKeyStrategy privateKeyStrategy = null; - if(isNotBlank(keyStoreInfo.getAlias())){ + if (isNotBlank(keyStoreInfo.getAlias())) { privateKeyStrategy = (aliases, socket) -> keyStoreInfo.getAlias(); } KeyStore keyStore = createKeyStore(keyStoreInfo); contextBuilder.loadKeyMaterial(keyStore, keyStoreInfo.getKeyPass(), privateKeyStrategy); } - if(theTlsAuthentication.getTrustStoreInfo().isPresent()){ - TrustStoreInfo trustStoreInfo = theTlsAuthentication.getTrustStoreInfo().get(); + if (theTlsAuthentication.getTrustStoreInfo().isPresent()) { + TrustStoreInfo trustStoreInfo = + theTlsAuthentication.getTrustStoreInfo().get(); KeyStore trustStore = createKeyStore(trustStoreInfo); contextBuilder.loadTrustMaterial(trustStore, TrustSelfSignedStrategy.INSTANCE); } return contextBuilder.build(); - } - catch (Exception e){ - throw new TlsAuthenticationException(Msg.code(2102)+"Failed to create SSLContext", e); + } catch (Exception e) { + throw new TlsAuthenticationException(Msg.code(2102) + "Failed to create SSLContext", e); } } - - public static KeyStore createKeyStore(BaseStoreInfo theStoreInfo){ + + public static KeyStore createKeyStore(BaseStoreInfo theStoreInfo) { try { KeyStore keyStore = KeyStore.getInstance(theStoreInfo.getType().toString()); - if(PathType.RESOURCE.equals(theStoreInfo.getPathType())){ - try(InputStream inputStream = TlsAuthenticationSvc.class.getResourceAsStream(theStoreInfo.getFilePath())){ + if (PathType.RESOURCE.equals(theStoreInfo.getPathType())) { + try (InputStream inputStream = + TlsAuthenticationSvc.class.getResourceAsStream(theStoreInfo.getFilePath())) { validateKeyStoreExists(inputStream); keyStore.load(inputStream, theStoreInfo.getStorePass()); } - } - else if(PathType.FILE.equals(theStoreInfo.getPathType())){ - try(InputStream inputStream = new FileInputStream(theStoreInfo.getFilePath())){ + } else if (PathType.FILE.equals(theStoreInfo.getPathType())) { + try (InputStream inputStream = new FileInputStream(theStoreInfo.getFilePath())) { validateKeyStoreExists(inputStream); keyStore.load(inputStream, theStoreInfo.getStorePass()); } } return keyStore; - } - catch (Exception e){ - throw new TlsAuthenticationException(Msg.code(2103)+"Failed to create KeyStore", e); + } catch (Exception e) { + throw new TlsAuthenticationException(Msg.code(2103) + "Failed to create KeyStore", e); } } - public static void validateKeyStoreExists(InputStream theInputStream){ - if(theInputStream == null){ - throw new TlsAuthenticationException(Msg.code(2116)+"Keystore does not exists"); + public static void validateKeyStoreExists(InputStream theInputStream) { + if (theInputStream == null) { + throw new TlsAuthenticationException(Msg.code(2116) + "Keystore does not exists"); } } public static X509TrustManager createTrustManager(Optional theTrustStoreInfo) { try { - TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + TrustManagerFactory trustManagerFactory = + TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); if (!theTrustStoreInfo.isPresent()) { trustManagerFactory.init((KeyStore) null); // Load Trust Manager Factory with default Java truststore - } - else { + } else { TrustStoreInfo trustStoreInfo = theTrustStoreInfo.get(); KeyStore trustStore = createKeyStore(trustStoreInfo); trustManagerFactory.init(trustStore); @@ -124,14 +124,13 @@ public class TlsAuthenticationSvc { return (X509TrustManager) trustManager; } } - throw new TlsAuthenticationException(Msg.code(2104)+"Could not find X509TrustManager"); - } - catch (Exception e) { - throw new TlsAuthenticationException(Msg.code(2105)+"Failed to create X509TrustManager"); + throw new TlsAuthenticationException(Msg.code(2104) + "Could not find X509TrustManager"); + } catch (Exception e) { + throw new TlsAuthenticationException(Msg.code(2105) + "Failed to create X509TrustManager"); } } - public static HostnameVerifier createHostnameVerifier(Optional theTrustStoreInfo){ + public static HostnameVerifier createHostnameVerifier(Optional theTrustStoreInfo) { return theTrustStoreInfo.isPresent() ? new DefaultHostnameVerifier() : new NoopHostnameVerifier(); } diff --git a/hapi-fhir-converter/src/main/java/ca/uhn/hapi/converters/canonical/VersionCanonicalizer.java b/hapi-fhir-converter/src/main/java/ca/uhn/hapi/converters/canonical/VersionCanonicalizer.java index 2aff63449b0..16c0af9310e 100644 --- a/hapi-fhir-converter/src/main/java/ca/uhn/hapi/converters/canonical/VersionCanonicalizer.java +++ b/hapi-fhir-converter/src/main/java/ca/uhn/hapi/converters/canonical/VersionCanonicalizer.java @@ -50,8 +50,8 @@ import org.hl7.fhir.instance.model.api.IBaseConformance; import org.hl7.fhir.instance.model.api.IBaseDatatype; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.r4.model.AuditEvent; import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.hl7.fhir.r4.model.AuditEvent; import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.CodeableConcept; import org.hl7.fhir.r4.model.Coding; @@ -65,11 +65,11 @@ import org.hl7.fhir.r5.model.PackageInformation; import org.hl7.fhir.r5.model.SearchParameter; import org.hl7.fhir.r5.model.StructureDefinition; -import javax.annotation.Nonnull; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -133,7 +133,6 @@ public class VersionCanonicalizer { @SuppressWarnings({"EnhancedSwitchMigration"}) - /** * Canonical version: R5 */ @@ -224,13 +223,17 @@ public class VersionCanonicalizer { */ T input = myContext.newTerser().clone(theSearchParameter); - List baseExtensionValues = extractNonStandardSearchParameterListAndClearSourceIfAnyArePresent(input, "base"); - List targetExtensionValues = extractNonStandardSearchParameterListAndClearSourceIfAnyArePresent(input, "target"); + List baseExtensionValues = + extractNonStandardSearchParameterListAndClearSourceIfAnyArePresent(input, "base"); + List targetExtensionValues = + extractNonStandardSearchParameterListAndClearSourceIfAnyArePresent(input, "target"); SearchParameter retVal = myStrategy.searchParameterToCanonical(input); - baseExtensionValues.forEach(t -> retVal.addExtension(HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE, new CodeType(t))); - targetExtensionValues.forEach(t -> retVal.addExtension(HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE, new CodeType(t))); + baseExtensionValues.forEach( + t -> retVal.addExtension(HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE, new CodeType(t))); + targetExtensionValues.forEach( + t -> retVal.addExtension(HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE, new CodeType(t))); return retVal; } @@ -277,19 +280,20 @@ public class VersionCanonicalizer { } @Nonnull - private List extractNonStandardSearchParameterListAndClearSourceIfAnyArePresent(IBaseResource theSearchParameter, String theChildName) { + private List extractNonStandardSearchParameterListAndClearSourceIfAnyArePresent( + IBaseResource theSearchParameter, String theChildName) { - BaseRuntimeChildDefinition child = myContext.getResourceDefinition(theSearchParameter).getChildByName(theChildName); + BaseRuntimeChildDefinition child = + myContext.getResourceDefinition(theSearchParameter).getChildByName(theChildName); List baseList = child.getAccessor().getValues(theSearchParameter); - List baseExtensionValues = baseList - .stream() - .filter(Objects::nonNull) - .filter(t -> t instanceof IPrimitiveType) - .map(t -> (IPrimitiveType) t) - .map(IPrimitiveType::getValueAsString) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + List baseExtensionValues = baseList.stream() + .filter(Objects::nonNull) + .filter(t -> t instanceof IPrimitiveType) + .map(t -> (IPrimitiveType) t) + .map(IPrimitiveType::getValueAsString) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); if (baseExtensionValues.stream().allMatch(Enumerations.VersionIndependentResourceTypesAll::isValidCode)) { baseExtensionValues.clear(); } else { @@ -331,10 +335,8 @@ public class VersionCanonicalizer { org.hl7.fhir.r5.model.CodeSystem codeSystemToValidatorCanonical(IBaseResource theResource); IBaseResource searchParameterFromCanonical(SearchParameter theResource); - - IBaseResource auditEventFromCanonical(AuditEvent theResource); - + IBaseResource auditEventFromCanonical(AuditEvent theResource); IBaseConformance capabilityStatementFromCanonical(CapabilityStatement theResource); } @@ -365,7 +367,7 @@ public class VersionCanonicalizer { retVal.setDisplay(coding.getDisplay()); retVal.setVersion(coding.getVersion()); if (!coding.getUserSelectedElement().isEmpty()) { - retVal.setUserSelected( coding.getUserSelected() ); + retVal.setUserSelected(coding.getUserSelected()); } return retVal; @@ -397,19 +399,23 @@ public class VersionCanonicalizer { ca.uhn.fhir.model.dstu2.resource.ValueSet input = (ca.uhn.fhir.model.dstu2.resource.ValueSet) theCodeSystem; retVal.setUrl(input.getUrl()); - for (ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConcept next : input.getCodeSystem().getConcept()) { + for (ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConcept next : + input.getCodeSystem().getConcept()) { translateAndAddConcept(next, retVal.getConcept()); } return retVal; } - private void translateAndAddConcept(ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConcept theSource, List theTarget) { + private void translateAndAddConcept( + ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConcept theSource, + List theTarget) { CodeSystem.ConceptDefinitionComponent targetConcept = new CodeSystem.ConceptDefinitionComponent(); targetConcept.setCode(theSource.getCode()); targetConcept.setDisplay(theSource.getDisplay()); - for (ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConceptDesignation next : theSource.getDesignation()) { + for (ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConceptDesignation next : + theSource.getDesignation()) { CodeSystem.ConceptDefinitionDesignationComponent targetDesignation = targetConcept.addDesignation(); targetDesignation.setLanguage(next.getLanguage()); targetDesignation.setValue(next.getValue()); @@ -439,8 +445,10 @@ public class VersionCanonicalizer { @Override public SearchParameter searchParameterToCanonical(IBaseResource theSearchParameter) { - org.hl7.fhir.dstu2.model.SearchParameter reencoded = (org.hl7.fhir.dstu2.model.SearchParameter) reencodeToHl7Org(theSearchParameter); - SearchParameter retVal = (SearchParameter) VersionConvertorFactory_10_50.convertResource(reencoded, ADVISOR_10_50); + org.hl7.fhir.dstu2.model.SearchParameter reencoded = + (org.hl7.fhir.dstu2.model.SearchParameter) reencodeToHl7Org(theSearchParameter); + SearchParameter retVal = + (SearchParameter) VersionConvertorFactory_10_50.convertResource(reencoded, ADVISOR_10_50); if (isBlank(retVal.getExpression())) { retVal.setExpression(reencoded.getXpath()); } @@ -480,13 +488,15 @@ public class VersionCanonicalizer { @Override public org.hl7.fhir.r5.model.ValueSet valueSetToValidatorCanonical(IBaseResource theResource) { org.hl7.fhir.dstu2.model.Resource reencoded = reencodeToHl7Org(theResource); - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_10_50.convertResource(reencoded, ADVISOR_10_50); + return (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_10_50.convertResource(reencoded, ADVISOR_10_50); } @Override public org.hl7.fhir.r5.model.CodeSystem codeSystemToValidatorCanonical(IBaseResource theResource) { org.hl7.fhir.dstu2.model.Resource reencoded = reencodeToHl7Org(theResource); - return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_10_50.convertResource(reencoded, ADVISOR_10_50); + return (org.hl7.fhir.r5.model.CodeSystem) + VersionConvertorFactory_10_50.convertResource(reencoded, ADVISOR_10_50); } @Override @@ -511,43 +521,51 @@ public class VersionCanonicalizer { if (myHl7OrgStructures) { return (Resource) theInput; } - return (Resource) myDstu2Hl7OrgContext.newJsonParser().parseResource(myDstu2Context.newJsonParser().encodeResourceToString(theInput)); + return (Resource) myDstu2Hl7OrgContext + .newJsonParser() + .parseResource(myDstu2Context.newJsonParser().encodeResourceToString(theInput)); } private IBaseResource reencodeFromHl7Org(Resource theInput) { if (myHl7OrgStructures) { return theInput; } - return myDstu2Context.newJsonParser().parseResource(myDstu2Hl7OrgContext.newJsonParser().encodeResourceToString(theInput)); + return myDstu2Context + .newJsonParser() + .parseResource(myDstu2Hl7OrgContext.newJsonParser().encodeResourceToString(theInput)); } - } private static class Dstu21Strategy implements IStrategy { @Override public CapabilityStatement capabilityStatementToCanonical(IBaseResource theCapabilityStatement) { - return (CapabilityStatement) VersionConvertorFactory_14_50.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theCapabilityStatement, ADVISOR_14_50); + return (CapabilityStatement) VersionConvertorFactory_14_50.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theCapabilityStatement, ADVISOR_14_50); } @Override public Coding codingToCanonical(IBaseCoding theCoding) { - return (org.hl7.fhir.r4.model.Coding) VersionConvertorFactory_14_40.convertType((org.hl7.fhir.dstu2016may.model.Coding) theCoding, ADVISOR_14_40); + return (org.hl7.fhir.r4.model.Coding) VersionConvertorFactory_14_40.convertType( + (org.hl7.fhir.dstu2016may.model.Coding) theCoding, ADVISOR_14_40); } @Override public CodeableConcept codeableConceptToCanonical(IBaseDatatype theCodeableConcept) { - return (org.hl7.fhir.r4.model.CodeableConcept) VersionConvertorFactory_14_40.convertType((org.hl7.fhir.dstu2016may.model.CodeableConcept) theCodeableConcept, ADVISOR_14_40); + return (org.hl7.fhir.r4.model.CodeableConcept) VersionConvertorFactory_14_40.convertType( + (org.hl7.fhir.dstu2016may.model.CodeableConcept) theCodeableConcept, ADVISOR_14_40); } @Override public ValueSet valueSetToCanonical(IBaseResource theValueSet) { - return (ValueSet) VersionConvertorFactory_14_40.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theValueSet, ADVISOR_14_40); + return (ValueSet) VersionConvertorFactory_14_40.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theValueSet, ADVISOR_14_40); } @Override public CodeSystem codeSystemToCanonical(IBaseResource theCodeSystem) { - return (CodeSystem) VersionConvertorFactory_14_40.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theCodeSystem, ADVISOR_14_40); + return (CodeSystem) VersionConvertorFactory_14_40.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theCodeSystem, ADVISOR_14_40); } @Override @@ -557,12 +575,14 @@ public class VersionCanonicalizer { @Override public ConceptMap conceptMapToCanonical(IBaseResource theConceptMap) { - return (ConceptMap) VersionConvertorFactory_14_40.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theConceptMap, ADVISOR_14_40); + return (ConceptMap) VersionConvertorFactory_14_40.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theConceptMap, ADVISOR_14_40); } @Override public SearchParameter searchParameterToCanonical(IBaseResource theSearchParameter) { - return (SearchParameter) VersionConvertorFactory_14_50.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theSearchParameter, ADVISOR_14_50); + return (SearchParameter) VersionConvertorFactory_14_50.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theSearchParameter, ADVISOR_14_50); } @Override @@ -572,7 +592,8 @@ public class VersionCanonicalizer { @Override public StructureDefinition structureDefinitionToCanonical(IBaseResource theResource) { - return (StructureDefinition) VersionConvertorFactory_14_50.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theResource, ADVISOR_14_50); + return (StructureDefinition) VersionConvertorFactory_14_50.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theResource, ADVISOR_14_50); } @Override @@ -587,17 +608,20 @@ public class VersionCanonicalizer { @Override public org.hl7.fhir.r5.model.Resource resourceToValidatorCanonical(IBaseResource theResource) { - return VersionConvertorFactory_14_50.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theResource, ADVISOR_14_50); + return VersionConvertorFactory_14_50.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theResource, ADVISOR_14_50); } @Override public org.hl7.fhir.r5.model.ValueSet valueSetToValidatorCanonical(IBaseResource theResource) { - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_14_50.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theResource, ADVISOR_14_50); + return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_14_50.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theResource, ADVISOR_14_50); } @Override public org.hl7.fhir.r5.model.CodeSystem codeSystemToValidatorCanonical(IBaseResource theResource) { - return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_14_50.convertResource((org.hl7.fhir.dstu2016may.model.Resource) theResource, ADVISOR_14_50); + return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_14_50.convertResource( + (org.hl7.fhir.dstu2016may.model.Resource) theResource, ADVISOR_14_50); } @Override @@ -620,27 +644,32 @@ public class VersionCanonicalizer { @Override public CapabilityStatement capabilityStatementToCanonical(IBaseResource theCapabilityStatement) { - return (CapabilityStatement) VersionConvertorFactory_30_50.convertResource((org.hl7.fhir.dstu3.model.Resource) theCapabilityStatement, ADVISOR_30_50); + return (CapabilityStatement) VersionConvertorFactory_30_50.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theCapabilityStatement, ADVISOR_30_50); } @Override public Coding codingToCanonical(IBaseCoding theCoding) { - return (org.hl7.fhir.r4.model.Coding) VersionConvertorFactory_30_40.convertType((org.hl7.fhir.dstu3.model.Coding) theCoding, ADVISOR_30_40); + return (org.hl7.fhir.r4.model.Coding) VersionConvertorFactory_30_40.convertType( + (org.hl7.fhir.dstu3.model.Coding) theCoding, ADVISOR_30_40); } @Override public CodeableConcept codeableConceptToCanonical(IBaseDatatype theCodeableConcept) { - return (org.hl7.fhir.r4.model.CodeableConcept) VersionConvertorFactory_30_40.convertType((org.hl7.fhir.dstu3.model.CodeableConcept) theCodeableConcept, ADVISOR_30_40); + return (org.hl7.fhir.r4.model.CodeableConcept) VersionConvertorFactory_30_40.convertType( + (org.hl7.fhir.dstu3.model.CodeableConcept) theCodeableConcept, ADVISOR_30_40); } @Override public ValueSet valueSetToCanonical(IBaseResource theValueSet) { - return (ValueSet) VersionConvertorFactory_30_40.convertResource((org.hl7.fhir.dstu3.model.Resource) theValueSet, ADVISOR_30_40); + return (ValueSet) VersionConvertorFactory_30_40.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theValueSet, ADVISOR_30_40); } @Override public CodeSystem codeSystemToCanonical(IBaseResource theCodeSystem) { - return (CodeSystem) VersionConvertorFactory_30_40.convertResource((org.hl7.fhir.dstu3.model.Resource) theCodeSystem, ADVISOR_30_40); + return (CodeSystem) VersionConvertorFactory_30_40.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theCodeSystem, ADVISOR_30_40); } @Override @@ -650,12 +679,14 @@ public class VersionCanonicalizer { @Override public ConceptMap conceptMapToCanonical(IBaseResource theConceptMap) { - return (ConceptMap) VersionConvertorFactory_30_40.convertResource((org.hl7.fhir.dstu3.model.Resource) theConceptMap, ADVISOR_30_40); + return (ConceptMap) VersionConvertorFactory_30_40.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theConceptMap, ADVISOR_30_40); } @Override public SearchParameter searchParameterToCanonical(IBaseResource theSearchParameter) { - return (SearchParameter) VersionConvertorFactory_30_50.convertResource((org.hl7.fhir.dstu3.model.Resource) theSearchParameter, ADVISOR_30_50); + return (SearchParameter) VersionConvertorFactory_30_50.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theSearchParameter, ADVISOR_30_50); } @Override @@ -665,7 +696,8 @@ public class VersionCanonicalizer { @Override public StructureDefinition structureDefinitionToCanonical(IBaseResource theResource) { - return (StructureDefinition) VersionConvertorFactory_30_50.convertResource((org.hl7.fhir.dstu3.model.Resource) theResource, ADVISOR_30_50); + return (StructureDefinition) VersionConvertorFactory_30_50.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theResource, ADVISOR_30_50); } @Override @@ -680,17 +712,20 @@ public class VersionCanonicalizer { @Override public org.hl7.fhir.r5.model.Resource resourceToValidatorCanonical(IBaseResource theResource) { - return VersionConvertorFactory_30_50.convertResource((org.hl7.fhir.dstu3.model.Resource) theResource, ADVISOR_30_50); + return VersionConvertorFactory_30_50.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theResource, ADVISOR_30_50); } @Override public org.hl7.fhir.r5.model.ValueSet valueSetToValidatorCanonical(IBaseResource theResource) { - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_30_50.convertResource((org.hl7.fhir.dstu3.model.Resource) theResource, ADVISOR_30_50); + return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_30_50.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theResource, ADVISOR_30_50); } @Override public org.hl7.fhir.r5.model.CodeSystem codeSystemToValidatorCanonical(IBaseResource theResource) { - return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_30_50.convertResource((org.hl7.fhir.dstu3.model.Resource) theResource, ADVISOR_30_50); + return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_30_50.convertResource( + (org.hl7.fhir.dstu3.model.Resource) theResource, ADVISOR_30_50); } @Override @@ -712,7 +747,8 @@ public class VersionCanonicalizer { private static class R4Strategy implements IStrategy { @Override public CapabilityStatement capabilityStatementToCanonical(IBaseResource theCapabilityStatement) { - return (CapabilityStatement) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r4.model.Resource) theCapabilityStatement, ADVISOR_40_50); + return (CapabilityStatement) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r4.model.Resource) theCapabilityStatement, ADVISOR_40_50); } @Override @@ -747,7 +783,8 @@ public class VersionCanonicalizer { @Override public SearchParameter searchParameterToCanonical(IBaseResource theSearchParameter) { - return (SearchParameter) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r4.model.Resource) theSearchParameter, ADVISOR_40_50); + return (SearchParameter) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r4.model.Resource) theSearchParameter, ADVISOR_40_50); } @Override @@ -757,7 +794,8 @@ public class VersionCanonicalizer { @Override public StructureDefinition structureDefinitionToCanonical(IBaseResource theResource) { - return (StructureDefinition) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r4.model.Resource) theResource, ADVISOR_40_50); + return (StructureDefinition) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r4.model.Resource) theResource, ADVISOR_40_50); } @Override @@ -772,17 +810,20 @@ public class VersionCanonicalizer { @Override public org.hl7.fhir.r5.model.Resource resourceToValidatorCanonical(IBaseResource theResource) { - return VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r4.model.Resource) theResource, ADVISOR_40_50); + return VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r4.model.Resource) theResource, ADVISOR_40_50); } @Override public org.hl7.fhir.r5.model.ValueSet valueSetToValidatorCanonical(IBaseResource theResource) { - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r4.model.Resource) theResource, ADVISOR_40_50); + return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r4.model.Resource) theResource, ADVISOR_40_50); } @Override public org.hl7.fhir.r5.model.CodeSystem codeSystemToValidatorCanonical(IBaseResource theResource) { - return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r4.model.Resource) theResource, ADVISOR_40_50); + return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r4.model.Resource) theResource, ADVISOR_40_50); } @Override @@ -799,66 +840,82 @@ public class VersionCanonicalizer { public IBaseConformance capabilityStatementFromCanonical(CapabilityStatement theResource) { return (IBaseConformance) VersionConvertorFactory_40_50.convertResource(theResource, ADVISOR_40_50); } - } private static class R4BStrategy implements IStrategy { @Override public CapabilityStatement capabilityStatementToCanonical(IBaseResource theCapabilityStatement) { - return (CapabilityStatement) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theCapabilityStatement, ADVISOR_43_50); + return (CapabilityStatement) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theCapabilityStatement, ADVISOR_43_50); } @Override public Coding codingToCanonical(IBaseCoding theCoding) { - org.hl7.fhir.r5.model.Coding r5coding = (org.hl7.fhir.r5.model.Coding) VersionConvertorFactory_43_50.convertType((org.hl7.fhir.r4b.model.Coding) theCoding, ADVISOR_43_50); + org.hl7.fhir.r5.model.Coding r5coding = (org.hl7.fhir.r5.model.Coding) + VersionConvertorFactory_43_50.convertType((org.hl7.fhir.r4b.model.Coding) theCoding, ADVISOR_43_50); return (org.hl7.fhir.r4.model.Coding) VersionConvertorFactory_40_50.convertType(r5coding, ADVISOR_40_50); } @Override public CodeableConcept codeableConceptToCanonical(IBaseDatatype theCodeableConcept) { - org.hl7.fhir.r5.model.CodeableConcept r5coding = (org.hl7.fhir.r5.model.CodeableConcept) VersionConvertorFactory_43_50.convertType((org.hl7.fhir.r4b.model.CodeableConcept) theCodeableConcept, ADVISOR_43_50); - return (org.hl7.fhir.r4.model.CodeableConcept) VersionConvertorFactory_40_50.convertType(r5coding, ADVISOR_40_50); + org.hl7.fhir.r5.model.CodeableConcept r5coding = + (org.hl7.fhir.r5.model.CodeableConcept) VersionConvertorFactory_43_50.convertType( + (org.hl7.fhir.r4b.model.CodeableConcept) theCodeableConcept, ADVISOR_43_50); + return (org.hl7.fhir.r4.model.CodeableConcept) + VersionConvertorFactory_40_50.convertType(r5coding, ADVISOR_40_50); } @Override public ValueSet valueSetToCanonical(IBaseResource theValueSet) { - org.hl7.fhir.r5.model.ValueSet valueSetR5 = (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theValueSet, ADVISOR_43_50); - return (org.hl7.fhir.r4.model.ValueSet) VersionConvertorFactory_40_50.convertResource(valueSetR5, ADVISOR_40_50); + org.hl7.fhir.r5.model.ValueSet valueSetR5 = + (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theValueSet, ADVISOR_43_50); + return (org.hl7.fhir.r4.model.ValueSet) + VersionConvertorFactory_40_50.convertResource(valueSetR5, ADVISOR_40_50); } @Override public CodeSystem codeSystemToCanonical(IBaseResource theCodeSystem) { - org.hl7.fhir.r5.model.CodeSystem codeSystemR5 = (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theCodeSystem, ADVISOR_43_50); - return (org.hl7.fhir.r4.model.CodeSystem) VersionConvertorFactory_40_50.convertResource(codeSystemR5, ADVISOR_40_50); + org.hl7.fhir.r5.model.CodeSystem codeSystemR5 = + (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theCodeSystem, ADVISOR_43_50); + return (org.hl7.fhir.r4.model.CodeSystem) + VersionConvertorFactory_40_50.convertResource(codeSystemR5, ADVISOR_40_50); } @Override public IBaseResource valueSetFromCanonical(ValueSet theValueSet) { - org.hl7.fhir.r5.model.ValueSet valueSetR5 = (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_40_50.convertResource(theValueSet, ADVISOR_40_50); + org.hl7.fhir.r5.model.ValueSet valueSetR5 = (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_40_50.convertResource(theValueSet, ADVISOR_40_50); return VersionConvertorFactory_43_50.convertResource(valueSetR5, ADVISOR_43_50); } @Override public ConceptMap conceptMapToCanonical(IBaseResource theConceptMap) { - org.hl7.fhir.r5.model.ConceptMap conceptMapR5 = (org.hl7.fhir.r5.model.ConceptMap) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theConceptMap, ADVISOR_43_50); + org.hl7.fhir.r5.model.ConceptMap conceptMapR5 = + (org.hl7.fhir.r5.model.ConceptMap) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theConceptMap, ADVISOR_43_50); return (ConceptMap) VersionConvertorFactory_40_50.convertResource(conceptMapR5, ADVISOR_40_50); } @Override public SearchParameter searchParameterToCanonical(IBaseResource theSearchParameter) { - return (SearchParameter) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theSearchParameter, ADVISOR_43_50); + return (SearchParameter) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theSearchParameter, ADVISOR_43_50); } @Override public IBaseParameters parametersFromCanonical(Parameters theParameters) { - org.hl7.fhir.r5.model.Parameters parametersR5 = (org.hl7.fhir.r5.model.Parameters) VersionConvertorFactory_40_50.convertResource(theParameters, ADVISOR_40_50); + org.hl7.fhir.r5.model.Parameters parametersR5 = (org.hl7.fhir.r5.model.Parameters) + VersionConvertorFactory_40_50.convertResource(theParameters, ADVISOR_40_50); return (IBaseParameters) VersionConvertorFactory_43_50.convertResource(parametersR5, ADVISOR_43_50); } @Override public StructureDefinition structureDefinitionToCanonical(IBaseResource theResource) { - return (StructureDefinition) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theResource, ADVISOR_43_50); + return (StructureDefinition) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theResource, ADVISOR_43_50); } @Override @@ -873,17 +930,20 @@ public class VersionCanonicalizer { @Override public org.hl7.fhir.r5.model.Resource resourceToValidatorCanonical(IBaseResource theResource) { - return VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theResource, ADVISOR_43_50); + return VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theResource, ADVISOR_43_50); } @Override public org.hl7.fhir.r5.model.ValueSet valueSetToValidatorCanonical(IBaseResource theResource) { - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theResource, ADVISOR_43_50); + return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theResource, ADVISOR_43_50); } @Override public org.hl7.fhir.r5.model.CodeSystem codeSystemToValidatorCanonical(IBaseResource theResource) { - return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theResource, ADVISOR_43_50); + return (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.Resource) theResource, ADVISOR_43_50); } @Override @@ -893,7 +953,8 @@ public class VersionCanonicalizer { @Override public IBaseResource auditEventFromCanonical(AuditEvent theResource) { - org.hl7.fhir.r5.model.AuditEvent r5 = (org.hl7.fhir.r5.model.AuditEvent) VersionConvertorFactory_40_50.convertResource(theResource, ADVISOR_40_50); + org.hl7.fhir.r5.model.AuditEvent r5 = (org.hl7.fhir.r5.model.AuditEvent) + VersionConvertorFactory_40_50.convertResource(theResource, ADVISOR_40_50); return VersionConvertorFactory_43_50.convertResource(r5, ADVISOR_43_50); } @@ -901,7 +962,6 @@ public class VersionCanonicalizer { public IBaseConformance capabilityStatementFromCanonical(CapabilityStatement theResource) { return (IBaseConformance) VersionConvertorFactory_43_50.convertResource(theResource, ADVISOR_43_50); } - } private static class R5Strategy implements IStrategy { @@ -913,22 +973,26 @@ public class VersionCanonicalizer { @Override public Coding codingToCanonical(IBaseCoding theCoding) { - return (org.hl7.fhir.r4.model.Coding) VersionConvertorFactory_40_50.convertType((org.hl7.fhir.r5.model.Coding) theCoding, ADVISOR_40_50); + return (org.hl7.fhir.r4.model.Coding) + VersionConvertorFactory_40_50.convertType((org.hl7.fhir.r5.model.Coding) theCoding, ADVISOR_40_50); } @Override public CodeableConcept codeableConceptToCanonical(IBaseDatatype theCodeableConcept) { - return (org.hl7.fhir.r4.model.CodeableConcept) VersionConvertorFactory_40_50.convertType((org.hl7.fhir.r5.model.CodeableConcept) theCodeableConcept, ADVISOR_40_50); + return (org.hl7.fhir.r4.model.CodeableConcept) VersionConvertorFactory_40_50.convertType( + (org.hl7.fhir.r5.model.CodeableConcept) theCodeableConcept, ADVISOR_40_50); } @Override public ValueSet valueSetToCanonical(IBaseResource theValueSet) { - return (ValueSet) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r5.model.ValueSet) theValueSet, ADVISOR_40_50); + return (ValueSet) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r5.model.ValueSet) theValueSet, ADVISOR_40_50); } @Override public CodeSystem codeSystemToCanonical(IBaseResource theCodeSystem) { - return (CodeSystem) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r5.model.CodeSystem) theCodeSystem, ADVISOR_40_50); + return (CodeSystem) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r5.model.CodeSystem) theCodeSystem, ADVISOR_40_50); } @Override @@ -938,7 +1002,8 @@ public class VersionCanonicalizer { @Override public ConceptMap conceptMapToCanonical(IBaseResource theConceptMap) { - return (ConceptMap) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r5.model.ConceptMap) theConceptMap, ADVISOR_40_50); + return (ConceptMap) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r5.model.ConceptMap) theConceptMap, ADVISOR_40_50); } @Override @@ -995,14 +1060,5 @@ public class VersionCanonicalizer { public IBaseConformance capabilityStatementFromCanonical(CapabilityStatement theResource) { return theResource; } - } - - } - - - - - - diff --git a/hapi-fhir-converter/src/main/java/ca/uhn/hapi/converters/server/VersionedApiConverterInterceptor.java b/hapi-fhir-converter/src/main/java/ca/uhn/hapi/converters/server/VersionedApiConverterInterceptor.java index e4ee5517b16..c009350abc5 100644 --- a/hapi-fhir-converter/src/main/java/ca/uhn/hapi/converters/server/VersionedApiConverterInterceptor.java +++ b/hapi-fhir-converter/src/main/java/ca/uhn/hapi/converters/server/VersionedApiConverterInterceptor.java @@ -40,9 +40,9 @@ import org.hl7.fhir.dstu3.model.Resource; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseResource; +import java.util.StringTokenizer; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.util.StringTokenizer; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -57,7 +57,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; * Versioned API features. *

    */ - @Interceptor(order = AuthorizationConstants.ORDER_CONVERTER_INTERCEPTOR) public class VersionedApiConverterInterceptor extends InterceptorAdapter { private final FhirContext myCtxDstu2; @@ -74,7 +73,12 @@ public class VersionedApiConverterInterceptor extends InterceptorAdapter { } @Override - public boolean outgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseDetails, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws AuthenticationException { + public boolean outgoingResponse( + RequestDetails theRequestDetails, + ResponseDetails theResponseDetails, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException { IBaseResource responseResource = theResponseDetails.getResponseResource(); if (responseResource == null) { return true; @@ -133,7 +137,9 @@ public class VersionedApiConverterInterceptor extends InterceptorAdapter { private org.hl7.fhir.dstu2.model.Resource toDstu2(IBaseResource theResponseResource) { if (theResponseResource instanceof IResource) { - return (org.hl7.fhir.dstu2.model.Resource) myCtxDstu2Hl7Org.newJsonParser().parseResource(myCtxDstu2.newJsonParser().encodeResourceToString(theResponseResource)); + return (org.hl7.fhir.dstu2.model.Resource) myCtxDstu2Hl7Org + .newJsonParser() + .parseResource(myCtxDstu2.newJsonParser().encodeResourceToString(theResponseResource)); } return (org.hl7.fhir.dstu2.model.Resource) theResponseResource; } diff --git a/hapi-fhir-converter/src/main/java/org/hl7/fhir/converter/NullVersionConverterAdvisor10_30.java b/hapi-fhir-converter/src/main/java/org/hl7/fhir/converter/NullVersionConverterAdvisor10_30.java index 4696f2b6ba6..e890353c895 100644 --- a/hapi-fhir-converter/src/main/java/org/hl7/fhir/converter/NullVersionConverterAdvisor10_30.java +++ b/hapi-fhir-converter/src/main/java/org/hl7/fhir/converter/NullVersionConverterAdvisor10_30.java @@ -28,7 +28,6 @@ import javax.annotation.Nullable; public class NullVersionConverterAdvisor10_30 extends BaseAdvisor_10_30 { - @Nullable @Override public CodeSystem getCodeSystem(@Nullable ValueSet theValueSet) throws FHIRException { @@ -36,7 +35,8 @@ public class NullVersionConverterAdvisor10_30 extends BaseAdvisor_10_30 { } @Override - public void handleCodeSystem(@Nullable CodeSystem theCodeSystem, @Nullable ValueSet theValueSet) throws FHIRException { + public void handleCodeSystem(@Nullable CodeSystem theCodeSystem, @Nullable ValueSet theValueSet) + throws FHIRException { // nothing } } diff --git a/hapi-fhir-converter/src/main/java/org/hl7/fhir/converter/NullVersionConverterAdvisor10_40.java b/hapi-fhir-converter/src/main/java/org/hl7/fhir/converter/NullVersionConverterAdvisor10_40.java index 3d31e45ea68..edeb30824c3 100644 --- a/hapi-fhir-converter/src/main/java/org/hl7/fhir/converter/NullVersionConverterAdvisor10_40.java +++ b/hapi-fhir-converter/src/main/java/org/hl7/fhir/converter/NullVersionConverterAdvisor10_40.java @@ -28,7 +28,6 @@ import javax.annotation.Nullable; public class NullVersionConverterAdvisor10_40 extends BaseAdvisor_10_40 { - @Nullable @Override public CodeSystem getCodeSystem(@Nullable ValueSet theValueSet) throws FHIRException { @@ -36,7 +35,8 @@ public class NullVersionConverterAdvisor10_40 extends BaseAdvisor_10_40 { } @Override - public void handleCodeSystem(@Nullable CodeSystem theCodeSystem, @Nullable ValueSet theValueSet) throws FHIRException { + public void handleCodeSystem(@Nullable CodeSystem theCodeSystem, @Nullable ValueSet theValueSet) + throws FHIRException { // nothing } } diff --git a/hapi-fhir-docs/src/main/java/ChangelogMigrator.java b/hapi-fhir-docs/src/main/java/ChangelogMigrator.java index f76d910b4ae..21e83891489 100644 --- a/hapi-fhir-docs/src/main/java/ChangelogMigrator.java +++ b/hapi-fhir-docs/src/main/java/ChangelogMigrator.java @@ -21,7 +21,6 @@ import ca.uhn.fhir.i18n.Msg; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator; -import org.apache.commons.compress.archivers.zip.UnsupportedZipFeatureException; import org.apache.commons.io.FileUtils; import org.jdom2.Content; import org.jdom2.Element; @@ -32,16 +31,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.SAXException; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; -import java.util.Map; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -58,8 +56,8 @@ public class ChangelogMigrator { org.jdom2.Document document = null; DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); - //If want to make namespace aware. - //factory.setNamespaceAware(true); + // If want to make namespace aware. + // factory.setNamespaceAware(true); DocumentBuilder documentBuilder = factory.newDocumentBuilder(); org.w3c.dom.Document w3cDocument = documentBuilder.parse(new File("src/changes/changes.xml")); document = new DOMBuilder().build(w3cDocument); @@ -67,7 +65,6 @@ public class ChangelogMigrator { int actionCount = 0; int releaseCount = 0; - Element docElement = document.getRootElement(); Element bodyElement = docElement.getChild("body", NS); List releases = bodyElement.getChildren("release", NS); @@ -105,7 +102,9 @@ public class ChangelogMigrator { throw new Error(Msg.code(630) + "Unknown type: " + type); } - String issue = nextAction.getAttribute("issue") != null ? nextAction.getAttribute("issue").getValue() : null; + String issue = nextAction.getAttribute("issue") != null + ? nextAction.getAttribute("issue").getValue() + : null; if (isNotBlank(issue)) { itemMap.put("issue", issue); } @@ -126,7 +125,8 @@ public class ChangelogMigrator { actionCount++; } - String releaseDir = "hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/" + version.replace(".", "_"); + String releaseDir = + "hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/" + version.replace(".", "_"); File releaseDirFile = new File(releaseDir); FileUtils.forceMkdir(releaseDirFile); File file = new File(releaseDirFile, "changes.yaml"); @@ -137,7 +137,6 @@ public class ChangelogMigrator { ObjectMapper mapper = new ObjectMapper(yf); mapper.writeValue(writer, items); - } file = new File(releaseDirFile, "version.yaml"); @@ -152,15 +151,9 @@ public class ChangelogMigrator { versionMap.put("codename", description); } mapper.writeValue(writer, versionMap); - } - } ourLog.info("Found {} releases and {} actions", releaseCount, actionCount); - } - } - - diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/AuthorizationInterceptors.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/AuthorizationInterceptors.java index 1dcd46a3d26..94a2becaeac 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/AuthorizationInterceptors.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/AuthorizationInterceptors.java @@ -25,13 +25,13 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; -import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.annotation.ConditionalUrlParam; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.annotation.Update; import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.exceptions.AuthenticationException; @@ -54,195 +54,211 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; */ public class AuthorizationInterceptors { - public class PatientResourceProvider implements IResourceProvider - { + public class PatientResourceProvider implements IResourceProvider { - @Override - public Class getResourceType() { - return Patient.class; - } + @Override + public Class getResourceType() { + return Patient.class; + } - public MethodOutcome create(@ResourceParam Patient thePatient, RequestDetails theRequestDetails) { + public MethodOutcome create(@ResourceParam Patient thePatient, RequestDetails theRequestDetails) { - return new MethodOutcome(); // populate this - } + return new MethodOutcome(); // populate this + } + } - } - - //START SNIPPET: patientAndAdmin - @SuppressWarnings("ConstantConditions") + // START SNIPPET: patientAndAdmin + @SuppressWarnings("ConstantConditions") public class PatientAndAdminAuthorizationInterceptor extends AuthorizationInterceptor { - @Override - public List buildRuleList(RequestDetails theRequestDetails) { + @Override + public List buildRuleList(RequestDetails theRequestDetails) { - // Process authorization header - The following is a fake - // implementation. Obviously we'd want something more real - // for a production scenario. - // - // In this basic example we have two hardcoded bearer tokens, - // one which is for a user that has access to one patient, and - // another that has full access. - IdType userIdPatientId = null; - boolean userIsAdmin = false; - String authHeader = theRequestDetails.getHeader("Authorization"); - if ("Bearer dfw98h38r".equals(authHeader)) { - // This user has access only to Patient/1 resources - userIdPatientId = new IdType("Patient", 1L); - } else if ("Bearer 39ff939jgg".equals(authHeader)) { - // This user has access to everything - userIsAdmin = true; - } else { - // Throw an HTTP 401 - throw new AuthenticationException(Msg.code(644) + "Missing or invalid Authorization header value"); - } + // Process authorization header - The following is a fake + // implementation. Obviously we'd want something more real + // for a production scenario. + // + // In this basic example we have two hardcoded bearer tokens, + // one which is for a user that has access to one patient, and + // another that has full access. + IdType userIdPatientId = null; + boolean userIsAdmin = false; + String authHeader = theRequestDetails.getHeader("Authorization"); + if ("Bearer dfw98h38r".equals(authHeader)) { + // This user has access only to Patient/1 resources + userIdPatientId = new IdType("Patient", 1L); + } else if ("Bearer 39ff939jgg".equals(authHeader)) { + // This user has access to everything + userIsAdmin = true; + } else { + // Throw an HTTP 401 + throw new AuthenticationException(Msg.code(644) + "Missing or invalid Authorization header value"); + } - // If the user is a specific patient, we create the following rule chain: - // Allow the user to read anything in their own patient compartment - // Allow the user to write anything in their own patient compartment - // If a client request doesn't pass either of the above, deny it - if (userIdPatientId != null) { - return new RuleBuilder() - .allow().read().allResources().inCompartment("Patient", userIdPatientId).andThen() - .allow().write().allResources().inCompartment("Patient", userIdPatientId).andThen() - .denyAll() - .build(); - } + // If the user is a specific patient, we create the following rule chain: + // Allow the user to read anything in their own patient compartment + // Allow the user to write anything in their own patient compartment + // If a client request doesn't pass either of the above, deny it + if (userIdPatientId != null) { + return new RuleBuilder() + .allow() + .read() + .allResources() + .inCompartment("Patient", userIdPatientId) + .andThen() + .allow() + .write() + .allResources() + .inCompartment("Patient", userIdPatientId) + .andThen() + .denyAll() + .build(); + } - // If the user is an admin, allow everything - if (userIsAdmin) { - return new RuleBuilder() - .allowAll() - .build(); - } + // If the user is an admin, allow everything + if (userIsAdmin) { + return new RuleBuilder().allowAll().build(); + } - // By default, deny everything. This should never get hit, but it's - // good to be defensive - return new RuleBuilder() - .denyAll() - .build(); - } - } - //END SNIPPET: patientAndAdmin + // By default, deny everything. This should never get hit, but it's + // good to be defensive + return new RuleBuilder().denyAll().build(); + } + } + // END SNIPPET: patientAndAdmin public void ruleFiltering() { RestfulServer restfulServer = new RestfulServer(); AuthorizationInterceptor theAuthorizationInterceptor = new AuthorizationInterceptor(); - //START SNIPPET: ruleFiltering + // START SNIPPET: ruleFiltering ConsentInterceptor consentInterceptor = new ConsentInterceptor(); consentInterceptor.registerConsentService(new RuleFilteringConsentService(theAuthorizationInterceptor)); restfulServer.registerInterceptor(consentInterceptor); - //END SNIPPET: ruleFiltering + // END SNIPPET: ruleFiltering } - - //START SNIPPET: conditionalUpdate - @Update() - public MethodOutcome update( - @IdParam IdType theId, - @ResourceParam Patient theResource, - @ConditionalUrlParam String theConditionalUrl, - ServletRequestDetails theRequestDetails, + // START SNIPPET: conditionalUpdate + @Update() + public MethodOutcome update( + @IdParam IdType theId, + @ResourceParam Patient theResource, + @ConditionalUrlParam String theConditionalUrl, + ServletRequestDetails theRequestDetails, IInterceptorBroadcaster theInterceptorBroadcaster) { - // If we're processing a conditional URL... - if (isNotBlank(theConditionalUrl)) { + // If we're processing a conditional URL... + if (isNotBlank(theConditionalUrl)) { - // Pretend we've done the conditional processing. Now let's - // notify the interceptors that an update has been performed - // and supply the actual ID that's being updated - IdType actual = new IdType("Patient", "1123"); + // Pretend we've done the conditional processing. Now let's + // notify the interceptors that an update has been performed + // and supply the actual ID that's being updated + IdType actual = new IdType("Patient", "1123"); + } - } + // In a real server, perhaps we would process the conditional + // request differently and follow a separate path. Either way, + // let's pretend there is some storage code here. + theResource.setId(theId.withVersion("2")); - // In a real server, perhaps we would process the conditional - // request differently and follow a separate path. Either way, - // let's pretend there is some storage code here. - theResource.setId(theId.withVersion("2")); - - // One TransactionDetails object should be created for each FHIR operation. Interceptors + // One TransactionDetails object should be created for each FHIR operation. Interceptors // may use it for getting/setting details about the running transaction. TransactionDetails transactionDetails = new TransactionDetails(); - // Notify the interceptor framework when we're about to perform an update. This is + // Notify the interceptor framework when we're about to perform an update. This is // useful as the authorization interceptor will pick this event up and use it // to factor into a decision about whether the operation should be allowed to proceed. IBaseResource previousContents = theResource; IBaseResource newContents = theResource; HookParams params = new HookParams() - .add(IBaseResource.class, previousContents) - .add(IBaseResource.class, newContents) - .add(RequestDetails.class, theRequestDetails) - .add(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, transactionDetails); + .add(IBaseResource.class, previousContents) + .add(IBaseResource.class, newContents) + .add(RequestDetails.class, theRequestDetails) + .add(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, transactionDetails); theInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params); - MethodOutcome retVal = new MethodOutcome(); - retVal.setCreated(true); - retVal.setResource(theResource); - return retVal; - } - //END SNIPPET: conditionalUpdate + MethodOutcome retVal = new MethodOutcome(); + retVal.setCreated(true); + retVal.setResource(theResource); + return retVal; + } + // END SNIPPET: conditionalUpdate public void authorizeTenantAction() { - //START SNIPPET: authorizeTenantAction + // START SNIPPET: authorizeTenantAction new AuthorizationInterceptor(PolicyEnum.DENY) { @Override public List buildRuleList(RequestDetails theRequestDetails) { return new RuleBuilder() - .allow().read().resourcesOfType(Patient.class).withAnyId().forTenantIds("TENANTA").andThen() - .build(); + .allow() + .read() + .resourcesOfType(Patient.class) + .withAnyId() + .forTenantIds("TENANTA") + .andThen() + .build(); } }; - //END SNIPPET: authorizeTenantAction + // END SNIPPET: authorizeTenantAction - - //START SNIPPET: patchAll + // START SNIPPET: patchAll new AuthorizationInterceptor(PolicyEnum.DENY) { @Override public List buildRuleList(RequestDetails theRequestDetails) { return new RuleBuilder() - // Authorize patch requests - .allow().patch().allRequests().andThen() - // Authorize actual writes that patch may perform - .allow().write().allResources().inCompartment("Patient", new IdType("Patient/123")).andThen() - .build(); + // Authorize patch requests + .allow() + .patch() + .allRequests() + .andThen() + // Authorize actual writes that patch may perform + .allow() + .write() + .allResources() + .inCompartment("Patient", new IdType("Patient/123")) + .andThen() + .build(); } }; - //END SNIPPET: patchAll + // END SNIPPET: patchAll - - //START SNIPPET: bulkExport + // START SNIPPET: bulkExport new AuthorizationInterceptor(PolicyEnum.DENY) { @Override public List buildRuleList(RequestDetails theRequestDetails) { return new RuleBuilder() - .allow().bulkExport().systemExport().withResourceTypes(Lists.newArrayList("Patient", "Encounter", "Observation")) - .build(); + .allow() + .bulkExport() + .systemExport() + .withResourceTypes(Lists.newArrayList("Patient", "Encounter", "Observation")) + .build(); } }; - //END SNIPPET: bulkExport + // END SNIPPET: bulkExport - //START SNIPPET: advancedCompartment + // START SNIPPET: advancedCompartment new AuthorizationInterceptor(PolicyEnum.DENY) { @Override public List buildRuleList(RequestDetails theRequestDetails) { - AdditionalCompartmentSearchParameters additionalSearchParams = new AdditionalCompartmentSearchParameters(); + AdditionalCompartmentSearchParameters additionalSearchParams = + new AdditionalCompartmentSearchParameters(); additionalSearchParams.addSearchParameters("device:patient", "device:subject"); return new RuleBuilder() - .allow().read().allResources().inCompartmentWithAdditionalSearchParams("Patient", new IdType("Patient/123"), additionalSearchParams) - .build(); + .allow() + .read() + .allResources() + .inCompartmentWithAdditionalSearchParams( + "Patient", new IdType("Patient/123"), additionalSearchParams) + .build(); } }; - //END SNIPPET: advancedCompartment - + // END SNIPPET: advancedCompartment } - - //START SNIPPET: narrowing + // START SNIPPET: narrowing public class MyPatientSearchNarrowingInterceptor extends SearchNarrowingInterceptor { /** @@ -262,9 +278,7 @@ public class AuthorizationInterceptors { if ("Bearer dfw98h38r".equals(authHeader)) { // This user will have access to two compartments - return new AuthorizedList() - .addCompartment("Patient/123") - .addCompartment("Patient/456"); + return new AuthorizedList().addCompartment("Patient/123").addCompartment("Patient/456"); } else if ("Bearer 39ff939jgg".equals(authHeader)) { @@ -274,25 +288,23 @@ public class AuthorizationInterceptors { } else { throw new AuthenticationException("Unknown bearer token"); - } - } - } - //END SNIPPET: narrowing + // END SNIPPET: narrowing @SuppressWarnings("SpellCheckingInspection") public void rsNarrowing() { RestfulServer restfulServer = new RestfulServer(); - //START SNIPPET: rsnarrowing + // START SNIPPET: rsnarrowing SearchNarrowingInterceptor narrowingInterceptor = new SearchNarrowingInterceptor() { @Override protected AuthorizedList buildAuthorizedList(RequestDetails theRequestDetails) { // Your rules go here return new AuthorizedList() - .addCodeInValueSet("Observation", "code", "http://hl7.org/fhir/ValueSet/observation-vitalsignresult"); + .addCodeInValueSet( + "Observation", "code", "http://hl7.org/fhir/ValueSet/observation-vitalsignresult"); } }; restfulServer.registerInterceptor(narrowingInterceptor); @@ -300,20 +312,18 @@ public class AuthorizationInterceptors { // Create a consent service for search narrowing IValidationSupport validationSupport = null; // This needs to be populated FhirContext searchParamRegistry = null; // This needs to be populated - SearchNarrowingConsentService consentService = new SearchNarrowingConsentService(validationSupport, searchParamRegistry); + SearchNarrowingConsentService consentService = + new SearchNarrowingConsentService(validationSupport, searchParamRegistry); // Create a ConsentIntereptor to apply the ConsentService and register it with the server ConsentInterceptor consentInterceptor = new ConsentInterceptor(); consentInterceptor.registerConsentService(consentService); restfulServer.registerInterceptor(consentInterceptor); - - //END SNIPPET: rsnarrowing + // END SNIPPET: rsnarrowing } - - - //START SNIPPET: narrowingByCode + // START SNIPPET: narrowingByCode public class MyCodeSearchNarrowingInterceptor extends SearchNarrowingInterceptor { /** @@ -329,22 +339,20 @@ public class AuthorizationInterceptors { if ("Bearer dfw98h38r".equals(authHeader)) { return new AuthorizedList() - // When searching for Observations, narrow the search to only include Observations - // with a code indicating that it is a Vital Signs Observation - .addCodeInValueSet("Observation", "code", "http://hl7.org/fhir/ValueSet/observation-vitalsignresult") - // When searching for Encounters, narrow the search to exclude Encounters where - // the Encounter class is in a ValueSet containing forbidden class codes - .addCodeNotInValueSet("Encounter", "class", "http://my-forbidden-encounter-classes"); + // When searching for Observations, narrow the search to only include Observations + // with a code indicating that it is a Vital Signs Observation + .addCodeInValueSet( + "Observation", "code", "http://hl7.org/fhir/ValueSet/observation-vitalsignresult") + // When searching for Encounters, narrow the search to exclude Encounters where + // the Encounter class is in a ValueSet containing forbidden class codes + .addCodeNotInValueSet("Encounter", "class", "http://my-forbidden-encounter-classes"); } else { throw new AuthenticationException("Unknown bearer token"); - } - } - } - //END SNIPPET: narrowingByCode + // END SNIPPET: narrowingByCode } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/AuthorizingTesterUiClientFactory.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/AuthorizingTesterUiClientFactory.java index 55aaf1d226f..f40e1a0ef6e 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/AuthorizingTesterUiClientFactory.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/AuthorizingTesterUiClientFactory.java @@ -28,15 +28,15 @@ import javax.servlet.http.HttpServletRequest; public class AuthorizingTesterUiClientFactory implements ITestingUiClientFactory { - @Override - public IGenericClient newClient(FhirContext theFhirContext, HttpServletRequest theRequest, String theServerBaseUrl) { - // Create a client - IGenericClient client = theFhirContext.newRestfulGenericClient(theServerBaseUrl); - - // Register an interceptor which adds credentials - client.registerInterceptor(new BasicAuthInterceptor("someusername", "somepassword")); - - return client; - } + @Override + public IGenericClient newClient( + FhirContext theFhirContext, HttpServletRequest theRequest, String theServerBaseUrl) { + // Create a client + IGenericClient client = theFhirContext.newRestfulGenericClient(theServerBaseUrl); + // Register an interceptor which adds credentials + client.registerInterceptor(new BasicAuthInterceptor("someusername", "somepassword")); + + return client; + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BalpExample.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BalpExample.java index b9d70798872..f04b586d38f 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BalpExample.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BalpExample.java @@ -20,21 +20,21 @@ package ca.uhn.hapi.fhir.docs; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.storage.interceptor.balp.AsyncMemoryQueueBackedFhirClientBalpSink; -import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditContextServices; -import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditEventSink; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.client.interceptor.BasicAuthInterceptor; import ca.uhn.fhir.rest.server.RestfulServer; +import ca.uhn.fhir.storage.interceptor.balp.AsyncMemoryQueueBackedFhirClientBalpSink; +import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditContextServices; +import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditEventSink; import org.hl7.fhir.r4.model.Reference; +import java.util.List; import javax.annotation.Nonnull; import javax.servlet.ServletException; -import java.util.List; public class BalpExample { - //START SNIPPET: contextService + // START SNIPPET: contextService public class ExampleBalpAuditContextServices implements IBalpAuditContextServices { /** @@ -46,9 +46,7 @@ public class BalpExample { public Reference getAgentClientWho(RequestDetails theRequestDetails) { Reference client = new Reference(); client.setDisplay("Growth Chart Application"); - client.getIdentifier() - .setSystem("http://example.org/clients") - .setValue("growth_chart"); + client.getIdentifier().setSystem("http://example.org/clients").setValue("growth_chart"); return client; } @@ -60,15 +58,13 @@ public class BalpExample { @Override public Reference getAgentUserWho(RequestDetails theRequestDetails) { Reference user = new Reference(); - user.getIdentifier() - .setSystem("http://example.org/users") - .setValue("my_username"); + user.getIdentifier().setSystem("http://example.org/users").setValue("my_username"); return user; } } - //END SNIPPET: contextService + // END SNIPPET: contextService - //START SNIPPET: server + // START SNIPPET: server public class MyServer extends RestfulServer { /** @@ -93,14 +89,13 @@ public class BalpExample { FhirContext fhirContext = FhirContext.forR4Cached(); String targetUrl = "http://my.fhir.server/baseR4"; List clientInterceptors = List.of( - // We'll register an auth interceptor against the sink FHIR client so that - // credentials get passed to the target server. Of course in a real implementation - // you should never hard code credentials like this. - new BasicAuthInterceptor("username", "password") - ); - IBalpAuditEventSink eventSink = new AsyncMemoryQueueBackedFhirClientBalpSink(fhirContext, targetUrl, clientInterceptors); - + // We'll register an auth interceptor against the sink FHIR client so that + // credentials get passed to the target server. Of course in a real implementation + // you should never hard code credentials like this. + new BasicAuthInterceptor("username", "password")); + IBalpAuditEventSink eventSink = + new AsyncMemoryQueueBackedFhirClientBalpSink(fhirContext, targetUrl, clientInterceptors); } } - //END SNIPPET: server + // END SNIPPET: server } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java index 1e8df7d0509..f4eaf67d1e6 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleBuilderExamples.java @@ -43,7 +43,7 @@ public class BundleBuilderExamples { private IGenericClient myFhirClient; public void update() throws FHIRException { - //START SNIPPET: update + // START SNIPPET: update // Create a TransactionBuilder BundleBuilder builder = new BundleBuilder(myFhirContext); @@ -56,12 +56,13 @@ public class BundleBuilderExamples { builder.addTransactionUpdateEntry(patient); // Execute the transaction - IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute(); - //END SNIPPET: update + IBaseBundle outcome = + myFhirClient.transaction().withBundle(builder.getBundle()).execute(); + // END SNIPPET: update } public void updateConditional() throws FHIRException { - //START SNIPPET: updateConditional + // START SNIPPET: updateConditional // Create a TransactionBuilder BundleBuilder builder = new BundleBuilder(myFhirContext); @@ -74,12 +75,13 @@ public class BundleBuilderExamples { builder.addTransactionUpdateEntry(patient).conditional("Patient?identifier=http://foo|bar"); // Execute the transaction - IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute(); - //END SNIPPET: updateConditional + IBaseBundle outcome = + myFhirClient.transaction().withBundle(builder.getBundle()).execute(); + // END SNIPPET: updateConditional } public void create() throws FHIRException { - //START SNIPPET: create + // START SNIPPET: create // Create a TransactionBuilder BundleBuilder builder = new BundleBuilder(myFhirContext); @@ -91,12 +93,13 @@ public class BundleBuilderExamples { builder.addTransactionCreateEntry(patient); // Execute the transaction - IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute(); - //END SNIPPET: create + IBaseBundle outcome = + myFhirClient.transaction().withBundle(builder.getBundle()).execute(); + // END SNIPPET: create } public void createConditional() throws FHIRException { - //START SNIPPET: createConditional + // START SNIPPET: createConditional // Create a TransactionBuilder BundleBuilder builder = new BundleBuilder(myFhirContext); @@ -109,12 +112,13 @@ public class BundleBuilderExamples { builder.addTransactionCreateEntry(patient).conditional("Patient?identifier=http://foo|bar"); // Execute the transaction - IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute(); - //END SNIPPET: createConditional + IBaseBundle outcome = + myFhirClient.transaction().withBundle(builder.getBundle()).execute(); + // END SNIPPET: createConditional } public void patch() throws FHIRException { - //START SNIPPET: patch + // START SNIPPET: patch // Create a FHIR Patch object Parameters patch = new Parameters(); @@ -133,12 +137,13 @@ public class BundleBuilderExamples { builder.addTransactionFhirPatchEntry(targetId, patch); // Execute the transaction - IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute(); - //END SNIPPET: patch + IBaseBundle outcome = + myFhirClient.transaction().withBundle(builder.getBundle()).execute(); + // END SNIPPET: patch } public void patchConditional() throws FHIRException { - //START SNIPPET: patchConditional + // START SNIPPET: patchConditional // Create a FHIR Patch object Parameters patch = new Parameters(); @@ -154,21 +159,20 @@ public class BundleBuilderExamples { String conditionalUrl = "Patient?identifier=http://foo|123"; builder.addTransactionFhirPatchEntry(patch).conditional(conditionalUrl); - // Execute the transaction - IBaseBundle outcome = myFhirClient.transaction().withBundle(builder.getBundle()).execute(); - //END SNIPPET: patchConditional + IBaseBundle outcome = + myFhirClient.transaction().withBundle(builder.getBundle()).execute(); + // END SNIPPET: patchConditional } public void customizeBundle() throws FHIRException { - //START SNIPPET: customizeBundle + // START SNIPPET: customizeBundle // Create a TransactionBuilder BundleBuilder builder = new BundleBuilder(myFhirContext); // Set bundle type to be searchset - builder - .setBundleField("type", "searchset") - .setBundleField("id", UUID.randomUUID().toString()) - .setMetaField("lastUpdated", builder.newPrimitive("instant", new Date())); + builder.setBundleField("type", "searchset") + .setBundleField("id", UUID.randomUUID().toString()) + .setMetaField("lastUpdated", builder.newPrimitive("instant", new Date())); // Create bundle entry IBase entry = builder.addEntry(); @@ -183,7 +187,6 @@ public class BundleBuilderExamples { IBase search = builder.addSearch(entry); builder.setSearchField(search, "mode", "match"); builder.setSearchField(search, "score", builder.newPrimitive("decimal", BigDecimal.ONE)); - //END SNIPPET: customizeBundle + // END SNIPPET: customizeBundle } - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleFetcher.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleFetcher.java index 31589f888e4..c653155a87c 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleFetcher.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleFetcher.java @@ -46,20 +46,16 @@ public class BundleFetcher { List patients = new ArrayList<>(); // We'll do a search for all Patients and extract the first page - Bundle bundle = client - .search() - .forResource(Patient.class) - .where(Patient.NAME.matches().value("smith")) - .returnBundle(Bundle.class) - .execute(); + Bundle bundle = client.search() + .forResource(Patient.class) + .where(Patient.NAME.matches().value("smith")) + .returnBundle(Bundle.class) + .execute(); patients.addAll(BundleUtil.toListOfResources(ctx, bundle)); // Load the subsequent pages while (bundle.getLink(IBaseBundle.LINK_NEXT) != null) { - bundle = client - .loadPage() - .next(bundle) - .execute(); + bundle = client.loadPage().next(bundle).execute(); patients.addAll(BundleUtil.toListOfResources(ctx, bundle)); } @@ -67,5 +63,3 @@ public class BundleFetcher { // END SNIPPET: loadAll } } - - diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ClientExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ClientExamples.java index d65ebe6231f..61a271eadcd 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ClientExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ClientExamples.java @@ -39,30 +39,29 @@ import org.hl7.fhir.r4.model.Patient; public class ClientExamples { - public interface IPatientClient extends IBasicClient { - // nothing yet - } + public interface IPatientClient extends IBasicClient { + // nothing yet + } - @SuppressWarnings("unused") - public void createProxy() { - // START SNIPPET: proxy - FhirContext ctx = FhirContext.forR4(); + @SuppressWarnings("unused") + public void createProxy() { + // START SNIPPET: proxy + FhirContext ctx = FhirContext.forR4(); - // Set connections to access the network via the HTTP proxy at - // example.com : 8888 - ctx.getRestfulClientFactory().setProxy("example.com", 8888); + // Set connections to access the network via the HTTP proxy at + // example.com : 8888 + ctx.getRestfulClientFactory().setProxy("example.com", 8888); - // If the proxy requires authentication, use the following as well - ctx.getRestfulClientFactory().setProxyCredentials("theUsername", "thePassword"); - - // Create the client - IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - // END SNIPPET: proxy - } + // If the proxy requires authentication, use the following as well + ctx.getRestfulClientFactory().setProxyCredentials("theUsername", "thePassword"); + // Create the client + IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); + // END SNIPPET: proxy + } - public void tenantId() { - // START SNIPPET: tenantId + public void tenantId() { + // START SNIPPET: tenantId FhirContext ctx = FhirContext.forR4(); // Create the client @@ -74,34 +73,34 @@ public class ClientExamples { // Read from tenant A tenantSelection.setTenantId("TENANT-A"); - Patient patientA = genericClient.read().resource(Patient.class).withId("123").execute(); + Patient patientA = + genericClient.read().resource(Patient.class).withId("123").execute(); // Read from tenant B tenantSelection.setTenantId("TENANT-B"); - Patient patientB = genericClient.read().resource(Patient.class).withId("456").execute(); + Patient patientB = + genericClient.read().resource(Patient.class).withId("456").execute(); // END SNIPPET: tenantId } + @SuppressWarnings("unused") + public void processMessage() { + // START SNIPPET: processMessage + FhirContext ctx = FhirContext.forDstu3(); - @SuppressWarnings("unused") - public void processMessage() { - // START SNIPPET: processMessage - FhirContext ctx = FhirContext.forDstu3(); + // Create the client + IGenericClient client = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - // Create the client - IGenericClient client = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - - Bundle bundle = new Bundle(); - // ..populate the bundle.. - - Bundle response = client - .operation() - .processMessage() // New operation for sending messages - .setMessageBundle(bundle) - .asynchronous(Bundle.class) - .execute(); - // END SNIPPET: processMessage - } + Bundle bundle = new Bundle(); + // ..populate the bundle.. + + Bundle response = client.operation() + .processMessage() // New operation for sending messages + .setMessageBundle(bundle) + .asynchronous(Bundle.class) + .execute(); + // END SNIPPET: processMessage + } @SuppressWarnings("unused") public void cacheControl() { @@ -114,86 +113,85 @@ public class ClientExamples { // ..populate the bundle.. // START SNIPPET: cacheControl - Bundle response = client - .search() - .forResource(Patient.class) - .returnBundle(Bundle.class) - .cacheControl(new CacheControlDirective().setNoCache(true)) // <-- add a directive - .execute(); + Bundle response = client.search() + .forResource(Patient.class) + .returnBundle(Bundle.class) + .cacheControl(new CacheControlDirective().setNoCache(true)) // <-- add a directive + .execute(); // END SNIPPET: cacheControl } - @SuppressWarnings("unused") - public void createOkHttp() { - // START SNIPPET: okhttp - FhirContext ctx = FhirContext.forDstu3(); + @SuppressWarnings("unused") + public void createOkHttp() { + // START SNIPPET: okhttp + FhirContext ctx = FhirContext.forDstu3(); - // Use OkHttp - ctx.setRestfulClientFactory(new OkHttpRestfulClientFactory(ctx)); - - // Create the client - IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - // END SNIPPET: okhttp - } + // Use OkHttp + ctx.setRestfulClientFactory(new OkHttpRestfulClientFactory(ctx)); - @SuppressWarnings("unused") - public void createTimeouts() { - // START SNIPPET: timeouts - FhirContext ctx = FhirContext.forR4(); + // Create the client + IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); + // END SNIPPET: okhttp + } - // Set how long to try and establish the initial TCP connection (in ms) - ctx.getRestfulClientFactory().setConnectTimeout(20 * 1000); - - // Set how long to block for individual read/write operations (in ms) - ctx.getRestfulClientFactory().setSocketTimeout(20 * 1000); - - // Create the client - IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - // END SNIPPET: timeouts - } + @SuppressWarnings("unused") + public void createTimeouts() { + // START SNIPPET: timeouts + FhirContext ctx = FhirContext.forR4(); - @SuppressWarnings("unused") - public void createSecurity() { - // START SNIPPET: security - // Create a context and get the client factory so it can be configured - FhirContext ctx = FhirContext.forR4(); - IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); + // Set how long to try and establish the initial TCP connection (in ms) + ctx.getRestfulClientFactory().setConnectTimeout(20 * 1000); - // Create an HTTP basic auth interceptor - String username = "foobar"; - String password = "boobear"; - IClientInterceptor authInterceptor = new BasicAuthInterceptor(username, password); + // Set how long to block for individual read/write operations (in ms) + ctx.getRestfulClientFactory().setSocketTimeout(20 * 1000); + + // Create the client + IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); + // END SNIPPET: timeouts + } + + @SuppressWarnings("unused") + public void createSecurity() { + // START SNIPPET: security + // Create a context and get the client factory so it can be configured + FhirContext ctx = FhirContext.forR4(); + IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); + + // Create an HTTP basic auth interceptor + String username = "foobar"; + String password = "boobear"; + IClientInterceptor authInterceptor = new BasicAuthInterceptor(username, password); // If you're using an annotation client, use this style to // register it - IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); + IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); annotationClient.registerInterceptor(authInterceptor); // If you're using a generic client, use this instead - IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - genericClient.registerInterceptor(authInterceptor); - // END SNIPPET: security - } + IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); + genericClient.registerInterceptor(authInterceptor); + // END SNIPPET: security + } - @SuppressWarnings("unused") - public void createCookie() { - // START SNIPPET: cookie - // Create a context and get the client factory so it can be configured - FhirContext ctx = FhirContext.forR4(); - IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); + @SuppressWarnings("unused") + public void createCookie() { + // START SNIPPET: cookie + // Create a context and get the client factory so it can be configured + FhirContext ctx = FhirContext.forR4(); + IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); - // Create a cookie interceptor. This cookie will have the name "mycookie" and - // the value "Chips Ahoy" - CookieInterceptor interceptor = new CookieInterceptor("mycookie=Chips Ahoy"); + // Create a cookie interceptor. This cookie will have the name "mycookie" and + // the value "Chips Ahoy" + CookieInterceptor interceptor = new CookieInterceptor("mycookie=Chips Ahoy"); - // Register the interceptor with your client (either style) - IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); - annotationClient.registerInterceptor(interceptor); + // Register the interceptor with your client (either style) + IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); + annotationClient.registerInterceptor(interceptor); - IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - annotationClient.registerInterceptor(interceptor); - // END SNIPPET: cookie - } + IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); + annotationClient.registerInterceptor(interceptor); + // END SNIPPET: cookie + } @SuppressWarnings("unused") public void addHeaders() { @@ -203,7 +201,7 @@ public class ClientExamples { IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); // Create a client - IGenericClient client = ctx.newRestfulGenericClient( "http://localhost:9999/fhir"); + IGenericClient client = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); // Register an additional headers interceptor and add one header to it AdditionalRequestHeadersInterceptor interceptor = new AdditionalRequestHeadersInterceptor(); @@ -215,88 +213,86 @@ public class ClientExamples { // END SNIPPET: addHeaders // START SNIPPET: addHeadersNoInterceptor - Patient p = client - .read() - .resource(Patient.class) - .withId(123L) - .withAdditionalHeader("X-Message", "Help I'm a Bug") - .execute(); + Patient p = client.read() + .resource(Patient.class) + .withId(123L) + .withAdditionalHeader("X-Message", "Help I'm a Bug") + .execute(); // END SNIPPET: addHeadersNoInterceptor } - @SuppressWarnings("unused") - public void gzip() { - // START SNIPPET: gzip - // Create a context and get the client factory so it can be configured - FhirContext ctx = FhirContext.forR4(); - IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); + @SuppressWarnings("unused") + public void gzip() { + // START SNIPPET: gzip + // Create a context and get the client factory so it can be configured + FhirContext ctx = FhirContext.forR4(); + IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); - // Register the interceptor with your client (either style) - IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); - annotationClient.registerInterceptor(new GZipContentInterceptor()); - // END SNIPPET: gzip - } - - @SuppressWarnings("unused") - public void createSecurityBearer() { - // START SNIPPET: securityBearer - // Create a context and get the client factory so it can be configured - FhirContext ctx = FhirContext.forR4(); - IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); + // Register the interceptor with your client (either style) + IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); + annotationClient.registerInterceptor(new GZipContentInterceptor()); + // END SNIPPET: gzip + } - // In reality the token would have come from an authorization server - String token = "3w03fj.r3r3t"; - - BearerTokenAuthInterceptor authInterceptor = new BearerTokenAuthInterceptor(token); + @SuppressWarnings("unused") + public void createSecurityBearer() { + // START SNIPPET: securityBearer + // Create a context and get the client factory so it can be configured + FhirContext ctx = FhirContext.forR4(); + IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); - // Register the interceptor with your client (either style) - IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); - annotationClient.registerInterceptor(authInterceptor); + // In reality the token would have come from an authorization server + String token = "3w03fj.r3r3t"; - IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - annotationClient.registerInterceptor(authInterceptor); - // END SNIPPET: securityBearer - } + BearerTokenAuthInterceptor authInterceptor = new BearerTokenAuthInterceptor(token); - @SuppressWarnings("unused") - public void createLogging() { - { - // START SNIPPET: logging - // Create a context and get the client factory so it can be configured - FhirContext ctx = FhirContext.forR4(); - IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); + // Register the interceptor with your client (either style) + IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); + annotationClient.registerInterceptor(authInterceptor); - // Create a logging interceptor - LoggingInterceptor loggingInterceptor = new LoggingInterceptor(); + IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); + annotationClient.registerInterceptor(authInterceptor); + // END SNIPPET: securityBearer + } - // Optionally you may configure the interceptor (by default only - // summary info is logged) - loggingInterceptor.setLogRequestSummary(true); - loggingInterceptor.setLogRequestBody(true); + @SuppressWarnings("unused") + public void createLogging() { + { + // START SNIPPET: logging + // Create a context and get the client factory so it can be configured + FhirContext ctx = FhirContext.forR4(); + IRestfulClientFactory clientFactory = ctx.getRestfulClientFactory(); - // Register the interceptor with your client (either style) - IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); - annotationClient.registerInterceptor(loggingInterceptor); + // Create a logging interceptor + LoggingInterceptor loggingInterceptor = new LoggingInterceptor(); - IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); - genericClient.registerInterceptor(loggingInterceptor); - // END SNIPPET: logging - } + // Optionally you may configure the interceptor (by default only + // summary info is logged) + loggingInterceptor.setLogRequestSummary(true); + loggingInterceptor.setLogRequestBody(true); - /******************************/ - { - // START SNIPPET: clientConfig - // Create a client - FhirContext ctx = FhirContext.forR4(); - IPatientClient client = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/"); + // Register the interceptor with your client (either style) + IPatientClient annotationClient = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/fhir"); + annotationClient.registerInterceptor(loggingInterceptor); - // Request JSON encoding from the server (_format=json) - client.setEncoding(EncodingEnum.JSON); + IGenericClient genericClient = ctx.newRestfulGenericClient("http://localhost:9999/fhir"); + genericClient.registerInterceptor(loggingInterceptor); + // END SNIPPET: logging + } - // Request pretty printing from the server (_pretty=true) - client.setPrettyPrint(true); - // END SNIPPET: clientConfig - } - } + /******************************/ + { + // START SNIPPET: clientConfig + // Create a client + FhirContext ctx = FhirContext.forR4(); + IPatientClient client = ctx.newRestfulClient(IPatientClient.class, "http://localhost:9999/"); + // Request JSON encoding from the server (_format=json) + client.setEncoding(EncodingEnum.JSON); + + // Request pretty printing from the server (_pretty=true) + client.setPrettyPrint(true); + // END SNIPPET: clientConfig + } + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ClientTransactionExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ClientTransactionExamples.java index 570d8cf8043..cbd954ce231 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ClientTransactionExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ClientTransactionExamples.java @@ -25,85 +25,78 @@ import org.hl7.fhir.r4.model.*; public class ClientTransactionExamples { - public static void main(String[] args) { - conditionalCreate(); - } + public static void main(String[] args) { + conditionalCreate(); + } - private static void conditionalCreate() { - - //START SNIPPET: conditional - // Create a patient object - Patient patient = new Patient(); - patient.addIdentifier() - .setSystem("http://acme.org/mrns") - .setValue("12345"); - patient.addName() - .setFamily("Jameson") - .addGiven("J") - .addGiven("Jonah"); - patient.setGender(Enumerations.AdministrativeGender.MALE); - - // Give the patient a temporary UUID so that other resources in - // the transaction can refer to it - patient.setId(IdType.newRandomUuid()); - - // Create an observation object - Observation observation = new Observation(); - observation.setStatus(Observation.ObservationStatus.FINAL); - observation - .getCode() - .addCoding() - .setSystem("http://loinc.org") - .setCode("789-8") - .setDisplay("Erythrocytes [#/volume] in Blood by Automated count"); - observation.setValue( - new Quantity() - .setValue(4.12) - .setUnit("10 trillion/L") - .setSystem("http://unitsofmeasure.org") - .setCode("10*12/L")); + private static void conditionalCreate() { - // The observation refers to the patient using the ID, which is already - // set to a temporary UUID - observation.setSubject(new Reference(patient.getIdElement().getValue())); + // START SNIPPET: conditional + // Create a patient object + Patient patient = new Patient(); + patient.addIdentifier().setSystem("http://acme.org/mrns").setValue("12345"); + patient.addName().setFamily("Jameson").addGiven("J").addGiven("Jonah"); + patient.setGender(Enumerations.AdministrativeGender.MALE); - // Create a bundle that will be used as a transaction - Bundle bundle = new Bundle(); - bundle.setType(Bundle.BundleType.TRANSACTION); - - // Add the patient as an entry. This entry is a POST with an - // If-None-Exist header (conditional create) meaning that it - // will only be created if there isn't already a Patient with - // the identifier 12345 - bundle.addEntry() - .setFullUrl(patient.getIdElement().getValue()) - .setResource(patient) - .getRequest() - .setUrl("Patient") - .setIfNoneExist("identifier=http://acme.org/mrns|12345") - .setMethod(Bundle.HTTPVerb.POST); - - // Add the observation. This entry is a POST with no header - // (normal create) meaning that it will be created even if - // a similar resource already exists. - bundle.addEntry() - .setResource(observation) - .getRequest() - .setUrl("Observation") - .setMethod(Bundle.HTTPVerb.POST); - - // Log the request - FhirContext ctx = FhirContext.forR4(); - System.out.println(ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle)); - - // Create a client and post the transaction to the server - IGenericClient client = ctx.newRestfulGenericClient("http://hapi.fhir.org/baseR4"); - Bundle resp = client.transaction().withBundle(bundle).execute(); + // Give the patient a temporary UUID so that other resources in + // the transaction can refer to it + patient.setId(IdType.newRandomUuid()); - // Log the response - System.out.println(ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(resp)); - //END SNIPPET: conditional - - } - + // Create an observation object + Observation observation = new Observation(); + observation.setStatus(Observation.ObservationStatus.FINAL); + observation + .getCode() + .addCoding() + .setSystem("http://loinc.org") + .setCode("789-8") + .setDisplay("Erythrocytes [#/volume] in Blood by Automated count"); + observation.setValue(new Quantity() + .setValue(4.12) + .setUnit("10 trillion/L") + .setSystem("http://unitsofmeasure.org") + .setCode("10*12/L")); + + // The observation refers to the patient using the ID, which is already + // set to a temporary UUID + observation.setSubject(new Reference(patient.getIdElement().getValue())); + + // Create a bundle that will be used as a transaction + Bundle bundle = new Bundle(); + bundle.setType(Bundle.BundleType.TRANSACTION); + + // Add the patient as an entry. This entry is a POST with an + // If-None-Exist header (conditional create) meaning that it + // will only be created if there isn't already a Patient with + // the identifier 12345 + bundle.addEntry() + .setFullUrl(patient.getIdElement().getValue()) + .setResource(patient) + .getRequest() + .setUrl("Patient") + .setIfNoneExist("identifier=http://acme.org/mrns|12345") + .setMethod(Bundle.HTTPVerb.POST); + + // Add the observation. This entry is a POST with no header + // (normal create) meaning that it will be created even if + // a similar resource already exists. + bundle.addEntry() + .setResource(observation) + .getRequest() + .setUrl("Observation") + .setMethod(Bundle.HTTPVerb.POST); + + // Log the request + FhirContext ctx = FhirContext.forR4(); + System.out.println(ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle)); + + // Create a client and post the transaction to the server + IGenericClient client = ctx.newRestfulGenericClient("http://hapi.fhir.org/baseR4"); + Bundle resp = client.transaction().withBundle(bundle).execute(); + + // Log the response + System.out.println(ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(resp)); + // END SNIPPET: conditional + + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CompleteExampleClient.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CompleteExampleClient.java index 5cae0e14d06..d8d8e69d435 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CompleteExampleClient.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CompleteExampleClient.java @@ -19,7 +19,7 @@ */ package ca.uhn.hapi.fhir.docs; -//START SNIPPET: client +// START SNIPPET: client import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.annotation.RequiredParam; @@ -34,53 +34,50 @@ import java.util.List; public class CompleteExampleClient { - /** - * This is a simple client interface. It can have many methods for various - * searches but in this case it has only 1. - */ - public interface ClientInterface extends IRestfulClient { + /** + * This is a simple client interface. It can have many methods for various + * searches but in this case it has only 1. + */ + public interface ClientInterface extends IRestfulClient { - /** - * This is translated into a URL similar to the following: - * http://fhir.healthintersections.com.au/open/Patient?identifier=urn:oid:1.2.36.146.595.217.0.1%7C12345 - */ - @Search - List findPatientsForMrn(@RequiredParam(name = Patient.SP_IDENTIFIER) Identifier theIdentifier); + /** + * This is translated into a URL similar to the following: + * http://fhir.healthintersections.com.au/open/Patient?identifier=urn:oid:1.2.36.146.595.217.0.1%7C12345 + */ + @Search + List findPatientsForMrn(@RequiredParam(name = Patient.SP_IDENTIFIER) Identifier theIdentifier); + } - } + /** + * The main method here will directly call an open FHIR server and retrieve a + * list of resources matching a given criteria, then load a linked resource. + */ + public static void main(String[] args) throws IOException { - /** - * The main method here will directly call an open FHIR server and retrieve a - * list of resources matching a given criteria, then load a linked resource. - */ - public static void main(String[] args) throws IOException { + // Create a client factory + FhirContext ctx = FhirContext.forDstu2(); - // Create a client factory - FhirContext ctx = FhirContext.forDstu2(); + // Create the client + String serverBase = "http://fhir.healthintersections.com.au/open"; + ClientInterface client = ctx.newRestfulClient(ClientInterface.class, serverBase); - // Create the client - String serverBase = "http://fhir.healthintersections.com.au/open"; - ClientInterface client = ctx.newRestfulClient(ClientInterface.class, serverBase); - - // Invoke the client to search for patient - Identifier identifier = new Identifier().setSystem("urn:oid:1.2.36.146.595.217.0.1").setValue("12345"); + // Invoke the client to search for patient + Identifier identifier = + new Identifier().setSystem("urn:oid:1.2.36.146.595.217.0.1").setValue("12345"); List patients = client.findPatientsForMrn(identifier); - System.out.println("Found " + patients.size() + " patients"); + System.out.println("Found " + patients.size() + " patients"); - // Print a value from the loaded resource - Patient patient = patients.get(0); - System.out.println("Patient Last Name: " + patient.getName().get(0).getFamily()); + // Print a value from the loaded resource + Patient patient = patients.get(0); + System.out.println("Patient Last Name: " + patient.getName().get(0).getFamily()); - // Load a referenced resource - Reference managingRef = patient.getManagingOrganization(); - Organization org = client.getOrganizationById(managingRef.getReferenceElement()); - - // Print organization name - System.out.println(org.getName()); - - } + // Load a referenced resource + Reference managingRef = patient.getManagingOrganization(); + Organization org = client.getOrganizationById(managingRef.getReferenceElement()); + // Print organization name + System.out.println(org.getName()); + } } // END SNIPPET: client - diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ConsentInterceptors.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ConsentInterceptors.java index 1bb72b5ade5..6748fd86cfa 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ConsentInterceptors.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ConsentInterceptors.java @@ -30,15 +30,15 @@ import org.hl7.fhir.r4.model.Observation; @SuppressWarnings("unused") public class ConsentInterceptors { - - //START SNIPPET: service + // START SNIPPET: service public class MyConsentService implements IConsentService { /** * Invoked once at the start of every request */ @Override - public ConsentOutcome startOperation(RequestDetails theRequestDetails, IConsentContextServices theContextServices) { + public ConsentOutcome startOperation( + RequestDetails theRequestDetails, IConsentContextServices theContextServices) { // This means that all requests should flow through the consent service // This has performance implications - If you know that some requests // don't need consent checking it is a good idea to return @@ -50,13 +50,17 @@ public class ConsentInterceptors { * Can a given resource be returned to the user? */ @Override - public ConsentOutcome canSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + public ConsentOutcome canSeeResource( + RequestDetails theRequestDetails, + IBaseResource theResource, + IConsentContextServices theContextServices) { // In this basic example, we will filter out lab results so that they // are never disclosed to the user. A real interceptor might do something // more nuanced. if (theResource instanceof Observation) { - Observation obs = (Observation)theResource; - if (obs.getCategoryFirstRep().hasCoding("http://hl7.org/fhir/codesystem-observation-category.html", "laboratory")) { + Observation obs = (Observation) theResource; + if (obs.getCategoryFirstRep() + .hasCoding("http://hl7.org/fhir/codesystem-observation-category.html", "laboratory")) { return ConsentOutcome.REJECT; } } @@ -69,26 +73,32 @@ public class ConsentInterceptors { * Modify resources that are being shown to the user */ @Override - public ConsentOutcome willSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + public ConsentOutcome willSeeResource( + RequestDetails theRequestDetails, + IBaseResource theResource, + IConsentContextServices theContextServices) { // Don't return the subject for Observation resources if (theResource instanceof Observation) { - Observation obs = (Observation)theResource; + Observation obs = (Observation) theResource; obs.setSubject(null); } return ConsentOutcome.AUTHORIZED; } @Override - public void completeOperationSuccess(RequestDetails theRequestDetails, IConsentContextServices theContextServices) { + public void completeOperationSuccess( + RequestDetails theRequestDetails, IConsentContextServices theContextServices) { // We could write an audit trail entry in here } @Override - public void completeOperationFailure(RequestDetails theRequestDetails, BaseServerResponseException theException, IConsentContextServices theContextServices) { + public void completeOperationFailure( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + IConsentContextServices theContextServices) { // We could write an audit trail entry in here } } - //END SNIPPET: service - + // END SNIPPET: service } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ConverterExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ConverterExamples.java index 8d8b49ea01b..58bdc0fe7b8 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ConverterExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ConverterExamples.java @@ -29,28 +29,29 @@ public class ConverterExamples { @SuppressWarnings("unused") public void c1020() throws FHIRException { - //START SNIPPET: 1020 + // START SNIPPET: 1020 // Create an input resource to convert org.hl7.fhir.dstu2.model.Observation input = new org.hl7.fhir.dstu2.model.Observation(); input.setEncounter(new org.hl7.fhir.dstu2.model.Reference("Encounter/123")); // Convert the resource - org.hl7.fhir.dstu3.model.Observation output = (Observation) VersionConvertorFactory_10_30.convertResource(input); + org.hl7.fhir.dstu3.model.Observation output = + (Observation) VersionConvertorFactory_10_30.convertResource(input); String context = output.getContext().getReference(); - //END SNIPPET: 1020 + // END SNIPPET: 1020 } @SuppressWarnings("unused") public void c1420() throws FHIRException { - //START SNIPPET: 1420 + // START SNIPPET: 1420 // Create a resource to convert org.hl7.fhir.dstu2016may.model.Questionnaire input = new org.hl7.fhir.dstu2016may.model.Questionnaire(); input.setTitle("My title"); // Convert the resource - org.hl7.fhir.dstu3.model.Questionnaire output = (Questionnaire) VersionConvertorFactory_14_30.convertResource(input); + org.hl7.fhir.dstu3.model.Questionnaire output = + (Questionnaire) VersionConvertorFactory_14_30.convertResource(input); String context = output.getTitle(); - //END SNIPPET: 1420 + // END SNIPPET: 1420 } - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Copier.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Copier.java index 4c2b35417a3..35594752f37 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Copier.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Copier.java @@ -43,10 +43,16 @@ public class Copier { IGenericClient target = ctx.newRestfulGenericClient("https://try.smilecdr.com:8000"); List resType = Arrays.asList( - "Patient", "Organization", "Encounter", "Procedure", - "Observation", "ResearchSubject", "Specimen", - "ResearchStudy", "Location", "Practitioner" - ); + "Patient", + "Organization", + "Encounter", + "Procedure", + "Observation", + "ResearchSubject", + "Specimen", + "ResearchStudy", + "Location", + "Practitioner"); List queued = new ArrayList<>(); Set sent = new HashSet<>(); @@ -61,37 +67,46 @@ public class Copier { String missingRef = null; for (ResourceReferenceInfo nextRefInfo : ctx.newTerser().getAllResourceReferences(nextQueued)) { - String nextRef = nextRefInfo.getResourceReference().getReferenceElement().getValue(); + String nextRef = nextRefInfo + .getResourceReference() + .getReferenceElement() + .getValue(); if (isNotBlank(nextRef) && !sent.contains(nextRef)) { missingRef = nextRef; } } if (missingRef != null) { - ourLog.info("Can't send {} because of missing ref {}", nextQueued.getIdElement().getIdPart(), missingRef); + ourLog.info( + "Can't send {} because of missing ref {}", + nextQueued.getIdElement().getIdPart(), + missingRef); continue; } - IIdType newId = target - .update() - .resource(nextQueued) - .execute() - .getId(); + IIdType newId = target.update().resource(nextQueued).execute().getId(); - ourLog.info("Copied resource {} and got ID {}", nextQueued.getIdElement().getValue(), newId); + ourLog.info( + "Copied resource {} and got ID {}", + nextQueued.getIdElement().getValue(), + newId); sent.add(nextQueued.getIdElement().toUnqualifiedVersionless().getValue()); queued.remove(nextQueued); } } - - } - private static void copy(FhirContext theCtx, IGenericClient theSource, IGenericClient theTarget, String theResType, List theQueued, Set theSent) { + private static void copy( + FhirContext theCtx, + IGenericClient theSource, + IGenericClient theTarget, + String theResType, + List theQueued, + Set theSent) { Bundle received = theSource - .search() - .forResource(theResType) - .returnBundle(Bundle.class) - .execute(); + .search() + .forResource(theResType) + .returnBundle(Bundle.class) + .execute(); copy(theCtx, theTarget, theResType, theQueued, theSent, received); while (received.getLink("next") != null) { @@ -99,13 +114,19 @@ public class Copier { received = theSource.loadPage().next(received).execute(); copy(theCtx, theTarget, theResType, theQueued, theSent, received); } - } - private static void copy(FhirContext theCtx, IGenericClient theTarget, String theResType, List theQueued, Set theSent, Bundle theReceived) { + private static void copy( + FhirContext theCtx, + IGenericClient theTarget, + String theResType, + List theQueued, + Set theSent, + Bundle theReceived) { for (Bundle.BundleEntryComponent nextEntry : theReceived.getEntry()) { Resource nextResource = nextEntry.getResource(); - nextResource.setId(theResType + "/" + "CR-" + nextResource.getIdElement().getIdPart()); + nextResource.setId( + theResType + "/" + "CR-" + nextResource.getIdElement().getIdPart()); boolean haveUnsentReference = false; for (ResourceReferenceInfo nextRefInfo : theCtx.newTerser().getAllResourceReferences(nextResource)) { @@ -126,15 +147,10 @@ public class Copier { continue; } - IIdType newId = theTarget - .update() - .resource(nextResource) - .execute() - .getId(); + IIdType newId = theTarget.update().resource(nextResource).execute().getId(); ourLog.info("Copied resource {} and got ID {}", nextResource.getId(), newId); theSent.add(nextResource.getIdElement().toUnqualifiedVersionless().getValue()); } } - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CreateCompositionAndGenerateDocument.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CreateCompositionAndGenerateDocument.java index e81c4f039b1..d8ef7fa84a8 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CreateCompositionAndGenerateDocument.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CreateCompositionAndGenerateDocument.java @@ -32,41 +32,41 @@ import org.slf4j.LoggerFactory; public class CreateCompositionAndGenerateDocument { - private static final Logger ourLog = LoggerFactory.getLogger(CreateCompositionAndGenerateDocument.class); + private static final Logger ourLog = LoggerFactory.getLogger(CreateCompositionAndGenerateDocument.class); - public static void main(String[] args) { + public static void main(String[] args) { - // START SNIPPET: CreateCompositionAndGenerateDocument - FhirContext ctx = FhirContext.forR4(); - IGenericClient client = ctx.newRestfulGenericClient("http://hapi.fhir.org/baseR4"); + // START SNIPPET: CreateCompositionAndGenerateDocument + FhirContext ctx = FhirContext.forR4(); + IGenericClient client = ctx.newRestfulGenericClient("http://hapi.fhir.org/baseR4"); - Patient patient = new Patient(); - patient.setId("PATIENT-ABC"); - patient.setActive(true); - client.update().resource(patient).execute(); + Patient patient = new Patient(); + patient.setId("PATIENT-ABC"); + patient.setActive(true); + client.update().resource(patient).execute(); - Observation observation = new Observation(); - observation.setId("OBSERVATION-ABC"); - observation.setSubject(new Reference("Patient/PATIENT-ABC")); - observation.setStatus(Observation.ObservationStatus.FINAL); - client.update().resource(observation).execute(); + Observation observation = new Observation(); + observation.setId("OBSERVATION-ABC"); + observation.setSubject(new Reference("Patient/PATIENT-ABC")); + observation.setStatus(Observation.ObservationStatus.FINAL); + client.update().resource(observation).execute(); - Composition composition = new Composition(); - composition.setId("COMPOSITION-ABC"); - composition.setSubject(new Reference("Patient/PATIENT-ABC")); - composition.addSection().setFocus(new Reference("Observation/OBSERVATION-ABC")); - client.update().resource(composition).execute(); + Composition composition = new Composition(); + composition.setId("COMPOSITION-ABC"); + composition.setSubject(new Reference("Patient/PATIENT-ABC")); + composition.addSection().setFocus(new Reference("Observation/OBSERVATION-ABC")); + client.update().resource(composition).execute(); - Bundle document = client - .operation() - .onInstance("Composition/COMPOSITION-ABC") - .named("$document") - .withNoParameters(Parameters.class) - .returnResourceType(Bundle.class) - .execute(); + Bundle document = client.operation() + .onInstance("Composition/COMPOSITION-ABC") + .named("$document") + .withNoParameters(Parameters.class) + .returnResourceType(Bundle.class) + .execute(); - ourLog.debug("Document bundle: {}", ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(document)); - // END SNIPPET: CreateCompositionAndGenerateDocument + ourLog.debug( + "Document bundle: {}", ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(document)); + // END SNIPPET: CreateCompositionAndGenerateDocument - } + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CustomObservation.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CustomObservation.java index 75c18109bdf..a3321ccbed6 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CustomObservation.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/CustomObservation.java @@ -21,6 +21,4 @@ package ca.uhn.hapi.fhir.docs; import org.hl7.fhir.dstu3.model.Observation; -public class CustomObservation extends Observation { - -} +public class CustomObservation extends Observation {} diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Dstu2Examples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Dstu2Examples.java index 81ef66972b1..3ec9e5275b6 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Dstu2Examples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Dstu2Examples.java @@ -25,61 +25,55 @@ import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.RestfulServer; -import javax.servlet.ServletException; import java.util.Collection; +import javax.servlet.ServletException; @SuppressWarnings("serial") public class Dstu2Examples { - private Collection resourceProviderList; + private Collection resourceProviderList; - public static void main(String[] args) { - new Dstu2Examples().getResourceTags(); - } + public static void main(String[] args) { + new Dstu2Examples().getResourceTags(); + } - @SuppressWarnings("unused") - public void getResourceTags() { - // START SNIPPET: context - // Create a DSTU2 context, which will use DSTU2 semantics - FhirContext ctx = FhirContext.forDstu2(); - - // This parser supports DSTU2 - IParser parser = ctx.newJsonParser(); - - // This client supports DSTU2 - IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu2"); - // END SNIPPET: context - } + @SuppressWarnings("unused") + public void getResourceTags() { + // START SNIPPET: context + // Create a DSTU2 context, which will use DSTU2 semantics + FhirContext ctx = FhirContext.forDstu2(); - - // START SNIPPET: server - public class MyServer extends RestfulServer - { + // This parser supports DSTU2 + IParser parser = ctx.newJsonParser(); - @Override - protected void initialize() throws ServletException { + // This client supports DSTU2 + IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu2"); + // END SNIPPET: context + } - // In your initialize method, assign a DSTU2 FhirContext. This - // is all that is required in order to put the server - // into DSTU2 mode - setFhirContext(FhirContext.forDstu2()); - - // Then set resource providers as normal, and do any other - // configuration you need to do. - setResourceProviders(resourceProviderList); - - } - - } - // END SNIPPET: server + // START SNIPPET: server + public class MyServer extends RestfulServer { - - public void upgrade() { - // START SNIPPET: client - FhirContext ctxDstu2 = FhirContext.forDstu2(); - IGenericClient clientDstu2 = ctxDstu2.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu2"); - - // END SNIPPET: client - - } - + @Override + protected void initialize() throws ServletException { + + // In your initialize method, assign a DSTU2 FhirContext. This + // is all that is required in order to put the server + // into DSTU2 mode + setFhirContext(FhirContext.forDstu2()); + + // Then set resource providers as normal, and do any other + // configuration you need to do. + setResourceProviders(resourceProviderList); + } + } + // END SNIPPET: server + + public void upgrade() { + // START SNIPPET: client + FhirContext ctxDstu2 = FhirContext.forDstu2(); + IGenericClient clientDstu2 = ctxDstu2.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu2"); + + // END SNIPPET: client + + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleProviders.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleProviders.java index af534e6b0f5..f64d0fdf6f5 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleProviders.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleProviders.java @@ -24,75 +24,68 @@ import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.server.HardcodedServerAddressStrategy; import ca.uhn.fhir.rest.server.IResourceProvider; import org.hl7.fhir.r4.model.Bundle; -import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.Patient; +import org.hl7.fhir.r4.model.StringType; import java.util.ArrayList; import java.util.List; -@SuppressWarnings(value= {"serial"}) +@SuppressWarnings(value = {"serial"}) public class ExampleProviders { - -//START SNIPPET: plainProvider -public class MyPlainProvider { + // START SNIPPET: plainProvider + public class MyPlainProvider { - /** - * This method is a Patient search, but HAPI can not automatically - * determine the resource type so it must be explicitly stated. - */ - @Search(type=Patient.class) - public Bundle searchForPatients(@RequiredParam(name=Patient.SP_NAME) StringType theName) { - Bundle retVal = new Bundle(); - // perform search - return retVal; - } - -} -//END SNIPPET: plainProvider + /** + * This method is a Patient search, but HAPI can not automatically + * determine the resource type so it must be explicitly stated. + */ + @Search(type = Patient.class) + public Bundle searchForPatients(@RequiredParam(name = Patient.SP_NAME) StringType theName) { + Bundle retVal = new Bundle(); + // perform search + return retVal; + } + } + // END SNIPPET: plainProvider + // START SNIPPET: plainProviderServer + public class ExampleServlet extends ca.uhn.fhir.rest.server.RestfulServer { -//START SNIPPET: plainProviderServer -public class ExampleServlet extends ca.uhn.fhir.rest.server.RestfulServer { + /** + * Constructor + */ + public ExampleServlet() { + /* + * Plain providers are passed to the server in the same way + * as resource providers. You may pass both resource providers + * and plain providers to the same server if you like. + */ + registerProvider(new MyPlainProvider()); - /** - * Constructor - */ - public ExampleServlet() { - /* - * Plain providers are passed to the server in the same way - * as resource providers. You may pass both resource providers - * and plain providers to the same server if you like. - */ - registerProvider(new MyPlainProvider()); - - List resourceProviders = new ArrayList(); - // ...add some resource providers... - registerProviders(resourceProviders); - } - -} -//END SNIPPET: plainProviderServer + List resourceProviders = new ArrayList(); + // ...add some resource providers... + registerProviders(resourceProviders); + } + } + // END SNIPPET: plainProviderServer - //START SNIPPET: addressStrategy - public class MyServlet extends ca.uhn.fhir.rest.server.RestfulServer { + // START SNIPPET: addressStrategy + public class MyServlet extends ca.uhn.fhir.rest.server.RestfulServer { - /** - * Constructor - */ - public MyServlet() { - - String serverBaseUrl = "http://foo.com/fhir"; - setServerAddressStrategy(new HardcodedServerAddressStrategy(serverBaseUrl)); - - // ...add some resource providers, etc... - List resourceProviders = new ArrayList(); - setResourceProviders(resourceProviders); - } - - } -//END SNIPPET: addressStrategy + /** + * Constructor + */ + public MyServlet() { + String serverBaseUrl = "http://foo.com/fhir"; + setServerAddressStrategy(new HardcodedServerAddressStrategy(serverBaseUrl)); + // ...add some resource providers, etc... + List resourceProviders = new ArrayList(); + setResourceProviders(resourceProviders); + } + } + // END SNIPPET: addressStrategy } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleRestfulClient.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleRestfulClient.java index c4ad20d195e..d2dcb9a74f8 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleRestfulClient.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleRestfulClient.java @@ -26,19 +26,18 @@ import java.util.List; @SuppressWarnings("unused") public class ExampleRestfulClient { - -//START SNIPPET: client -public static void main(String[] args) { - FhirContext ctx = FhirContext.forDstu2(); - String serverBase = "http://foo.com/fhirServerBase"; - - // Create the client - IRestfulClient client = ctx.newRestfulClient(IRestfulClient.class, serverBase); - - // Try the client out! This method will invoke the server - List patients = client.getPatient(new StringType("SMITH")); - -} -//END SNIPPET: client + + // START SNIPPET: client + public static void main(String[] args) { + FhirContext ctx = FhirContext.forDstu2(); + String serverBase = "http://foo.com/fhirServerBase"; + + // Create the client + IRestfulClient client = ctx.newRestfulClient(IRestfulClient.class, serverBase); + + // Try the client out! This method will invoke the server + List patients = client.getPatient(new StringType("SMITH")); + } + // END SNIPPET: client } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleRestfulServlet.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleRestfulServlet.java index a5768a9d6ff..601a94cb353 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleRestfulServlet.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExampleRestfulServlet.java @@ -22,19 +22,21 @@ package ca.uhn.hapi.fhir.docs; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.RestfulServer; -import javax.servlet.ServletException; -import javax.servlet.annotation.WebServlet; import java.util.ArrayList; import java.util.List; +import javax.servlet.ServletException; +import javax.servlet.annotation.WebServlet; -//START SNIPPET: servlet +// START SNIPPET: servlet /** * In this example, we are using Servlet 3.0 annotations to define * the URL pattern for this servlet, but we could also * define this in a web.xml file. */ -@WebServlet(urlPatterns= {"/fhir/*"}, displayName="FHIR Server") +@WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") public class ExampleRestfulServlet extends RestfulServer { private static final long serialVersionUID = 1L; @@ -44,21 +46,17 @@ public class ExampleRestfulServlet extends RestfulServer { * be used to configure the servlet to define resource providers, or set up * configuration, interceptors, etc. */ - @Override - protected void initialize() throws ServletException { - /* - * The servlet defines any number of resource providers, and - * configures itself to use them by calling - * setResourceProviders() - */ - List resourceProviders = new ArrayList(); - resourceProviders.add(new RestfulPatientResourceProvider()); - resourceProviders.add(new RestfulObservationResourceProvider()); - setResourceProviders(resourceProviders); - } - + @Override + protected void initialize() throws ServletException { + /* + * The servlet defines any number of resource providers, and + * configures itself to use them by calling + * setResourceProviders() + */ + List resourceProviders = new ArrayList(); + resourceProviders.add(new RestfulPatientResourceProvider()); + resourceProviders.add(new RestfulObservationResourceProvider()); + setResourceProviders(resourceProviders); + } } -//END SNIPPET: servlet - - - +// END SNIPPET: servlet diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExtensionsDstu2.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExtensionsDstu2.java index bbed4377514..1cb4d4e2753 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExtensionsDstu2.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExtensionsDstu2.java @@ -35,90 +35,89 @@ import java.util.List; public class ExtensionsDstu2 { -@SuppressWarnings("unused") -public static void main(String[] args) throws DataFormatException, IOException { + @SuppressWarnings("unused") + public static void main(String[] args) throws DataFormatException, IOException { - { - Questionnaire q= new Questionnaire(); - Questionnaire.GroupQuestion item = q.getGroup().addQuestion(); - item.setText("Hello"); - - ExtensionDt extension = new ExtensionDt(false, "http://hl7.org/fhir/StructureDefinition/translation"); - item.getTextElement().addUndeclaredExtension(extension); - - extension.addUndeclaredExtension(new ExtensionDt(false, "lang", new CodeDt("es"))); - extension.addUndeclaredExtension(new ExtensionDt(false, "cont", new StringDt("hola"))); - - System.out.println(FhirContext.forDstu2().newJsonParser().setPrettyPrint(true).encodeResourceToString(q)); - } + { + Questionnaire q = new Questionnaire(); + Questionnaire.GroupQuestion item = q.getGroup().addQuestion(); + item.setText("Hello"); - -// START SNIPPET: resourceExtension -// Create an example patient -Patient patient = new Patient(); -patient.addIdentifier().setUse(IdentifierUseEnum.OFFICIAL).setSystem("urn:example").setValue("7000135"); + ExtensionDt extension = new ExtensionDt(false, "http://hl7.org/fhir/StructureDefinition/translation"); + item.getTextElement().addUndeclaredExtension(extension); -// Create an extension -ExtensionDt ext = new ExtensionDt(); -ext.setModifier(false); -ext.setUrl("http://example.com/extensions#someext"); -ext.setValue(new DateTimeDt("2011-01-02T11:13:15")); + extension.addUndeclaredExtension(new ExtensionDt(false, "lang", new CodeDt("es"))); + extension.addUndeclaredExtension(new ExtensionDt(false, "cont", new StringDt("hola"))); -// Add the extension to the resource -patient.addUndeclaredExtension(ext); -//END SNIPPET: resourceExtension + System.out.println( + FhirContext.forDstu2().newJsonParser().setPrettyPrint(true).encodeResourceToString(q)); + } + // START SNIPPET: resourceExtension + // Create an example patient + Patient patient = new Patient(); + patient.addIdentifier() + .setUse(IdentifierUseEnum.OFFICIAL) + .setSystem("urn:example") + .setValue("7000135"); -//START SNIPPET: resourceStringExtension -// Continuing the example from above, we will add a name to the patient, and then -// add an extension to part of that name -HumanNameDt name = patient.addName(); -name.addFamily().setValue("Shmoe"); + // Create an extension + ExtensionDt ext = new ExtensionDt(); + ext.setModifier(false); + ext.setUrl("http://example.com/extensions#someext"); + ext.setValue(new DateTimeDt("2011-01-02T11:13:15")); -// Add a new "given name", which is of type StringDt -StringDt given = name.addGiven(); -given.setValue("Joe"); + // Add the extension to the resource + patient.addUndeclaredExtension(ext); + // END SNIPPET: resourceExtension -// Create an extension and add it to the StringDt -ExtensionDt givenExt = new ExtensionDt(false, "http://examples.com#moreext", new StringDt("Hello")); -given.addUndeclaredExtension(givenExt); -//END SNIPPET: resourceStringExtension + // START SNIPPET: resourceStringExtension + // Continuing the example from above, we will add a name to the patient, and then + // add an extension to part of that name + HumanNameDt name = patient.addName(); + name.addFamily().setValue("Shmoe"); -FhirContext ctx = FhirContext.forDstu2(); -String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); -System.out.println(output); + // Add a new "given name", which is of type StringDt + StringDt given = name.addGiven(); + given.setValue("Joe"); + // Create an extension and add it to the StringDt + ExtensionDt givenExt = new ExtensionDt(false, "http://examples.com#moreext", new StringDt("Hello")); + given.addUndeclaredExtension(givenExt); + // END SNIPPET: resourceStringExtension -//START SNIPPET: parseExtension -// Get all extensions (modifier or not) for a given URL -List resourceExts = patient.getUndeclaredExtensionsByUrl("http://fooextensions.com#exts"); + FhirContext ctx = FhirContext.forDstu2(); + String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); + System.out.println(output); -// Get all non-modifier extensions regardless of URL -List nonModExts = patient.getUndeclaredExtensions(); + // START SNIPPET: parseExtension + // Get all extensions (modifier or not) for a given URL + List resourceExts = patient.getUndeclaredExtensionsByUrl("http://fooextensions.com#exts"); -// Get all modifier extensions regardless of URL -List modExts = patient.getUndeclaredModifierExtensions(); -//END SNIPPET: parseExtension - -} - - -public void foo() { -//START SNIPPET: subExtension -Patient patient = new Patient(); - -// Add an extension (initially with no contents) to the resource -ExtensionDt parent = new ExtensionDt(false, "http://example.com#parent"); -patient.addUndeclaredExtension(parent); - -// Add two extensions as children to the parent extension -ExtensionDt child1 = new ExtensionDt(false, "http://example.com#childOne", new StringDt("value1")); -parent.addUndeclaredExtension(child1); - -ExtensionDt child2 = new ExtensionDt(false, "http://example.com#childTwo", new StringDt("value1")); -parent.addUndeclaredExtension(child2); -//END SNIPPET: subExtension - -} + // Get all non-modifier extensions regardless of URL + List nonModExts = patient.getUndeclaredExtensions(); + // Get all modifier extensions regardless of URL + List modExts = patient.getUndeclaredModifierExtensions(); + // END SNIPPET: parseExtension + + } + + public void foo() { + // START SNIPPET: subExtension + Patient patient = new Patient(); + + // Add an extension (initially with no contents) to the resource + ExtensionDt parent = new ExtensionDt(false, "http://example.com#parent"); + patient.addUndeclaredExtension(parent); + + // Add two extensions as children to the parent extension + ExtensionDt child1 = new ExtensionDt(false, "http://example.com#childOne", new StringDt("value1")); + parent.addUndeclaredExtension(child1); + + ExtensionDt child2 = new ExtensionDt(false, "http://example.com#childTwo", new StringDt("value1")); + parent.addUndeclaredExtension(child2); + // END SNIPPET: subExtension + + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java index 6180acf013c..a433b2f27d6 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ExtensionsDstu3.java @@ -29,149 +29,143 @@ import java.util.List; public class ExtensionsDstu3 { - public void customType() { + public void customType() { -IGenericClient client = FhirContext.forDstu3().newRestfulGenericClient("http://foo"); + IGenericClient client = FhirContext.forDstu3().newRestfulGenericClient("http://foo"); -//START SNIPPET: customTypeClientSimple -// Create an example patient -MyPatient custPatient = new MyPatient(); -custPatient.addName().setFamily("Smith").addGiven("John"); -custPatient.setPetName(new StringType("Rover")); // populate the extension + // START SNIPPET: customTypeClientSimple + // Create an example patient + MyPatient custPatient = new MyPatient(); + custPatient.addName().setFamily("Smith").addGiven("John"); + custPatient.setPetName(new StringType("Rover")); // populate the extension -// Create the resource like normal -client.create().resource(custPatient).execute(); + // Create the resource like normal + client.create().resource(custPatient).execute(); -// You can also read the resource back like normal -custPatient = client.read().resource(MyPatient.class).withId("123").execute(); -//END SNIPPET: customTypeClientSimple + // You can also read the resource back like normal + custPatient = client.read().resource(MyPatient.class).withId("123").execute(); + // END SNIPPET: customTypeClientSimple -//START SNIPPET: customTypeClientSearch -// Perform the search using the custom type -Bundle bundle = client - .search() - .forResource(MyPatient.class) - .returnBundle(Bundle.class) - .execute(); + // START SNIPPET: customTypeClientSearch + // Perform the search using the custom type + Bundle bundle = client.search() + .forResource(MyPatient.class) + .returnBundle(Bundle.class) + .execute(); -// Entries in the return bundle will use the given type -MyPatient pat0 = (MyPatient) bundle.getEntry().get(0).getResource(); -//END SNIPPET: customTypeClientSearch - -//START SNIPPET: customTypeClientSearch2 -//Perform the search using the custom type -bundle = client - .history() - .onInstance(new IdType("Patient/123")) - .andReturnBundle(Bundle.class) - .preferResponseType(MyPatient.class) - .execute(); + // Entries in the return bundle will use the given type + MyPatient pat0 = (MyPatient) bundle.getEntry().get(0).getResource(); + // END SNIPPET: customTypeClientSearch -//Entries in the return bundle will use the given type -MyPatient historyPatient0 = (MyPatient) bundle.getEntry().get(0).getResource(); -//END SNIPPET: customTypeClientSearch2 + // START SNIPPET: customTypeClientSearch2 + // Perform the search using the custom type + bundle = client.history() + .onInstance(new IdType("Patient/123")) + .andReturnBundle(Bundle.class) + .preferResponseType(MyPatient.class) + .execute(); - } + // Entries in the return bundle will use the given type + MyPatient historyPatient0 = (MyPatient) bundle.getEntry().get(0).getResource(); + // END SNIPPET: customTypeClientSearch2 - public void customTypeDeclared() { + } + public void customTypeDeclared() { -//START SNIPPET: customTypeClientDeclared -FhirContext ctx = FhirContext.forDstu3(); + // START SNIPPET: customTypeClientDeclared + FhirContext ctx = FhirContext.forDstu3(); -// Instruct the context that if it receives a resource which -// claims to conform to the given profile (by URL), it should -// use the MyPatient type to parse this resource -ctx.setDefaultTypeForProfile("http://example.com/StructureDefinition/mypatient", MyPatient.class); + // Instruct the context that if it receives a resource which + // claims to conform to the given profile (by URL), it should + // use the MyPatient type to parse this resource + ctx.setDefaultTypeForProfile("http://example.com/StructureDefinition/mypatient", MyPatient.class); -// You can declare as many default types as you like -ctx.setDefaultTypeForProfile("http://foo.com/anotherProfile", CustomObservation.class); + // You can declare as many default types as you like + ctx.setDefaultTypeForProfile("http://foo.com/anotherProfile", CustomObservation.class); -// Create a client -IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu3"); - -// You can also read the resource back like normal -Patient patient = client.read().resource(Patient.class).withId("123").execute(); -if (patient instanceof MyPatient) { - // If the server supplied a resource which declared to conform - // to the given profile, MyPatient will have been returned so - // process it differently.. -} - -//END SNIPPET: customTypeClientDeclared - - - } - -@SuppressWarnings("unused") -public static void main(String[] args) throws DataFormatException, IOException { - - -// START SNIPPET: resourceExtension -// Create an example patient -Patient patient = new Patient(); -patient.addIdentifier().setUse(Identifier.IdentifierUse.OFFICIAL).setSystem("urn:example").setValue("7000135"); - -// Create an extension -Extension ext = new Extension(); -ext.setUrl("http://example.com/extensions#someext"); -ext.setValue(new DateTimeType("2011-01-02T11:13:15")); - -// Add the extension to the resource -patient.addExtension(ext); -//END SNIPPET: resourceExtension - - -//START SNIPPET: resourceStringExtension -// Continuing the example from above, we will add a name to the patient, and then -// add an extension to part of that name -HumanName name = patient.addName(); -name.setFamily("Shmoe"); - -// Add a new "given name", which is of type String -StringType given = name.addGivenElement(); -given.setValue("Joe"); - -// Create an extension and add it to the String -Extension givenExt = new Extension("http://examples.com#moreext", new StringType("Hello")); -given.addExtension(givenExt); -//END SNIPPET: resourceStringExtension - -FhirContext ctx = FhirContext.forDstu3(); -String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); -System.out.println(output); - - -//START SNIPPET: parseExtension -// Get all extensions (modifier or not) for a given URL -List resourceExts = patient.getExtensionsByUrl("http://fooextensions.com#exts"); - -// Get all non-modifier extensions regardless of URL -List nonModExts = patient.getExtension(); - -// Get all modifier extensions regardless of URL -List modExts = patient.getModifierExtension(); -//END SNIPPET: parseExtension - -} - - -public void foo() { -//START SNIPPET: subExtension -Patient patient = new Patient(); - -// Add an extension (initially with no contents) to the resource -Extension parent = new Extension("http://example.com#parent"); -patient.addExtension(parent); - -// Add two extensions as children to the parent extension -Extension child1 = new Extension("http://example.com#childOne", new StringType("value1")); -parent.addExtension(child1); - -Extension child2 = new Extension("http://example.com#chilwo", new StringType("value1")); -parent.addExtension(child2); -//END SNIPPET: subExtension - -} + // Create a client + IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu3"); + // You can also read the resource back like normal + Patient patient = client.read().resource(Patient.class).withId("123").execute(); + if (patient instanceof MyPatient) { + // If the server supplied a resource which declared to conform + // to the given profile, MyPatient will have been returned so + // process it differently.. + } + + // END SNIPPET: customTypeClientDeclared + + } + + @SuppressWarnings("unused") + public static void main(String[] args) throws DataFormatException, IOException { + + // START SNIPPET: resourceExtension + // Create an example patient + Patient patient = new Patient(); + patient.addIdentifier() + .setUse(Identifier.IdentifierUse.OFFICIAL) + .setSystem("urn:example") + .setValue("7000135"); + + // Create an extension + Extension ext = new Extension(); + ext.setUrl("http://example.com/extensions#someext"); + ext.setValue(new DateTimeType("2011-01-02T11:13:15")); + + // Add the extension to the resource + patient.addExtension(ext); + // END SNIPPET: resourceExtension + + // START SNIPPET: resourceStringExtension + // Continuing the example from above, we will add a name to the patient, and then + // add an extension to part of that name + HumanName name = patient.addName(); + name.setFamily("Shmoe"); + + // Add a new "given name", which is of type String + StringType given = name.addGivenElement(); + given.setValue("Joe"); + + // Create an extension and add it to the String + Extension givenExt = new Extension("http://examples.com#moreext", new StringType("Hello")); + given.addExtension(givenExt); + // END SNIPPET: resourceStringExtension + + FhirContext ctx = FhirContext.forDstu3(); + String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); + System.out.println(output); + + // START SNIPPET: parseExtension + // Get all extensions (modifier or not) for a given URL + List resourceExts = patient.getExtensionsByUrl("http://fooextensions.com#exts"); + + // Get all non-modifier extensions regardless of URL + List nonModExts = patient.getExtension(); + + // Get all modifier extensions regardless of URL + List modExts = patient.getModifierExtension(); + // END SNIPPET: parseExtension + + } + + public void foo() { + // START SNIPPET: subExtension + Patient patient = new Patient(); + + // Add an extension (initially with no contents) to the resource + Extension parent = new Extension("http://example.com#parent"); + patient.addExtension(parent); + + // Add two extensions as children to the parent extension + Extension child1 = new Extension("http://example.com#childOne", new StringType("value1")); + parent.addExtension(child1); + + Extension child2 = new Extension("http://example.com#chilwo", new StringType("value1")); + parent.addExtension(child2); + // END SNIPPET: subExtension + + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirContextIntro.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirContextIntro.java index 4676813fdf3..786e1ffed14 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirContextIntro.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirContextIntro.java @@ -37,8 +37,8 @@ public class FhirContextIntro { // END SNIPPET: creatingContext } - @SuppressWarnings("unused") - public static void creatingContextHl7org() { + @SuppressWarnings("unused") + public static void creatingContextHl7org() { // START SNIPPET: creatingContextHl7org // Create a context for DSTU3 FhirContext ctx = FhirContext.forDstu3(); @@ -52,93 +52,92 @@ public class FhirContextIntro { String encoded = ctx.newJsonParser().encodeResourceToString(patient); // END SNIPPET: creatingContextHl7org - } - + } public void encodeMsg() throws DataFormatException { -FhirContext ctx = new FhirContext(Patient.class, Observation.class); -//START SNIPPET: encodeMsg + FhirContext ctx = new FhirContext(Patient.class, Observation.class); + // START SNIPPET: encodeMsg -/** - * FHIR model types in HAPI are simple POJOs. To create a new - * one, invoke the default constructor and then - * start populating values. - */ -Patient patient = new Patient(); + /** + * FHIR model types in HAPI are simple POJOs. To create a new + * one, invoke the default constructor and then + * start populating values. + */ + Patient patient = new Patient(); -// Add an MRN (a patient identifier) -Identifier id = patient.addIdentifier(); -id.setSystem("http://example.com/fictitious-mrns"); -id.setValue("MRN001"); + // Add an MRN (a patient identifier) + Identifier id = patient.addIdentifier(); + id.setSystem("http://example.com/fictitious-mrns"); + id.setValue("MRN001"); -// Add a name -HumanName name = patient.addName(); -name.setUse(HumanName.NameUse.OFFICIAL); -name.setFamily("Tester"); -name.addGiven("John"); -name.addGiven("Q"); + // Add a name + HumanName name = patient.addName(); + name.setUse(HumanName.NameUse.OFFICIAL); + name.setFamily("Tester"); + name.addGiven("John"); + name.addGiven("Q"); -// We can now use a parser to encode this resource into a string. -String encoded = ctx.newXmlParser().encodeResourceToString(patient); -System.out.println(encoded); -//END SNIPPET: encodeMsg - -//START SNIPPET: encodeMsgJson -IParser jsonParser = ctx.newJsonParser(); -jsonParser.setPrettyPrint(true); -encoded = jsonParser.encodeResourceToString(patient); -System.out.println(encoded); -//END SNIPPET: encodeMsgJson + // We can now use a parser to encode this resource into a string. + String encoded = ctx.newXmlParser().encodeResourceToString(patient); + System.out.println(encoded); + // END SNIPPET: encodeMsg + // START SNIPPET: encodeMsgJson + IParser jsonParser = ctx.newJsonParser(); + jsonParser.setPrettyPrint(true); + encoded = jsonParser.encodeResourceToString(patient); + System.out.println(encoded); + // END SNIPPET: encodeMsgJson } + public void fluent() throws DataFormatException { + FhirContext ctx = new FhirContext(Patient.class, Observation.class); + String encoded; + // START SNIPPET: encodeMsgFluent + Patient patient = new Patient(); + patient.addIdentifier().setSystem("http://example.com/fictitious-mrns").setValue("MRN001"); + patient.addName() + .setUse(HumanName.NameUse.OFFICIAL) + .setFamily("Tester") + .addGiven("John") + .addGiven("Q"); -public void fluent() throws DataFormatException { -FhirContext ctx = new FhirContext(Patient.class, Observation.class); -String encoded; -//START SNIPPET: encodeMsgFluent -Patient patient = new Patient(); -patient.addIdentifier().setSystem("http://example.com/fictitious-mrns").setValue("MRN001"); -patient.addName().setUse(HumanName.NameUse.OFFICIAL).setFamily("Tester").addGiven("John").addGiven("Q"); - -encoded = ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); -System.out.println(encoded); -//END SNIPPET: encodeMsgFluent - -} + encoded = ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); + System.out.println(encoded); + // END SNIPPET: encodeMsgFluent + } public static void parseMsg() { -FhirContext ctx = FhirContext.forR4(); + FhirContext ctx = FhirContext.forR4(); -//START SNIPPET: parseMsg -// The following is an example Patient resource -String msgString = "" - + "
    John Cardinal
    " - + "" - + "" - + "" - + "
    " - + "
    "; + // START SNIPPET: parseMsg + // The following is an example Patient resource + String msgString = "" + + "
    John Cardinal
    " + + "" + + "" + + "" + + "
    " + + "
    "; -// The hapi context object is used to create a new XML parser -// instance. The parser can then be used to parse (or unmarshall) the -// string message into a Patient object -IParser parser = ctx.newXmlParser(); -Patient patient = parser.parseResource(Patient.class, msgString); + // The hapi context object is used to create a new XML parser + // instance. The parser can then be used to parse (or unmarshall) the + // string message into a Patient object + IParser parser = ctx.newXmlParser(); + Patient patient = parser.parseResource(Patient.class, msgString); -// The patient object has accessor methods to retrieve all of the -// data which has been parsed into the instance. -String patientId = patient.getIdentifier().get(0).getValue(); -String familyName = patient.getName().get(0).getFamily(); -Enumerations.AdministrativeGender gender = patient.getGender(); + // The patient object has accessor methods to retrieve all of the + // data which has been parsed into the instance. + String patientId = patient.getIdentifier().get(0).getValue(); + String familyName = patient.getName().get(0).getFamily(); + Enumerations.AdministrativeGender gender = patient.getGender(); -System.out.println(patientId); // PRP1660 -System.out.println(familyName); // Cardinal -System.out.println(gender.toCode()); // male -//END SNIPPET: parseMsg + System.out.println(patientId); // PRP1660 + System.out.println(familyName); // Cardinal + System.out.println(gender.toCode()); // male + // END SNIPPET: parseMsg } - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirDataModel.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirDataModel.java index 208681f619e..4a365cd0597 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirDataModel.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirDataModel.java @@ -22,183 +22,171 @@ package ca.uhn.hapi.fhir.docs; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.rest.client.api.IGenericClient; +import org.hl7.fhir.r4.model.*; import java.util.Date; import java.util.List; -import org.hl7.fhir.r4.model.*; - public class FhirDataModel { - public static void datatypes() { - // START SNIPPET: datatypes - Observation obs = new Observation(); + public static void datatypes() { + // START SNIPPET: datatypes + Observation obs = new Observation(); - // These are all equivalent - obs.setIssuedElement(new InstantType(new Date())); + // These are all equivalent + obs.setIssuedElement(new InstantType(new Date())); obs.setIssuedElement(new InstantType(new Date(), TemporalPrecisionEnum.MILLI)); obs.setIssued(new Date()); - // The InstantType also lets you work with the instant as a Java Date - // object or as a FHIR String. - Date date = obs.getIssuedElement().getValue(); // A date object - String dateString = obs.getIssuedElement().getValueAsString(); // "2014-03-08T12:59:58.068-05:00" - // END SNIPPET: datatypes + // The InstantType also lets you work with the instant as a Java Date + // object or as a FHIR String. + Date date = obs.getIssuedElement().getValue(); // A date object + String dateString = obs.getIssuedElement().getValueAsString(); // "2014-03-08T12:59:58.068-05:00" + // END SNIPPET: datatypes - System.out.println(date); - System.out.println(dateString); + System.out.println(date); + System.out.println(dateString); + } - } + @SuppressWarnings("unused") + public void nonNull() { + // START SNIPPET: nonNull + Observation observation = new Observation(); - @SuppressWarnings("unused") - public void nonNull() { - // START SNIPPET: nonNull - Observation observation = new Observation(); + // None of these calls will not return null, but instead create their + // respective + // child elements. + List identifierList = observation.getIdentifier(); + CodeableConcept code = observation.getCode(); + StringType textElement = observation.getCode().getTextElement(); - // None of these calls will not return null, but instead create their - // respective - // child elements. - List identifierList = observation.getIdentifier(); - CodeableConcept code = observation.getCode(); - StringType textElement = observation.getCode().getTextElement(); + // DateTimeDt is a FHIR primitive however, so the following will return + // null + // unless a value has been placed there. + Date active = observation.addIdentifier().getPeriod().getStartElement().getValue(); + // END SNIPPET: nonNull - // DateTimeDt is a FHIR primitive however, so the following will return - // null - // unless a value has been placed there. - Date active = observation.addIdentifier().getPeriod().getStartElement().getValue(); - // END SNIPPET: nonNull + } - } + @SuppressWarnings("unused") + public static void codes() { + // START SNIPPET: codes + Patient patient = new Patient(); - @SuppressWarnings("unused") - public static void codes() { - // START SNIPPET: codes - Patient patient = new Patient(); + // You can set this code using a String if you want. Note that + // for "closed" valuesets (such as the one used for Patient.gender) + // you must use one of the strings defined by the FHIR specification. + // You must not define your own. + patient.getGenderElement().setValueAsString("male"); - // You can set this code using a String if you want. Note that - // for "closed" valuesets (such as the one used for Patient.gender) - // you must use one of the strings defined by the FHIR specification. - // You must not define your own. - patient.getGenderElement().setValueAsString("male"); - - // HAPI also provides Java enumerated types which make it easier to - // deal with coded values. This code achieves the exact same result - // as the code above. - patient.setGender(Enumerations.AdministrativeGender.MALE); - - // You can also retrieve coded values the same way - String genderString = patient.getGenderElement().getValueAsString(); - Enumerations.AdministrativeGender genderEnum = patient.getGenderElement().getValue(); - - // END SNIPPET: codes + // HAPI also provides Java enumerated types which make it easier to + // deal with coded values. This code achieves the exact same result + // as the code above. + patient.setGender(Enumerations.AdministrativeGender.MALE); - } + // You can also retrieve coded values the same way + String genderString = patient.getGenderElement().getValueAsString(); + Enumerations.AdministrativeGender genderEnum = + patient.getGenderElement().getValue(); - - @SuppressWarnings("unused") - public static void codeableConcepts() { - // START SNIPPET: codeableConcepts - Patient patient = new Patient(); + // END SNIPPET: codes - // Coded types can naturally be set using plain strings - Coding statusCoding = patient.getMaritalStatus().addCoding(); - statusCoding.setSystem("http://hl7.org/fhir/v3/MaritalStatus"); - statusCoding.setCode("M"); - statusCoding.setDisplay("Married"); + } - // You could add a second coding to the field if needed too. This - // can be useful if you want to convey the concept using different - // codesystems. - Coding secondStatus = patient.getMaritalStatus().addCoding(); - secondStatus.setCode("H"); - secondStatus.setSystem("http://example.com#maritalStatus"); - secondStatus.setDisplay("Happily Married"); - - // CodeableConcept also has a text field meant to convey - // a user readable version of the concepts it conveys. - patient.getMaritalStatus().setText("Happily Married"); - - // There are also accessors for retrieving values - String firstCode = patient.getMaritalStatus().getCoding().get(0).getCode(); - String secondCode = patient.getMaritalStatus().getCoding().get(1).getCode(); - // END SNIPPET: codeableConcepts + @SuppressWarnings("unused") + public static void codeableConcepts() { + // START SNIPPET: codeableConcepts + Patient patient = new Patient(); - } + // Coded types can naturally be set using plain strings + Coding statusCoding = patient.getMaritalStatus().addCoding(); + statusCoding.setSystem("http://hl7.org/fhir/v3/MaritalStatus"); + statusCoding.setCode("M"); + statusCoding.setDisplay("Married"); + // You could add a second coding to the field if needed too. This + // can be useful if you want to convey the concept using different + // codesystems. + Coding secondStatus = patient.getMaritalStatus().addCoding(); + secondStatus.setCode("H"); + secondStatus.setSystem("http://example.com#maritalStatus"); + secondStatus.setDisplay("Happily Married"); - - public static void main(String[] args) { - tmp(); - - - datatypes(); + // CodeableConcept also has a text field meant to convey + // a user readable version of the concepts it conveys. + patient.getMaritalStatus().setText("Happily Married"); - // START SNIPPET: observation - // Create an Observation instance - Observation observation = new Observation(); - - // Give the observation a status - observation.setStatus(Observation.ObservationStatus.FINAL); - - // Give the observation a code (what kind of observation is this) - Coding coding = observation.getCode().addCoding(); - coding.setCode("29463-7").setSystem("http://loinc.org").setDisplay("Body Weight"); - - // Create a quantity datatype - Quantity value = new Quantity(); - value.setValue(83.9).setSystem("http://unitsofmeasure.org").setCode("kg"); - observation.setValue(value); - - // Set the reference range - SimpleQuantity low = new SimpleQuantity(); - low.setValue(45).setSystem("http://unitsofmeasure.org").setCode("kg"); - observation.getReferenceRangeFirstRep().setLow(low); - SimpleQuantity high = new SimpleQuantity(); - low.setValue(90).setSystem("http://unitsofmeasure.org").setCode("kg"); - observation.getReferenceRangeFirstRep().setHigh(high); - - // END SNIPPET: observation - - - } + // There are also accessors for retrieving values + String firstCode = patient.getMaritalStatus().getCoding().get(0).getCode(); + String secondCode = patient.getMaritalStatus().getCoding().get(1).getCode(); + // END SNIPPET: codeableConcepts - private static void tmp() { -// Create a FHIR Context -FhirContext ctx = FhirContext.forR4(); + } -// Create a client -IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseR4"); + public static void main(String[] args) { + tmp(); -// Read a patient with the given ID -Patient patient = client - .read() - .resource(Patient.class) - .withId("952975") - .execute(); + datatypes(); -// Print the patient's name -String string = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); -System.out.println(string); + // START SNIPPET: observation + // Create an Observation instance + Observation observation = new Observation(); - } + // Give the observation a status + observation.setStatus(Observation.ObservationStatus.FINAL); - public void namesHard() { - // START SNIPPET: namesHard - Patient patient = new Patient(); - HumanName name = patient.addName(); - name.setFamily("Smith"); - StringType firstName = name.addGivenElement(); - firstName.setValue("Rob"); - StringType secondName = name.addGivenElement(); - secondName.setValue("Bruce"); - // END SNIPPET: namesHard - } + // Give the observation a code (what kind of observation is this) + Coding coding = observation.getCode().addCoding(); + coding.setCode("29463-7").setSystem("http://loinc.org").setDisplay("Body Weight"); - public void namesEasy() { - // START SNIPPET: namesEasy - Patient patient = new Patient(); - patient.addName().setFamily("Smith").addGiven("Rob").addGiven("Bruce"); - // END SNIPPET: namesEasy - } + // Create a quantity datatype + Quantity value = new Quantity(); + value.setValue(83.9).setSystem("http://unitsofmeasure.org").setCode("kg"); + observation.setValue(value); + // Set the reference range + SimpleQuantity low = new SimpleQuantity(); + low.setValue(45).setSystem("http://unitsofmeasure.org").setCode("kg"); + observation.getReferenceRangeFirstRep().setLow(low); + SimpleQuantity high = new SimpleQuantity(); + low.setValue(90).setSystem("http://unitsofmeasure.org").setCode("kg"); + observation.getReferenceRangeFirstRep().setHigh(high); + + // END SNIPPET: observation + + } + + private static void tmp() { + // Create a FHIR Context + FhirContext ctx = FhirContext.forR4(); + + // Create a client + IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseR4"); + + // Read a patient with the given ID + Patient patient = client.read().resource(Patient.class).withId("952975").execute(); + + // Print the patient's name + String string = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); + System.out.println(string); + } + + public void namesHard() { + // START SNIPPET: namesHard + Patient patient = new Patient(); + HumanName name = patient.addName(); + name.setFamily("Smith"); + StringType firstName = name.addGivenElement(); + firstName.setValue("Rob"); + StringType secondName = name.addGivenElement(); + secondName.setValue("Bruce"); + // END SNIPPET: namesHard + } + + public void namesEasy() { + // START SNIPPET: namesEasy + Patient patient = new Patient(); + patient.addName().setFamily("Smith").addGiven("Rob").addGiven("Bruce"); + // END SNIPPET: namesEasy + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirTesterConfig.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirTesterConfig.java index f700294b507..029e057c28d 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirTesterConfig.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/FhirTesterConfig.java @@ -43,30 +43,29 @@ public class FhirTesterConfig { /** * This bean tells the testing webpage which servers it should configure itself * to communicate with. In this example we configure it to talk to the local - * server, as well as one public server. If you are creating a project to - * deploy somewhere else, you might choose to only put your own server's + * server, as well as one public server. If you are creating a project to + * deploy somewhere else, you might choose to only put your own server's * address here. - * + * * Note the use of the ${serverBase} variable below. This will be replaced with * the base URL as reported by the server itself. Often for a simple Tomcat * (or other container) installation, this will end up being something * like "http://localhost:8080/hapi-fhir-jpaserver-example". If you are - * deploying your server to a place with a fully qualified domain name, + * deploying your server to a place with a fully qualified domain name, * you might want to use that instead of using the variable. */ @Bean public TesterConfig testerConfig() { TesterConfig retVal = new TesterConfig(); - retVal - .addServer() + retVal.addServer() .withId("home") .withFhirVersion(FhirVersionEnum.R4) .withBaseUrl("${serverBase}/fhir") .withName("Local Tester") // Add a $diff button on search result rows where version > 1 - .withSearchResultRowOperation("$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) - - .addServer() + .withSearchResultRowOperation( + "$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) + .addServer() .withId("hapi") .withFhirVersion(FhirVersionEnum.R4) .withBaseUrl("http://hapi.fhir.org/baseR4") @@ -76,13 +75,12 @@ public class FhirTesterConfig { .withSearchResultRowInteraction(RestOperationTypeEnum.UPDATE, id -> false); /* - * Use the method below to supply a client "factory" which can be used + * Use the method below to supply a client "factory" which can be used * if your server requires authentication */ // retVal.setClientFactory(clientFactory); - + return retVal; } - } // END SNIPPET: file diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/GenericClientExample.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/GenericClientExample.java index db6617bd076..c4dc98a9e16 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/GenericClientExample.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/GenericClientExample.java @@ -91,21 +91,12 @@ public class GenericClientExample { Parameters patch = new Parameters(); Parameters.ParametersParameterComponent operation = patch.addParameter(); operation.setName("operation"); - operation - .addPart() - .setName("type") - .setValue(new CodeType("delete")); - operation - .addPart() - .setName("path") - .setValue(new StringType("Patient.identifier[0]")); + operation.addPart().setName("type").setValue(new CodeType("delete")); + operation.addPart().setName("path").setValue(new StringType("Patient.identifier[0]")); // Invoke the patch - MethodOutcome outcome = client - .patch() - .withFhirPatch(patch) - .withId("Patient/123") - .execute(); + MethodOutcome outcome = + client.patch().withFhirPatch(patch).withId("Patient/123").execute(); // The server may provide the updated contents in the response Patient resultingResource = (Patient) outcome.getResource(); @@ -123,20 +114,16 @@ public class GenericClientExample { IGenericClient client = ctx.newRestfulGenericClient(serverBase); // Create a JSON patch object - String patch = "[ " + - " { " + - " \"op\":\"replace\", " + - " \"path\":\"/active\", " + - " \"value\":false " + - " } " + - "]"; + String patch = "[ " + " { " + + " \"op\":\"replace\", " + + " \"path\":\"/active\", " + + " \"value\":false " + + " } " + + "]"; // Invoke the patch - MethodOutcome outcome = client - .patch() - .withBody(patch) - .withId("Patient/123") - .execute(); + MethodOutcome outcome = + client.patch().withBody(patch).withId("Patient/123").execute(); // The server may provide the updated contents in the response Patient resultingResource = (Patient) outcome.getResource(); @@ -153,12 +140,11 @@ public class GenericClientExample { IGenericClient client = ctx.newRestfulGenericClient(serverBase); // Perform a search - Bundle results = client - .search() - .forResource(Patient.class) - .where(Patient.FAMILY.matches().value("duck")) - .returnBundle(Bundle.class) - .execute(); + Bundle results = client.search() + .forResource(Patient.class) + .where(Patient.FAMILY.matches().value("duck")) + .returnBundle(Bundle.class) + .execute(); System.out.println("Found " + results.getEntry().size() + " patients named 'duck'"); // END SNIPPET: simple @@ -179,10 +165,10 @@ public class GenericClientExample { // encoding to the server // instead of the default which is non-pretty printed XML) MethodOutcome outcome = client.create() - .resource(patient) - .prettyPrint() - .encodedJson() - .execute(); + .resource(patient) + .prettyPrint() + .encodedJson() + .execute(); // The MethodOutcome object will contain information about the // response from the server, including the ID of the created @@ -198,16 +184,16 @@ public class GenericClientExample { // START SNIPPET: createConditional // One form MethodOutcome outcome = client.create() - .resource(patient) - .conditionalByUrl("Patient?identifier=system%7C00001") - .execute(); + .resource(patient) + .conditionalByUrl("Patient?identifier=system%7C00001") + .execute(); // Another form MethodOutcome outcome2 = client.create() - .resource(patient) - .conditional() - .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("system", "00001")) - .execute(); + .resource(patient) + .conditional() + .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("system", "00001")) + .execute(); // This will return Boolean.TRUE if the server responded with an HTTP 201 created, // otherwise it will return null. @@ -224,9 +210,7 @@ public class GenericClientExample { patient.addName().setFamily("Smith").addGiven("John"); // Validate the resource - MethodOutcome outcome = client.validate() - .resource(patient) - .execute(); + MethodOutcome outcome = client.validate().resource(patient).execute(); // The returned object will contain an operation outcome resource OperationOutcome oo = (OperationOutcome) outcome.getOperationOutcome(); @@ -254,9 +238,7 @@ public class GenericClientExample { patient.setId("Patient/123"); // Invoke the server update method - MethodOutcome outcome = client.update() - .resource(patient) - .execute(); + MethodOutcome outcome = client.update().resource(patient).execute(); // The MethodOutcome object will contain information about the // response from the server, including the ID of the created @@ -271,22 +253,23 @@ public class GenericClientExample { Patient patient = new Patient(); // START SNIPPET: updateConditional client.update() - .resource(patient) - .conditionalByUrl("Patient?identifier=system%7C00001") - .execute(); + .resource(patient) + .conditionalByUrl("Patient?identifier=system%7C00001") + .execute(); client.update() - .resource(patient) - .conditional() - .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("system", "00001")) - .execute(); + .resource(patient) + .conditional() + .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("system", "00001")) + .execute(); // END SNIPPET: updateConditional } { // START SNIPPET: etagupdate // First, let's retrieve the latest version of a resource // from the server - Patient patient = client.read().resource(Patient.class).withId("123").execute(); + Patient patient = + client.read().resource(Patient.class).withId("123").execute(); // If the server is a version aware server, we should now know the latest version // of the resource @@ -299,10 +282,7 @@ public class GenericClientExample { // a version, it will be included in the request sent to // the server try { - MethodOutcome outcome = client - .update() - .resource(patient) - .execute(); + MethodOutcome outcome = client.update().resource(patient).execute(); } catch (PreconditionFailedException e) { // If we get here, the latest version has changed // on the server so our update failed. @@ -313,87 +293,87 @@ public class GenericClientExample { // START SNIPPET: conformance // Retrieve the server's conformance statement and print its // description - CapabilityStatement conf = client - .capabilities() - .ofType(CapabilityStatement.class) - .execute(); + CapabilityStatement conf = + client.capabilities().ofType(CapabilityStatement.class).execute(); System.out.println(conf.getDescriptionElement().getValue()); // END SNIPPET: conformance } { // START SNIPPET: delete - MethodOutcome response = client - .delete() - .resourceById(new IdType("Patient", "1234")) - .execute(); + MethodOutcome response = + client.delete().resourceById(new IdType("Patient", "1234")).execute(); // outcome may be null if the server didn't return one OperationOutcome outcome = (OperationOutcome) response.getOperationOutcome(); if (outcome != null) { - System.out.println(outcome.getIssueFirstRep().getDetails().getCodingFirstRep().getCode()); + System.out.println(outcome.getIssueFirstRep() + .getDetails() + .getCodingFirstRep() + .getCode()); } // END SNIPPET: delete } { // START SNIPPET: deleteConditional client.delete() - .resourceConditionalByUrl("Patient?identifier=system%7C00001") - .execute(); + .resourceConditionalByUrl("Patient?identifier=system%7C00001") + .execute(); client.delete() - .resourceConditionalByType("Patient") - .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("system", "00001")) - .execute(); + .resourceConditionalByType("Patient") + .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("system", "00001")) + .execute(); // END SNIPPET: deleteConditional } { // START SNIPPET: deleteCascade client.delete() - .resourceById(new IdType("Patient/123")) - .cascade(DeleteCascadeModeEnum.DELETE) - .execute(); + .resourceById(new IdType("Patient/123")) + .cascade(DeleteCascadeModeEnum.DELETE) + .execute(); client.delete() - .resourceConditionalByType("Patient") - .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("system", "00001")) - .execute(); + .resourceConditionalByType("Patient") + .where(Patient.IDENTIFIER.exactly().systemAndIdentifier("system", "00001")) + .execute(); // END SNIPPET: deleteCascade } { // START SNIPPET: search Bundle response = client.search() - .forResource(Patient.class) - .where(Patient.BIRTHDATE.beforeOrEquals().day("2011-01-01")) - .and(Patient.GENERAL_PRACTITIONER.hasChainedProperty(Organization.NAME.matches().value("Smith"))) - .returnBundle(Bundle.class) - .execute(); + .forResource(Patient.class) + .where(Patient.BIRTHDATE.beforeOrEquals().day("2011-01-01")) + .and(Patient.GENERAL_PRACTITIONER.hasChainedProperty( + Organization.NAME.matches().value("Smith"))) + .returnBundle(Bundle.class) + .execute(); // END SNIPPET: search // START SNIPPET: searchOr response = client.search() - .forResource(Patient.class) - .where(Patient.FAMILY.matches().values("Smith", "Smyth")) - .returnBundle(Bundle.class) - .execute(); + .forResource(Patient.class) + .where(Patient.FAMILY.matches().values("Smith", "Smyth")) + .returnBundle(Bundle.class) + .execute(); // END SNIPPET: searchOr // START SNIPPET: searchAnd response = client.search() - .forResource(Patient.class) - .where(Patient.ADDRESS.matches().values("Toronto")) - .and(Patient.ADDRESS.matches().values("Ontario")) - .and(Patient.ADDRESS.matches().values("Canada")) - .returnBundle(Bundle.class) - .execute(); + .forResource(Patient.class) + .where(Patient.ADDRESS.matches().values("Toronto")) + .and(Patient.ADDRESS.matches().values("Ontario")) + .and(Patient.ADDRESS.matches().values("Canada")) + .returnBundle(Bundle.class) + .execute(); // END SNIPPET: searchAnd // START SNIPPET: searchCompartment response = client.search() - .forResource(Patient.class) - .withIdAndCompartment("123", "condition") - .where(Patient.ADDRESS.matches().values("Toronto")) - .returnBundle(Bundle.class) - .execute(); + .forResource(Patient.class) + .withIdAndCompartment("123", "condition") + .where(Patient.ADDRESS.matches().values("Toronto")) + .returnBundle(Bundle.class) + .execute(); // END SNIPPET: searchCompartment // START SNIPPET: searchUrl @@ -403,65 +383,65 @@ public class GenericClientExample { // URL will be added to it searchUrl = "Patient?identifier=foo"; - response = client.search() - .byUrl(searchUrl) - .returnBundle(Bundle.class) - .execute(); + response = + client.search().byUrl(searchUrl).returnBundle(Bundle.class).execute(); // END SNIPPET: searchUrl // START SNIPPET: searchSubsetSummary response = client.search() - .forResource(Patient.class) - .where(Patient.ADDRESS.matches().values("Toronto")) - .returnBundle(Bundle.class) - .summaryMode(SummaryEnum.TRUE) - .execute(); + .forResource(Patient.class) + .where(Patient.ADDRESS.matches().values("Toronto")) + .returnBundle(Bundle.class) + .summaryMode(SummaryEnum.TRUE) + .execute(); // END SNIPPET: searchSubsetSummary // START SNIPPET: searchSubsetElements response = client.search() - .forResource(Patient.class) - .where(Patient.ADDRESS.matches().values("Toronto")) - .returnBundle(Bundle.class) - .elementsSubset("identifier", "name") // only include the identifier and name - .execute(); + .forResource(Patient.class) + .where(Patient.ADDRESS.matches().values("Toronto")) + .returnBundle(Bundle.class) + .elementsSubset("identifier", "name") // only include the identifier and name + .execute(); // END SNIPPET: searchSubsetElements // START SNIPPET: searchAdv response = client.search() - .forResource(Patient.class) - .encodedJson() - .where(Patient.BIRTHDATE.beforeOrEquals().day("2012-01-22")) - .and(Patient.BIRTHDATE.after().day("2011-01-01")) - .withTag("http://acme.org/codes", "needs-review") - .include(Patient.INCLUDE_ORGANIZATION.asRecursive()) - .include(Patient.INCLUDE_GENERAL_PRACTITIONER.asNonRecursive()) - .revInclude(Provenance.INCLUDE_TARGET) - .lastUpdated(new DateRangeParam("2011-01-01", null)) - .sort().ascending(Patient.BIRTHDATE) - .sort().descending(Patient.NAME) - .count(123) - .returnBundle(Bundle.class) - .execute(); + .forResource(Patient.class) + .encodedJson() + .where(Patient.BIRTHDATE.beforeOrEquals().day("2012-01-22")) + .and(Patient.BIRTHDATE.after().day("2011-01-01")) + .withTag("http://acme.org/codes", "needs-review") + .include(Patient.INCLUDE_ORGANIZATION.asRecursive()) + .include(Patient.INCLUDE_GENERAL_PRACTITIONER.asNonRecursive()) + .revInclude(Provenance.INCLUDE_TARGET) + .lastUpdated(new DateRangeParam("2011-01-01", null)) + .sort() + .ascending(Patient.BIRTHDATE) + .sort() + .descending(Patient.NAME) + .count(123) + .returnBundle(Bundle.class) + .execute(); // END SNIPPET: searchAdv // START SNIPPET: searchPost response = client.search() - .forResource("Patient") - .where(Patient.NAME.matches().value("Tester")) - .usingStyle(SearchStyleEnum.POST) - .returnBundle(Bundle.class) - .execute(); + .forResource("Patient") + .where(Patient.NAME.matches().value("Tester")) + .usingStyle(SearchStyleEnum.POST) + .returnBundle(Bundle.class) + .execute(); // END SNIPPET: searchPost // START SNIPPET: searchComposite response = client.search() - .forResource("Observation") - .where(Observation.CODE_VALUE_DATE - .withLeft(Observation.CODE.exactly().code("FOO$BAR")) - .withRight(Observation.VALUE_DATE.exactly().day("2001-01-01"))) - .returnBundle(Bundle.class) - .execute(); + .forResource("Observation") + .where(Observation.CODE_VALUE_DATE + .withLeft(Observation.CODE.exactly().code("FOO$BAR")) + .withRight(Observation.VALUE_DATE.exactly().day("2001-01-01"))) + .returnBundle(Bundle.class) + .execute(); // END SNIPPET: searchComposite } { @@ -470,36 +450,32 @@ public class GenericClientExample { // .. populate this list - note that you can also pass in a populated // Bundle if you want to create one manually .. - List response = client.transaction().withResources(resources).execute(); + List response = + client.transaction().withResources(resources).execute(); // END SNIPPET: transaction } { // START SNIPPET: read // search for patient 123 - Patient patient = client.read() - .resource(Patient.class) - .withId("123") - .execute(); + Patient patient = + client.read().resource(Patient.class).withId("123").execute(); // END SNIPPET: read } { // START SNIPPET: vread // search for patient 123 (specific version 888) Patient patient = client.read() - .resource(Patient.class) - .withIdAndVersion("123", "888") - .execute(); + .resource(Patient.class) + .withIdAndVersion("123", "888") + .execute(); // END SNIPPET: vread } { // START SNIPPET: readabsolute // search for patient 123 on example.com String url = "http://example.com/fhir/Patient/123"; - Patient patient = client.read() - .resource(Patient.class) - .withUrl(url) - .execute(); + Patient patient = client.read().resource(Patient.class).withUrl(url).execute(); // END SNIPPET: readabsolute } @@ -507,17 +483,16 @@ public class GenericClientExample { // START SNIPPET: etagread // search for patient 123 Patient patient = client.read() - .resource(Patient.class) - .withId("123") - .ifVersionMatches("001").returnNull() - .execute(); + .resource(Patient.class) + .withId("123") + .ifVersionMatches("001") + .returnNull() + .execute(); if (patient == null) { // resource has not changed } // END SNIPPET: etagread } - - } @SuppressWarnings("unused") @@ -525,22 +500,18 @@ public class GenericClientExample { IGenericClient client = FhirContext.forDstu2().newRestfulGenericClient(""); { // START SNIPPET: historyDstu2 - Bundle response = client - .history() - .onServer() - .returnBundle(Bundle.class) - .execute(); + Bundle response = + client.history().onServer().returnBundle(Bundle.class).execute(); // END SNIPPET: historyDstu2 } { // START SNIPPET: historyFeatures - Bundle response = client - .history() - .onServer() - .returnBundle(Bundle.class) - .since(new InstantType("2012-01-01T12:22:32.038Z")) - .count(100) - .execute(); + Bundle response = client.history() + .onServer() + .returnBundle(Bundle.class) + .since(new InstantType("2012-01-01T12:22:32.038Z")) + .count(100) + .execute(); // END SNIPPET: historyFeatures } } @@ -557,10 +528,10 @@ public class GenericClientExample { // Perform a search Bundle resultBundle = client.search() - .forResource(Patient.class) - .where(Patient.NAME.matches().value("Smith")) - .returnBundle(Bundle.class) - .execute(); + .forResource(Patient.class) + .where(Patient.NAME.matches().value("Smith")) + .returnBundle(Bundle.class) + .execute(); if (resultBundle.getLink(Bundle.LINK_NEXT) != null) { @@ -585,13 +556,12 @@ public class GenericClientExample { inParams.addParameter().setName("end").setValue(new DateType("2015-03-01")); // Invoke $everything on "Patient/1" - Parameters outParams = client - .operation() - .onInstance(new IdType("Patient", "1")) - .named("$everything") - .withParameters(inParams) - .useHttpGet() // Use HTTP GET instead of POST - .execute(); + Parameters outParams = client.operation() + .onInstance(new IdType("Patient", "1")) + .named("$everything") + .withParameters(inParams) + .useHttpGet() // Use HTTP GET instead of POST + .execute(); // END SNIPPET: operationHttpGet } @@ -609,12 +579,11 @@ public class GenericClientExample { inParams.addParameter().setName("end").setValue(new DateType("2015-03-01")); // Invoke $everything on "Patient/1" - Parameters outParams = client - .operation() - .onInstance(new IdType("Patient", "1")) - .named("$everything") - .withParameters(inParams) - .execute(); + Parameters outParams = client.operation() + .onInstance(new IdType("Patient", "1")) + .named("$everything") + .withParameters(inParams) + .execute(); /* * Note that the $everything operation returns a Bundle instead @@ -638,13 +607,11 @@ public class GenericClientExample { client.registerInterceptor(new LoggingInterceptor(true)); // Invoke $everything on "Patient/1" - Parameters outParams = client - .operation() - .onInstance(new IdType("Patient", "1")) - .named("$everything") - .withNoParameters(Parameters.class) // No input parameters - .execute(); + Parameters outParams = client.operation() + .onInstance(new IdType("Patient", "1")) + .named("$everything") + .withNoParameters(Parameters.class) // No input parameters + .execute(); // END SNIPPET: operationNoIn } - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/GenomicsUploader.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/GenomicsUploader.java index 3950e7969ce..f95c7142897 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/GenomicsUploader.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/GenomicsUploader.java @@ -39,7 +39,8 @@ public class GenomicsUploader { dnaSequenceVariantName.setCode("dnaSequenceVariantName"); dnaSequenceVariantName.setType(Enumerations.SearchParamType.TOKEN); dnaSequenceVariantName.setTitle("DNASequenceVariantName"); - dnaSequenceVariantName.setExpression("Observation.extension('http://hl7.org/fhir/StructureDefinition/observation-geneticsDNASequenceVariantName')"); + dnaSequenceVariantName.setExpression( + "Observation.extension('http://hl7.org/fhir/StructureDefinition/observation-geneticsDNASequenceVariantName')"); dnaSequenceVariantName.setXpathUsage(SearchParameter.XPathUsageType.NORMAL); client.update().resource(dnaSequenceVariantName).execute(); @@ -50,7 +51,8 @@ public class GenomicsUploader { dNAVariantId.setCode("dnaVariantId"); dNAVariantId.setType(Enumerations.SearchParamType.TOKEN); dNAVariantId.setTitle("DNAVariantId"); - dNAVariantId.setExpression("Observation.extension('http://hl7.org/fhir/StructureDefinition/observation-geneticsDNAVariantId')"); + dNAVariantId.setExpression( + "Observation.extension('http://hl7.org/fhir/StructureDefinition/observation-geneticsDNAVariantId')"); dNAVariantId.setXpathUsage(SearchParameter.XPathUsageType.NORMAL); client.update().resource(dNAVariantId).execute(); @@ -72,9 +74,9 @@ public class GenomicsUploader { alleleName.setCode("alleleName"); alleleName.setType(Enumerations.SearchParamType.TOKEN); alleleName.setTitle("AlleleName"); - alleleName.setExpression("Observation.extension('http://hl7.org/fhir/StructureDefinition/observation-geneticsAlleleName')"); + alleleName.setExpression( + "Observation.extension('http://hl7.org/fhir/StructureDefinition/observation-geneticsAlleleName')"); alleleName.setXpathUsage(SearchParameter.XPathUsageType.NORMAL); client.update().resource(alleleName).execute(); } - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/HttpProxy.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/HttpProxy.java index 425970a83aa..633c0e34bcd 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/HttpProxy.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/HttpProxy.java @@ -34,36 +34,33 @@ import org.hl7.fhir.r4.model.Patient; public class HttpProxy { - public static void main(String[] args) { - /* - * This is out of date - Just keeping - * it in case it's helpful... - */ - final String authUser = "username"; - final String authPassword = "password"; - CredentialsProvider credsProvider = new BasicCredentialsProvider(); - credsProvider.setCredentials(new AuthScope("10.10.10.10", 8080), - new UsernamePasswordCredentials(authUser, authPassword)); + public static void main(String[] args) { + /* + * This is out of date - Just keeping + * it in case it's helpful... + */ + final String authUser = "username"; + final String authPassword = "password"; + CredentialsProvider credsProvider = new BasicCredentialsProvider(); + credsProvider.setCredentials( + new AuthScope("10.10.10.10", 8080), new UsernamePasswordCredentials(authUser, authPassword)); - HttpHost myProxy = new HttpHost("10.10.10.10", 8080); - - - HttpClientBuilder clientBuilder = HttpClientBuilder.create(); - clientBuilder - .setProxy(myProxy) - .setProxyAuthenticationStrategy(new ProxyAuthenticationStrategy()) - .setDefaultCredentialsProvider(credsProvider) - .disableCookieManagement(); - CloseableHttpClient httpClient = clientBuilder.build(); - - FhirContext ctx = FhirContext.forDstu2(); - String serverBase = "http://spark.furore.com/fhir/"; - ctx.getRestfulClientFactory().setHttpClient(httpClient); - IGenericClient client = ctx.newRestfulGenericClient(serverBase); + HttpHost myProxy = new HttpHost("10.10.10.10", 8080); - IdType id = new IdType("Patient", "123"); - Patient patient = client.read().resource(Patient.class).withId(id).execute(); - - } - + HttpClientBuilder clientBuilder = HttpClientBuilder.create(); + clientBuilder + .setProxy(myProxy) + .setProxyAuthenticationStrategy(new ProxyAuthenticationStrategy()) + .setDefaultCredentialsProvider(credsProvider) + .disableCookieManagement(); + CloseableHttpClient httpClient = clientBuilder.build(); + + FhirContext ctx = FhirContext.forDstu2(); + String serverBase = "http://spark.furore.com/fhir/"; + ctx.getRestfulClientFactory().setHttpClient(httpClient); + IGenericClient client = ctx.newRestfulGenericClient(serverBase); + + IdType id = new IdType("Patient", "123"); + Patient patient = client.read().resource(Patient.class).withId(id).execute(); + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/IRestfulClient.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/IRestfulClient.java index ab8bb1363ac..d3c49da6b4c 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/IRestfulClient.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/IRestfulClient.java @@ -31,7 +31,7 @@ import org.hl7.fhir.r4.model.StringType; import java.util.List; -//START SNIPPET: provider +// START SNIPPET: provider /** * All RESTful clients must be an interface which extends IBasicClient */ @@ -40,12 +40,12 @@ public interface IRestfulClient extends IBasicClient { /** * The "@Read" annotation indicates that this method supports the * read operation. Read operations should return a single resource - * instance. - * + * instance. + * * @param theId * The read operation takes one parameter, which must be of type * IdType and must be annotated with the "@Read.IdParam" annotation. - * @return + * @return * Returns a resource matching this identifier, or null if none exists. */ @Read() @@ -66,11 +66,11 @@ public interface IRestfulClient extends IBasicClient { Organization getOrganizationById(@IdParam IIdType theId); /** - * The "@Search" annotation indicates that this method supports the + * The "@Search" annotation indicates that this method supports the * search operation. You may have many different methods annotated with * this annotation, to support many different search criteria. This * example searches by family name. - * + * * @param theFamilyName * This operation takes one parameter which is the search criteria. It is * annotated with the "@Required" annotation. This annotation takes one argument, @@ -83,8 +83,5 @@ public interface IRestfulClient extends IBasicClient { */ @Search() List getPatient(@RequiredParam(name = Patient.SP_FAMILY) StringType theFamilyName); - } -//END SNIPPET: provider - - +// END SNIPPET: provider diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/IncludesExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/IncludesExamples.java index f50710dc8d4..d6de19adad0 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/IncludesExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/IncludesExamples.java @@ -32,45 +32,45 @@ import java.util.List; public class IncludesExamples { - public static void main(String[] args) { - testSearchForPatients(); - } + public static void main(String[] args) { + testSearchForPatients(); + } - private static void testSearchForPatients() { - List resources = new IncludesExamples().searchForPatients(); + private static void testSearchForPatients() { + List resources = new IncludesExamples().searchForPatients(); - // Create a bundle with both - FhirContext ctx = FhirContext.forDstu2(); + // Create a bundle with both + FhirContext ctx = FhirContext.forDstu2(); - R4BundleFactory bf = new R4BundleFactory(ctx); + R4BundleFactory bf = new R4BundleFactory(ctx); bf.addTotalResultsToBundle(resources.size(), BundleTypeEnum.SEARCHSET); bf.addResourcesToBundle(new ArrayList<>(resources), BundleTypeEnum.SEARCHSET, null, null, null); IBaseResource b = bf.getResourceBundle(); - // Encode the bundle - String encoded = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(b); - System.out.println(encoded); - } + // Encode the bundle + String encoded = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(b); + System.out.println(encoded); + } - // START SNIPPET: addIncludes - @Search - private List searchForPatients() { - // Create an organization - Organization org = new Organization(); - org.setId("Organization/65546"); - org.setName("Test Organization"); + // START SNIPPET: addIncludes + @Search + private List searchForPatients() { + // Create an organization + Organization org = new Organization(); + org.setId("Organization/65546"); + org.setName("Test Organization"); - // Create a patient - Patient patient = new Patient(); - patient.setId("Patient/1333"); - patient.addIdentifier().setSystem("urn:mrns").setValue("253345"); - patient.getManagingOrganization().setResource(org); + // Create a patient + Patient patient = new Patient(); + patient.setId("Patient/1333"); + patient.addIdentifier().setSystem("urn:mrns").setValue("253345"); + patient.getManagingOrganization().setResource(org); - // Here we return only the patient object, which has links to other resources - List retVal = new ArrayList(); - retVal.add(patient); - return retVal; - } - // END SNIPPET: addIncludes + // Here we return only the patient object, which has links to other resources + List retVal = new ArrayList(); + retVal.add(patient); + return retVal; + } + // END SNIPPET: addIncludes } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Interceptors.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Interceptors.java index b8f3467efab..ffa82d80f35 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Interceptors.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Interceptors.java @@ -32,7 +32,6 @@ import org.slf4j.LoggerFactory; public class Interceptors { - // START SNIPPET: sampleClass @Interceptor public class SimpleServerLoggingInterceptor { @@ -43,11 +42,9 @@ public class Interceptors { public void logRequests(RequestDetails theRequest) { ourLog.info("Request of type {} with request ID: {}", theRequest.getOperation(), theRequest.getRequestId()); } - } // END SNIPPET: sampleClass - public void registerClient() { // START SNIPPET: registerClient @@ -64,5 +61,4 @@ public class Interceptors { // END SNIPPET: registerClient } - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsClient.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsClient.java index 64586007bd7..d1f0846c980 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsClient.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsClient.java @@ -23,25 +23,23 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jaxrs.client.JaxRsRestfulClientFactory; import ca.uhn.fhir.rest.client.api.IGenericClient; -@SuppressWarnings(value= {"serial"}) +@SuppressWarnings(value = {"serial"}) public class JaxRsClient { -public static void main(String[] args) { -//START SNIPPET: createClient - - // Create a client - FhirContext ctx = FhirContext.forDstu2(); - - // Create an instance of the JAX RS client factory and - // set it on the context - JaxRsRestfulClientFactory clientFactory = new JaxRsRestfulClientFactory(ctx); - ctx.setRestfulClientFactory(clientFactory); - - // This client uses JAX-RS! - IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu2"); - -//END SNIPPET: createClient -} + public static void main(String[] args) { + // START SNIPPET: createClient + // Create a client + FhirContext ctx = FhirContext.forDstu2(); + // Create an instance of the JAX RS client factory and + // set it on the context + JaxRsRestfulClientFactory clientFactory = new JaxRsRestfulClientFactory(ctx); + ctx.setRestfulClientFactory(clientFactory); + + // This client uses JAX-RS! + IGenericClient client = ctx.newRestfulGenericClient("http://fhirtest.uhn.ca/baseDstu2"); + + // END SNIPPET: createClient + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsConformanceProvider.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsConformanceProvider.java index 78e0034a65b..8827962da1e 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsConformanceProvider.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsConformanceProvider.java @@ -23,37 +23,38 @@ import ca.uhn.fhir.jaxrs.server.AbstractJaxRsConformanceProvider; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.IResourceProvider; +import java.util.concurrent.ConcurrentHashMap; import javax.ejb.EJB; import javax.ejb.Stateless; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; -import java.util.concurrent.ConcurrentHashMap; /** * Conformance Rest Service - * + * * @author Peter Van Houte */ - // START SNIPPET: jax-rs-conformance +// START SNIPPET: jax-rs-conformance @Path("") @Stateless -@Produces({ MediaType.APPLICATION_JSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML }) +@Produces({MediaType.APPLICATION_JSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML}) public class JaxRsConformanceProvider extends AbstractJaxRsConformanceProvider { - @EJB - private JaxRsPatientRestProvider provider; + @EJB + private JaxRsPatientRestProvider provider; - public JaxRsConformanceProvider() { - super("My Server Description", "My Server Name", "My Server Version"); - } + public JaxRsConformanceProvider() { + super("My Server Description", "My Server Name", "My Server Version"); + } - @Override - protected ConcurrentHashMap, IResourceProvider> getProviders() { - ConcurrentHashMap, IResourceProvider> map = new ConcurrentHashMap, IResourceProvider>(); - map.put(JaxRsConformanceProvider.class, this); - map.put(JaxRsPatientRestProvider.class, provider); - return map; - } + @Override + protected ConcurrentHashMap, IResourceProvider> getProviders() { + ConcurrentHashMap, IResourceProvider> map = + new ConcurrentHashMap, IResourceProvider>(); + map.put(JaxRsConformanceProvider.class, this); + map.put(JaxRsPatientRestProvider.class, provider); + return map; + } } // END SNIPPET: jax-rs-conformance diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsPatientRestProvider.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsPatientRestProvider.java index b10b0a1fe28..90981c02318 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsPatientRestProvider.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/JaxRsPatientRestProvider.java @@ -33,9 +33,9 @@ import org.hl7.fhir.r4.model.StringType; import javax.ejb.Local; import javax.ejb.Stateless; -import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; +import javax.ws.rs.*; /** * A demo JaxRs Patient Rest Provider @@ -44,48 +44,56 @@ import javax.ws.rs.core.Response; @Stateless // START SNIPPET: jax-rs-provider-construction @Path("/Patient") -@Produces({ MediaType.APPLICATION_JSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML }) +@Produces({MediaType.APPLICATION_JSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML}) public class JaxRsPatientRestProvider extends AbstractJaxRsResourceProvider { - public JaxRsPatientRestProvider() { - super(JaxRsPatientRestProvider.class); - } -// END SNIPPET: jax-rs-provider-construction + public JaxRsPatientRestProvider() { + super(JaxRsPatientRestProvider.class); + } + // END SNIPPET: jax-rs-provider-construction - @Override - public Class getResourceType() { - return Patient.class; - } - - - @Create - public MethodOutcome create(@ResourceParam final Patient patient, @ConditionalUrlParam String theConditional) { - // create the patient ... - return new MethodOutcome(new IdType(1L)).setCreated(true); - } - -// START SNIPPET: jax-rs-provider-operation - @GET - @Path("/{id}/$someCustomOperation") - public Response someCustomOperationUsingGet(@PathParam("id") String id, String resource) throws Exception { - return customOperation(resource, RequestTypeEnum.GET, id, "$someCustomOperation", - RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE); - } - - @Operation(name = "someCustomOperation", idempotent = true, returnParameters = { - @OperationParam(name = "return", type = StringDt.class) }) - public Parameters someCustomOperation(@IdParam IdType myId, @OperationParam(name = "dummy") StringDt dummyInput) { - Parameters parameters = new Parameters(); - parameters.addParameter().setName("return").setValue(new StringType("My Dummy Result")); - return parameters; - } - // END SNIPPET: jax-rs-provider-operation - - @POST - @Path("/{id}/$someCustomOperation") - public Response someCustomOperationUsingPost(@PathParam("id") String id, String resource) throws Exception { - return customOperation(resource, RequestTypeEnum.POST, id, "$someCustomOperation", - RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE); - } - + @Override + public Class getResourceType() { + return Patient.class; + } + + @Create + public MethodOutcome create(@ResourceParam final Patient patient, @ConditionalUrlParam String theConditional) { + // create the patient ... + return new MethodOutcome(new IdType(1L)).setCreated(true); + } + + // START SNIPPET: jax-rs-provider-operation + @GET + @Path("/{id}/$someCustomOperation") + public Response someCustomOperationUsingGet(@PathParam("id") String id, String resource) throws Exception { + return customOperation( + resource, + RequestTypeEnum.GET, + id, + "$someCustomOperation", + RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE); + } + + @Operation( + name = "someCustomOperation", + idempotent = true, + returnParameters = {@OperationParam(name = "return", type = StringDt.class)}) + public Parameters someCustomOperation(@IdParam IdType myId, @OperationParam(name = "dummy") StringDt dummyInput) { + Parameters parameters = new Parameters(); + parameters.addParameter().setName("return").setValue(new StringType("My Dummy Result")); + return parameters; + } + // END SNIPPET: jax-rs-provider-operation + + @POST + @Path("/{id}/$someCustomOperation") + public Response someCustomOperationUsingPost(@PathParam("id") String id, String resource) throws Exception { + return customOperation( + resource, + RequestTypeEnum.POST, + id, + "$someCustomOperation", + RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE); + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Multitenancy.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Multitenancy.java index e7b6c37f34d..3b6f9796085 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Multitenancy.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Multitenancy.java @@ -31,39 +31,39 @@ import org.hl7.fhir.r4.model.Patient; public class Multitenancy { -//START SNIPPET: enableUrlBaseTenantIdentificationStrategy + // START SNIPPET: enableUrlBaseTenantIdentificationStrategy public class MyServer extends RestfulServer { - @Override - protected void initialize() { + @Override + protected void initialize() { - setTenantIdentificationStrategy(new UrlBaseTenantIdentificationStrategy()); + setTenantIdentificationStrategy(new UrlBaseTenantIdentificationStrategy()); - // ... do other initialization ... + // ... do other initialization ... + } } -} -//END SNIPPET: enableUrlBaseTenantIdentificationStrategy + // END SNIPPET: enableUrlBaseTenantIdentificationStrategy -//START SNIPPET: resourceProvider + // START SNIPPET: resourceProvider public class MyPatientResourceProvider implements IResourceProvider { - @Override - public Class getResourceType() { - return Patient.class; + @Override + public Class getResourceType() { + return Patient.class; + } + + @Read + public Patient read(RequestDetails theRequestDetails, @IdParam IdType theId) { + + String tenantId = theRequestDetails.getTenantId(); + String resourceId = theId.getIdPart(); + + // Use these two values to fetch the patient + + return new Patient(); + } } - @Read - public Patient read(RequestDetails theRequestDetails, @IdParam IdType theId) { - - String tenantId = theRequestDetails.getTenantId(); - String resourceId = theId.getIdPart(); - - // Use these two values to fetch the patient - - return new Patient(); - } -} - -//END SNIPPET: resourceProvider + // END SNIPPET: resourceProvider } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/MyPatient.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/MyPatient.java index 6e3fc75423a..386ee53e6b9 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/MyPatient.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/MyPatient.java @@ -19,7 +19,7 @@ */ package ca.uhn.hapi.fhir.docs; -//START SNIPPET: patientDef +// START SNIPPET: patientDef import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.Description; @@ -36,61 +36,61 @@ import java.util.List; /** * Definition class for adding extensions to the built-in * Patient resource type. - * + * * Note the "profile" attribute below, which indicates the URL/ID of the * profile implemented by this resource. You are not required to supply this, * but if you do it will be automatically populated in the resource meta * tag if the resource is returned by a server. */ -@ResourceDef(name="Patient", profile="http://example.com/StructureDefinition/mypatient") +@ResourceDef(name = "Patient", profile = "http://example.com/StructureDefinition/mypatient") public class MyPatient extends Patient { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - /** + /** * Each extension is defined in a field. Any valid HAPI Data Type * can be used for the field type. Note that the [name=""] attribute * in the @Child annotation needs to match the name for the bean accessor * and mutator methods. */ - @Child(name="petName") - @Extension(url="http://example.com/dontuse#petname", definedLocally=false, isModifier=false) - @Description(shortDefinition="The name of the patient's favourite pet") + @Child(name = "petName") + @Extension(url = "http://example.com/dontuse#petname", definedLocally = false, isModifier = false) + @Description(shortDefinition = "The name of the patient's favourite pet") private StringType myPetName; /** * The second example extension uses a List type to provide * repeatable values. Note that a [max=] value has been placed in * the @Child annotation. - * + * * Note also that this extension is a modifier extension */ - @Child(name="importantDates", max=Child.MAX_UNLIMITED) - @Extension(url="http://example.com/dontuse#importantDates", definedLocally=false, isModifier=true) - @Description(shortDefinition="Some dates of note for this patient") + @Child(name = "importantDates", max = Child.MAX_UNLIMITED) + @Extension(url = "http://example.com/dontuse#importantDates", definedLocally = false, isModifier = true) + @Description(shortDefinition = "Some dates of note for this patient") private List myImportantDates; /** * It is important to override the isEmpty() method, adding a check for any - * newly added fields. + * newly added fields. */ @Override public boolean isEmpty() { return super.isEmpty() && ElementUtil.isEmpty(myPetName, myImportantDates); } - + /******** * Accessors and mutators follow - * + * * IMPORTANT: * Each extension is required to have an getter/accessor and a setter/mutator. * You are highly recommended to create getters which create instances if they - * do not already exist, since this is how the rest of the HAPI FHIR API works. + * do not already exist, since this is how the rest of the HAPI FHIR API works. ********/ - + /** Getter for important dates */ public List getImportantDates() { - if (myImportantDates==null) { + if (myImportantDates == null) { myImportantDates = new ArrayList(); } return myImportantDates; @@ -113,6 +113,5 @@ public class MyPatient extends Patient { public void setPetName(StringType thePetName) { myPetName = thePetName; } - } -//END SNIPPET: patientDef +// END SNIPPET: patientDef diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/MyPatientUse.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/MyPatientUse.java index 79e69a93827..fa0738fbaea 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/MyPatientUse.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/MyPatientUse.java @@ -37,69 +37,67 @@ import java.util.List; public class MyPatientUse { - @ResourceDef() - public static class MyPatient extends Patient { + @ResourceDef() + public static class MyPatient extends Patient { - @Child(name="petName") - @Extension(url="http://example.com/dontuse#petname", definedLocally=false, isModifier=false) - @Description(shortDefinition="The name of the patient's favourite pet") - private StringType myPetName; - - public StringType getPetName() { - if(myPetName==null) { - myPetName = new StringType(); - } - return myPetName; - } + @Child(name = "petName") + @Extension(url = "http://example.com/dontuse#petname", definedLocally = false, isModifier = false) + @Description(shortDefinition = "The name of the patient's favourite pet") + private StringType myPetName; - public void setPetName(StringType thePetName) { - myPetName = thePetName; - } + public StringType getPetName() { + if (myPetName == null) { + myPetName = new StringType(); + } + return myPetName; + } - public List getImportantDates() { - if (myImportantDates==null) { - myImportantDates= new ArrayList<>(); - } - return myImportantDates; - } + public void setPetName(StringType thePetName) { + myPetName = thePetName; + } - public void setImportantDates(List theImportantDates) { - myImportantDates = theImportantDates; - } + public List getImportantDates() { + if (myImportantDates == null) { + myImportantDates = new ArrayList<>(); + } + return myImportantDates; + } - @Child(name="importantDates", max=Child.MAX_UNLIMITED) - @Extension(url="http://example.com/dontuse#importantDates", definedLocally=false, isModifier=true) - @Description(shortDefinition="Some dates of note for the patient") - private List myImportantDates; + public void setImportantDates(List theImportantDates) { + myImportantDates = theImportantDates; + } - } - -@SuppressWarnings("unused") -public static void main(String[] args) throws DataFormatException, IOException { -//START SNIPPET: patientUse -MyPatient patient = new MyPatient(); -patient.setPetName(new StringType("Fido")); -patient.getImportantDates().add(new DateTimeType("2010-01-02")); -patient.getImportantDates().add(new DateTimeType("2014-01-26T11:11:11")); + @Child(name = "importantDates", max = Child.MAX_UNLIMITED) + @Extension(url = "http://example.com/dontuse#importantDates", definedLocally = false, isModifier = true) + @Description(shortDefinition = "Some dates of note for the patient") + private List myImportantDates; + } -patient.addName().setFamily("Smith").addGiven("John").addGiven("Quincy").addSuffix("Jr"); + @SuppressWarnings("unused") + public static void main(String[] args) throws DataFormatException, IOException { + // START SNIPPET: patientUse + MyPatient patient = new MyPatient(); + patient.setPetName(new StringType("Fido")); + patient.getImportantDates().add(new DateTimeType("2010-01-02")); + patient.getImportantDates().add(new DateTimeType("2014-01-26T11:11:11")); -IParser p = FhirContext.forDstu2().newXmlParser().setPrettyPrint(true); -String messageString = p.encodeResourceToString(patient); + patient.addName().setFamily("Smith").addGiven("John").addGiven("Quincy").addSuffix("Jr"); -System.out.println(messageString); -//END SNIPPET: patientUse - -//START SNIPPET: patientParse -IParser parser = FhirContext.forDstu2().newXmlParser(); -MyPatient newPatient = parser.parseResource(MyPatient.class, messageString); -//END SNIPPET: patientParse + IParser p = FhirContext.forDstu2().newXmlParser().setPrettyPrint(true); + String messageString = p.encodeResourceToString(patient); -{ - FhirContext ctx2 = FhirContext.forDstu2(); - RuntimeResourceDefinition def = ctx2.getResourceDefinition(patient); - System.out.println(ctx2.newXmlParser().setPrettyPrint(true).encodeResourceToString(def.toProfile())); -} -} - + System.out.println(messageString); + // END SNIPPET: patientUse + + // START SNIPPET: patientParse + IParser parser = FhirContext.forDstu2().newXmlParser(); + MyPatient newPatient = parser.parseResource(MyPatient.class, messageString); + // END SNIPPET: patientParse + + { + FhirContext ctx2 = FhirContext.forDstu2(); + RuntimeResourceDefinition def = ctx2.getResourceDefinition(patient); + System.out.println(ctx2.newXmlParser().setPrettyPrint(true).encodeResourceToString(def.toProfile())); + } + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Narrative.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Narrative.java index e858fc957ae..3b9e31dcbf6 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Narrative.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Narrative.java @@ -27,32 +27,34 @@ import org.hl7.fhir.r4.model.Patient; @SuppressWarnings("unused") public class Narrative { -public static void main(String[] args) throws DataFormatException { + public static void main(String[] args) throws DataFormatException { -//START SNIPPET: example1 -Patient patient = new Patient(); -patient.addIdentifier().setSystem("urn:foo").setValue("7000135"); -patient.addName().setFamily("Smith").addGiven("John").addGiven("Edward"); -patient.addAddress().addLine("742 Evergreen Terrace").setCity("Springfield").setState("ZZ"); + // START SNIPPET: example1 + Patient patient = new Patient(); + patient.addIdentifier().setSystem("urn:foo").setValue("7000135"); + patient.addName().setFamily("Smith").addGiven("John").addGiven("Edward"); + patient.addAddress() + .addLine("742 Evergreen Terrace") + .setCity("Springfield") + .setState("ZZ"); -FhirContext ctx = FhirContext.forDstu2(); + FhirContext ctx = FhirContext.forDstu2(); -// Use the narrative generator -ctx.setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator()); + // Use the narrative generator + ctx.setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator()); -// Encode the output, including the narrative -String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); -System.out.println(output); -//END SNIPPET: example1 + // Encode the output, including the narrative + String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); + System.out.println(output); + // END SNIPPET: example1 -} - -public void simple() { -//START SNIPPET: simple -Patient pat = new Patient(); -pat.getText().setStatus(org.hl7.fhir.r4.model.Narrative.NarrativeStatus.GENERATED); -pat.getText().setDivAsString("
    This is the narrative text
    this is line 2
    "); -//END SNIPPET: simple -} - + } + + public void simple() { + // START SNIPPET: simple + Patient pat = new Patient(); + pat.getText().setStatus(org.hl7.fhir.r4.model.Narrative.NarrativeStatus.GENERATED); + pat.getText().setDivAsString("
    This is the narrative text
    this is line 2
    "); + // END SNIPPET: simple + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/NarrativeGenerator.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/NarrativeGenerator.java index dd24ad6ab57..952ab969f10 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/NarrativeGenerator.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/NarrativeGenerator.java @@ -28,18 +28,17 @@ public class NarrativeGenerator { public void testGenerator() { -//START SNIPPET: gen -FhirContext ctx = FhirContext.forDstu2(); -String propFile = "classpath:/com/foo/customnarrative.properties"; -CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator(propFile); + // START SNIPPET: gen + FhirContext ctx = FhirContext.forDstu2(); + String propFile = "classpath:/com/foo/customnarrative.properties"; + CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator(propFile); -Patient patient = new Patient(); + Patient patient = new Patient(); -ctx.setNarrativeGenerator(gen); -String output = ctx.newJsonParser().encodeResourceToString(patient); -System.out.println(output); -//END SNIPPET: gen + ctx.setNarrativeGenerator(gen); + String output = ctx.newJsonParser().encodeResourceToString(patient); + System.out.println(output); + // END SNIPPET: gen - } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PagingPatientProvider.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PagingPatientProvider.java index fab3cb559e2..2622ad526d1 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PagingPatientProvider.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PagingPatientProvider.java @@ -28,77 +28,76 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.Patient; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; @SuppressWarnings("null") // START SNIPPET: provider public class PagingPatientProvider implements IResourceProvider { - /** - * Search for Patient resources matching a given family name - */ - @Search - public IBundleProvider search(@RequiredParam(name = Patient.SP_FAMILY) StringParam theFamily) { - final InstantType searchTime = InstantType.withCurrentTime(); + /** + * Search for Patient resources matching a given family name + */ + @Search + public IBundleProvider search(@RequiredParam(name = Patient.SP_FAMILY) StringParam theFamily) { + final InstantType searchTime = InstantType.withCurrentTime(); - /** - * First, we'll search the database for a set of database row IDs that - * match the given search criteria. That way we can keep just the row IDs - * around, and load the actual resources on demand later as the client - * pages through them. - */ - final List matchingResourceIds = null; // <-- implement this + /** + * First, we'll search the database for a set of database row IDs that + * match the given search criteria. That way we can keep just the row IDs + * around, and load the actual resources on demand later as the client + * pages through them. + */ + final List matchingResourceIds = null; // <-- implement this - /** - * Return a bundle provider which can page through the IDs and return the - * resources that go with them. - */ - return new IBundleProvider() { + /** + * Return a bundle provider which can page through the IDs and return the + * resources that go with them. + */ + return new IBundleProvider() { - @Override - public Integer size() { - return matchingResourceIds.size(); - } - - @Nonnull @Override - public List getResources(int theFromIndex, int theToIndex) { - int end = Math.max(theToIndex, matchingResourceIds.size() - 1); - List idsToReturn = matchingResourceIds.subList(theFromIndex, end); - return loadResourcesByIds(idsToReturn); - } + public Integer size() { + return matchingResourceIds.size(); + } - @Override - public InstantType getPublished() { - return searchTime; - } + @Nonnull + @Override + public List getResources(int theFromIndex, int theToIndex) { + int end = Math.max(theToIndex, matchingResourceIds.size() - 1); + List idsToReturn = matchingResourceIds.subList(theFromIndex, end); + return loadResourcesByIds(idsToReturn); + } - @Override - public Integer preferredPageSize() { - // Typically this method just returns null - return null; - } + @Override + public InstantType getPublished() { + return searchTime; + } + + @Override + public Integer preferredPageSize() { + // Typically this method just returns null + return null; + } @Override public String getUuid() { return null; } - }; - } + }; + } - /** - * Load a list of patient resources given their IDs - */ - private List loadResourcesByIds(List theIdsToReturn) { - // .. implement this search against the database .. - return null; - } - - @Override - public Class getResourceType() { - return Patient.class; - } + /** + * Load a list of patient resources given their IDs + */ + private List loadResourcesByIds(List theIdsToReturn) { + // .. implement this search against the database .. + return null; + } + @Override + public Class getResourceType() { + return Patient.class; + } } // END SNIPPET: provider diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PagingServer.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PagingServer.java index 2e506d59004..ce10e8e2c2a 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PagingServer.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PagingServer.java @@ -22,31 +22,29 @@ package ca.uhn.hapi.fhir.docs; import ca.uhn.fhir.rest.server.FifoMemoryPagingProvider; import ca.uhn.fhir.rest.server.RestfulServer; -@SuppressWarnings({ "serial" }) -//START SNIPPET: provider +@SuppressWarnings({"serial"}) +// START SNIPPET: provider public class PagingServer extends RestfulServer { public PagingServer() { - + /* * Set the resource providers as always. Here we are using the paging * provider from the example below, but it is not strictly necessary - * to use a paging resource provider as well. If a normal resource + * to use a paging resource provider as well. If a normal resource * provider is used (one which returns List instead of IBundleProvider) * then the loaded resources will be stored by the IPagingProvider. */ setResourceProviders(new PagingPatientProvider()); - + /* * Set a paging provider. Here a simple in-memory implementation - * is used, but you may create your own. + * is used, but you may create your own. */ FifoMemoryPagingProvider pp = new FifoMemoryPagingProvider(10); pp.setDefaultPageSize(10); pp.setMaximumPageSize(100); setPagingProvider(pp); - } - } -//END SNIPPET: provider +// END SNIPPET: provider diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Parser.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Parser.java index bd2b201bf16..2875e95a78e 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Parser.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/Parser.java @@ -32,17 +32,16 @@ public class Parser { public static void main(String[] args) throws DataFormatException, IOException { { - //START SNIPPET: parsing + // START SNIPPET: parsing // Create a FHIR context FhirContext ctx = FhirContext.forR4(); // The following example is a simple serialized Patient resource to parse - String input = "{" + - "\"resourceType\" : \"Patient\"," + - " \"name\" : [{" + - " \"family\": \"Simpson\"" + - " }]" + - "}"; + String input = "{" + "\"resourceType\" : \"Patient\"," + + " \"name\" : [{" + + " \"family\": \"Simpson\"" + + " }]" + + "}"; // Instantiate a new parser IParser parser = ctx.newJsonParser(); @@ -50,10 +49,10 @@ public class Parser { // Parse it Patient parsed = parser.parseResource(Patient.class, input); System.out.println(parsed.getName().get(0).getFamily()); - //END SNIPPET: parsing + // END SNIPPET: parsing } { - //START SNIPPET: encoding + // START SNIPPET: encoding // Create a FHIR context FhirContext ctx = FhirContext.forR4(); @@ -71,7 +70,7 @@ public class Parser { // Using XML instead serialized = ctx.newXmlParser().encodeResourceToString(patient); System.out.println(serialized); - //END SNIPPET: encoding + // END SNIPPET: encoding } { // Create a FHIR context @@ -79,7 +78,7 @@ public class Parser { Patient patient = new Patient(); patient.addName().setFamily("Simpson").addGiven("James"); - //START SNIPPET: encodingPretty + // START SNIPPET: encodingPretty // Create a parser IParser parser = ctx.newJsonParser(); @@ -92,7 +91,7 @@ public class Parser { // You can also chain these statements together ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); - //END SNIPPET: encodingPretty + // END SNIPPET: encodingPretty } { // Create a FHIR context @@ -100,7 +99,7 @@ public class Parser { Patient patient = new Patient(); patient.addName().setFamily("Simpson").addGiven("James"); - //START SNIPPET: encodingConfig + // START SNIPPET: encodingConfig // Create a parser IParser parser = ctx.newJsonParser(); @@ -116,34 +115,37 @@ public class Parser { // Serialize it String serialized = parser.encodeResourceToString(patient); System.out.println(serialized); - //END SNIPPET: encodingConfig + // END SNIPPET: encodingConfig } { - //START SNIPPET: disableStripVersions + // START SNIPPET: disableStripVersions FhirContext ctx = FhirContext.forR4(); IParser parser = ctx.newJsonParser(); // Disable the automatic stripping of versions from references on the parser parser.setStripVersionsFromReferences(false); - //END SNIPPET: disableStripVersions + // END SNIPPET: disableStripVersions - //START SNIPPET: disableStripVersionsCtx + // START SNIPPET: disableStripVersionsCtx ctx.getParserOptions().setStripVersionsFromReferences(false); - //END SNIPPET: disableStripVersionsCtx + // END SNIPPET: disableStripVersionsCtx } { - //START SNIPPET: disableStripVersionsField + // START SNIPPET: disableStripVersionsField FhirContext ctx = FhirContext.forR4(); IParser parser = ctx.newJsonParser(); // Preserve versions only on these two fields (for the given parser) - parser.setDontStripVersionsFromReferencesAtPaths("AuditEvent.entity.reference", "Patient.managingOrganization"); + parser.setDontStripVersionsFromReferencesAtPaths( + "AuditEvent.entity.reference", "Patient.managingOrganization"); // You can also apply this setting to the context so that it will // flow to all parsers - ctx.getParserOptions().setDontStripVersionsFromReferencesAtPaths("AuditEvent.entity.reference", "Patient.managingOrganization"); - //END SNIPPET: disableStripVersionsField + ctx.getParserOptions() + .setDontStripVersionsFromReferencesAtPaths( + "AuditEvent.entity.reference", "Patient.managingOrganization"); + // END SNIPPET: disableStripVersionsField } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PartitionExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PartitionExamples.java index d41f34f38c5..dbe13655830 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PartitionExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PartitionExamples.java @@ -33,16 +33,13 @@ import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; import org.springframework.beans.factory.annotation.Autowired; -import javax.servlet.http.HttpServletRequest; import java.util.Set; +import javax.servlet.http.HttpServletRequest; @SuppressWarnings("InnerClassMayBeStatic") public class PartitionExamples { - public void multitenantServer() { - - } - + public void multitenantServer() {} // START SNIPPET: partitionInterceptorRequestPartition @Interceptor @@ -63,11 +60,9 @@ public class PartitionExamples { String tenantId = theRequestDetails.getTenantId(); return RequestPartitionId.fromPartitionName(tenantId); } - } // END SNIPPET: partitionInterceptorRequestPartition - // START SNIPPET: partitionInterceptorHeaders @Interceptor public class CustomHeaderBasedPartitionInterceptor { @@ -83,11 +78,9 @@ public class PartitionExamples { String partitionName = theRequestDetails.getHeader("X-Partition-Name"); return RequestPartitionId.fromPartitionName(partitionName); } - } // END SNIPPET: partitionInterceptorHeaders - // START SNIPPET: partitionInterceptorResourceContents @Interceptor public class ResourceTypePartitionInterceptor { @@ -97,16 +90,14 @@ public class PartitionExamples { if (theResource instanceof Patient) { return RequestPartitionId.fromPartitionName("PATIENT"); } else if (theResource instanceof Observation) { - return RequestPartitionId.fromPartitionName("OBSERVATION"); + return RequestPartitionId.fromPartitionName("OBSERVATION"); } else { return RequestPartitionId.fromPartitionName("OTHER"); } } - } // END SNIPPET: partitionInterceptorResourceContents - // START SNIPPET: partitionInterceptorReadAllPartitions @Interceptor public class PartitionInterceptorReadAllPartitions { @@ -115,11 +106,9 @@ public class PartitionExamples { public RequestPartitionId readPartition() { return RequestPartitionId.allPartitions(); } - } // END SNIPPET: partitionInterceptorReadAllPartitions - // START SNIPPET: partitionInterceptorReadBasedOnScopes @Interceptor public class PartitionInterceptorReadPartitionsBasedOnScopes { @@ -128,22 +117,19 @@ public class PartitionExamples { public RequestPartitionId readPartition(ServletRequestDetails theRequest) { HttpServletRequest servletRequest = theRequest.getServletRequest(); - Set approvedScopes = (Set) servletRequest.getAttribute("ca.cdr.servletattribute.session.oidc.approved_scopes"); + Set approvedScopes = + (Set) servletRequest.getAttribute("ca.cdr.servletattribute.session.oidc.approved_scopes"); - String partition = approvedScopes - .stream() - .filter(t->t.startsWith("partition-")) - .map(t->t.substring("partition-".length())) - .findFirst() - .orElseThrow(()->new InvalidRequestException("No partition scopes found in request")); + String partition = approvedScopes.stream() + .filter(t -> t.startsWith("partition-")) + .map(t -> t.substring("partition-".length())) + .findFirst() + .orElseThrow(() -> new InvalidRequestException("No partition scopes found in request")); return RequestPartitionId.fromPartitionName(partition); - } - } // END SNIPPET: partitionInterceptorReadBasedOnScopes - // START SNIPPET: multitenantServer public class MultitenantServer extends RestfulServer { @@ -169,5 +155,4 @@ public class PartitionExamples { } // END SNIPPET: multitenantServer - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PatchExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PatchExamples.java index 0f82ce92ad3..e89d68089cf 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PatchExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/PatchExamples.java @@ -26,12 +26,12 @@ import ca.uhn.fhir.rest.api.PatchTypeEnum; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.OperationOutcome; - public class PatchExamples { - //START SNIPPET: patch + // START SNIPPET: patch @Patch - public OperationOutcome patientPatch(@IdParam IdType theId, PatchTypeEnum thePatchType, @ResourceParam String theBody) { + public OperationOutcome patientPatch( + @IdParam IdType theId, PatchTypeEnum thePatchType, @ResourceParam String theBody) { if (thePatchType == PatchTypeEnum.JSON_PATCH) { // do something @@ -39,12 +39,11 @@ public class PatchExamples { if (thePatchType == PatchTypeEnum.XML_PATCH) { // do something } - + OperationOutcome retVal = new OperationOutcome(); retVal.getText().setDivAsString("
    OK
    "); return retVal; } - //END SNIPPET: patch - + // END SNIPPET: patch } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/QuickUsage.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/QuickUsage.java index f26722b840c..74223d1510f 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/QuickUsage.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/QuickUsage.java @@ -36,37 +36,39 @@ import java.util.List; public class QuickUsage { -@SuppressWarnings("unused") -public static void main(String[] args) throws DataFormatException, IOException { + @SuppressWarnings("unused") + public static void main(String[] args) throws DataFormatException, IOException { -Patient patient = new Patient(); -patient.addIdentifier().setUse(IdentifierUseEnum.OFFICIAL).setSystem("urn:fake:mrns").setValue("7000135"); -patient.addIdentifier().setUse(IdentifierUseEnum.SECONDARY).setSystem("urn:fake:otherids").setValue("3287486"); + Patient patient = new Patient(); + patient.addIdentifier() + .setUse(IdentifierUseEnum.OFFICIAL) + .setSystem("urn:fake:mrns") + .setValue("7000135"); + patient.addIdentifier() + .setUse(IdentifierUseEnum.SECONDARY) + .setSystem("urn:fake:otherids") + .setValue("3287486"); -patient.addName().addFamily("Smith").addGiven("John").addGiven("Q").addSuffix("Junior"); + patient.addName().addFamily("Smith").addGiven("John").addGiven("Q").addSuffix("Junior"); -patient.setGender(AdministrativeGenderEnum.MALE); + patient.setGender(AdministrativeGenderEnum.MALE); + FhirContext ctx = FhirContext.forDstu2(); + String xmlEncoded = ctx.newXmlParser().encodeResourceToString(patient); + String jsonEncoded = ctx.newJsonParser().encodeResourceToString(patient); -FhirContext ctx = FhirContext.forDstu2(); -String xmlEncoded = ctx.newXmlParser().encodeResourceToString(patient); -String jsonEncoded = ctx.newJsonParser().encodeResourceToString(patient); - -MyClientInterface client = ctx.newRestfulClient(MyClientInterface.class, "http://foo/fhir"); -IdentifierDt searchParam = new IdentifierDt("urn:someidentifiers", "7000135"); -List clients = client.findPatientsByIdentifier(searchParam); -} - -public interface MyClientInterface extends IRestfulClient -{ - /** A FHIR search */ - @Search - public List findPatientsByIdentifier(@RequiredParam(name = "identifier") IdentifierDt theIdentifier); - - /** A FHIR create */ - @Create - public MethodOutcome createPatient(@ResourceParam Patient thePatient); - -} + MyClientInterface client = ctx.newRestfulClient(MyClientInterface.class, "http://foo/fhir"); + IdentifierDt searchParam = new IdentifierDt("urn:someidentifiers", "7000135"); + List clients = client.findPatientsByIdentifier(searchParam); + } + public interface MyClientInterface extends IRestfulClient { + /** A FHIR search */ + @Search + public List findPatientsByIdentifier(@RequiredParam(name = "identifier") IdentifierDt theIdentifier); + + /** A FHIR create */ + @Create + public MethodOutcome createPatient(@ResourceParam Patient thePatient); + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RepositoryValidatingInterceptorExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RepositoryValidatingInterceptorExamples.java index c703f7fe2ee..78131d4deba 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RepositoryValidatingInterceptorExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RepositoryValidatingInterceptorExamples.java @@ -37,14 +37,14 @@ public class RepositoryValidatingInterceptorExamples { private InterceptorService myInterceptorService; public void createSimpleRule() { - //START SNIPPET: createSimpleRule + // START SNIPPET: createSimpleRule // First you must ask the Spring Application Context for a rule builder RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class); // Add a simple rule requiring all Patient resources to declare conformance to the US Core // Patient Profile, and to validate successfully. ruleBuilder - .forResourcesOfType("Patient") + .forResourcesOfType("Patient") .requireAtLeastProfile("http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient") .and() .requireValidationToDeclaredProfiles(); @@ -55,106 +55,102 @@ public class RepositoryValidatingInterceptorExamples { // Create and register the interceptor RepositoryValidatingInterceptor interceptor = new RepositoryValidatingInterceptor(myFhirCtx, rules); myInterceptorService.registerInterceptor(interceptor); - //END SNIPPET: createSimpleRule + // END SNIPPET: createSimpleRule } public void requireProfileDeclarations() { RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class); - //START SNIPPET: requireProfileDeclarations + // START SNIPPET: requireProfileDeclarations // Require Patient resources to declare conformance to US Core patient profile ruleBuilder - .forResourcesOfType("Patient") - .requireAtLeastProfile("http://www.hl7.org/fhir/us/core/StructureDefinition-us-core-patient.html"); + .forResourcesOfType("Patient") + .requireAtLeastProfile("http://www.hl7.org/fhir/us/core/StructureDefinition-us-core-patient.html"); // Require Patient resources to declare conformance to either the US Core patient profile // or the UK Core patient profile ruleBuilder - .forResourcesOfType("Patient") - .requireAtLeastOneProfileOf( - "http://www.hl7.org/fhir/us/core/StructureDefinition-us-core-patient.html", - "https://fhir.nhs.uk/R4/StructureDefinition/UKCore-Patient"); - //END SNIPPET: requireProfileDeclarations + .forResourcesOfType("Patient") + .requireAtLeastOneProfileOf( + "http://www.hl7.org/fhir/us/core/StructureDefinition-us-core-patient.html", + "https://fhir.nhs.uk/R4/StructureDefinition/UKCore-Patient"); + // END SNIPPET: requireProfileDeclarations } public void requireValidationToDeclaredProfiles() { RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class); - //START SNIPPET: requireValidationToDeclaredProfiles + // START SNIPPET: requireValidationToDeclaredProfiles // Require Patient resources to validate to any declared profiles - ruleBuilder - .forResourcesOfType("Patient") - .requireValidationToDeclaredProfiles(); - //END SNIPPET: requireValidationToDeclaredProfiles + ruleBuilder.forResourcesOfType("Patient").requireValidationToDeclaredProfiles(); + // END SNIPPET: requireValidationToDeclaredProfiles } public void requireValidationToDeclaredProfilesAdjustThreshold() { RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class); - //START SNIPPET: requireValidationToDeclaredProfilesAdjustThreshold + // START SNIPPET: requireValidationToDeclaredProfilesAdjustThreshold ruleBuilder - .forResourcesOfType("Patient") - .requireValidationToDeclaredProfiles() - .rejectOnSeverity(ResultSeverityEnum.WARNING); - //END SNIPPET: requireValidationToDeclaredProfilesAdjustThreshold + .forResourcesOfType("Patient") + .requireValidationToDeclaredProfiles() + .rejectOnSeverity(ResultSeverityEnum.WARNING); + // END SNIPPET: requireValidationToDeclaredProfilesAdjustThreshold } public void requireValidationToDeclaredProfilesTagOnFailure() { RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class); - //START SNIPPET: requireValidationToDeclaredProfilesTagOnFailure + // START SNIPPET: requireValidationToDeclaredProfilesTagOnFailure ruleBuilder - .forResourcesOfType("Patient") - .requireValidationToDeclaredProfiles() - .neverReject() - .tagOnSeverity(ResultSeverityEnum.ERROR, "http://example.com", "validation-failure"); - //END SNIPPET: requireValidationToDeclaredProfilesTagOnFailure + .forResourcesOfType("Patient") + .requireValidationToDeclaredProfiles() + .neverReject() + .tagOnSeverity(ResultSeverityEnum.ERROR, "http://example.com", "validation-failure"); + // END SNIPPET: requireValidationToDeclaredProfilesTagOnFailure } public void requireValidationToDeclaredProfilesAdditionalOptions() { RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class); - //START SNIPPET: requireValidationToDeclaredProfilesAdditionalOptions + // START SNIPPET: requireValidationToDeclaredProfilesAdditionalOptions ruleBuilder - .forResourcesOfType("Patient") - .requireValidationToDeclaredProfiles() + .forResourcesOfType("Patient") + .requireValidationToDeclaredProfiles() - // Configure the validator to reject unknown extensions - // by default, all extensions are accepted and to undo this rejection - // call allowAnyExtensions() - .rejectUnknownExtensions() + // Configure the validator to reject unknown extensions + // by default, all extensions are accepted and to undo this rejection + // call allowAnyExtensions() + .rejectUnknownExtensions() - // Configure the validator to not perform terminology validation - .disableTerminologyChecks() + // Configure the validator to not perform terminology validation + .disableTerminologyChecks() - // Configure the validator to raise an error if a resource being - // validated declares a profile, and the StructureDefinition for - // this profile can not be found. - .errorOnUnknownProfiles() + // Configure the validator to raise an error if a resource being + // validated declares a profile, and the StructureDefinition for + // this profile can not be found. + .errorOnUnknownProfiles() - // Configure the validator to suppress the information-level - // message that is added to the validation result if a profile - // StructureDefinition does not declare a binding for a coded - // field. - .suppressNoBindingMessage() + // Configure the validator to suppress the information-level + // message that is added to the validation result if a profile + // StructureDefinition does not declare a binding for a coded + // field. + .suppressNoBindingMessage() - // Configure the validator to suppress the warning-level message - // that is added when validating a code that can't be found in a - // ValueSet that has an extensible binding. - .suppressWarningForExtensibleValueSetValidation(); - //END SNIPPET: requireValidationToDeclaredProfilesAdditionalOptions + // Configure the validator to suppress the warning-level message + // that is added when validating a code that can't be found in a + // ValueSet that has an extensible binding. + .suppressWarningForExtensibleValueSetValidation(); + // END SNIPPET: requireValidationToDeclaredProfilesAdditionalOptions } - public void disallowProfiles() { RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class); - //START SNIPPET: disallowProfiles + // START SNIPPET: disallowProfiles // No UK Core patients allowed! ruleBuilder - .forResourcesOfType("Patient") - .disallowProfile("https://fhir.nhs.uk/R4/StructureDefinition/UKCore-Patient"); - //END SNIPPET: disallowProfiles + .forResourcesOfType("Patient") + .disallowProfile("https://fhir.nhs.uk/R4/StructureDefinition/UKCore-Patient"); + // END SNIPPET: disallowProfiles } } - diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RequestCounterInterceptor.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RequestCounterInterceptor.java index 423c31c996a..be64bc050ba 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RequestCounterInterceptor.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RequestCounterInterceptor.java @@ -22,31 +22,28 @@ package ca.uhn.hapi.fhir.docs; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; -import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -//START SNIPPET: interceptor +// START SNIPPET: interceptor @Interceptor -public class RequestCounterInterceptor -{ +public class RequestCounterInterceptor { - private int myRequestCount; + private int myRequestCount; - public int getRequestCount() { - return myRequestCount; - } + public int getRequestCount() { + return myRequestCount; + } - /** - * Override the incomingRequestPreProcessed method, which is called - * for each incoming request before any processing is done - */ + /** + * Override the incomingRequestPreProcessed method, which is called + * for each incoming request before any processing is done + */ @Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_PROCESSED) - public boolean incomingRequestPreProcessed(HttpServletRequest theRequest, HttpServletResponse theResponse) { - myRequestCount++; - return true; - } - + public boolean incomingRequestPreProcessed(HttpServletRequest theRequest, HttpServletResponse theResponse) { + myRequestCount++; + return true; + } } -//END SNIPPET: interceptor +// END SNIPPET: interceptor diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RequestExceptionInterceptor.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RequestExceptionInterceptor.java index 0351c6a8a33..4e89f7eb9b9 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RequestExceptionInterceptor.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RequestExceptionInterceptor.java @@ -24,19 +24,20 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import java.io.IOException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.io.IOException; -//START SNIPPET: interceptor +// START SNIPPET: interceptor public class RequestExceptionInterceptor { @Hook(Pointcut.SERVER_HANDLE_EXCEPTION) public boolean handleException( - RequestDetails theRequestDetails, - BaseServerResponseException theException, - HttpServletRequest theServletRequest, - HttpServletResponse theServletResponse) throws IOException { + RequestDetails theRequestDetails, + BaseServerResponseException theException, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws IOException { // HAPI's server exceptions know what the appropriate HTTP status code is theServletResponse.setStatus(theException.getStatusCode()); @@ -50,6 +51,5 @@ public class RequestExceptionInterceptor { // to stop processing immediately return false; } - } -//END SNIPPET: interceptor +// END SNIPPET: interceptor diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ResourceRefs.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ResourceRefs.java index 696bb99b6ad..be20bd728cc 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ResourceRefs.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ResourceRefs.java @@ -25,31 +25,30 @@ import org.hl7.fhir.r4.model.Patient; public class ResourceRefs { - private static FhirContext ourCtx = FhirContext.forDstu2(); + private static FhirContext ourCtx = FhirContext.forDstu2(); - public static void main(String[] args) { - manualContained(); - } + public static void main(String[] args) { + manualContained(); + } - public static void manualContained() { - // START SNIPPET: manualContained - // Create an organization, and give it a local ID - Organization org = new Organization(); - org.setId("#localOrganization"); - org.getNameElement().setValue("Contained Test Organization"); + public static void manualContained() { + // START SNIPPET: manualContained + // Create an organization, and give it a local ID + Organization org = new Organization(); + org.setId("#localOrganization"); + org.getNameElement().setValue("Contained Test Organization"); - // Create a patient - Patient patient = new Patient(); - patient.setId("Patient/1333"); - patient.addIdentifier().setSystem("urn:mrns").setValue("253345"); + // Create a patient + Patient patient = new Patient(); + patient.setId("Patient/1333"); + patient.addIdentifier().setSystem("urn:mrns").setValue("253345"); - // Set the reference, and manually add the contained resource - patient.getManagingOrganization().setReference("#localOrganization"); - patient.getContained().add(org); - - String encoded = ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); - System.out.println(encoded); - // END SNIPPET: manualContained - } + // Set the reference, and manually add the contained resource + patient.getManagingOrganization().setReference("#localOrganization"); + patient.getContained().add(org); + String encoded = ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); + System.out.println(encoded); + // END SNIPPET: manualContained + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulObservationResourceProvider.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulObservationResourceProvider.java index 35736100b22..1831452ae9e 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulObservationResourceProvider.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulObservationResourceProvider.java @@ -33,7 +33,7 @@ import org.hl7.fhir.r4.model.Patient; import java.util.Collections; import java.util.List; -//START SNIPPET: provider +// START SNIPPET: provider /** * All resource providers must implement IResourceProvider @@ -49,15 +49,15 @@ public class RestfulObservationResourceProvider implements IResourceProvider { public Class getResourceType() { return Patient.class; } - + /** * The "@Read" annotation indicates that this method supports the - * read operation. It takes one argument, the Resource type being returned. - * + * read operation. It takes one argument, the Resource type being returned. + * * @param theId * The read operation takes one parameter, which must be of type * IdType and must be annotated with the "@Read.IdParam" annotation. - * @return + * @return * Returns a resource matching this identifier, or null if none exists. */ @Read() @@ -73,11 +73,11 @@ public class RestfulObservationResourceProvider implements IResourceProvider { } /** - * The "@Search" annotation indicates that this method supports the + * The "@Search" annotation indicates that this method supports the * search operation. You may have many different methods annotated with * this annotation, to support many different search criteria. This * example searches by family name. - * + * * @param theFamilyName * This operation takes one parameter which is the search criteria. It is * annotated with the "@Required" annotation. This annotation takes one argument, @@ -101,6 +101,5 @@ public class RestfulObservationResourceProvider implements IResourceProvider { patient.setGender(Enumerations.AdministrativeGender.MALE); return Collections.singletonList(patient); } - } -//END SNIPPET: provider +// END SNIPPET: provider diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulPatientResourceProvider.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulPatientResourceProvider.java index 86e263e5877..95cf5db322a 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulPatientResourceProvider.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulPatientResourceProvider.java @@ -34,7 +34,7 @@ import org.hl7.fhir.r4.model.IdType; import java.util.Collections; import java.util.List; -//START SNIPPET: provider +// START SNIPPET: provider /** * All resource providers must implement IResourceProvider @@ -50,16 +50,16 @@ public class RestfulPatientResourceProvider implements IResourceProvider { public Class getResourceType() { return Patient.class; } - + /** * The "@Read" annotation indicates that this method supports the * read operation. Read operations should return a single resource - * instance. - * + * instance. + * * @param theId * The read operation takes one parameter, which must be of type * IdType and must be annotated with the "@Read.IdParam" annotation. - * @return + * @return * Returns a resource matching this identifier, or null if none exists. */ @Read() @@ -75,11 +75,11 @@ public class RestfulPatientResourceProvider implements IResourceProvider { } /** - * The "@Search" annotation indicates that this method supports the + * The "@Search" annotation indicates that this method supports the * search operation. You may have many different methods annotated with * this annotation, to support many different search criteria. This * example searches by family name. - * + * * @param theFamilyName * This operation takes one parameter which is the search criteria. It is * annotated with the "@Required" annotation. This annotation takes one argument, @@ -103,8 +103,5 @@ public class RestfulPatientResourceProvider implements IResourceProvider { patient.setGender(AdministrativeGenderEnum.MALE); return Collections.singletonList(patient); } - } -//END SNIPPET: provider - - +// END SNIPPET: provider diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulPatientResourceProviderMore.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulPatientResourceProviderMore.java index 765b6cf3621..a1c27f0d831 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulPatientResourceProviderMore.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/RestfulPatientResourceProviderMore.java @@ -19,8 +19,8 @@ */ package ca.uhn.hapi.fhir.docs; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.TemporalPrecisionEnum; @@ -36,1075 +36,1023 @@ import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.exceptions.*; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.r4.model.*; import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent; import org.hl7.fhir.r4.model.Identifier.IdentifierUse; import org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity; +import org.hl7.fhir.r4.model.*; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; @SuppressWarnings("unused") public abstract class RestfulPatientResourceProviderMore implements IResourceProvider { - public interface ITestClient extends IBasicClient - { + public interface ITestClient extends IBasicClient { - @Search - List getPatientByDob(@RequiredParam(name = Patient.SP_BIRTHDATE) DateParam theParam); - - } - -private boolean detectedVersionConflict; -private boolean conflictHappened; -private boolean couldntFindThisId; -private FhirContext myContext; - -//START SNIPPET: searchAll -@Search -public List getAllOrganizations() { - List retVal=new ArrayList(); // populate this - return retVal; -} -//END SNIPPET: searchAll - -//START SNIPPET: updateEtag -@Update -public MethodOutcome update(@IdParam IdType theId, @ResourceParam Patient thePatient) { - String resourceId = theId.getIdPart(); - String versionId = theId.getVersionIdPart(); // this will contain the ETag - - String currentVersion = "1"; // populate this with the current version - - if (!versionId.equals(currentVersion)) { - throw new ResourceVersionConflictException(Msg.code(632) + "Expected version " + currentVersion); - } - - // ... perform the update ... - return new MethodOutcome(); - -} -//END SNIPPET: updateEtag - -//START SNIPPET: summaryAndElements -@Search -public List search( - SummaryEnum theSummary, // will receive the summary (no annotation required) - @Elements Set theElements // (requires the @Elements annotation) - ) { - return null; // todo: populate -} -//END SNIPPET: summaryAndElements - -//START SNIPPET: searchCompartment -public class PatientRp implements IResourceProvider { - - @Override - public Class getResourceType() { - return Patient.class; - } - - @Search(compartmentName="Condition") - public List searchCompartment(@IdParam IdType thePatientId) { - List retVal=new ArrayList(); - - // populate this with resources of any type that are a part of the - // "Condition" compartment for the Patient with ID "thePatientId" - - return retVal; - } - - // .. also include other Patient operations .. -} -//END SNIPPET: searchCompartment - - -//START SNIPPET: sort -@Search -public List findPatients( - @RequiredParam(name=Patient.SP_IDENTIFIER) StringParam theParameter, - @Sort SortSpec theSort) { - List retVal=new ArrayList(); // populate this - - // theSort is null unless a _sort parameter is actually provided - if (theSort != null) { - - // The name of the param to sort by - String param = theSort.getParamName(); - - // The sort order, or null - SortOrderEnum order = theSort.getOrder(); - - // This will be populated if a second _sort was specified - SortSpec subSort = theSort.getChain(); - - // ...apply the sort... - } - - return retVal; -} -//END SNIPPET: sort - -//START SNIPPET: count -@Search -public List findPatients( - @RequiredParam(name=Patient.SP_IDENTIFIER) StringParam theParameter, - @Count Integer theCount) { - List retVal=new ArrayList(); // populate this - - // count is null unless a _count parameter is actually provided - if (theCount != null) { - // ... do search with count ... - } else { - // ... do search without count ... + @Search + List getPatientByDob(@RequiredParam(name = Patient.SP_BIRTHDATE) DateParam theParam); } - return retVal; -} -//END SNIPPET: count + private boolean detectedVersionConflict; + private boolean conflictHappened; + private boolean couldntFindThisId; + private FhirContext myContext; -//START SNIPPET: offset -@Search -public List findPatients( - @RequiredParam(name=Patient.SP_IDENTIFIER) StringParam theParameter, - @Offset Integer theOffset, - @Count Integer theCount) { - List retVal=new ArrayList(); // populate this - - // offset is null unless a _offset parameter is actually provided - if (theOffset != null) { - // ... do search with offset ... - } else { - // ... do search without offset ... + // START SNIPPET: searchAll + @Search + public List getAllOrganizations() { + List retVal = new ArrayList(); // populate this + return retVal; } + // END SNIPPET: searchAll - return retVal; -} -//END SNIPPET: offset + // START SNIPPET: updateEtag + @Update + public MethodOutcome update(@IdParam IdType theId, @ResourceParam Patient thePatient) { + String resourceId = theId.getIdPart(); + String versionId = theId.getVersionIdPart(); // this will contain the ETag -//START SNIPPET: underlyingReq -@Search -public List findPatients( - @RequiredParam(name="foo") StringParam theParameter, - HttpServletRequest theRequest, - HttpServletResponse theResponse) { - List retVal=new ArrayList(); // populate this - return retVal; -} -//END SNIPPET: underlyingReq + String currentVersion = "1"; // populate this with the current version -//START SNIPPET: referenceSimple -@Search -public List findDiagnosticReportsWithSubjet( - @OptionalParam(name=DiagnosticReport.SP_SUBJECT) ReferenceParam theSubject - ) { - List retVal=new ArrayList(); + if (!versionId.equals(currentVersion)) { + throw new ResourceVersionConflictException(Msg.code(632) + "Expected version " + currentVersion); + } - // If the parameter passed in includes a resource type (e.g. ?subject:Patient=123) - // that resource type is available. Here we just check that it is either not provided - // or set to "Patient" - if (theSubject.hasResourceType()) { - String resourceType = theSubject.getResourceType(); - if ("Patient".equals(resourceType) == false) { - throw new InvalidRequestException(Msg.code(633) + "Invalid resource type for parameter 'subject': " + resourceType); - } - } - - if (theSubject != null) { - // ReferenceParam extends IdType so all of the resource ID methods are available - String subjectId = theSubject.getIdPart(); - - // .. populate retVal with DiagnosticReport resources having - // subject with id "subjectId" .. - - } - - return retVal; - -} -//END SNIPPET: referenceSimple - - -//START SNIPPET: referenceWithChain -@Search -public List findReportsWithChain( - @RequiredParam(name=DiagnosticReport.SP_SUBJECT, chainWhitelist= {Patient.SP_FAMILY, Patient.SP_GENDER}) ReferenceParam theSubject - ) { - List retVal=new ArrayList(); - - String chain = theSubject.getChain(); - if (Patient.SP_FAMILY.equals(chain)) { - String familyName = theSubject.getValue(); - // .. populate with reports matching subject family name .. - } - if (Patient.SP_GENDER.equals(chain)) { - String gender = theSubject.getValue(); - // .. populate with reports matching subject gender .. - } - - return retVal; -} -//END SNIPPET: referenceWithChain - - -//START SNIPPET: referenceWithChainCombo -@Search -public List findReportsWithChainCombo ( - @RequiredParam(name=DiagnosticReport.SP_SUBJECT, chainWhitelist= {"", Patient.SP_FAMILY}) ReferenceParam theSubject - ) { - List retVal=new ArrayList(); - - String chain = theSubject.getChain(); - if (Patient.SP_FAMILY.equals(chain)) { - String familyName = theSubject.getValue(); - // .. populate with reports matching subject family name .. - } - if ("".equals(chain)) { - String resourceId = theSubject.getValue(); - // .. populate with reports matching subject with resource ID .. - } - - return retVal; -} -//END SNIPPET: referenceWithChainCombo - - -//START SNIPPET: referenceWithStaticChain -@Search -public List findObservations( - @RequiredParam(name= Observation.SP_SUBJECT+'.'+Patient.SP_IDENTIFIER) TokenParam theProvider - ) { - - String system = theProvider.getSystem(); - String identifier = theProvider.getValue(); - - // ...Do a search for all observations for the given subject... - - List retVal=new ArrayList(); // populate this - return retVal; - -} -//END SNIPPET: referenceWithStaticChain - - -//START SNIPPET: referenceWithDynamicChain -@Search() -public List findBySubject( - @RequiredParam(name=Observation.SP_SUBJECT, chainWhitelist = {"", Patient.SP_IDENTIFIER, Patient.SP_BIRTHDATE}) ReferenceParam subject - ) { - List observations = new ArrayList(); - - String chain = subject.getChain(); - if (Patient.SP_IDENTIFIER.equals(chain)) { - - // Because the chained parameter "subject.identifier" is actually of type - // "token", we convert the value to a token before processing it. - TokenParam tokenSubject = subject.toTokenParam(myContext); - String system = tokenSubject.getSystem(); - String identifier = tokenSubject.getValue(); - - // TODO: populate all the observations for the identifier - - } else if (Patient.SP_BIRTHDATE.equals(chain)) { - - // Because the chained parameter "subject.birthdate" is actually of type - // "date", we convert the value to a date before processing it. - DateParam dateSubject = subject.toDateParam(myContext); - DateTimeType birthDate = new DateTimeType(dateSubject.getValueAsString()); - - // TODO: populate all the observations for the birthdate - - } else if ("".equals(chain)) { - - String resourceId = subject.getValue(); - // TODO: populate all the observations for the resource id - - } - - return observations; -} -//END SNIPPET: referenceWithDynamicChain - - -//START SNIPPET: read -@Read() -public Patient getResourceById(@IdParam IdType theId) { - Patient retVal = new Patient(); - - // ...populate... - retVal.addIdentifier().setSystem("urn:mrns").setValue("12345"); - retVal.addName().setFamily("Smith").addGiven("Tester").addGiven("Q"); - // ...etc... - - // if you know the version ID of the resource, you should set it and HAPI will - // include it in a Content-Location header - retVal.setId(new IdType("Patient", "123", "2")); - - return retVal; -} -//END SNIPPET: read - -//START SNIPPET: delete -@Delete() -public void deletePatient(@IdParam IdType theId) { - // .. Delete the patient .. - if (couldntFindThisId) { - throw new ResourceNotFoundException(Msg.code(634) + "Unknown version"); + // ... perform the update ... + return new MethodOutcome(); } - if (conflictHappened) { - throw new ResourceVersionConflictException(Msg.code(635) + "Couldn't delete because [foo]"); + // END SNIPPET: updateEtag + + // START SNIPPET: summaryAndElements + @Search + public List search( + SummaryEnum theSummary, // will receive the summary (no annotation required) + @Elements Set theElements // (requires the @Elements annotation) + ) { + return null; // todo: populate } - // otherwise, delete was successful - return; // can also return MethodOutcome -} -//END SNIPPET: delete + // END SNIPPET: summaryAndElements + // START SNIPPET: searchCompartment + public class PatientRp implements IResourceProvider { -//START SNIPPET: deleteConditional -@Delete() -public void deletePatientConditional(@IdParam IdType theId, @ConditionalUrlParam String theConditionalUrl) { - // Only one of theId or theConditionalUrl will have a value depending - // on whether the URL received was a logical ID, or a conditional - // search string - if (theId != null) { - // do a normal delete - } else { - // do a conditional delete - } - - // otherwise, delete was successful - return; // can also return MethodOutcome -} -//END SNIPPET: deleteConditional + @Override + public Class getResourceType() { + return Patient.class; + } -//START SNIPPET: history -@History() -public List getPatientHistory( - @IdParam IdType theId, - @Since InstantType theSince, - @At DateRangeParam theAt - ) { - List retVal = new ArrayList(); - - Patient patient = new Patient(); + @Search(compartmentName = "Condition") + public List searchCompartment(@IdParam IdType thePatientId) { + List retVal = new ArrayList(); - // Set the ID and version - patient.setId(theId.withVersion("1")); + // populate this with resources of any type that are a part of the + // "Condition" compartment for the Patient with ID "thePatientId" - if (isDeleted(patient)) { - - // If the resource is deleted, it just needs to have an ID and some metadata - ResourceMetadataKeyEnum.DELETED_AT.put(patient, InstantType.withCurrentTime()); - ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(patient, BundleEntryTransactionMethodEnum.DELETE); - - } else { - - // If the resource is not deleted, it should have normal resource content - patient.addName().setFamily("Smith"); // ..populate the rest + return retVal; + } + // .. also include other Patient operations .. } + // END SNIPPET: searchCompartment - return retVal; -} -//END SNIPPET: history + // START SNIPPET: sort + @Search + public List findPatients( + @RequiredParam(name = Patient.SP_IDENTIFIER) StringParam theParameter, @Sort SortSpec theSort) { + List retVal = new ArrayList(); // populate this + // theSort is null unless a _sort parameter is actually provided + if (theSort != null) { + + // The name of the param to sort by + String param = theSort.getParamName(); + + // The sort order, or null + SortOrderEnum order = theSort.getOrder(); + + // This will be populated if a second _sort was specified + SortSpec subSort = theSort.getChain(); + + // ...apply the sort... + } + + return retVal; + } + // END SNIPPET: sort + + // START SNIPPET: count + @Search + public List findPatients( + @RequiredParam(name = Patient.SP_IDENTIFIER) StringParam theParameter, @Count Integer theCount) { + List retVal = new ArrayList(); // populate this + + // count is null unless a _count parameter is actually provided + if (theCount != null) { + // ... do search with count ... + } else { + // ... do search without count ... + } + + return retVal; + } + // END SNIPPET: count + + // START SNIPPET: offset + @Search + public List findPatients( + @RequiredParam(name = Patient.SP_IDENTIFIER) StringParam theParameter, + @Offset Integer theOffset, + @Count Integer theCount) { + List retVal = new ArrayList(); // populate this + + // offset is null unless a _offset parameter is actually provided + if (theOffset != null) { + // ... do search with offset ... + } else { + // ... do search without offset ... + } + + return retVal; + } + // END SNIPPET: offset + + // START SNIPPET: underlyingReq + @Search + public List findPatients( + @RequiredParam(name = "foo") StringParam theParameter, + HttpServletRequest theRequest, + HttpServletResponse theResponse) { + List retVal = new ArrayList(); // populate this + return retVal; + } + // END SNIPPET: underlyingReq + + // START SNIPPET: referenceSimple + @Search + public List findDiagnosticReportsWithSubjet( + @OptionalParam(name = DiagnosticReport.SP_SUBJECT) ReferenceParam theSubject) { + List retVal = new ArrayList(); + + // If the parameter passed in includes a resource type (e.g. ?subject:Patient=123) + // that resource type is available. Here we just check that it is either not provided + // or set to "Patient" + if (theSubject.hasResourceType()) { + String resourceType = theSubject.getResourceType(); + if ("Patient".equals(resourceType) == false) { + throw new InvalidRequestException( + Msg.code(633) + "Invalid resource type for parameter 'subject': " + resourceType); + } + } + + if (theSubject != null) { + // ReferenceParam extends IdType so all of the resource ID methods are available + String subjectId = theSubject.getIdPart(); + + // .. populate retVal with DiagnosticReport resources having + // subject with id "subjectId" .. + + } + + return retVal; + } + // END SNIPPET: referenceSimple + + // START SNIPPET: referenceWithChain + @Search + public List findReportsWithChain( + @RequiredParam( + name = DiagnosticReport.SP_SUBJECT, + chainWhitelist = {Patient.SP_FAMILY, Patient.SP_GENDER}) + ReferenceParam theSubject) { + List retVal = new ArrayList(); + + String chain = theSubject.getChain(); + if (Patient.SP_FAMILY.equals(chain)) { + String familyName = theSubject.getValue(); + // .. populate with reports matching subject family name .. + } + if (Patient.SP_GENDER.equals(chain)) { + String gender = theSubject.getValue(); + // .. populate with reports matching subject gender .. + } + + return retVal; + } + // END SNIPPET: referenceWithChain + + // START SNIPPET: referenceWithChainCombo + @Search + public List findReportsWithChainCombo( + @RequiredParam( + name = DiagnosticReport.SP_SUBJECT, + chainWhitelist = {"", Patient.SP_FAMILY}) + ReferenceParam theSubject) { + List retVal = new ArrayList(); + + String chain = theSubject.getChain(); + if (Patient.SP_FAMILY.equals(chain)) { + String familyName = theSubject.getValue(); + // .. populate with reports matching subject family name .. + } + if ("".equals(chain)) { + String resourceId = theSubject.getValue(); + // .. populate with reports matching subject with resource ID .. + } + + return retVal; + } + // END SNIPPET: referenceWithChainCombo + + // START SNIPPET: referenceWithStaticChain + @Search + public List findObservations( + @RequiredParam(name = Observation.SP_SUBJECT + '.' + Patient.SP_IDENTIFIER) TokenParam theProvider) { + + String system = theProvider.getSystem(); + String identifier = theProvider.getValue(); + + // ...Do a search for all observations for the given subject... + + List retVal = new ArrayList(); // populate this + return retVal; + } + // END SNIPPET: referenceWithStaticChain + + // START SNIPPET: referenceWithDynamicChain + @Search() + public List findBySubject( + @RequiredParam( + name = Observation.SP_SUBJECT, + chainWhitelist = {"", Patient.SP_IDENTIFIER, Patient.SP_BIRTHDATE}) + ReferenceParam subject) { + List observations = new ArrayList(); + + String chain = subject.getChain(); + if (Patient.SP_IDENTIFIER.equals(chain)) { + + // Because the chained parameter "subject.identifier" is actually of type + // "token", we convert the value to a token before processing it. + TokenParam tokenSubject = subject.toTokenParam(myContext); + String system = tokenSubject.getSystem(); + String identifier = tokenSubject.getValue(); + + // TODO: populate all the observations for the identifier + + } else if (Patient.SP_BIRTHDATE.equals(chain)) { + + // Because the chained parameter "subject.birthdate" is actually of type + // "date", we convert the value to a date before processing it. + DateParam dateSubject = subject.toDateParam(myContext); + DateTimeType birthDate = new DateTimeType(dateSubject.getValueAsString()); + + // TODO: populate all the observations for the birthdate + + } else if ("".equals(chain)) { + + String resourceId = subject.getValue(); + // TODO: populate all the observations for the resource id + + } + + return observations; + } + // END SNIPPET: referenceWithDynamicChain + + // START SNIPPET: read + @Read() + public Patient getResourceById(@IdParam IdType theId) { + Patient retVal = new Patient(); + + // ...populate... + retVal.addIdentifier().setSystem("urn:mrns").setValue("12345"); + retVal.addName().setFamily("Smith").addGiven("Tester").addGiven("Q"); + // ...etc... + + // if you know the version ID of the resource, you should set it and HAPI will + // include it in a Content-Location header + retVal.setId(new IdType("Patient", "123", "2")); + + return retVal; + } + // END SNIPPET: read + + // START SNIPPET: delete + @Delete() + public void deletePatient(@IdParam IdType theId) { + // .. Delete the patient .. + if (couldntFindThisId) { + throw new ResourceNotFoundException(Msg.code(634) + "Unknown version"); + } + if (conflictHappened) { + throw new ResourceVersionConflictException(Msg.code(635) + "Couldn't delete because [foo]"); + } + // otherwise, delete was successful + return; // can also return MethodOutcome + } + // END SNIPPET: delete + + // START SNIPPET: deleteConditional + @Delete() + public void deletePatientConditional(@IdParam IdType theId, @ConditionalUrlParam String theConditionalUrl) { + // Only one of theId or theConditionalUrl will have a value depending + // on whether the URL received was a logical ID, or a conditional + // search string + if (theId != null) { + // do a normal delete + } else { + // do a conditional delete + } + + // otherwise, delete was successful + return; // can also return MethodOutcome + } + // END SNIPPET: deleteConditional + + // START SNIPPET: history + @History() + public List getPatientHistory( + @IdParam IdType theId, @Since InstantType theSince, @At DateRangeParam theAt) { + List retVal = new ArrayList(); + + Patient patient = new Patient(); + + // Set the ID and version + patient.setId(theId.withVersion("1")); + + if (isDeleted(patient)) { + + // If the resource is deleted, it just needs to have an ID and some metadata + ResourceMetadataKeyEnum.DELETED_AT.put(patient, InstantType.withCurrentTime()); + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(patient, BundleEntryTransactionMethodEnum.DELETE); + + } else { + + // If the resource is not deleted, it should have normal resource content + patient.addName().setFamily("Smith"); // ..populate the rest + } + + return retVal; + } + // END SNIPPET: history private boolean isDeleted(Patient thePatient) { return false; } - -//START SNIPPET: vread -@Read(version=true) -public Patient readOrVread(@IdParam IdType theId) { - Patient retVal = new Patient(); - - if (theId.hasVersionIdPart()) { - // this is a vread - } else { - // this is a read - } - - // ...populate... - - return retVal; -} -//END SNIPPET: vread - -//START SNIPPET: searchStringParam -@Search() -public List searchByLastName(@RequiredParam(name=Patient.SP_FAMILY) StringParam theFamily) { - String valueToMatch = theFamily.getValue(); - - if (theFamily.isExact()) { - // Do an exact match search - } else { - // Do a fuzzy search if possible - } - - // ...populate... - Patient patient = new Patient(); - patient.addIdentifier().setSystem("urn:mrns").setValue("12345"); - patient.addName().setFamily("Smith").addGiven("Tester").addGiven("Q"); - // ...etc... - - // Every returned resource must have its logical ID set. If the server - // supports versioning, that should be set too - String logicalId = "4325"; - String versionId = "2"; // optional - patient.setId(new IdType("Patient", logicalId, versionId)); - - /* - * This is obviously a fairly contrived example since we are always - * just returning the same hardcoded patient, but in a real scenario - * you could return as many resources as you wanted, and they - * should actually match the given search criteria. - */ - List retVal = new ArrayList(); - retVal.add(patient); - - return retVal; -} -//END SNIPPET: searchStringParam - -//START SNIPPET: searchNamedQuery -@Search(queryName="namedQuery1") -public List searchByNamedQuery(@RequiredParam(name="someparam") StringParam theSomeParam) { - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: searchNamedQuery - -//START SNIPPET: searchComposite -@Search() -public List searchByComposite( - @RequiredParam(name=Observation.SP_CODE_VALUE_DATE, compositeTypes= {TokenParam.class, DateParam.class}) - CompositeParam theParam) { - // Each of the two values in the composite param are accessible separately. - // In the case of Observation's name-value-date, the left is a string and - // the right is a date. - TokenParam observationName = theParam.getLeftValue(); - DateParam observationValue = theParam.getRightValue(); - - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: searchComposite - - -//START SNIPPET: searchIdentifierParam -@Search() -public List searchByIdentifier(@RequiredParam(name=Patient.SP_IDENTIFIER) TokenParam theId) { - String identifierSystem = theId.getSystem(); - String identifier = theId.getValue(); - - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: searchIdentifierParam - -//START SNIPPET: searchOptionalParam -@Search() -public List searchByNames( @RequiredParam(name=Patient.SP_FAMILY) StringParam theFamilyName, - @OptionalParam(name=Patient.SP_GIVEN) StringParam theGivenName ) { - String familyName = theFamilyName.getValue(); - String givenName = theGivenName != null ? theGivenName.getValue() : null; - - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: searchOptionalParam - -//START SNIPPET: searchWithDocs -@Description(shortDefinition="This search finds all patient resources matching a given name combination") -@Search() -public List searchWithDocs( - @Description(shortDefinition="This is the patient's last name - Supports partial matches") - @RequiredParam(name=Patient.SP_FAMILY) StringParam theFamilyName, - - @Description(shortDefinition="This is the patient's given names") - @OptionalParam(name=Patient.SP_GIVEN) StringParam theGivenName ) { - - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: searchWithDocs - - -//START SNIPPET: searchMultiple -@Search() -public List searchByObservationNames( - @RequiredParam(name=Observation.SP_CODE) TokenOrListParam theCodings ) { - - // The list here will contain 0..* codings, and any observations which match any of the - // given codings should be returned - List wantedCodings = theCodings.getValuesAsQueryTokens(); - - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: searchMultiple - - -//START SNIPPET: searchMultipleAnd -@Search() -public List searchByPatientAddress( - @RequiredParam(name=Patient.SP_ADDRESS) StringAndListParam theAddressParts ) { - - // StringAndListParam is a container for 0..* StringOrListParam, which is in turn a - // container for 0..* strings. It is a little bit weird to understand at first, but think of the - // StringAndListParam to be an AND list with multiple OR lists inside it. So you will need - // to return results which match at least one string within every OR list. - List wantedCodings = theAddressParts.getValuesAsQueryTokens(); - for (StringOrListParam nextOrList : wantedCodings) { - List queryTokens = nextOrList.getValuesAsQueryTokens(); - // Only return results that match at least one of the tokens in the list below - for (StringParam nextString : queryTokens) { - // ....check for match... - } - } - - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: searchMultipleAnd - - -//START SNIPPET: dates -@Search() -public List searchByObservationNames( @RequiredParam(name=Patient.SP_BIRTHDATE) DateParam theDate ) { - ParamPrefixEnum prefix = theDate.getPrefix(); // e.g. gt, le, etc.. - Date date = theDate.getValue(); // e.g. 2011-01-02 - TemporalPrecisionEnum precision = theDate.getPrecision(); // e.g. DAY - - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: dates - -public void dateClientExample() { -ITestClient client = provideTc(); -//START SNIPPET: dateClient -DateParam param = new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, "2011-01-02"); -List response = client.getPatientByDob(param); -//END SNIPPET: dateClient -} - -//START SNIPPET: dateRange -@Search() -public List searchByDateRange( - @RequiredParam(name=Observation.SP_DATE) DateRangeParam theRange ) { - - Date from = theRange.getLowerBoundAsInstant(); - Date to = theRange.getUpperBoundAsInstant(); - - List retVal = new ArrayList(); - // ...populate... - return retVal; -} -//END SNIPPET: dateRange - - -private ITestClient provideTc() { - return null; -} -@Override -public Class getResourceType() { - return null; -} - - - -//START SNIPPET: pathSpec -@Search() -public List getDiagnosticReport( - @RequiredParam(name=DiagnosticReport.SP_IDENTIFIER) - TokenParam theIdentifier, - - @IncludeParam(allow= {"DiagnosticReport:subject"}) - Set theIncludes ) { - - List retVal = new ArrayList(); - - // Assume this method exists and loads the report from the DB - DiagnosticReport report = loadSomeDiagnosticReportFromDatabase(theIdentifier); - - // If the client has asked for the subject to be included: - if (theIncludes.contains(new Include("DiagnosticReport:subject"))) { - - // The resource reference should contain the ID of the patient - IIdType subjectId = report.getSubject().getReferenceElement(); - - // So load the patient ID and return it - Patient subject = loadSomePatientFromDatabase(subjectId); - report.getSubject().setResource(subject); - - } - - retVal.add(report); - return retVal; -} -//END SNIPPET: pathSpec - -//START SNIPPET: revInclude -@Search() -public List getDiagnosticReport( - @RequiredParam(name=DiagnosticReport.SP_IDENTIFIER) - TokenParam theIdentifier, - - @IncludeParam() - Set theIncludes, - - @IncludeParam(reverse=true) - Set theReverseIncludes - ) { - -return new ArrayList(); // populate this -} -//END SNIPPET: revInclude - -//START SNIPPET: pathSpecSimple -@Search() -public List getDiagnosticReport( - @RequiredParam(name=DiagnosticReport.SP_IDENTIFIER) - TokenParam theIdentifier, - - @IncludeParam(allow= {"DiagnosticReport:subject"}) - String theInclude ) { - - List retVal = new ArrayList(); - - // Assume this method exists and loads the report from the DB - DiagnosticReport report = loadSomeDiagnosticReportFromDatabase(theIdentifier); - - // If the client has asked for the subject to be included: - if ("DiagnosticReport:subject".equals(theInclude)) { - - // The resource reference should contain the ID of the patient - IIdType subjectId = report.getSubject().getReferenceElement(); - - // So load the patient ID and return it - Patient subject = loadSomePatientFromDatabase(subjectId); - report.getSubject().setResource(subject); - - } - - retVal.add(report); - return retVal; -} -//END SNIPPET: pathSpecSimple - -//START SNIPPET: quantity -@Search() -public List getObservationsByQuantity( - @RequiredParam(name=Observation.SP_VALUE_QUANTITY) QuantityParam theQuantity) { - - List retVal = new ArrayList(); - - ParamPrefixEnum prefix = theQuantity.getPrefix(); - BigDecimal value = theQuantity.getValue(); - String units = theQuantity.getUnits(); - // .. Apply these parameters .. - - // ... populate ... - return retVal; -} -//END SNIPPET: quantity - -private DiagnosticReport loadSomeDiagnosticReportFromDatabase(TokenParam theIdentifier) { - return null; -} - -private Patient loadSomePatientFromDatabase(IIdType theId) { - return null; -} - - -//START SNIPPET: create -@Create -public MethodOutcome createPatient(@ResourceParam Patient thePatient) { - - /* - * First we might want to do business validation. The UnprocessableEntityException - * results in an HTTP 422, which is appropriate for business rule failure - */ - if (thePatient.getIdentifierFirstRep().isEmpty()) { - /* It is also possible to pass an OperationOutcome resource - * to the UnprocessableEntityException if you want to return - * a custom populated OperationOutcome. Otherwise, a simple one - * is created using the string supplied below. - */ - throw new UnprocessableEntityException(Msg.code(636) + "No identifier supplied"); - } - - // Save this patient to the database... - savePatientToDatabase(thePatient); - - // This method returns a MethodOutcome object which contains - // the ID (composed of the type Patient, the logical ID 3746, and the - // version ID 1) - MethodOutcome retVal = new MethodOutcome(); - retVal.setId(new IdType("Patient", "3746", "1")); - - // You can also add an OperationOutcome resource to return - // This part is optional though: - OperationOutcome outcome = new OperationOutcome(); - outcome.addIssue().setDiagnostics("One minor issue detected"); - retVal.setOperationOutcome(outcome); - - return retVal; -} -//END SNIPPET: create - - -//START SNIPPET: createConditional -@Create -public MethodOutcome createPatientConditional( - @ResourceParam Patient thePatient, - @ConditionalUrlParam String theConditionalUrl) { - - if (theConditionalUrl != null) { - // We are doing a conditional create - - // populate this with the ID of the existing resource which - // matches the conditional URL - return new MethodOutcome(); - } else { - // We are doing a normal create - - // populate this with the ID of the newly created resource - return new MethodOutcome(); - } - -} -//END SNIPPET: createConditional - - -//START SNIPPET: createClient -@Create -public abstract MethodOutcome createNewPatient(@ResourceParam Patient thePatient); -//END SNIPPET: createClient - -//START SNIPPET: updateConditional -@Update -public MethodOutcome updatePatientConditional( - @ResourceParam Patient thePatient, - @IdParam IdType theId, - @ConditionalUrlParam String theConditional) { - - // Only one of theId or theConditional will have a value and the other will be null, - // depending on the URL passed into the server. - if (theConditional != null) { - // Do a conditional update. theConditional will have a value like "Patient?identifier=system%7C00001" - } else { - // Do a normal update. theId will have the identity of the resource to update - } - - return new MethodOutcome(); // populate this -} -//END SNIPPET: updateConditional - -//START SNIPPET: updatePrefer -@Update -public MethodOutcome updatePatientPrefer( - @ResourceParam Patient thePatient, - @IdParam IdType theId) { - - // Save the patient to the database - - // Update the version and last updated time on the resource - IdType updatedId = theId.withVersion("123"); - thePatient.setId(updatedId); - InstantType lastUpdated = InstantType.withCurrentTime(); - thePatient.getMeta().setLastUpdatedElement(lastUpdated); - - // Add the resource to the outcome, so that it can be returned by the server - // if the client requests it - MethodOutcome outcome = new MethodOutcome(); - outcome.setId(updatedId); - outcome.setResource(thePatient); - return outcome; -} -//END SNIPPET: updatePrefer - -//START SNIPPET: updateRaw -@Update -public MethodOutcome updatePatientWithRawValue ( - @ResourceParam Patient thePatient, - @IdParam IdType theId, - @ResourceParam String theRawBody, - @ResourceParam EncodingEnum theEncodingEnum) { - - // Here, thePatient will have the parsed patient body, but - // theRawBody will also have the raw text of the resource - // being created, and theEncodingEnum will tell you which - // encoding was used - - return new MethodOutcome(); // populate this -} -//END SNIPPET: updateRaw - -//START SNIPPET: update -@Update -public MethodOutcome updatePatient(@IdParam IdType theId, @ResourceParam Patient thePatient) { - - /* - * First we might want to do business validation. The UnprocessableEntityException - * results in an HTTP 422, which is appropriate for business rule failure - */ - if (thePatient.getIdentifierFirstRep().isEmpty()) { - /* It is also possible to pass an OperationOutcome resource - * to the UnprocessableEntityException if you want to return - * a custom populated OperationOutcome. Otherwise, a simple one - * is created using the string supplied below. - */ - throw new UnprocessableEntityException(Msg.code(637) + "No identifier supplied"); - } - - String versionId = theId.getVersionIdPart(); - if (versionId != null) { - // If the client passed in a version number in an If-Match header, they are - // doing a version-aware update. You may wish to throw an exception if the supplied - // version is not the latest version. Note that as of DSTU2 the FHIR specification uses - // ETags and If-Match to handle version aware updates, so PreconditionFailedException (HTTP 412) - // is used instead of ResourceVersionConflictException (HTTP 409) - if (detectedVersionConflict) { - throw new PreconditionFailedException(Msg.code(638) + "Unexpected version"); - } - } - - // Save this patient to the database... - savePatientToDatabase(theId, thePatient); - - // This method returns a MethodOutcome object which contains - // the ID and Version ID for the newly saved resource - MethodOutcome retVal = new MethodOutcome(); - String newVersion = "2"; // may be null if the server is not version aware - retVal.setId(theId.withVersion(newVersion)); - - // You can also add an OperationOutcome resource to return - // This part is optional though: - OperationOutcome outcome = new OperationOutcome(); - outcome.addIssue().setDiagnostics("One minor issue detected"); - retVal.setOperationOutcome(outcome); - - // If your server supports creating resources during an update if they don't already exist - // (this is not mandatory and may not be desirable anyhow) you can flag in the response - // that this was a creation as follows: - // retVal.setCreated(true); - - return retVal; -} -//END SNIPPET: update - -//START SNIPPET: updateClient -@Update -public abstract MethodOutcome updateSomePatient(@IdParam IdType theId, @ResourceParam Patient thePatient); -//END SNIPPET: updateClient - -//START SNIPPET: validate -@Validate -public MethodOutcome validatePatient(@ResourceParam Patient thePatient, - @Validate.Mode ValidationModeEnum theMode, - @Validate.Profile String theProfile) { - - // Actually do our validation: The UnprocessableEntityException - // results in an HTTP 422, which is appropriate for business rule failure - if (thePatient.getIdentifierFirstRep().isEmpty()) { - /* It is also possible to pass an OperationOutcome resource - * to the UnprocessableEntityException if you want to return - * a custom populated OperationOutcome. Otherwise, a simple one - * is created using the string supplied below. - */ - throw new UnprocessableEntityException(Msg.code(639) + "No identifier supplied"); - } - - // This method returns a MethodOutcome object - MethodOutcome retVal = new MethodOutcome(); - - // You may also add an OperationOutcome resource to return - // This part is optional though: - OperationOutcome outcome = new OperationOutcome(); - outcome.addIssue().setSeverity(IssueSeverity.WARNING).setDiagnostics("One minor issue detected"); - retVal.setOperationOutcome(outcome); - - return retVal; -} -//END SNIPPET: validate - - - - -public static void main(String[] args) throws DataFormatException, IOException { -//nothing -} - - -private void savePatientToDatabase(Patient thePatient) { - // nothing -} -private void savePatientToDatabase(IdType theId, Patient thePatient) { - // nothing -} - -//START SNIPPET: metadataProvider -public class CapabilityStatementProvider { - - @Metadata - public CapabilityStatement getServerMetadata() { - CapabilityStatement retVal = new CapabilityStatement(); - // ..populate.. - return retVal; - } + // START SNIPPET: vread + @Read(version = true) + public Patient readOrVread(@IdParam IdType theId) { + Patient retVal = new Patient(); + + if (theId.hasVersionIdPart()) { + // this is a vread + } else { + // this is a read + } + + // ...populate... + + return retVal; + } + // END SNIPPET: vread + + // START SNIPPET: searchStringParam + @Search() + public List searchByLastName(@RequiredParam(name = Patient.SP_FAMILY) StringParam theFamily) { + String valueToMatch = theFamily.getValue(); + + if (theFamily.isExact()) { + // Do an exact match search + } else { + // Do a fuzzy search if possible + } + + // ...populate... + Patient patient = new Patient(); + patient.addIdentifier().setSystem("urn:mrns").setValue("12345"); + patient.addName().setFamily("Smith").addGiven("Tester").addGiven("Q"); + // ...etc... + + // Every returned resource must have its logical ID set. If the server + // supports versioning, that should be set too + String logicalId = "4325"; + String versionId = "2"; // optional + patient.setId(new IdType("Patient", logicalId, versionId)); + + /* + * This is obviously a fairly contrived example since we are always + * just returning the same hardcoded patient, but in a real scenario + * you could return as many resources as you wanted, and they + * should actually match the given search criteria. + */ + List retVal = new ArrayList(); + retVal.add(patient); + + return retVal; + } + // END SNIPPET: searchStringParam + + // START SNIPPET: searchNamedQuery + @Search(queryName = "namedQuery1") + public List searchByNamedQuery(@RequiredParam(name = "someparam") StringParam theSomeParam) { + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: searchNamedQuery + + // START SNIPPET: searchComposite + @Search() + public List searchByComposite( + @RequiredParam( + name = Observation.SP_CODE_VALUE_DATE, + compositeTypes = {TokenParam.class, DateParam.class}) + CompositeParam theParam) { + // Each of the two values in the composite param are accessible separately. + // In the case of Observation's name-value-date, the left is a string and + // the right is a date. + TokenParam observationName = theParam.getLeftValue(); + DateParam observationValue = theParam.getRightValue(); + + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: searchComposite + + // START SNIPPET: searchIdentifierParam + @Search() + public List searchByIdentifier(@RequiredParam(name = Patient.SP_IDENTIFIER) TokenParam theId) { + String identifierSystem = theId.getSystem(); + String identifier = theId.getValue(); + + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: searchIdentifierParam + + // START SNIPPET: searchOptionalParam + @Search() + public List searchByNames( + @RequiredParam(name = Patient.SP_FAMILY) StringParam theFamilyName, + @OptionalParam(name = Patient.SP_GIVEN) StringParam theGivenName) { + String familyName = theFamilyName.getValue(); + String givenName = theGivenName != null ? theGivenName.getValue() : null; + + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: searchOptionalParam + + // START SNIPPET: searchWithDocs + @Description(shortDefinition = "This search finds all patient resources matching a given name combination") + @Search() + public List searchWithDocs( + @Description(shortDefinition = "This is the patient's last name - Supports partial matches") + @RequiredParam(name = Patient.SP_FAMILY) + StringParam theFamilyName, + @Description(shortDefinition = "This is the patient's given names") @OptionalParam(name = Patient.SP_GIVEN) + StringParam theGivenName) { + + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: searchWithDocs + + // START SNIPPET: searchMultiple + @Search() + public List searchByObservationNames( + @RequiredParam(name = Observation.SP_CODE) TokenOrListParam theCodings) { + + // The list here will contain 0..* codings, and any observations which match any of the + // given codings should be returned + List wantedCodings = theCodings.getValuesAsQueryTokens(); + + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: searchMultiple + + // START SNIPPET: searchMultipleAnd + @Search() + public List searchByPatientAddress( + @RequiredParam(name = Patient.SP_ADDRESS) StringAndListParam theAddressParts) { + + // StringAndListParam is a container for 0..* StringOrListParam, which is in turn a + // container for 0..* strings. It is a little bit weird to understand at first, but think of the + // StringAndListParam to be an AND list with multiple OR lists inside it. So you will need + // to return results which match at least one string within every OR list. + List wantedCodings = theAddressParts.getValuesAsQueryTokens(); + for (StringOrListParam nextOrList : wantedCodings) { + List queryTokens = nextOrList.getValuesAsQueryTokens(); + // Only return results that match at least one of the tokens in the list below + for (StringParam nextString : queryTokens) { + // ....check for match... + } + } + + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: searchMultipleAnd + + // START SNIPPET: dates + @Search() + public List searchByObservationNames(@RequiredParam(name = Patient.SP_BIRTHDATE) DateParam theDate) { + ParamPrefixEnum prefix = theDate.getPrefix(); // e.g. gt, le, etc.. + Date date = theDate.getValue(); // e.g. 2011-01-02 + TemporalPrecisionEnum precision = theDate.getPrecision(); // e.g. DAY + + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: dates + + public void dateClientExample() { + ITestClient client = provideTc(); + // START SNIPPET: dateClient + DateParam param = new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, "2011-01-02"); + List response = client.getPatientByDob(param); + // END SNIPPET: dateClient + } + + // START SNIPPET: dateRange + @Search() + public List searchByDateRange(@RequiredParam(name = Observation.SP_DATE) DateRangeParam theRange) { + + Date from = theRange.getLowerBoundAsInstant(); + Date to = theRange.getUpperBoundAsInstant(); + + List retVal = new ArrayList(); + // ...populate... + return retVal; + } + // END SNIPPET: dateRange + + private ITestClient provideTc() { + return null; + } + + @Override + public Class getResourceType() { + return null; + } + + // START SNIPPET: pathSpec + @Search() + public List getDiagnosticReport( + @RequiredParam(name = DiagnosticReport.SP_IDENTIFIER) TokenParam theIdentifier, + @IncludeParam(allow = {"DiagnosticReport:subject"}) Set theIncludes) { + + List retVal = new ArrayList(); + + // Assume this method exists and loads the report from the DB + DiagnosticReport report = loadSomeDiagnosticReportFromDatabase(theIdentifier); + + // If the client has asked for the subject to be included: + if (theIncludes.contains(new Include("DiagnosticReport:subject"))) { + + // The resource reference should contain the ID of the patient + IIdType subjectId = report.getSubject().getReferenceElement(); + + // So load the patient ID and return it + Patient subject = loadSomePatientFromDatabase(subjectId); + report.getSubject().setResource(subject); + } + + retVal.add(report); + return retVal; + } + // END SNIPPET: pathSpec + + // START SNIPPET: revInclude + @Search() + public List getDiagnosticReport( + @RequiredParam(name = DiagnosticReport.SP_IDENTIFIER) TokenParam theIdentifier, + @IncludeParam() Set theIncludes, + @IncludeParam(reverse = true) Set theReverseIncludes) { + + return new ArrayList(); // populate this + } + // END SNIPPET: revInclude + + // START SNIPPET: pathSpecSimple + @Search() + public List getDiagnosticReport( + @RequiredParam(name = DiagnosticReport.SP_IDENTIFIER) TokenParam theIdentifier, + @IncludeParam(allow = {"DiagnosticReport:subject"}) String theInclude) { + + List retVal = new ArrayList(); + + // Assume this method exists and loads the report from the DB + DiagnosticReport report = loadSomeDiagnosticReportFromDatabase(theIdentifier); + + // If the client has asked for the subject to be included: + if ("DiagnosticReport:subject".equals(theInclude)) { + + // The resource reference should contain the ID of the patient + IIdType subjectId = report.getSubject().getReferenceElement(); + + // So load the patient ID and return it + Patient subject = loadSomePatientFromDatabase(subjectId); + report.getSubject().setResource(subject); + } + + retVal.add(report); + return retVal; + } + // END SNIPPET: pathSpecSimple + + // START SNIPPET: quantity + @Search() + public List getObservationsByQuantity( + @RequiredParam(name = Observation.SP_VALUE_QUANTITY) QuantityParam theQuantity) { + + List retVal = new ArrayList(); + + ParamPrefixEnum prefix = theQuantity.getPrefix(); + BigDecimal value = theQuantity.getValue(); + String units = theQuantity.getUnits(); + // .. Apply these parameters .. + + // ... populate ... + return retVal; + } + // END SNIPPET: quantity + + private DiagnosticReport loadSomeDiagnosticReportFromDatabase(TokenParam theIdentifier) { + return null; + } + + private Patient loadSomePatientFromDatabase(IIdType theId) { + return null; + } + + // START SNIPPET: create + @Create + public MethodOutcome createPatient(@ResourceParam Patient thePatient) { + + /* + * First we might want to do business validation. The UnprocessableEntityException + * results in an HTTP 422, which is appropriate for business rule failure + */ + if (thePatient.getIdentifierFirstRep().isEmpty()) { + /* It is also possible to pass an OperationOutcome resource + * to the UnprocessableEntityException if you want to return + * a custom populated OperationOutcome. Otherwise, a simple one + * is created using the string supplied below. + */ + throw new UnprocessableEntityException(Msg.code(636) + "No identifier supplied"); + } + + // Save this patient to the database... + savePatientToDatabase(thePatient); + + // This method returns a MethodOutcome object which contains + // the ID (composed of the type Patient, the logical ID 3746, and the + // version ID 1) + MethodOutcome retVal = new MethodOutcome(); + retVal.setId(new IdType("Patient", "3746", "1")); + + // You can also add an OperationOutcome resource to return + // This part is optional though: + OperationOutcome outcome = new OperationOutcome(); + outcome.addIssue().setDiagnostics("One minor issue detected"); + retVal.setOperationOutcome(outcome); + + return retVal; + } + // END SNIPPET: create + + // START SNIPPET: createConditional + @Create + public MethodOutcome createPatientConditional( + @ResourceParam Patient thePatient, @ConditionalUrlParam String theConditionalUrl) { + + if (theConditionalUrl != null) { + // We are doing a conditional create + + // populate this with the ID of the existing resource which + // matches the conditional URL + return new MethodOutcome(); + } else { + // We are doing a normal create + + // populate this with the ID of the newly created resource + return new MethodOutcome(); + } + } + // END SNIPPET: createConditional + + // START SNIPPET: createClient + @Create + public abstract MethodOutcome createNewPatient(@ResourceParam Patient thePatient); + // END SNIPPET: createClient + + // START SNIPPET: updateConditional + @Update + public MethodOutcome updatePatientConditional( + @ResourceParam Patient thePatient, @IdParam IdType theId, @ConditionalUrlParam String theConditional) { + + // Only one of theId or theConditional will have a value and the other will be null, + // depending on the URL passed into the server. + if (theConditional != null) { + // Do a conditional update. theConditional will have a value like "Patient?identifier=system%7C00001" + } else { + // Do a normal update. theId will have the identity of the resource to update + } + + return new MethodOutcome(); // populate this + } + // END SNIPPET: updateConditional + + // START SNIPPET: updatePrefer + @Update + public MethodOutcome updatePatientPrefer(@ResourceParam Patient thePatient, @IdParam IdType theId) { + + // Save the patient to the database + + // Update the version and last updated time on the resource + IdType updatedId = theId.withVersion("123"); + thePatient.setId(updatedId); + InstantType lastUpdated = InstantType.withCurrentTime(); + thePatient.getMeta().setLastUpdatedElement(lastUpdated); + + // Add the resource to the outcome, so that it can be returned by the server + // if the client requests it + MethodOutcome outcome = new MethodOutcome(); + outcome.setId(updatedId); + outcome.setResource(thePatient); + return outcome; + } + // END SNIPPET: updatePrefer + + // START SNIPPET: updateRaw + @Update + public MethodOutcome updatePatientWithRawValue( + @ResourceParam Patient thePatient, + @IdParam IdType theId, + @ResourceParam String theRawBody, + @ResourceParam EncodingEnum theEncodingEnum) { + + // Here, thePatient will have the parsed patient body, but + // theRawBody will also have the raw text of the resource + // being created, and theEncodingEnum will tell you which + // encoding was used + + return new MethodOutcome(); // populate this + } + // END SNIPPET: updateRaw + + // START SNIPPET: update + @Update + public MethodOutcome updatePatient(@IdParam IdType theId, @ResourceParam Patient thePatient) { + + /* + * First we might want to do business validation. The UnprocessableEntityException + * results in an HTTP 422, which is appropriate for business rule failure + */ + if (thePatient.getIdentifierFirstRep().isEmpty()) { + /* It is also possible to pass an OperationOutcome resource + * to the UnprocessableEntityException if you want to return + * a custom populated OperationOutcome. Otherwise, a simple one + * is created using the string supplied below. + */ + throw new UnprocessableEntityException(Msg.code(637) + "No identifier supplied"); + } + + String versionId = theId.getVersionIdPart(); + if (versionId != null) { + // If the client passed in a version number in an If-Match header, they are + // doing a version-aware update. You may wish to throw an exception if the supplied + // version is not the latest version. Note that as of DSTU2 the FHIR specification uses + // ETags and If-Match to handle version aware updates, so PreconditionFailedException (HTTP 412) + // is used instead of ResourceVersionConflictException (HTTP 409) + if (detectedVersionConflict) { + throw new PreconditionFailedException(Msg.code(638) + "Unexpected version"); + } + } + + // Save this patient to the database... + savePatientToDatabase(theId, thePatient); + + // This method returns a MethodOutcome object which contains + // the ID and Version ID for the newly saved resource + MethodOutcome retVal = new MethodOutcome(); + String newVersion = "2"; // may be null if the server is not version aware + retVal.setId(theId.withVersion(newVersion)); + + // You can also add an OperationOutcome resource to return + // This part is optional though: + OperationOutcome outcome = new OperationOutcome(); + outcome.addIssue().setDiagnostics("One minor issue detected"); + retVal.setOperationOutcome(outcome); + + // If your server supports creating resources during an update if they don't already exist + // (this is not mandatory and may not be desirable anyhow) you can flag in the response + // that this was a creation as follows: + // retVal.setCreated(true); + + return retVal; + } + // END SNIPPET: update + + // START SNIPPET: updateClient + @Update + public abstract MethodOutcome updateSomePatient(@IdParam IdType theId, @ResourceParam Patient thePatient); + // END SNIPPET: updateClient + + // START SNIPPET: validate + @Validate + public MethodOutcome validatePatient( + @ResourceParam Patient thePatient, + @Validate.Mode ValidationModeEnum theMode, + @Validate.Profile String theProfile) { + + // Actually do our validation: The UnprocessableEntityException + // results in an HTTP 422, which is appropriate for business rule failure + if (thePatient.getIdentifierFirstRep().isEmpty()) { + /* It is also possible to pass an OperationOutcome resource + * to the UnprocessableEntityException if you want to return + * a custom populated OperationOutcome. Otherwise, a simple one + * is created using the string supplied below. + */ + throw new UnprocessableEntityException(Msg.code(639) + "No identifier supplied"); + } + + // This method returns a MethodOutcome object + MethodOutcome retVal = new MethodOutcome(); + + // You may also add an OperationOutcome resource to return + // This part is optional though: + OperationOutcome outcome = new OperationOutcome(); + outcome.addIssue().setSeverity(IssueSeverity.WARNING).setDiagnostics("One minor issue detected"); + retVal.setOperationOutcome(outcome); + + return retVal; + } + // END SNIPPET: validate + + public static void main(String[] args) throws DataFormatException, IOException { + // nothing + } + + private void savePatientToDatabase(Patient thePatient) { + // nothing + } + + private void savePatientToDatabase(IdType theId, Patient thePatient) { + // nothing + } + + // START SNIPPET: metadataProvider + public class CapabilityStatementProvider { + + @Metadata + public CapabilityStatement getServerMetadata() { + CapabilityStatement retVal = new CapabilityStatement(); + // ..populate.. + return retVal; + } + } + // END SNIPPET: metadataProvider + + // START SNIPPET: metadataClient + public interface MetadataClient extends IRestfulClient { + + @Metadata + CapabilityStatement getServerMetadata(); + + // ....Other methods can also be added as usual.... + + } + // END SNIPPET: metadataClient + + // START SNIPPET: historyClient + public interface HistoryClient extends IBasicClient { + /** Server level (history of ALL resources) */ + @History + Bundle getHistoryServer(); + + /** Type level (history of all resources of a given type) */ + @History(type = Patient.class) + Bundle getHistoryPatientType(); + + /** Instance level (history of a specific resource instance by type and ID) */ + @History(type = Patient.class) + Bundle getHistoryPatientInstance(@IdParam IdType theId); + + /** + * Either (or both) of the "since" and "count" parameters can + * also be included in any of the methods above. + */ + @History + Bundle getHistoryServerWithCriteria(@Since Date theDate, @Count Integer theCount); + } + // END SNIPPET: historyClient + + public void bbbbb() throws DataFormatException, IOException { + // START SNIPPET: metadataClientUsage + FhirContext ctx = FhirContext.forR4(); + MetadataClient client = ctx.newRestfulClient(MetadataClient.class, "http://spark.furore.com/fhir"); + CapabilityStatement metadata = client.getServerMetadata(); + System.out.println(ctx.newXmlParser().encodeResourceToString(metadata)); + // END SNIPPET: metadataClientUsage + } + + // START SNIPPET: readTags + @Read() + public Patient readPatient(@IdParam IdType theId) { + Patient retVal = new Patient(); + + // ..populate demographics, contact, or anything else you usually would.. + + // Populate some tags + retVal.getMeta().addTag("http://animals", "Dog", "Canine Patient"); // TODO: more realistic example + retVal.getMeta().addTag("http://personality", "Friendly", "Friendly"); // TODO: more realistic example + + return retVal; + } + // END SNIPPET: readTags + + // START SNIPPET: clientReadInterface + private interface IPatientClient extends IBasicClient { + /** Read a patient from a server by ID */ + @Read + Patient readPatient(@IdParam IdType theId); + + // Only one method is shown here, but many methods may be + // added to the same client interface! + } + // END SNIPPET: clientReadInterface + + public void clientRead() { + // START SNIPPET: clientReadTags + IPatientClient client = FhirContext.forR4().newRestfulClient(IPatientClient.class, "http://foo/fhir"); + Patient patient = client.readPatient(new IdType("1234")); + + // Access the tag list + List tagList = patient.getMeta().getTag(); + for (Coding next : tagList) { + // ..process the tags somehow.. + } + // END SNIPPET: clientReadTags + + // START SNIPPET: clientCreateTags + Patient newPatient = new Patient(); + + // Populate the resource object + newPatient.addIdentifier().setUse(IdentifierUse.OFFICIAL).setValue("123"); + newPatient.addName().setFamily("Jones").addGiven("Frank"); + + // Populate some tags + newPatient.getMeta().addTag("http://animals", "Dog", "Canine Patient"); // TODO: more realistic example + newPatient.getMeta().addTag("http://personality", "Friendly", "Friendly"); // TODO: more realistic example + + // ...invoke the create method on the client... + // END SNIPPET: clientCreateTags + } + + // START SNIPPET: createTags + @Create + public MethodOutcome createPatientResource(@ResourceParam Patient thePatient) { + + // ..save the resource.. + IdType id = new IdType("123"); // the new database primary key for this resource + + // Get the tag list + List tags = thePatient.getMeta().getTag(); + for (Coding tag : tags) { + // process/save each tag somehow + } + + return new MethodOutcome(id); + } + // END SNIPPET: createTags + + // START SNIPPET: transaction + @Transaction + public Bundle transaction(@TransactionParam Bundle theInput) { + for (BundleEntryComponent nextEntry : theInput.getEntry()) { + // Process entry + } + + Bundle retVal = new Bundle(); + // Populate return bundle + return retVal; + } + // END SNIPPET: transaction } -//END SNIPPET: metadataProvider - - - -//START SNIPPET: metadataClient -public interface MetadataClient extends IRestfulClient { - - @Metadata - CapabilityStatement getServerMetadata(); - - // ....Other methods can also be added as usual.... - -} -//END SNIPPET: metadataClient - -//START SNIPPET: historyClient -public interface HistoryClient extends IBasicClient { - /** Server level (history of ALL resources) */ - @History - Bundle getHistoryServer(); - - /** Type level (history of all resources of a given type) */ - @History(type=Patient.class) - Bundle getHistoryPatientType(); - - /** Instance level (history of a specific resource instance by type and ID) */ - @History(type=Patient.class) - Bundle getHistoryPatientInstance(@IdParam IdType theId); - - /** - * Either (or both) of the "since" and "count" parameters can - * also be included in any of the methods above. - */ - @History - Bundle getHistoryServerWithCriteria(@Since Date theDate, @Count Integer theCount); - -} -//END SNIPPET: historyClient - - -public void bbbbb() throws DataFormatException, IOException { -//START SNIPPET: metadataClientUsage -FhirContext ctx = FhirContext.forR4(); -MetadataClient client = ctx.newRestfulClient(MetadataClient.class, "http://spark.furore.com/fhir"); -CapabilityStatement metadata = client.getServerMetadata(); -System.out.println(ctx.newXmlParser().encodeResourceToString(metadata)); -//END SNIPPET: metadataClientUsage -} - -//START SNIPPET: readTags -@Read() -public Patient readPatient(@IdParam IdType theId) { - Patient retVal = new Patient(); - - // ..populate demographics, contact, or anything else you usually would.. - - // Populate some tags - retVal.getMeta().addTag("http://animals", "Dog", "Canine Patient"); // TODO: more realistic example - retVal.getMeta().addTag("http://personality", "Friendly", "Friendly"); // TODO: more realistic example - - return retVal; -} -//END SNIPPET: readTags - -//START SNIPPET: clientReadInterface -private interface IPatientClient extends IBasicClient -{ - /** Read a patient from a server by ID */ - @Read - Patient readPatient(@IdParam IdType theId); - - // Only one method is shown here, but many methods may be - // added to the same client interface! -} -//END SNIPPET: clientReadInterface - -public void clientRead() { -//START SNIPPET: clientReadTags -IPatientClient client = FhirContext.forR4().newRestfulClient(IPatientClient.class, "http://foo/fhir"); -Patient patient = client.readPatient(new IdType("1234")); - -// Access the tag list -List tagList = patient.getMeta().getTag(); -for (Coding next : tagList) { - // ..process the tags somehow.. -} -//END SNIPPET: clientReadTags - -//START SNIPPET: clientCreateTags -Patient newPatient = new Patient(); - -// Populate the resource object -newPatient.addIdentifier().setUse(IdentifierUse.OFFICIAL).setValue("123"); -newPatient.addName().setFamily("Jones").addGiven("Frank"); - -// Populate some tags -newPatient.getMeta().addTag("http://animals", "Dog", "Canine Patient"); // TODO: more realistic example -newPatient.getMeta().addTag("http://personality", "Friendly", "Friendly"); // TODO: more realistic example - -// ...invoke the create method on the client... -//END SNIPPET: clientCreateTags -} - -//START SNIPPET: createTags -@Create -public MethodOutcome createPatientResource(@ResourceParam Patient thePatient) { - - // ..save the resource.. - IdType id = new IdType("123"); // the new database primary key for this resource - - // Get the tag list - List tags = thePatient.getMeta().getTag(); - for (Coding tag : tags) { - // process/save each tag somehow - } - - return new MethodOutcome(id); -} -//END SNIPPET: createTags - -//START SNIPPET: transaction -@Transaction -public Bundle transaction(@TransactionParam Bundle theInput) { - for (BundleEntryComponent nextEntry : theInput.getEntry()) { - // Process entry - } - - Bundle retVal = new Bundle(); - // Populate return bundle - return retVal; -} -//END SNIPPET: transaction - -} - - diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/SecurityInterceptors.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/SecurityInterceptors.java index 29b6bf69ecb..f44f6004ecf 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/SecurityInterceptors.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/SecurityInterceptors.java @@ -33,11 +33,11 @@ import javax.servlet.http.HttpServletResponse; public class SecurityInterceptors { public void basicAuthInterceptorRealm() { - //START SNIPPET: basicAuthInterceptorRealm + // START SNIPPET: basicAuthInterceptorRealm AuthenticationException ex = new AuthenticationException(); ex.addAuthenticateHeaderForRealm("myRealm"); throw ex; - //END SNIPPET: basicAuthInterceptorRealm + // END SNIPPET: basicAuthInterceptorRealm } // START SNIPPET: basicAuthInterceptorExample @@ -49,7 +49,9 @@ public class SecurityInterceptors { * a username and password are provided in a header called Authorization. */ @Hook(Pointcut.SERVER_INCOMING_REQUEST_POST_PROCESSED) - public boolean incomingRequestPostProcessed(RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) throws AuthenticationException { + public boolean incomingRequestPostProcessed( + RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) + throws AuthenticationException { String authHeader = theRequest.getHeader("Authorization"); // The format of the header must be: @@ -77,9 +79,7 @@ public class SecurityInterceptors { // Return true to allow the request to proceed return true; } - - } - //END SNIPPET: basicAuthInterceptorExample + // END SNIPPET: basicAuthInterceptorExample } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerETagExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerETagExamples.java index 2c98c6fb453..6c78c25e8f9 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerETagExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerETagExamples.java @@ -28,21 +28,20 @@ import javax.servlet.annotation.WebServlet; @SuppressWarnings("serial") public class ServerETagExamples { - // START SNIPPET: disablingETags - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") - public class RestfulServerWithLogging extends RestfulServer { + // START SNIPPET: disablingETags + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") + public class RestfulServerWithLogging extends RestfulServer { + + @Override + protected void initialize() throws ServletException { + // ... define your resource providers here ... + + // ETag support is enabled by default + setETagSupport(ETagSupportEnum.ENABLED); + } + } + // END SNIPPET: disablingETags - @Override - protected void initialize() throws ServletException { - // ... define your resource providers here ... - - // ETag support is enabled by default - setETagSupport(ETagSupportEnum.ENABLED); - } - - } - // END SNIPPET: disablingETags - - - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerExceptionsExample.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerExceptionsExample.java index 87a6d52b887..91135338497 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerExceptionsExample.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerExceptionsExample.java @@ -31,23 +31,20 @@ import org.hl7.fhir.r4.model.Patient; public abstract class ServerExceptionsExample implements IResourceProvider { -private boolean databaseIsDown; + private boolean databaseIsDown; -//START SNIPPET: returnOO -@Read -public Patient read(@IdParam IdType theId) { - if (databaseIsDown) { - OperationOutcome oo = new OperationOutcome(); - oo.addIssue().setSeverity(IssueSeverityEnum.FATAL).setDetails("Database is down"); - throw new InternalErrorException(Msg.code(641) + "Database is down", oo); - } - - Patient patient = new Patient(); // populate this - return patient; -} -//END SNIPPET: returnOO + // START SNIPPET: returnOO + @Read + public Patient read(@IdParam IdType theId) { + if (databaseIsDown) { + OperationOutcome oo = new OperationOutcome(); + oo.addIssue().setSeverity(IssueSeverityEnum.FATAL).setDetails("Database is down"); + throw new InternalErrorException(Msg.code(641) + "Database is down", oo); + } + Patient patient = new Patient(); // populate this + return patient; + } + // END SNIPPET: returnOO } - - diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerInterceptors.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerInterceptors.java index 735b400756a..4aa2a4c2900 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerInterceptors.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerInterceptors.java @@ -33,67 +33,66 @@ import java.util.List; public class ServerInterceptors { -@SuppressWarnings("unused") -public static void main(String[] args) throws DataFormatException, IOException { - - -// START SNIPPET: resourceExtension -// Create an example patient -Patient patient = new Patient(); -patient.addIdentifier().setUse(IdentifierUseEnum.OFFICIAL).setSystem("urn:example").setValue("7000135"); + @SuppressWarnings("unused") + public static void main(String[] args) throws DataFormatException, IOException { -// Create an extension -ExtensionDt ext = new ExtensionDt(); -ext.setModifier(false); -ext.setUrl("http://example.com/extensions#someext"); -ext.setValue(new DateTimeDt("2011-01-02T11:13:15")); + // START SNIPPET: resourceExtension + // Create an example patient + Patient patient = new Patient(); + patient.addIdentifier() + .setUse(IdentifierUseEnum.OFFICIAL) + .setSystem("urn:example") + .setValue("7000135"); -// Add the extension to the resource -patient.addUndeclaredExtension(ext); -//END SNIPPET: resourceExtension + // Create an extension + ExtensionDt ext = new ExtensionDt(); + ext.setModifier(false); + ext.setUrl("http://example.com/extensions#someext"); + ext.setValue(new DateTimeDt("2011-01-02T11:13:15")); + // Add the extension to the resource + patient.addUndeclaredExtension(ext); + // END SNIPPET: resourceExtension -//START SNIPPET: resourceStringExtension -HumanNameDt name = patient.addName(); -name.addFamily().setValue("Shmoe"); -StringDt given = name.addGiven(); -given.setValue("Joe"); -ExtensionDt ext2 = new ExtensionDt(false, "http://examples.com#moreext", new StringDt("Hello")); -given.addUndeclaredExtension(ext2); -//END SNIPPET: resourceStringExtension + // START SNIPPET: resourceStringExtension + HumanNameDt name = patient.addName(); + name.addFamily().setValue("Shmoe"); + StringDt given = name.addGiven(); + given.setValue("Joe"); + ExtensionDt ext2 = new ExtensionDt(false, "http://examples.com#moreext", new StringDt("Hello")); + given.addUndeclaredExtension(ext2); + // END SNIPPET: resourceStringExtension -String output = FhirContext.forDstu2().newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); -System.out.println(output); + String output = + FhirContext.forDstu2().newXmlParser().setPrettyPrint(true).encodeResourceToString(patient); + System.out.println(output); + // START SNIPPET: parseExtension + // Get all extensions (modifier or not) for a given URL + List resourceExts = patient.getUndeclaredExtensionsByUrl("http://fooextensions.com#exts"); -//START SNIPPET: parseExtension -// Get all extensions (modifier or not) for a given URL -List resourceExts = patient.getUndeclaredExtensionsByUrl("http://fooextensions.com#exts"); + // Get all non-modifier extensions regardless of URL + List nonModExts = patient.getUndeclaredExtensions(); -// Get all non-modifier extensions regardless of URL -List nonModExts = patient.getUndeclaredExtensions(); + // Get all modifier extensions regardless of URL + List modExts = patient.getUndeclaredModifierExtensions(); + // END SNIPPET: parseExtension -// Get all modifier extensions regardless of URL -List modExts = patient.getUndeclaredModifierExtensions(); -//END SNIPPET: parseExtension - -} - - -public void foo() { -//START SNIPPET: subExtension -Patient patient = new Patient(); - -ExtensionDt parent = new ExtensionDt(false, "http://example.com#parent"); -patient.addUndeclaredExtension(parent); - -ExtensionDt child1 = new ExtensionDt(false, "http://example.com#childOne", new StringDt("value1")); -parent.addUndeclaredExtension(child1); - -ExtensionDt child2 = new ExtensionDt(false, "http://example.com#childTwo", new StringDt("value1")); -parent.addUndeclaredExtension(child2); -//END SNIPPET: subExtension - -} + } + public void foo() { + // START SNIPPET: subExtension + Patient patient = new Patient(); + + ExtensionDt parent = new ExtensionDt(false, "http://example.com#parent"); + patient.addUndeclaredExtension(parent); + + ExtensionDt child1 = new ExtensionDt(false, "http://example.com#childOne", new StringDt("value1")); + parent.addUndeclaredExtension(child1); + + ExtensionDt child2 = new ExtensionDt(false, "http://example.com#childTwo", new StringDt("value1")); + parent.addUndeclaredExtension(child2); + // END SNIPPET: subExtension + + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerMetadataExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerMetadataExamples.java index 8370b28df29..cd0d14d7c61 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerMetadataExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerMetadataExamples.java @@ -19,9 +19,6 @@ */ package ca.uhn.hapi.fhir.docs; -import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; -import ca.uhn.fhir.model.api.Tag; -import ca.uhn.fhir.model.api.TagList; import ca.uhn.fhir.rest.annotation.Search; import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.Patient; @@ -31,32 +28,34 @@ import java.util.List; public class ServerMetadataExamples { - // START SNIPPET: serverMethod - @Search - public List getAllPatients() { - ArrayList retVal = new ArrayList(); - - // Create a patient to return - Patient patient = new Patient(); - retVal.add(patient); - patient.setId("Patient/123"); - patient.addName().setFamily("Smith").addGiven("John"); - - // Add tags to the resource - patient.getMeta().addTag() - .setSystem("http://example.com/tags") - .setCode("tag1") - .setDisplay("Some tag"); - patient.getMeta().addTag() - .setSystem("http://example.com/tags") - .setCode("tag2") - .setDisplay("Another tag"); + // START SNIPPET: serverMethod + @Search + public List getAllPatients() { + ArrayList retVal = new ArrayList(); - // Set the lastUpdate date + // Create a patient to return + Patient patient = new Patient(); + retVal.add(patient); + patient.setId("Patient/123"); + patient.addName().setFamily("Smith").addGiven("John"); + + // Add tags to the resource + patient.getMeta() + .addTag() + .setSystem("http://example.com/tags") + .setCode("tag1") + .setDisplay("Some tag"); + patient.getMeta() + .addTag() + .setSystem("http://example.com/tags") + .setCode("tag2") + .setDisplay("Another tag"); + + // Set the lastUpdate date patient.getMeta().setLastUpdatedElement(new InstantType("2014-07-12T11:22:27Z")); - return retVal; - } - // END SNIPPET: serverMethod - + return retVal; + } + // END SNIPPET: serverMethod + } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerOperations.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerOperations.java index a1f66c5cde1..1365d2d61ec 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerOperations.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServerOperations.java @@ -34,19 +34,18 @@ import org.hl7.fhir.r4.model.IdType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.List; - +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; public class ServerOperations { private static final Logger ourLog = LoggerFactory.getLogger(ServerOperations.class); - - //START SNIPPET: manualInputAndOutput - @Operation(name="$manualInputAndOutput", manualResponse=true, manualRequest=true) - public void manualInputAndOutput(HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws IOException { + // START SNIPPET: manualInputAndOutput + @Operation(name = "$manualInputAndOutput", manualResponse = true, manualRequest = true) + public void manualInputAndOutput(HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) + throws IOException { String contentType = theServletRequest.getContentType(); byte[] bytes = IOUtils.toByteArray(theServletRequest.getInputStream()); @@ -56,70 +55,67 @@ public class ServerOperations { theServletResponse.getWriter().write("hello"); theServletResponse.getWriter().close(); } - //END SNIPPET: manualInputAndOutput + // END SNIPPET: manualInputAndOutput + // START SNIPPET: searchParamBasic + @Operation(name = "$find-matches", idempotent = true) + public Parameters findMatchesBasic( + @OperationParam(name = "date") DateParam theDate, @OperationParam(name = "code") TokenParam theCode) { - //START SNIPPET: searchParamBasic - @Operation(name="$find-matches", idempotent=true) - public Parameters findMatchesBasic( - @OperationParam(name="date") DateParam theDate, - @OperationParam(name="code") TokenParam theCode) { - - Parameters retVal = new Parameters(); - // Populate bundle with matching resources - return retVal; - } - //END SNIPPET: searchParamBasic + Parameters retVal = new Parameters(); + // Populate bundle with matching resources + return retVal; + } + // END SNIPPET: searchParamBasic - //START SNIPPET: searchParamAdvanced - @Operation(name="$find-matches", idempotent=true) - public Parameters findMatchesAdvanced( - @OperationParam(name="dateRange") DateRangeParam theDate, - @OperationParam(name="name") List theName, - @OperationParam(name="code") TokenAndListParam theEnd) { - - Parameters retVal = new Parameters(); - // Populate bundle with matching resources - return retVal; - } - //END SNIPPET: searchParamAdvanced + // START SNIPPET: searchParamAdvanced + @Operation(name = "$find-matches", idempotent = true) + public Parameters findMatchesAdvanced( + @OperationParam(name = "dateRange") DateRangeParam theDate, + @OperationParam(name = "name") List theName, + @OperationParam(name = "code") TokenAndListParam theEnd) { - //START SNIPPET: patientTypeOperation - @Operation(name="$everything", idempotent=true) - public Bundle patientTypeOperation( - @OperationParam(name="start") DateDt theStart, - @OperationParam(name="end") DateDt theEnd) { - - Bundle retVal = new Bundle(); - // Populate bundle with matching resources - return retVal; - } - //END SNIPPET: patientTypeOperation + Parameters retVal = new Parameters(); + // Populate bundle with matching resources + return retVal; + } + // END SNIPPET: searchParamAdvanced - //START SNIPPET: patientInstanceOperation - @Operation(name="$everything", idempotent=true) - public Bundle patientInstanceOperation( - @IdParam IdType thePatientId, - @OperationParam(name="start") DateDt theStart, - @OperationParam(name="end") DateDt theEnd) { - - Bundle retVal = new Bundle(); - // Populate bundle with matching resources - return retVal; - } - //END SNIPPET: patientInstanceOperation + // START SNIPPET: patientTypeOperation + @Operation(name = "$everything", idempotent = true) + public Bundle patientTypeOperation( + @OperationParam(name = "start") DateDt theStart, @OperationParam(name = "end") DateDt theEnd) { - //START SNIPPET: serverOperation - @Operation(name="$closure") - public ConceptMap closureOperation( - @OperationParam(name="name") StringDt theStart, - @OperationParam(name="concept") List theEnd, - @OperationParam(name="version") IdType theVersion) { - - ConceptMap retVal = new ConceptMap(); - // Populate bundle with matching resources - return retVal; - } - //END SNIPPET: serverOperation + Bundle retVal = new Bundle(); + // Populate bundle with matching resources + return retVal; + } + // END SNIPPET: patientTypeOperation + + // START SNIPPET: patientInstanceOperation + @Operation(name = "$everything", idempotent = true) + public Bundle patientInstanceOperation( + @IdParam IdType thePatientId, + @OperationParam(name = "start") DateDt theStart, + @OperationParam(name = "end") DateDt theEnd) { + + Bundle retVal = new Bundle(); + // Populate bundle with matching resources + return retVal; + } + // END SNIPPET: patientInstanceOperation + + // START SNIPPET: serverOperation + @Operation(name = "$closure") + public ConceptMap closureOperation( + @OperationParam(name = "name") StringDt theStart, + @OperationParam(name = "concept") List theEnd, + @OperationParam(name = "version") IdType theVersion) { + + ConceptMap retVal = new ConceptMap(); + // Populate bundle with matching resources + return retVal; + } + // END SNIPPET: serverOperation } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServletExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServletExamples.java index 5860e7bc986..a6b336f3cfa 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServletExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ServletExamples.java @@ -42,119 +42,123 @@ import org.hl7.fhir.r4.model.CapabilityStatement; import org.hl7.fhir.r4.model.Enumerations; import org.springframework.web.cors.CorsConfiguration; +import java.util.Arrays; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; -import java.util.Arrays; @SuppressWarnings({"serial", "RedundantThrows", "InnerClassMayBeStatic"}) public class ServletExamples { - // START SNIPPET: loggingInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") - public class RestfulServerWithLogging extends RestfulServer { + // START SNIPPET: loggingInterceptor + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") + public class RestfulServerWithLogging extends RestfulServer { - @Override - protected void initialize() throws ServletException { - - // ... define your resource providers here ... - - // Now register the logging interceptor - LoggingInterceptor loggingInterceptor = new LoggingInterceptor(); - registerInterceptor(loggingInterceptor); + @Override + protected void initialize() throws ServletException { - // The SLF4j logger "test.accesslog" will receive the logging events - loggingInterceptor.setLoggerName("test.accesslog"); - - // This is the format for each line. A number of substitution variables may - // be used here. See the JavaDoc for LoggingInterceptor for information on - // what is available. - loggingInterceptor.setMessageFormat("Source[${remoteAddr}] Operation[${operationType} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}]"); - - } - - } - // END SNIPPET: loggingInterceptor + // ... define your resource providers here ... - // START SNIPPET: OpenApiInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") - public class RestfulServerWithOpenApi extends RestfulServer { + // Now register the logging interceptor + LoggingInterceptor loggingInterceptor = new LoggingInterceptor(); + registerInterceptor(loggingInterceptor); - @Override - protected void initialize() throws ServletException { + // The SLF4j logger "test.accesslog" will receive the logging events + loggingInterceptor.setLoggerName("test.accesslog"); - // ... define your resource providers here ... + // This is the format for each line. A number of substitution variables may + // be used here. See the JavaDoc for LoggingInterceptor for information on + // what is available. + loggingInterceptor.setMessageFormat( + "Source[${remoteAddr}] Operation[${operationType} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}]"); + } + } + // END SNIPPET: loggingInterceptor - // Now register the interceptor + // START SNIPPET: OpenApiInterceptor + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") + public class RestfulServerWithOpenApi extends RestfulServer { + + @Override + protected void initialize() throws ServletException { + + // ... define your resource providers here ... + + // Now register the interceptor OpenApiInterceptor openApiInterceptor = new OpenApiInterceptor(); - registerInterceptor(openApiInterceptor); + registerInterceptor(openApiInterceptor); + } + } + // END SNIPPET: OpenApiInterceptor - } + // START SNIPPET: validatingInterceptor + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") + public class ValidatingServerWithLogging extends RestfulServer { - } - // END SNIPPET: OpenApiInterceptor - - // START SNIPPET: validatingInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") - public class ValidatingServerWithLogging extends RestfulServer { - - @Override - protected void initialize() { + @Override + protected void initialize() { FhirContext ctx = FhirContext.forDstu3(); setFhirContext(ctx); - // ... define your resource providers here ... + // ... define your resource providers here ... - // Create an interceptor to validate incoming requests - RequestValidatingInterceptor requestInterceptor = new RequestValidatingInterceptor(); - - // Register a validator module (you could also use SchemaBaseValidator and/or SchematronBaseValidator) + // Create an interceptor to validate incoming requests + RequestValidatingInterceptor requestInterceptor = new RequestValidatingInterceptor(); + + // Register a validator module (you could also use SchemaBaseValidator and/or SchematronBaseValidator) requestInterceptor.addValidatorModule(new FhirInstanceValidator(ctx)); - - requestInterceptor.setFailOnSeverity(ResultSeverityEnum.ERROR); - requestInterceptor.setAddResponseHeaderOnSeverity(ResultSeverityEnum.INFORMATION); - requestInterceptor.setResponseHeaderValue("Validation on ${line}: ${message} ${severity}"); - requestInterceptor.setResponseHeaderValueNoIssues("No issues detected"); - - // Now register the validating interceptor - registerInterceptor(requestInterceptor); - // Create an interceptor to validate responses - // This is configured in the same way as above - ResponseValidatingInterceptor responseInterceptor = new ResponseValidatingInterceptor(); - responseInterceptor.addValidatorModule(new FhirInstanceValidator(ctx)); - responseInterceptor.setFailOnSeverity(ResultSeverityEnum.ERROR); - responseInterceptor.setAddResponseHeaderOnSeverity(ResultSeverityEnum.INFORMATION); - responseInterceptor.setResponseHeaderValue("Validation on ${line}: ${message} ${severity}"); - responseInterceptor.setResponseHeaderValueNoIssues("No issues detected"); - registerInterceptor(responseInterceptor); - } - - } - // END SNIPPET: validatingInterceptor + requestInterceptor.setFailOnSeverity(ResultSeverityEnum.ERROR); + requestInterceptor.setAddResponseHeaderOnSeverity(ResultSeverityEnum.INFORMATION); + requestInterceptor.setResponseHeaderValue("Validation on ${line}: ${message} ${severity}"); + requestInterceptor.setResponseHeaderValueNoIssues("No issues detected"); - // START SNIPPET: exceptionInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") - public class RestfulServerWithExceptionHandling extends RestfulServer { + // Now register the validating interceptor + registerInterceptor(requestInterceptor); - @Override - protected void initialize() throws ServletException { - - // ... define your resource providers here ... - - // Now register the interceptor - ExceptionHandlingInterceptor interceptor = new ExceptionHandlingInterceptor(); - registerInterceptor(interceptor); + // Create an interceptor to validate responses + // This is configured in the same way as above + ResponseValidatingInterceptor responseInterceptor = new ResponseValidatingInterceptor(); + responseInterceptor.addValidatorModule(new FhirInstanceValidator(ctx)); + responseInterceptor.setFailOnSeverity(ResultSeverityEnum.ERROR); + responseInterceptor.setAddResponseHeaderOnSeverity(ResultSeverityEnum.INFORMATION); + responseInterceptor.setResponseHeaderValue("Validation on ${line}: ${message} ${severity}"); + responseInterceptor.setResponseHeaderValueNoIssues("No issues detected"); + registerInterceptor(responseInterceptor); + } + } + // END SNIPPET: validatingInterceptor - // Return the stack trace to the client for the following exception types - interceptor.setReturnStackTracesForExceptionTypes(InternalErrorException.class, NullPointerException.class); - - } - - } - // END SNIPPET: exceptionInterceptor + // START SNIPPET: exceptionInterceptor + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") + public class RestfulServerWithExceptionHandling extends RestfulServer { + + @Override + protected void initialize() throws ServletException { + + // ... define your resource providers here ... + + // Now register the interceptor + ExceptionHandlingInterceptor interceptor = new ExceptionHandlingInterceptor(); + registerInterceptor(interceptor); + + // Return the stack trace to the client for the following exception types + interceptor.setReturnStackTracesForExceptionTypes(InternalErrorException.class, NullPointerException.class); + } + } + // END SNIPPET: exceptionInterceptor // START SNIPPET: fhirPathInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") public class RestfulServerWithFhirPath extends RestfulServer { @Override @@ -165,14 +169,14 @@ public class ServletExamples { // Now register the interceptor FhirPathFilterInterceptor interceptor = new FhirPathFilterInterceptor(); registerInterceptor(interceptor); - } - } // END SNIPPET: fhirPathInterceptor // START SNIPPET: staticCapabilityStatementInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") public class RestfulServerWithStaticCapabilityStatement extends RestfulServer { @Override @@ -195,66 +199,65 @@ public class ServletExamples { // Now register the interceptor registerInterceptor(interceptor); - } - } // END SNIPPET: staticCapabilityStatementInterceptor - // START SNIPPET: responseHighlighterInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") - public class RestfulServerWithResponseHighlighter extends RestfulServer { + // START SNIPPET: responseHighlighterInterceptor + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") + public class RestfulServerWithResponseHighlighter extends RestfulServer { - @Override - protected void initialize() throws ServletException { - - // ... define your resource providers here ... - - // Now register the interceptor - ResponseHighlighterInterceptor interceptor = new ResponseHighlighterInterceptor(); - registerInterceptor(interceptor); + @Override + protected void initialize() throws ServletException { - } - - } - // END SNIPPET: responseHighlighterInterceptor + // ... define your resource providers here ... - // START SNIPPET: corsInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") - public class RestfulServerWithCors extends RestfulServer { + // Now register the interceptor + ResponseHighlighterInterceptor interceptor = new ResponseHighlighterInterceptor(); + registerInterceptor(interceptor); + } + } + // END SNIPPET: responseHighlighterInterceptor - @Override - protected void initialize() throws ServletException { - - // ... define your resource providers here ... + // START SNIPPET: corsInterceptor + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") + public class RestfulServerWithCors extends RestfulServer { - // Define your CORS configuration. This is an example - // showing a typical setup. You should customize this - // to your specific needs - CorsConfiguration config = new CorsConfiguration(); - config.addAllowedHeader("x-fhir-starter"); - config.addAllowedHeader("Origin"); - config.addAllowedHeader("Accept"); - config.addAllowedHeader("X-Requested-With"); - config.addAllowedHeader("Content-Type"); + @Override + protected void initialize() throws ServletException { - config.addAllowedOrigin("*"); + // ... define your resource providers here ... - config.addExposedHeader("Location"); - config.addExposedHeader("Content-Location"); - config.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE", "OPTIONS", "PATCH")); + // Define your CORS configuration. This is an example + // showing a typical setup. You should customize this + // to your specific needs + CorsConfiguration config = new CorsConfiguration(); + config.addAllowedHeader("x-fhir-starter"); + config.addAllowedHeader("Origin"); + config.addAllowedHeader("Accept"); + config.addAllowedHeader("X-Requested-With"); + config.addAllowedHeader("Content-Type"); - // Create the interceptor and register it - CorsInterceptor interceptor = new CorsInterceptor(config); - registerInterceptor(interceptor); + config.addAllowedOrigin("*"); - } - - } - // END SNIPPET: corsInterceptor + config.addExposedHeader("Location"); + config.addExposedHeader("Content-Location"); + config.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE", "OPTIONS", "PATCH")); + // Create the interceptor and register it + CorsInterceptor interceptor = new CorsInterceptor(config); + registerInterceptor(interceptor); + } + } + // END SNIPPET: corsInterceptor - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") public class RestfulServerWithResponseTerminologyTranslationInterceptor extends RestfulServer { private IValidationSupport myValidationSupport; @@ -265,7 +268,8 @@ public class ServletExamples { // START SNIPPET: ResponseTerminologyTranslationInterceptor // Create an interceptor that will map from a proprietary CodeSystem to LOINC - ResponseTerminologyTranslationInterceptor interceptor = new ResponseTerminologyTranslationInterceptor(myValidationSupport, myResponseTerminologyTranslationSvc); + ResponseTerminologyTranslationInterceptor interceptor = new ResponseTerminologyTranslationInterceptor( + myValidationSupport, myResponseTerminologyTranslationSvc); interceptor.addMappingSpecification("http://examplelabs.org", "http://loinc.org"); // Register the interceptor @@ -275,9 +279,10 @@ public class ServletExamples { } } - // START SNIPPET: preferHandling - @WebServlet(urlPatterns = { "/fhir/*" }, displayName = "FHIR Server") + @WebServlet( + urlPatterns = {"/fhir/*"}, + displayName = "FHIR Server") public class RestfulServerWithPreferHandling extends RestfulServer { @Override @@ -293,10 +298,7 @@ public class ServletExamples { // Register the interceptor registerInterceptor(interceptor); - } // END SNIPPET: preferHandling } - - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/TagsExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/TagsExamples.java index c8dc33a28bf..cd45b87fa3f 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/TagsExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/TagsExamples.java @@ -30,52 +30,52 @@ import java.util.List; public class TagsExamples { - public static void main(String[] args) { - new TagsExamples().getResourceTags(); - } + public static void main(String[] args) { + new TagsExamples().getResourceTags(); + } - @SuppressWarnings("unused") - public void getResourceTags() { - // START SNIPPET: getResourceTags - IGenericClient client = FhirContext.forDstu2().newRestfulGenericClient("http://fhir.healthintersections.com.au/open"); - Patient p = client.read(Patient.class, "1"); + @SuppressWarnings("unused") + public void getResourceTags() { + // START SNIPPET: getResourceTags + IGenericClient client = + FhirContext.forDstu2().newRestfulGenericClient("http://fhir.healthintersections.com.au/open"); + Patient p = client.read(Patient.class, "1"); - // Retrieve the list of tags from the resource metadata + // Retrieve the list of tags from the resource metadata List tags = p.getMeta().getTag(); - // tags may be empty if no tags were read in - if (tags.isEmpty()) { - System.out.println("No tags!"); - } else { + // tags may be empty if no tags were read in + if (tags.isEmpty()) { + System.out.println("No tags!"); + } else { - // You may iterate over all the tags - for (Coding next : tags) { - System.out.println(next.getSystem() + " - " + next.getCode()); - } + // You may iterate over all the tags + for (Coding next : tags) { + System.out.println(next.getSystem() + " - " + next.getCode()); + } - // You may also get a specific tag (by system and code) + // You may also get a specific tag (by system and code) Coding tag = p.getMeta().getTag("http://hl7.org/fhir/tag", "http://foo"); - - } - // END SNIPPET: getResourceTags - } + } + // END SNIPPET: getResourceTags + } - // START SNIPPET: serverMethod - @Search - public List getAllPatients() { - ArrayList retVal = new ArrayList(); - - // Create a patient to return - Patient patient = new Patient(); - patient.setId("Patient/123"); - patient.addName().setFamily("Smith").addGiven("John"); - - // Add some tags to the patient - patient.getMeta().addTag("http://example.com/tags", "tag2", "Some tag"); + // START SNIPPET: serverMethod + @Search + public List getAllPatients() { + ArrayList retVal = new ArrayList(); + + // Create a patient to return + Patient patient = new Patient(); + patient.setId("Patient/123"); + patient.addName().setFamily("Smith").addGiven("John"); + + // Add some tags to the patient + patient.getMeta().addTag("http://example.com/tags", "tag2", "Some tag"); patient.getMeta().addTag("http://example.com/tags", "tag1", "Another tag"); - - return retVal; - } - // END SNIPPET: serverMethod - + + return retVal; + } + // END SNIPPET: serverMethod + } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidateDirectory.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidateDirectory.java index 8e5e60cdac2..7b9eadb0318 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidateDirectory.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidateDirectory.java @@ -20,11 +20,11 @@ package ca.uhn.hapi.fhir.docs; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.ValidationResult; import org.apache.commons.io.IOUtils; -import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; import org.hl7.fhir.common.hapi.validation.support.PrePopulatedValidationSupport; import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; @@ -42,82 +42,82 @@ import java.util.Map; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class ValidateDirectory { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ValidateDirectory.class); + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ValidateDirectory.class); - public static void main(String[] args) throws Exception { - // Load all profiles in this directory - File profileDirectory = new File("/tmp/directory/with/profiles"); + public static void main(String[] args) throws Exception { + // Load all profiles in this directory + File profileDirectory = new File("/tmp/directory/with/profiles"); - // Validate resources in this directory - File resourceDirectory = new File("/tmp/directory/with/resources/to/validate"); + // Validate resources in this directory + File resourceDirectory = new File("/tmp/directory/with/resources/to/validate"); - FhirContext ctx = FhirContext.forDstu3(); - IParser xmlParser = ctx.newXmlParser(); - IParser jsonParser = ctx.newJsonParser(); + FhirContext ctx = FhirContext.forDstu3(); + IParser xmlParser = ctx.newXmlParser(); + IParser jsonParser = ctx.newJsonParser(); - Map structureDefinitions = new HashMap<>(); - Map codeSystems = new HashMap<>(); - Map valueSets = new HashMap<>(); + Map structureDefinitions = new HashMap<>(); + Map codeSystems = new HashMap<>(); + Map valueSets = new HashMap<>(); - // Load all profile files - for (File nextFile : profileDirectory.listFiles()) { + // Load all profile files + for (File nextFile : profileDirectory.listFiles()) { - IBaseResource parsedRes = null; - if (nextFile.getAbsolutePath().toLowerCase().endsWith(".xml")) { - parsedRes = xmlParser.parseResource(new FileReader(nextFile)); - } else if (nextFile.getAbsolutePath().toLowerCase().endsWith(".json")) { - parsedRes = jsonParser.parseResource(new FileReader(nextFile)); - } else { - ourLog.info("Ignoring file: {}", nextFile.getName()); - } + IBaseResource parsedRes = null; + if (nextFile.getAbsolutePath().toLowerCase().endsWith(".xml")) { + parsedRes = xmlParser.parseResource(new FileReader(nextFile)); + } else if (nextFile.getAbsolutePath().toLowerCase().endsWith(".json")) { + parsedRes = jsonParser.parseResource(new FileReader(nextFile)); + } else { + ourLog.info("Ignoring file: {}", nextFile.getName()); + } - if (parsedRes instanceof StructureDefinition) { - StructureDefinition res = (StructureDefinition) parsedRes; - if (isNotBlank(res.getUrl())) { - structureDefinitions.put(res.getUrl(), res); - } - } else if (parsedRes instanceof ValueSet) { - ValueSet res = (ValueSet) parsedRes; - if (isNotBlank(res.getUrl())) { - valueSets.put(res.getUrl(), res); - } - } else if (parsedRes instanceof CodeSystem) { - CodeSystem res = (CodeSystem) parsedRes; - if (isNotBlank(res.getUrl())) { - codeSystems.put(res.getUrl(), res); - } - } - } + if (parsedRes instanceof StructureDefinition) { + StructureDefinition res = (StructureDefinition) parsedRes; + if (isNotBlank(res.getUrl())) { + structureDefinitions.put(res.getUrl(), res); + } + } else if (parsedRes instanceof ValueSet) { + ValueSet res = (ValueSet) parsedRes; + if (isNotBlank(res.getUrl())) { + valueSets.put(res.getUrl(), res); + } + } else if (parsedRes instanceof CodeSystem) { + CodeSystem res = (CodeSystem) parsedRes; + if (isNotBlank(res.getUrl())) { + codeSystems.put(res.getUrl(), res); + } + } + } - FhirInstanceValidator instanceValidator = new FhirInstanceValidator(ctx); + FhirInstanceValidator instanceValidator = new FhirInstanceValidator(ctx); - ValidationSupportChain validationSupportChain = new ValidationSupportChain(); - validationSupportChain.addValidationSupport((ca.uhn.fhir.context.support.IValidationSupport) new DefaultProfileValidationSupport(ctx)); - validationSupportChain.addValidationSupport((ca.uhn.fhir.context.support.IValidationSupport) new PrePopulatedValidationSupport(ctx, structureDefinitions, valueSets, codeSystems)); + ValidationSupportChain validationSupportChain = new ValidationSupportChain(); + validationSupportChain.addValidationSupport( + (ca.uhn.fhir.context.support.IValidationSupport) new DefaultProfileValidationSupport(ctx)); + validationSupportChain.addValidationSupport((ca.uhn.fhir.context.support.IValidationSupport) + new PrePopulatedValidationSupport(ctx, structureDefinitions, valueSets, codeSystems)); - instanceValidator.setValidationSupport(validationSupportChain); + instanceValidator.setValidationSupport(validationSupportChain); - FhirValidator val = ctx.newValidator(); - val.registerValidatorModule(instanceValidator); - - // Loop through the files in the validation directory and validate each one - for (File nextFile : resourceDirectory.listFiles()) { - - if (nextFile.getAbsolutePath().toLowerCase().endsWith(".xml")) { - ourLog.info("Going to validate: {}", nextFile.getName()); - } else if (nextFile.getAbsolutePath().toLowerCase().endsWith(".json")) { - ourLog.info("Going to validate: {}", nextFile.getName()); - } else { - ourLog.info("Ignoring file: {}", nextFile.getName()); - continue; - } - - String input = IOUtils.toString(new FileReader(nextFile)); - ValidationResult result = val.validateWithResult(input); - IBaseOperationOutcome oo = result.toOperationOutcome(); - ourLog.info("Result:\n{}", xmlParser.setPrettyPrint(true).encodeResourceToString(oo)); - } - - } + FhirValidator val = ctx.newValidator(); + val.registerValidatorModule(instanceValidator); + // Loop through the files in the validation directory and validate each one + for (File nextFile : resourceDirectory.listFiles()) { + + if (nextFile.getAbsolutePath().toLowerCase().endsWith(".xml")) { + ourLog.info("Going to validate: {}", nextFile.getName()); + } else if (nextFile.getAbsolutePath().toLowerCase().endsWith(".json")) { + ourLog.info("Going to validate: {}", nextFile.getName()); + } else { + ourLog.info("Ignoring file: {}", nextFile.getName()); + continue; + } + + String input = IOUtils.toString(new FileReader(nextFile)); + ValidationResult result = val.validateWithResult(input); + IBaseOperationOutcome oo = result.toOperationOutcome(); + ourLog.info("Result:\n{}", xmlParser.setPrettyPrint(true).encodeResourceToString(oo)); + } + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidatorExamples.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidatorExamples.java index 3ad82a35f33..ed304aa3b48 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidatorExamples.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidatorExamples.java @@ -19,12 +19,12 @@ */ package ca.uhn.hapi.fhir.docs; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.ConceptValidationOptions; import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.ValidationSupportContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.parser.StrictErrorHandler; import ca.uhn.fhir.rest.client.api.IGenericClient; @@ -45,8 +45,8 @@ import org.hl7.fhir.common.hapi.validation.support.PrePopulatedValidationSupport import org.hl7.fhir.common.hapi.validation.support.RemoteTerminologyServiceValidationSupport; import org.hl7.fhir.common.hapi.validation.support.SnapshotGeneratingValidationSupport; import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain; -import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; +import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.ContactPoint; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.OperationOutcome; @@ -55,17 +55,17 @@ import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.StructureDefinition; import org.hl7.fhir.r4.model.ValueSet; -import javax.annotation.Nonnull; -import javax.servlet.ServletException; import java.io.File; import java.io.FileReader; import java.util.List; +import javax.annotation.Nonnull; +import javax.servlet.ServletException; @SuppressWarnings({"serial", "unused"}) public class ValidatorExamples { - public void validationIntro() { - // START SNIPPET: validationIntro + public void validationIntro() { + // START SNIPPET: validationIntro FhirContext ctx = FhirContext.forR4(); // Ask the context for a validator @@ -91,175 +91,174 @@ public class ValidatorExamples { for (SingleValidationMessage next : result.getMessages()) { System.out.println(next.getLocationString() + " " + next.getMessage()); } - // END SNIPPET: validationIntro - } - - // START SNIPPET: serverValidation - public class MyRestfulServer extends RestfulServer { + // END SNIPPET: validationIntro + } - @Override - protected void initialize() throws ServletException { - // ...Configure resource providers, etc... - - // Create a context, set the error handler and instruct - // the server to use it - FhirContext ctx = FhirContext.forR4(); - ctx.setParserErrorHandler(new StrictErrorHandler()); - setFhirContext(ctx); - } - - } - // END SNIPPET: serverValidation + // START SNIPPET: serverValidation + public class MyRestfulServer extends RestfulServer { - @SuppressWarnings("unused") - public void enableValidation() { - // START SNIPPET: clientValidation - FhirContext ctx = FhirContext.forR4(); - - ctx.setParserErrorHandler(new StrictErrorHandler()); - - // This client will have strict parser validation enabled - IGenericClient client = ctx.newRestfulGenericClient("http://hapi.fhir.org/baseR4"); - // END SNIPPET: clientValidation - - } - - public void parserValidation() { - // START SNIPPET: parserValidation - FhirContext ctx = FhirContext.forR4(); - - // Create a parser and configure it to use the strict error handler - IParser parser = ctx.newXmlParser(); - parser.setParserErrorHandler(new StrictErrorHandler()); + @Override + protected void initialize() throws ServletException { + // ...Configure resource providers, etc... - // This example resource is invalid, as Patient.active can not repeat - String input = ""; + // Create a context, set the error handler and instruct + // the server to use it + FhirContext ctx = FhirContext.forR4(); + ctx.setParserErrorHandler(new StrictErrorHandler()); + setFhirContext(ctx); + } + } + // END SNIPPET: serverValidation - // The following will throw a DataFormatException because of the StrictErrorHandler - parser.parseResource(Patient.class, input); - // END SNIPPET: parserValidation - } + @SuppressWarnings("unused") + public void enableValidation() { + // START SNIPPET: clientValidation + FhirContext ctx = FhirContext.forR4(); - public void validateResource() { - // START SNIPPET: basicValidation - // As always, you need a context - FhirContext ctx = FhirContext.forR4(); + ctx.setParserErrorHandler(new StrictErrorHandler()); - // Create and populate a new patient object - Patient p = new Patient(); - p.addName().setFamily("Smith").addGiven("John").addGiven("Q"); - p.addIdentifier().setSystem("urn:foo:identifiers").setValue("12345"); - p.addTelecom().setSystem(ContactPoint.ContactPointSystem.PHONE).setValue("416 123-4567"); + // This client will have strict parser validation enabled + IGenericClient client = ctx.newRestfulGenericClient("http://hapi.fhir.org/baseR4"); + // END SNIPPET: clientValidation - // Request a validator and apply it - FhirValidator val = ctx.newValidator(); + } - // Create the Schema/Schematron modules and register them. Note that - // you might want to consider keeping these modules around as long-term - // objects: they parse and then store schemas, which can be an expensive - // operation. - IValidatorModule module1 = new SchemaBaseValidator(ctx); - IValidatorModule module2 = new SchematronBaseValidator(ctx); - val.registerValidatorModule(module1); - val.registerValidatorModule(module2); + public void parserValidation() { + // START SNIPPET: parserValidation + FhirContext ctx = FhirContext.forR4(); - ValidationResult result = val.validateWithResult(p); - if (result.isSuccessful()) { - - System.out.println("Validation passed"); - - } else { - // We failed validation! - System.out.println("Validation failed"); - } - - // The result contains a list of "messages" - List messages = result.getMessages(); - for (SingleValidationMessage next : messages) { - System.out.println("Message:"); - System.out.println(" * Location: " + next.getLocationString()); - System.out.println(" * Severity: " + next.getSeverity()); - System.out.println(" * Message : " + next.getMessage()); - } - - // You can also convert the results into an OperationOutcome resource - OperationOutcome oo = (OperationOutcome) result.toOperationOutcome(); - String results = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(oo); - System.out.println(results); - // END SNIPPET: basicValidation + // Create a parser and configure it to use the strict error handler + IParser parser = ctx.newXmlParser(); + parser.setParserErrorHandler(new StrictErrorHandler()); - } + // This example resource is invalid, as Patient.active can not repeat + String input = ""; - public static void main(String[] args) throws Exception { - instanceValidator(); + // The following will throw a DataFormatException because of the StrictErrorHandler + parser.parseResource(Patient.class, input); + // END SNIPPET: parserValidation + } - } + public void validateResource() { + // START SNIPPET: basicValidation + // As always, you need a context + FhirContext ctx = FhirContext.forR4(); - private static void instanceValidator() throws Exception { - // START SNIPPET: instanceValidator - FhirContext ctx = FhirContext.forR4(); + // Create and populate a new patient object + Patient p = new Patient(); + p.addName().setFamily("Smith").addGiven("John").addGiven("Q"); + p.addIdentifier().setSystem("urn:foo:identifiers").setValue("12345"); + p.addTelecom().setSystem(ContactPoint.ContactPointSystem.PHONE).setValue("416 123-4567"); - // Create a validation support chain + // Request a validator and apply it + FhirValidator val = ctx.newValidator(); + + // Create the Schema/Schematron modules and register them. Note that + // you might want to consider keeping these modules around as long-term + // objects: they parse and then store schemas, which can be an expensive + // operation. + IValidatorModule module1 = new SchemaBaseValidator(ctx); + IValidatorModule module2 = new SchematronBaseValidator(ctx); + val.registerValidatorModule(module1); + val.registerValidatorModule(module2); + + ValidationResult result = val.validateWithResult(p); + if (result.isSuccessful()) { + + System.out.println("Validation passed"); + + } else { + // We failed validation! + System.out.println("Validation failed"); + } + + // The result contains a list of "messages" + List messages = result.getMessages(); + for (SingleValidationMessage next : messages) { + System.out.println("Message:"); + System.out.println(" * Location: " + next.getLocationString()); + System.out.println(" * Severity: " + next.getSeverity()); + System.out.println(" * Message : " + next.getMessage()); + } + + // You can also convert the results into an OperationOutcome resource + OperationOutcome oo = (OperationOutcome) result.toOperationOutcome(); + String results = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(oo); + System.out.println(results); + // END SNIPPET: basicValidation + + } + + public static void main(String[] args) throws Exception { + instanceValidator(); + } + + private static void instanceValidator() throws Exception { + // START SNIPPET: instanceValidator + FhirContext ctx = FhirContext.forR4(); + + // Create a validation support chain ValidationSupportChain validationSupportChain = new ValidationSupportChain( - new DefaultProfileValidationSupport(ctx), - new InMemoryTerminologyServerValidationSupport(ctx), - new CommonCodeSystemsTerminologyService(ctx) - ); + new DefaultProfileValidationSupport(ctx), + new InMemoryTerminologyServerValidationSupport(ctx), + new CommonCodeSystemsTerminologyService(ctx)); - // Create a FhirInstanceValidator and register it to a validator - FhirValidator validator = ctx.newValidator(); - FhirInstanceValidator instanceValidator = new FhirInstanceValidator(validationSupportChain); - validator.registerValidatorModule(instanceValidator); - - /* - * If you want, you can configure settings on the validator to adjust - * its behaviour during validation - */ - instanceValidator.setAnyExtensionsAllowed(true); - - - /* - * Let's create a resource to validate. This Observation has some fields - * populated, but it is missing Observation.status, which is mandatory. - */ - Observation obs = new Observation(); - obs.getCode().addCoding().setSystem("http://loinc.org").setCode("12345-6"); - obs.setValue(new StringType("This is a value")); - - // Validate - ValidationResult result = validator.validateWithResult(obs); + // Create a FhirInstanceValidator and register it to a validator + FhirValidator validator = ctx.newValidator(); + FhirInstanceValidator instanceValidator = new FhirInstanceValidator(validationSupportChain); + validator.registerValidatorModule(instanceValidator); - /* - * Note: You can also explicitly declare a profile to validate against - * using the block below. - */ - // ValidationResult result = validator.validateWithResult(obs, new ValidationOptions().addProfile("http://myprofile.com")); + /* + * If you want, you can configure settings on the validator to adjust + * its behaviour during validation + */ + instanceValidator.setAnyExtensionsAllowed(true); - // Do we have any errors or fatal errors? - System.out.println(result.isSuccessful()); // false - - // Show the issues - for (SingleValidationMessage next : result.getMessages()) { - System.out.println(" Next issue " + next.getSeverity() + " - " + next.getLocationString() + " - " + next.getMessage()); - } - // Prints: - // Next issue ERROR - /f:Observation - Element '/f:Observation.status': minimum required = 1, but only found 0 - // Next issue WARNING - /f:Observation/f:code - Unable to validate code "12345-6" in code system "http://loinc.org" - - // You can also convert the result into an operation outcome if you - // need to return one from a server - OperationOutcome oo = (OperationOutcome) result.toOperationOutcome(); - // END SNIPPET: instanceValidator - } - - private static void instanceValidatorCustom() throws Exception { - // START SNIPPET: instanceValidatorCustom - FhirContext ctx = FhirContext.forR4(); + /* + * Let's create a resource to validate. This Observation has some fields + * populated, but it is missing Observation.status, which is mandatory. + */ + Observation obs = new Observation(); + obs.getCode().addCoding().setSystem("http://loinc.org").setCode("12345-6"); + obs.setValue(new StringType("This is a value")); - // Create a FhirInstanceValidator and register it to a validator - FhirValidator validator = ctx.newValidator(); - FhirInstanceValidator instanceValidator = new FhirInstanceValidator(ctx); - validator.registerValidatorModule(instanceValidator); + // Validate + ValidationResult result = validator.validateWithResult(obs); + + /* + * Note: You can also explicitly declare a profile to validate against + * using the block below. + */ + // ValidationResult result = validator.validateWithResult(obs, new + // ValidationOptions().addProfile("http://myprofile.com")); + + // Do we have any errors or fatal errors? + System.out.println(result.isSuccessful()); // false + + // Show the issues + for (SingleValidationMessage next : result.getMessages()) { + System.out.println( + " Next issue " + next.getSeverity() + " - " + next.getLocationString() + " - " + next.getMessage()); + } + // Prints: + // Next issue ERROR - /f:Observation - Element '/f:Observation.status': minimum required = 1, but only found 0 + // Next issue WARNING - /f:Observation/f:code - Unable to validate code "12345-6" in code system + // "http://loinc.org" + + // You can also convert the result into an operation outcome if you + // need to return one from a server + OperationOutcome oo = (OperationOutcome) result.toOperationOutcome(); + // END SNIPPET: instanceValidator + } + + private static void instanceValidatorCustom() throws Exception { + // START SNIPPET: instanceValidatorCustom + FhirContext ctx = FhirContext.forR4(); + + // Create a FhirInstanceValidator and register it to a validator + FhirValidator validator = ctx.newValidator(); + FhirInstanceValidator instanceValidator = new FhirInstanceValidator(ctx); + validator.registerValidatorModule(instanceValidator); IValidationSupport valSupport = new IValidationSupport() { @@ -288,19 +287,30 @@ public class ValidatorExamples { } @Override - public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) { + public boolean isCodeSystemSupported( + ValidationSupportContext theValidationSupportContext, String theSystem) { // TODO: implement (or return null if your implementation does not support this function) return false; } @Override - public CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { + public CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { // TODO: implement (or return null if your implementation does not support this function) return null; } @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { // TODO: implement (or return null if your implementation does not support this function) return null; } @@ -309,30 +319,29 @@ public class ValidatorExamples { public FhirContext getFhirContext() { return ctx; } - }; - - /* - * ValidationSupportChain strings multiple instances of IValidationSupport together. The - * code below is useful because it means that when the validator wants to load a - * StructureDefinition or a ValueSet, it will first use DefaultProfileValidationSupport, - * which loads the default HL7 versions. Any StructureDefinitions which are not found in - * the built-in set are delegated to your custom implementation. - */ - ValidationSupportChain support = new ValidationSupportChain(new DefaultProfileValidationSupport(ctx), valSupport); - instanceValidator.setValidationSupport(support); - - // END SNIPPET: instanceValidatorCustom - } + /* + * ValidationSupportChain strings multiple instances of IValidationSupport together. The + * code below is useful because it means that when the validator wants to load a + * StructureDefinition or a ValueSet, it will first use DefaultProfileValidationSupport, + * which loads the default HL7 versions. Any StructureDefinitions which are not found in + * the built-in set are delegated to your custom implementation. + */ + ValidationSupportChain support = + new ValidationSupportChain(new DefaultProfileValidationSupport(ctx), valSupport); + instanceValidator.setValidationSupport(support); - public void validateSupplyProfiles() { + // END SNIPPET: instanceValidatorCustom + } - StructureDefinition someStructureDefnition = null; - ValueSet someValueSet = null; - String input = null; + public void validateSupplyProfiles() { - // START SNIPPET: validateSupplyProfiles + StructureDefinition someStructureDefnition = null; + ValueSet someValueSet = null; + String input = null; + + // START SNIPPET: validateSupplyProfiles FhirContext ctx = FhirContext.forR4(); // Create a chain that will hold our modules @@ -372,7 +381,6 @@ public class ValidatorExamples { } - public void validateUsingRemoteTermServer() { StructureDefinition someStructureDefnition = null; @@ -408,43 +416,40 @@ public class ValidatorExamples { } + @SuppressWarnings("unused") + private static void validateFiles() throws Exception { + // START SNIPPET: validateFiles + FhirContext ctx = FhirContext.forR4(); - @SuppressWarnings("unused") - private static void validateFiles() throws Exception { - // START SNIPPET: validateFiles - FhirContext ctx = FhirContext.forR4(); + // Create a validator and configure it + FhirValidator validator = ctx.newValidator(); + validator.setValidateAgainstStandardSchema(true); + validator.setValidateAgainstStandardSchematron(true); - // Create a validator and configure it - FhirValidator validator = ctx.newValidator(); - validator.setValidateAgainstStandardSchema(true); - validator.setValidateAgainstStandardSchematron(true); + // Get a list of files in a given directory + String[] fileList = new File("/home/some/dir").list(new WildcardFileFilter("*.txt")); + for (String nextFile : fileList) { - // Get a list of files in a given directory - String[] fileList = new File("/home/some/dir").list(new WildcardFileFilter("*.txt")); - for (String nextFile : fileList) { + // For each file, load the contents into a string + String nextFileContents = IOUtils.toString(new FileReader(nextFile)); - // For each file, load the contents into a string - String nextFileContents = IOUtils.toString(new FileReader(nextFile)); + // Parse that string (this example assumes JSON encoding) + IBaseResource resource = ctx.newJsonParser().parseResource(nextFileContents); - // Parse that string (this example assumes JSON encoding) - IBaseResource resource = ctx.newJsonParser().parseResource(nextFileContents); + // Apply the validation. This will throw an exception on the first + // validation failure + ValidationResult result = validator.validateWithResult(resource); + if (result.isSuccessful() == false) { + throw new Exception(Msg.code(640) + "We failed!"); + } + } - // Apply the validation. This will throw an exception on the first - // validation failure - ValidationResult result = validator.validateWithResult(resource); - if (result.isSuccessful() == false) { - throw new Exception(Msg.code(640) + "We failed!"); - } - - } + // END SNIPPET: validateFiles + } - // END SNIPPET: validateFiles - } - - - @SuppressWarnings("unused") - private static void npm() throws Exception { - // START SNIPPET: npm + @SuppressWarnings("unused") + private static void npm() throws Exception { + // START SNIPPET: npm // Create an NPM Package Support module and load one package in from // the classpath FhirContext ctx = FhirContext.forR4(); @@ -453,12 +458,11 @@ public class ValidatorExamples { // Create a support chain including the NPM Package Support ValidationSupportChain validationSupportChain = new ValidationSupportChain( - npmPackageSupport, - new DefaultProfileValidationSupport(ctx), - new CommonCodeSystemsTerminologyService(ctx), - new InMemoryTerminologyServerValidationSupport(ctx), - new SnapshotGeneratingValidationSupport(ctx) - ); + npmPackageSupport, + new DefaultProfileValidationSupport(ctx), + new CommonCodeSystemsTerminologyService(ctx), + new InMemoryTerminologyServerValidationSupport(ctx), + new SnapshotGeneratingValidationSupport(ctx)); CachingValidationSupport validationSupport = new CachingValidationSupport(validationSupportChain); // Create a validator. Note that for good performance you can create as many validator objects @@ -475,10 +479,6 @@ public class ValidatorExamples { // Perform the validation ValidationResult outcome = validator.validateWithResult(patient); - // END SNIPPET: npm - } - - - - + // END SNIPPET: npm + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidatorExamplesDstu3.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidatorExamplesDstu3.java index cc0c1469384..38fd9932691 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidatorExamplesDstu3.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/ValidatorExamplesDstu3.java @@ -20,48 +20,48 @@ package ca.uhn.hapi.fhir.docs; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; +import ca.uhn.fhir.validation.FhirValidator; import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; public class ValidatorExamplesDstu3 { - public void validateProfileDstu3() { - // START SNIPPET: validateFiles - - FhirContext ctx = FhirContext.forDstu3(); - FhirValidator validator = ctx.newValidator(); - - // Typically if you are doing profile validation, you want to disable - // the schema/schematron validation since the profile will specify - // all the same rules (and more) - validator.setValidateAgainstStandardSchema(false); - validator.setValidateAgainstStandardSchematron(false); - - // FhirInstanceValidator is the validation module that handles - // profile validation. So, create an InstanceValidator module - // and register it to the validator. - FhirInstanceValidator instanceVal = new FhirInstanceValidator(ctx); - validator.registerValidatorModule(instanceVal); + public void validateProfileDstu3() { + // START SNIPPET: validateFiles - // FhirInstanceValidator requires an instance of "IValidationSupport" in - // order to function. This module is used by the validator to actually obtain - // all of the resources it needs in order to perform validation. Specifically, - // the validator uses it to fetch StructureDefinitions, ValueSets, CodeSystems, - // etc, as well as to perform terminology validation. - // - // The implementation used here (ValidationSupportChain) is allows for - // multiple implementations to be used in a chain, where if a specific resource - // is needed the whole chain is tried and the first module which is actually - // able to answer is used. The first entry in the chain that we register is - // the DefaultProfileValidationSupport, which supplies the "built-in" FHIR - // StructureDefinitions and ValueSets - ValidationSupportChain validationSupportChain = new ValidationSupportChain(); - validationSupportChain.addValidationSupport((ca.uhn.fhir.context.support.IValidationSupport) new DefaultProfileValidationSupport(ctx)); - instanceVal.setValidationSupport(validationSupportChain); - - // END SNIPPET: validateFiles - } + FhirContext ctx = FhirContext.forDstu3(); + FhirValidator validator = ctx.newValidator(); + // Typically if you are doing profile validation, you want to disable + // the schema/schematron validation since the profile will specify + // all the same rules (and more) + validator.setValidateAgainstStandardSchema(false); + validator.setValidateAgainstStandardSchematron(false); + + // FhirInstanceValidator is the validation module that handles + // profile validation. So, create an InstanceValidator module + // and register it to the validator. + FhirInstanceValidator instanceVal = new FhirInstanceValidator(ctx); + validator.registerValidatorModule(instanceVal); + + // FhirInstanceValidator requires an instance of "IValidationSupport" in + // order to function. This module is used by the validator to actually obtain + // all of the resources it needs in order to perform validation. Specifically, + // the validator uses it to fetch StructureDefinitions, ValueSets, CodeSystems, + // etc, as well as to perform terminology validation. + // + // The implementation used here (ValidationSupportChain) is allows for + // multiple implementations to be used in a chain, where if a specific resource + // is needed the whole chain is tried and the first module which is actually + // able to answer is used. The first entry in the chain that we register is + // the DefaultProfileValidationSupport, which supplies the "built-in" FHIR + // StructureDefinitions and ValueSets + ValidationSupportChain validationSupportChain = new ValidationSupportChain(); + validationSupportChain.addValidationSupport( + (ca.uhn.fhir.context.support.IValidationSupport) new DefaultProfileValidationSupport(ctx)); + instanceVal.setValidationSupport(validationSupportChain); + + // END SNIPPET: validateFiles + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomCompositeExtension.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomCompositeExtension.java index e6843ec2833..d0012473864 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomCompositeExtension.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomCompositeExtension.java @@ -28,7 +28,7 @@ import org.hl7.fhir.dstu3.model.BackboneElement; import org.hl7.fhir.dstu3.model.Patient; import org.hl7.fhir.dstu3.model.StringType; -//START SNIPPET: resource +// START SNIPPET: resource @ResourceDef(name = "Patient") public class CustomCompositeExtension extends Patient { @@ -38,7 +38,7 @@ public class CustomCompositeExtension extends Patient { * A custom extension */ @Child(name = "foo") - @Extension(url="http://acme.org/fooParent", definedLocally = false, isModifier = false) + @Extension(url = "http://acme.org/fooParent", definedLocally = false, isModifier = false) protected FooParentExtension fooParentExtension; public FooParentExtension getFooParentExtension() { @@ -95,8 +95,6 @@ public class CustomCompositeExtension extends Patient { public void setChildB(StringType theChildB) { myChildB = theChildB; } - } - } -//END SNIPPET: resource +// END SNIPPET: resource diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomDatatype.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomDatatype.java index f227f7d3b89..f2550591bce 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomDatatype.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomDatatype.java @@ -19,7 +19,7 @@ */ package ca.uhn.hapi.fhir.docs.customtype; -//START SNIPPET: datatype +// START SNIPPET: datatype import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.DatatypeDef; @@ -30,33 +30,32 @@ import org.hl7.fhir.dstu3.model.Type; import org.hl7.fhir.instance.model.api.ICompositeType; /** - * This is an example of a custom datatype. - * + * This is an example of a custom datatype. + * * This is an STU3 example so it extends Type and implements ICompositeType. For * DSTU2 it would extend BaseIdentifiableElement and implement ICompositeDatatype. */ -@DatatypeDef(name="CustomDatatype") +@DatatypeDef(name = "CustomDatatype") public class CustomDatatype extends Type implements ICompositeType { private static final long serialVersionUID = 1L; - + @Child(name = "date", order = 0, min = 1, max = 1) private DateTimeType myDate; - @Child(name = "kittens", order = 1, min = 1, max = 1) - private StringType myKittens; + @Child(name = "kittens", order = 1, min = 1, max = 1) + private StringType myKittens; public DateTimeType getDate() { - if (myDate == null) - myDate = new DateTimeType(); + if (myDate == null) myDate = new DateTimeType(); return myDate; } - public StringType getKittens() { - return myKittens; - } + public StringType getKittens() { + return myKittens; + } - @Override + @Override public boolean isEmpty() { return ElementUtil.isEmpty(myDate, myKittens); } @@ -67,9 +66,9 @@ public class CustomDatatype extends Type implements ICompositeType { } public CustomDatatype setKittens(StringType theKittens) { - myKittens = theKittens; - return this; - } + myKittens = theKittens; + return this; + } @Override protected CustomDatatype typedCopy() { @@ -79,4 +78,4 @@ public class CustomDatatype extends Type implements ICompositeType { return retVal; } } -//END SNIPPET: datatype +// END SNIPPET: datatype diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomResource.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomResource.java index fffc14f427d..d4d0e012107 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomResource.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomResource.java @@ -36,9 +36,9 @@ import java.util.List; /** * This is an example of a custom resource that also uses a custom * datatype. - * + * * Note that we are extending DomainResource for an STU3 - * resource. For DSTU2 it would be BaseResource. + * resource. For DSTU2 it would be BaseResource. */ @ResourceDef(name = "CustomResource", profile = "http://hl7.org/fhir/profiles/custom-resource") public class CustomResource extends DomainResource { @@ -49,34 +49,34 @@ public class CustomResource extends DomainResource { * We give the resource a field with name "television". This field has no * specific type, so it's a choice[x] field for any type. */ - @Child(name="television", min=1, max=Child.MAX_UNLIMITED, order=0) + @Child(name = "television", min = 1, max = Child.MAX_UNLIMITED, order = 0) private List myTelevision; /** * We'll give it one more field called "dogs" */ - @Child(name = "dogs", min=0, max=1, order=1) + @Child(name = "dogs", min = 0, max = 1, order = 1) private StringType myDogs; - + @Override public CustomResource copy() { CustomResource retVal = new CustomResource(); super.copyValues(retVal); retVal.myTelevision = myTelevision; - retVal.myDogs = myDogs; + retVal.myDogs = myDogs; return retVal; } - public List getTelevision() { - if (myTelevision == null) { - myTelevision = new ArrayList(); - } + public List getTelevision() { + if (myTelevision == null) { + myTelevision = new ArrayList(); + } return myTelevision; } - public StringType getDogs() { - return myDogs; - } + public StringType getDogs() { + return myDogs; + } @Override public ResourceType getResourceType() { @@ -98,8 +98,7 @@ public class CustomResource extends DomainResource { } public void setDogs(StringType theDogs) { - myDogs = theDogs; - } - + myDogs = theDogs; + } } // END SNIPPET: resource diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomUsage.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomUsage.java index 4848542dbc5..a21fe46b635 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomUsage.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/customtype/CustomUsage.java @@ -28,35 +28,34 @@ import java.util.Date; public class CustomUsage { - public static void main(String[] args) { - - // START SNIPPET: usage - // Create a context. Note that we declare the custom types we'll be using - // on the context before actually using them - FhirContext ctx = FhirContext.forDstu3(); - ctx.registerCustomType(CustomResource.class); - ctx.registerCustomType(CustomDatatype.class); - - // Now let's create an instance of our custom resource type - // and populate it with some data - CustomResource res = new CustomResource(); - - // Add some values, including our custom datatype - DateType value0 = new DateType("2015-01-01"); - res.getTelevision().add(value0); - - CustomDatatype value1 = new CustomDatatype(); - value1.setDate(new DateTimeType(new Date())); - value1.setKittens(new StringType("FOO")); - res.getTelevision().add(value1); - - res.setDogs(new StringType("Some Dogs")); - - // Now let's serialize our instance - String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(res); - System.out.println(output); - // END SNIPPET: usage - - } - + public static void main(String[] args) { + + // START SNIPPET: usage + // Create a context. Note that we declare the custom types we'll be using + // on the context before actually using them + FhirContext ctx = FhirContext.forDstu3(); + ctx.registerCustomType(CustomResource.class); + ctx.registerCustomType(CustomDatatype.class); + + // Now let's create an instance of our custom resource type + // and populate it with some data + CustomResource res = new CustomResource(); + + // Add some values, including our custom datatype + DateType value0 = new DateType("2015-01-01"); + res.getTelevision().add(value0); + + CustomDatatype value1 = new CustomDatatype(); + value1.setDate(new DateTimeType(new Date())); + value1.setKittens(new StringType("FOO")); + res.getTelevision().add(value1); + + res.setDogs(new StringType("Some Dogs")); + + // Now let's serialize our instance + String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(res); + System.out.println(output); + // END SNIPPET: usage + + } } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/HeaderBasedBinarySecurityContextInterceptor.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/HeaderBasedBinarySecurityContextInterceptor.java index 8ad829c3370..2c6c4d5e8f5 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/HeaderBasedBinarySecurityContextInterceptor.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/HeaderBasedBinarySecurityContextInterceptor.java @@ -22,7 +22,6 @@ package ca.uhn.hapi.fhir.docs.interceptor; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.binary.BinarySecurityContextInterceptor; -import org.hl7.fhir.instance.model.api.IBaseBinary; /** * This class is mostly intended as an example implementation of the @@ -54,7 +53,8 @@ public class HeaderBasedBinarySecurityContextInterceptor extends BinarySecurityC * @param theRequestDetails The request details associated with this request */ @Override - protected boolean securityContextIdentifierAllowed(String theSecurityContextSystem, String theSecurityContextValue, RequestDetails theRequestDetails) { + protected boolean securityContextIdentifierAllowed( + String theSecurityContextSystem, String theSecurityContextValue, RequestDetails theRequestDetails) { // In our simple example, we will use an incoming header called X-SecurityContext-Allowed-Identifier // to determine whether the security context is allowed. This is typically not what you diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/MyTestInterceptor.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/MyTestInterceptor.java index a9689b7e660..4a3ef534b7d 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/MyTestInterceptor.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/MyTestInterceptor.java @@ -32,7 +32,8 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage; public class MyTestInterceptor { @Hook(Pointcut.SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY) - public boolean beforeRestHookDelivery(ResourceDeliveryMessage theDeliveryMessage, CanonicalSubscription theSubscription) { + public boolean beforeRestHookDelivery( + ResourceDeliveryMessage theDeliveryMessage, CanonicalSubscription theSubscription) { String header = "Authorization: Bearer 1234567"; @@ -40,5 +41,4 @@ public class MyTestInterceptor { return true; } - } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/PatientNameModifierMdmPreProcessingInterceptor.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/PatientNameModifierMdmPreProcessingInterceptor.java index 53ab62d13f2..a5594bea484 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/PatientNameModifierMdmPreProcessingInterceptor.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/PatientNameModifierMdmPreProcessingInterceptor.java @@ -24,6 +24,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.HumanName; import org.hl7.fhir.r4.model.Patient; + import java.util.List; import java.util.stream.Collectors; @@ -44,8 +45,8 @@ public class PatientNameModifierMdmPreProcessingInterceptor { List nameList = patient.getName(); List validHumanNameList = nameList.stream() - .filter(theHumanName -> !myNamesToIgnore.contains(theHumanName.getNameAsSingleString())) - .collect(Collectors.toList()); + .filter(theHumanName -> !myNamesToIgnore.contains(theHumanName.getNameAsSingleString())) + .collect(Collectors.toList()); patient.setName(validHumanNameList); } diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/TagTrimmingInterceptor.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/TagTrimmingInterceptor.java index 0d8c169f0b7..33880d96446 100644 --- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/TagTrimmingInterceptor.java +++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/interceptor/TagTrimmingInterceptor.java @@ -47,6 +47,5 @@ public class TagTrimmingInterceptor { theResource.getMeta().getProfile().clear(); theResource.getMeta().getSecurity().clear(); } - } // END SNIPPET: TagTrimmingInterceptor diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpClient.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpClient.java index 3e7e964f984..8ea09b92709 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpClient.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpClient.java @@ -29,16 +29,16 @@ import ca.uhn.fhir.rest.client.api.IHttpClient; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.impl.BaseHttpClientInvocation; import ca.uhn.fhir.rest.client.method.MethodUtil; +import org.hl7.fhir.instance.model.api.IBaseBinary; + +import java.util.List; +import java.util.Map; import javax.ws.rs.client.Client; import javax.ws.rs.client.Entity; import javax.ws.rs.client.Invocation.Builder; import javax.ws.rs.core.Form; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.MultivaluedMap; -import org.hl7.fhir.instance.model.api.IBaseBinary; - -import java.util.List; -import java.util.Map; /** * A Http Request based on JaxRs. This is an adapter around the class @@ -55,8 +55,13 @@ public class JaxRsHttpClient implements IHttpClient { private String myIfNoneExistString; private RequestTypeEnum myRequestType; - public JaxRsHttpClient(Client theClient, StringBuilder theUrl, Map> theIfNoneExistParams, String theIfNoneExistString, - RequestTypeEnum theRequestType, List
    theHeaders) { + public JaxRsHttpClient( + Client theClient, + StringBuilder theUrl, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders) { this.myClient = theClient; this.myUrl = theUrl; this.myIfNoneExistParams = theIfNoneExistParams; @@ -66,7 +71,8 @@ public class JaxRsHttpClient implements IHttpClient { } @Override - public IHttpRequest createByteRequest(FhirContext theContext, String theContents, String theContentType, EncodingEnum theEncoding) { + public IHttpRequest createByteRequest( + FhirContext theContext, String theContents, String theContentType, EncodingEnum theEncoding) { Entity entity = Entity.entity(theContents, theContentType + Constants.HEADER_SUFFIX_CT_UTF_8); JaxRsHttpRequest retVal = createHttpRequest(entity); addHeadersToRequest(retVal, theEncoding, theContext); @@ -75,7 +81,8 @@ public class JaxRsHttpClient implements IHttpClient { } @Override - public IHttpRequest createParamRequest(FhirContext theContext, Map> theParams, EncodingEnum theEncoding) { + public IHttpRequest createParamRequest( + FhirContext theContext, Map> theParams, EncodingEnum theEncoding) { MultivaluedMap map = new MultivaluedHashMap(); for (Map.Entry> nextParam : theParams.entrySet()) { List value = nextParam.getValue(); @@ -150,5 +157,4 @@ public class JaxRsHttpClient implements IHttpClient { } return b; } - } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpRequest.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpRequest.java index 40c06ec04d5..94f03508eed 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpRequest.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpRequest.java @@ -26,14 +26,14 @@ import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.api.IHttpResponse; import ca.uhn.fhir.util.StopWatch; -import javax.ws.rs.client.Entity; -import javax.ws.rs.client.Invocation; -import javax.ws.rs.core.Response; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import javax.ws.rs.client.Entity; +import javax.ws.rs.client.Invocation; +import javax.ws.rs.core.Response; /** * A Http Request based on JaxRs. This is an adapter around the class @@ -128,5 +128,4 @@ public class JaxRsHttpRequest extends BaseHttpRequest implements IHttpRequest { public void setUri(String theUrl) { throw new UnsupportedOperationException(Msg.code(606)); } - } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpResponse.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpResponse.java index 063550a4a79..1522d5007d3 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpResponse.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsHttpResponse.java @@ -19,32 +19,27 @@ */ package ca.uhn.fhir.jaxrs.client; -import java.io.*; +import ca.uhn.fhir.rest.client.api.IHttpResponse; +import ca.uhn.fhir.rest.client.impl.BaseHttpResponse; +import ca.uhn.fhir.util.StopWatch; +import java.io.*; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; - import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import ca.uhn.fhir.rest.client.impl.BaseHttpResponse; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.util.StopWatch; - -import ca.uhn.fhir.rest.client.api.IHttpResponse; -import org.apache.commons.io.IOUtils; - /** * A Http Response based on JaxRs. This is an adapter around the class {@link javax.ws.rs.core.Response Response} * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ public class JaxRsHttpResponse extends BaseHttpResponse implements IHttpResponse { - + private boolean myBufferedEntity = false; private final Response myResponse; - + public JaxRsHttpResponse(Response theResponse, StopWatch theResponseStopWatch) { super(theResponseStopWatch); this.myResponse = theResponse; @@ -52,7 +47,7 @@ public class JaxRsHttpResponse extends BaseHttpResponse implements IHttpResponse @Override public void bufferEntity() throws IOException { - if(!myBufferedEntity && myResponse.hasEntity()) { + if (!myBufferedEntity && myResponse.hasEntity()) { myBufferedEntity = true; myResponse.bufferEntity(); } else { @@ -77,7 +72,8 @@ public class JaxRsHttpResponse extends BaseHttpResponse implements IHttpResponse @Override public Map> getAllHeaders() { Map> theHeaders = new ConcurrentHashMap>(); - for (Entry> iterable_element : myResponse.getStringHeaders().entrySet()) { + for (Entry> iterable_element : + myResponse.getStringHeaders().entrySet()) { theHeaders.put(iterable_element.getKey().toLowerCase(), iterable_element.getValue()); } return theHeaders; @@ -89,20 +85,19 @@ public class JaxRsHttpResponse extends BaseHttpResponse implements IHttpResponse if (mediaType == null) { return null; } - //Keep only type and subtype and do not include the parameters such as charset + // Keep only type and subtype and do not include the parameters such as charset return new MediaType(mediaType.getType(), mediaType.getSubtype()).toString(); } - + @Override public Response getResponse() { return myResponse; } - + @Override public int getStatus() { return myResponse.getStatus(); } - @Override public String getStatusInfo() { @@ -122,7 +117,5 @@ public class JaxRsHttpResponse extends BaseHttpResponse implements IHttpResponse public List getHeaders(String theName) { List retVal = myResponse.getStringHeaders().get(theName); return retVal; - } - - + } } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsRestfulClientFactory.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsRestfulClientFactory.java index 97c34b3c9e7..68084615474 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsRestfulClientFactory.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/client/JaxRsRestfulClientFactory.java @@ -25,27 +25,23 @@ import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.client.api.Header; import ca.uhn.fhir.rest.client.api.IHttpClient; import ca.uhn.fhir.rest.client.impl.RestfulClientFactory; -import ca.uhn.fhir.rest.client.tls.TlsAuthenticationSvc; -import ca.uhn.fhir.tls.TlsAuthentication; -import javax.net.ssl.SSLContext; -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; import java.util.List; import java.util.Map; -import java.util.Optional; +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; /** * A Restful Client Factory, based on Jax Rs * Default Jax-Rs client is NOT thread safe in static context, you should create a new factory every time or - * use a specific Jax-Rs client implementation which managed connection pool. + * use a specific Jax-Rs client implementation which managed connection pool. * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ public class JaxRsRestfulClientFactory extends RestfulClientFactory { private Client myNativeClient; - private List> registeredComponents; - + private List> registeredComponents; + /** * Constructor. Note that you must set the {@link FhirContext} manually using {@link #setFhirContext(FhirContext)} if this constructor is used! */ @@ -55,7 +51,7 @@ public class JaxRsRestfulClientFactory extends RestfulClientFactory { /** * Constructor - * + * * @param theFhirContext * The context */ @@ -71,7 +67,7 @@ public class JaxRsRestfulClientFactory extends RestfulClientFactory { if (registeredComponents != null && !registeredComponents.isEmpty()) { for (Class c : registeredComponents) { - myNativeClient = myNativeClient.register(c); + myNativeClient = myNativeClient.register(c); } } @@ -79,40 +75,44 @@ public class JaxRsRestfulClientFactory extends RestfulClientFactory { } @Override - public synchronized IHttpClient getHttpClient(StringBuilder url, Map> theIfNoneExistParams, String theIfNoneExistString, RequestTypeEnum theRequestType, List
    theHeaders) { + public synchronized IHttpClient getHttpClient( + StringBuilder url, + Map> theIfNoneExistParams, + String theIfNoneExistString, + RequestTypeEnum theRequestType, + List
    theHeaders) { Client client = getNativeClientClient(); return new JaxRsHttpClient(client, url, theIfNoneExistParams, theIfNoneExistString, theRequestType, theHeaders); } - /*** - * Not supported with default Jax-Rs client implementation - * @param theHost - * The host (or null to disable proxying, as is the default) - * @param thePort - */ + /*** + * Not supported with default Jax-Rs client implementation + * @param theHost + * The host (or null to disable proxying, as is the default) + * @param thePort + */ @Override public void setProxy(String theHost, Integer thePort) { throw new UnsupportedOperationException(Msg.code(605) + "Proxies are not supported yet in JAX-RS client"); } - - /** - * Only accept clients of type javax.ws.rs.client.Client - * Can be used to set a specific Client implementation - * @param theHttpClient - */ + + /** + * Only accept clients of type javax.ws.rs.client.Client + * Can be used to set a specific Client implementation + * @param theHttpClient + */ @Override public synchronized void setHttpClient(Object theHttpClient) { this.myNativeClient = (Client) theHttpClient; } - /** - * Register a list of Jax-Rs component (provider, filter...) - * @param components list of Jax-Rs components to register - */ - public void register(List> components) { - registeredComponents = components; - } - + /** + * Register a list of Jax-Rs component (provider, filter...) + * @param components list of Jax-Rs components to register + */ + public void register(List> components) { + registeredComponents = components; + } @Override protected synchronized JaxRsHttpClient getHttpClient(String theServerBase) { @@ -120,10 +120,8 @@ public class JaxRsRestfulClientFactory extends RestfulClientFactory { } @Override - protected void resetHttpClient() { - if (myNativeClient != null) - myNativeClient.close(); // close client to avoid memory leak - myNativeClient = null; - } - + protected void resetHttpClient() { + if (myNativeClient != null) myNativeClient.close(); // close client to avoid memory leak + myNativeClient = null; + } } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsBundleProvider.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsBundleProvider.java index 74c56d643f1..fc00b175fd6 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsBundleProvider.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsBundleProvider.java @@ -19,30 +19,28 @@ */ package ca.uhn.fhir.jaxrs.server; -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import javax.interceptor.Interceptors; -import javax.ws.rs.*; - -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.api.BundleInclusionRule; import ca.uhn.fhir.jaxrs.server.interceptor.JaxRsExceptionInterceptor; import ca.uhn.fhir.jaxrs.server.util.JaxRsMethodBindings; import ca.uhn.fhir.jaxrs.server.util.JaxRsRequest; import ca.uhn.fhir.jaxrs.server.util.JaxRsRequest.Builder; -import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IRestfulServer; +import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.server.IPagingProvider; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; import ca.uhn.fhir.rest.server.method.BaseMethodBinding; +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import javax.interceptor.Interceptors; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.*; + /** * This server is the abstract superclass for all bundle providers. It exposes * a large amount of the fhir api functionality using JAXRS @@ -50,143 +48,144 @@ import ca.uhn.fhir.rest.server.method.BaseMethodBinding; * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ @SuppressWarnings("javadoc") -@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN }) -@Consumes({ MediaType.APPLICATION_FORM_URLENCODED, MediaType.APPLICATION_JSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML }) +@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN}) +@Consumes({ + MediaType.APPLICATION_FORM_URLENCODED, + MediaType.APPLICATION_JSON, + Constants.CT_FHIR_JSON, + Constants.CT_FHIR_XML +}) @Interceptors(JaxRsExceptionInterceptor.class) -public abstract class AbstractJaxRsBundleProvider extends AbstractJaxRsProvider implements IRestfulServer, IBundleProvider { +public abstract class AbstractJaxRsBundleProvider extends AbstractJaxRsProvider + implements IRestfulServer, IBundleProvider { - /** the method bindings for this class */ - private final JaxRsMethodBindings theBindings; + /** the method bindings for this class */ + private final JaxRsMethodBindings theBindings; - /** - * The default constructor. The method bindings are retrieved from the class - * being constructed. - */ - protected AbstractJaxRsBundleProvider() { - super(); - theBindings = JaxRsMethodBindings.getMethodBindings(this, getClass()); - } + /** + * The default constructor. The method bindings are retrieved from the class + * being constructed. + */ + protected AbstractJaxRsBundleProvider() { + super(); + theBindings = JaxRsMethodBindings.getMethodBindings(this, getClass()); + } - /** - * Provides the ability to specify the {@link FhirContext}. - * @param ctx the {@link FhirContext} instance. - */ - protected AbstractJaxRsBundleProvider(final FhirContext ctx) { - super(ctx); - theBindings = JaxRsMethodBindings.getMethodBindings(this, getClass()); - } + /** + * Provides the ability to specify the {@link FhirContext}. + * @param ctx the {@link FhirContext} instance. + */ + protected AbstractJaxRsBundleProvider(final FhirContext ctx) { + super(ctx); + theBindings = JaxRsMethodBindings.getMethodBindings(this, getClass()); + } - /** - * This constructor takes in an explicit interface class. This subclass - * should be identical to the class being constructed but is given - * explicitly in order to avoid issues with proxy classes in a jee - * environment. - * - * @param theProviderClass the interface of the class - */ - protected AbstractJaxRsBundleProvider(final Class theProviderClass) { - theBindings = JaxRsMethodBindings.getMethodBindings(this, theProviderClass); - } + /** + * This constructor takes in an explicit interface class. This subclass + * should be identical to the class being constructed but is given + * explicitly in order to avoid issues with proxy classes in a jee + * environment. + * + * @param theProviderClass the interface of the class + */ + protected AbstractJaxRsBundleProvider(final Class theProviderClass) { + theBindings = JaxRsMethodBindings.getMethodBindings(this, theProviderClass); + } - /** - * Create all resources in one transaction - * - * @param resource the body of the post method containing the bundle of the resources being created in a xml/json form - * @return the response - * @see https://www.hl7. org/fhir/http.html#create - */ - @POST - public Response create(final String resource) - throws IOException { - return execute(getRequest(RequestTypeEnum.POST, RestOperationTypeEnum.TRANSACTION).resource(resource)); - } + /** + * Create all resources in one transaction + * + * @param resource the body of the post method containing the bundle of the resources being created in a xml/json form + * @return the response + * @see https://www.hl7. org/fhir/http.html#create + */ + @POST + public Response create(final String resource) throws IOException { + return execute(getRequest(RequestTypeEnum.POST, RestOperationTypeEnum.TRANSACTION) + .resource(resource)); + } - /** - * Search the resource type based on some filter criteria - * - * @return the response - * @see https://www.hl7.org/fhir/http.html#search - */ - @GET - public Response search() - throws IOException { - return execute(getRequest(RequestTypeEnum.GET, RestOperationTypeEnum.SEARCH_TYPE)); - } + /** + * Search the resource type based on some filter criteria + * + * @return the response + * @see https://www.hl7.org/fhir/http.html#search + */ + @GET + public Response search() throws IOException { + return execute(getRequest(RequestTypeEnum.GET, RestOperationTypeEnum.SEARCH_TYPE)); + } - /** - * Execute the method described by the requestBuilder and methodKey - * - * @param theRequestBuilder the requestBuilder that contains the information about the request - * @param methodKey the key determining the method to be executed - * @return the response - */ - private Response execute(final Builder theRequestBuilder, final String methodKey) - throws IOException { - final JaxRsRequest theRequest = theRequestBuilder.build(); - final BaseMethodBinding method = getBinding(theRequest.getRestOperationType(), methodKey); - try { - return (Response) method.invokeServer(this, theRequest); - } - catch (final Throwable theException) { - return handleException(theRequest, theException); - } - } + /** + * Execute the method described by the requestBuilder and methodKey + * + * @param theRequestBuilder the requestBuilder that contains the information about the request + * @param methodKey the key determining the method to be executed + * @return the response + */ + private Response execute(final Builder theRequestBuilder, final String methodKey) throws IOException { + final JaxRsRequest theRequest = theRequestBuilder.build(); + final BaseMethodBinding method = getBinding(theRequest.getRestOperationType(), methodKey); + try { + return (Response) method.invokeServer(this, theRequest); + } catch (final Throwable theException) { + return handleException(theRequest, theException); + } + } - /** - * Execute the method described by the requestBuilder - * - * @param theRequestBuilder the requestBuilder that contains the information about the request - * @return the response - */ - private Response execute(final Builder theRequestBuilder) - throws IOException { - return execute(theRequestBuilder, JaxRsMethodBindings.DEFAULT_METHOD_KEY); - } + /** + * Execute the method described by the requestBuilder + * + * @param theRequestBuilder the requestBuilder that contains the information about the request + * @return the response + */ + private Response execute(final Builder theRequestBuilder) throws IOException { + return execute(theRequestBuilder, JaxRsMethodBindings.DEFAULT_METHOD_KEY); + } - /** - * Return the method binding for the given rest operation - * - * @param restOperation the rest operation to retrieve - * @param theBindingKey the key determining the method to be executed (needed for e.g. custom operation) - * @return - */ - protected BaseMethodBinding getBinding(final RestOperationTypeEnum restOperation, final String theBindingKey) { - return getBindings().getBinding(restOperation, theBindingKey); - } + /** + * Return the method binding for the given rest operation + * + * @param restOperation the rest operation to retrieve + * @param theBindingKey the key determining the method to be executed (needed for e.g. custom operation) + * @return + */ + protected BaseMethodBinding getBinding(final RestOperationTypeEnum restOperation, final String theBindingKey) { + return getBindings().getBinding(restOperation, theBindingKey); + } - /** - * Default: an empty list of interceptors - * - * @see ca.uhn.fhir.rest.server.IRestfulServerDefaults#getInterceptors_() - */ - @Override - public List getInterceptors_() { - return Collections.emptyList(); - } + /** + * Default: an empty list of interceptors + * + * @see ca.uhn.fhir.rest.server.IRestfulServerDefaults#getInterceptors_() + */ + @Override + public List getInterceptors_() { + return Collections.emptyList(); + } - /** - * Default: no paging provider - */ - @Override - public IPagingProvider getPagingProvider() { - return null; - } + /** + * Default: no paging provider + */ + @Override + public IPagingProvider getPagingProvider() { + return null; + } - /** - * Default: BundleInclusionRule.BASED_ON_INCLUDES - */ - @Override - public BundleInclusionRule getBundleInclusionRule() { - return BundleInclusionRule.BASED_ON_INCLUDES; - } - - /** - * Return the bindings defined in this resource provider - * - * @return the jax-rs method bindings - */ - public JaxRsMethodBindings getBindings() { - return theBindings; - } + /** + * Default: BundleInclusionRule.BASED_ON_INCLUDES + */ + @Override + public BundleInclusionRule getBundleInclusionRule() { + return BundleInclusionRule.BASED_ON_INCLUDES; + } + /** + * Return the bindings defined in this resource provider + * + * @return the jax-rs method bindings + */ + public JaxRsMethodBindings getBindings() { + return theBindings; + } } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsConformanceProvider.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsConformanceProvider.java index 720c40a12ef..5208fe2e03f 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsConformanceProvider.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsConformanceProvider.java @@ -48,12 +48,6 @@ import org.slf4j.LoggerFactory; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.EventListener; -import javax.ws.rs.GET; -import javax.ws.rs.OPTIONS; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Method; @@ -65,6 +59,12 @@ import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import javax.ws.rs.GET; +import javax.ws.rs.OPTIONS; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; /** * This is the conformance provider for the jax rs servers. It requires all providers to be registered during startup because the conformance profile is generated during the postconstruct phase. @@ -85,7 +85,8 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv /** * the resource bindings */ - private ConcurrentHashMap myResourceNameToBinding = new ConcurrentHashMap(); + private ConcurrentHashMap myResourceNameToBinding = + new ConcurrentHashMap(); /** * the server configuration */ @@ -95,6 +96,7 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv * the conformance. It is created once during startup */ private org.hl7.fhir.r4.model.CapabilityStatement myR4CapabilityStatement; + private org.hl7.fhir.dstu3.model.CapabilityStatement myDstu3CapabilityStatement; private org.hl7.fhir.dstu2016may.model.Conformance myDstu2_1Conformance; private org.hl7.fhir.dstu2.model.Conformance myDstu2Hl7OrgConformance; @@ -108,7 +110,8 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv * @param serverName the server name. If null, "" is used * @param serverVersion the server version. If null, "" is used */ - protected AbstractJaxRsConformanceProvider(String implementationDescription, String serverName, String serverVersion) { + protected AbstractJaxRsConformanceProvider( + String implementationDescription, String serverName, String serverVersion) { myServerConfiguration.setFhirContext(getFhirContext()); myServerConfiguration.setImplementationDescription(StringUtils.defaultIfEmpty(implementationDescription, "")); myServerConfiguration.setServerName(StringUtils.defaultIfEmpty(serverName, "")); @@ -123,7 +126,8 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv * @param serverName the server name. If null, "" is used * @param serverVersion the server version. If null, "" is used */ - protected AbstractJaxRsConformanceProvider(FhirContext ctx, String implementationDescription, String serverName, String serverVersion) { + protected AbstractJaxRsConformanceProvider( + FhirContext ctx, String implementationDescription, String serverName, String serverVersion) { super(ctx); myServerConfiguration.setFhirContext(ctx); myServerConfiguration.setImplementationDescription(StringUtils.defaultIfEmpty(implementationDescription, "")); @@ -158,23 +162,31 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv FhirVersionEnum fhirContextVersion = super.getFhirContext().getVersion().getVersion(); switch (fhirContextVersion) { case R4: - ServerCapabilityStatementProvider r4ServerCapabilityStatementProvider = new ServerCapabilityStatementProvider(getFhirContext(), myServerConfiguration); - myR4CapabilityStatement = (CapabilityStatement) r4ServerCapabilityStatementProvider.getServerConformance(null, null); + ServerCapabilityStatementProvider r4ServerCapabilityStatementProvider = + new ServerCapabilityStatementProvider(getFhirContext(), myServerConfiguration); + myR4CapabilityStatement = + (CapabilityStatement) r4ServerCapabilityStatementProvider.getServerConformance(null, null); break; case DSTU3: - org.hl7.fhir.dstu3.hapi.rest.server.ServerCapabilityStatementProvider dstu3ServerCapabilityStatementProvider = new org.hl7.fhir.dstu3.hapi.rest.server.ServerCapabilityStatementProvider(myServerConfiguration); + org.hl7.fhir.dstu3.hapi.rest.server.ServerCapabilityStatementProvider + dstu3ServerCapabilityStatementProvider = + new org.hl7.fhir.dstu3.hapi.rest.server.ServerCapabilityStatementProvider( + myServerConfiguration); myDstu3CapabilityStatement = dstu3ServerCapabilityStatementProvider.getServerConformance(null, null); break; case DSTU2_1: - org.hl7.fhir.dstu2016may.hapi.rest.server.ServerConformanceProvider dstu2_1ServerConformanceProvider = new org.hl7.fhir.dstu2016may.hapi.rest.server.ServerConformanceProvider(myServerConfiguration); + org.hl7.fhir.dstu2016may.hapi.rest.server.ServerConformanceProvider dstu2_1ServerConformanceProvider = + new org.hl7.fhir.dstu2016may.hapi.rest.server.ServerConformanceProvider(myServerConfiguration); myDstu2_1Conformance = dstu2_1ServerConformanceProvider.getServerConformance(null, null); break; case DSTU2_HL7ORG: - ServerConformanceProvider dstu2Hl7OrgServerConformanceProvider = new ServerConformanceProvider(myServerConfiguration); + ServerConformanceProvider dstu2Hl7OrgServerConformanceProvider = + new ServerConformanceProvider(myServerConfiguration); myDstu2Hl7OrgConformance = dstu2Hl7OrgServerConformanceProvider.getServerConformance(null, null); break; case DSTU2: - ca.uhn.fhir.rest.server.provider.dstu2.ServerConformanceProvider dstu2ServerConformanceProvider = new ca.uhn.fhir.rest.server.provider.dstu2.ServerConformanceProvider(myServerConfiguration); + ca.uhn.fhir.rest.server.provider.dstu2.ServerConformanceProvider dstu2ServerConformanceProvider = + new ca.uhn.fhir.rest.server.provider.dstu2.ServerConformanceProvider(myServerConfiguration); myDstu2Conformance = dstu2ServerConformanceProvider.getServerConformance(null, null); break; default: @@ -242,7 +254,8 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv Set summaryMode = Collections.emptySet(); - return (Response) RestfulServerUtils.streamResponseAsResource(this, conformance, summaryMode, Constants.STATUS_HTTP_200_OK, false, true, requestDetails, null, null); + return (Response) RestfulServerUtils.streamResponseAsResource( + this, conformance, summaryMode, Constants.STATUS_HTTP_200_OK, false, true, requestDetails, null, null); } /** @@ -253,7 +266,8 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv * @return the numbers of basemethodbindings added * @see ca.uhn.fhir.rest.server.RestfulServer#findResourceMethods(Object) */ - public int addProvider(IResourceProvider theProvider, Class theProviderInterface) throws ConfigurationException { + public int addProvider(IResourceProvider theProvider, Class theProviderInterface) + throws ConfigurationException { int count = 0; for (Method m : ReflectionUtil.getDeclaredMethods(theProviderInterface)) { @@ -270,10 +284,12 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv // } if (!Modifier.isPublic(m.getModifiers())) { - throw new ConfigurationException(Msg.code(593) + "Method '" + m.getName() + "' is not public, FHIR RESTful methods must be public"); + throw new ConfigurationException(Msg.code(593) + "Method '" + m.getName() + + "' is not public, FHIR RESTful methods must be public"); } else { if (Modifier.isStatic(m.getModifiers())) { - throw new ConfigurationException(Msg.code(594) + "Method '" + m.getName() + "' is static, FHIR RESTful methods must not be static"); + throw new ConfigurationException(Msg.code(594) + "Method '" + m.getName() + + "' is static, FHIR RESTful methods must not be static"); } else { ourLog.debug("Scanning public method: {}#{}", theProvider.getClass(), m.getName()); @@ -299,7 +315,8 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv Package pack = annotation.annotationType().getPackage(); if (pack.equals(IdParam.class.getPackage())) { if (!allowableParams.contains(annotation.annotationType())) { - throw new ConfigurationException(Msg.code(595) + "Method[" + m.toString() + "] is not allowed to have a parameter annotated with " + annotation); + throw new ConfigurationException(Msg.code(595) + "Method[" + m.toString() + + "] is not allowed to have a parameter annotated with " + annotation); } } } @@ -334,5 +351,4 @@ public abstract class AbstractJaxRsConformanceProvider extends AbstractJaxRsProv throw new ConfigurationException(Msg.code(596) + "Unsupported Fhir version: " + fhirContextVersion); } } - } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsPageProvider.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsPageProvider.java index 710e51d4d50..86626914d35 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsPageProvider.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsPageProvider.java @@ -19,33 +19,31 @@ */ package ca.uhn.fhir.jaxrs.server; -import java.io.IOException; - -import javax.interceptor.Interceptors; - -import javax.ws.rs.GET; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.api.BundleInclusionRule; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jaxrs.server.interceptor.JaxRsExceptionInterceptor; import ca.uhn.fhir.jaxrs.server.interceptor.JaxRsResponseException; import ca.uhn.fhir.jaxrs.server.util.JaxRsRequest; -import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.api.server.IRestfulServer; +import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.server.IPagingProvider; import ca.uhn.fhir.rest.server.PageProvider; import ca.uhn.fhir.rest.server.method.PageMethodBinding; +import java.io.IOException; +import javax.interceptor.Interceptors; +import javax.ws.rs.GET; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + /** * Base class for a provider to provide the [baseUrl]?_getpages=foo request, which is a request to the * server to retrieve the next page of a set of paged results. */ -@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN }) +@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN}) @Interceptors(JaxRsExceptionInterceptor.class) public abstract class AbstractJaxRsPageProvider extends AbstractJaxRsProvider implements IRestfulServer { @@ -67,14 +65,14 @@ public abstract class AbstractJaxRsPageProvider extends AbstractJaxRsProvider im * @param ctx the {@link FhirContext} instance. */ protected AbstractJaxRsPageProvider(FhirContext ctx) { - super(ctx); - try { - myBinding = new PageMethodBinding(getFhirContext(), PageProvider.class.getMethod("getPage")); - } catch (Exception e) { - throw new ca.uhn.fhir.context.ConfigurationException(Msg.code(1984), e); - } + super(ctx); + try { + myBinding = new PageMethodBinding(getFhirContext(), PageProvider.class.getMethod("getPage")); + } catch (Exception e) { + throw new ca.uhn.fhir.context.ConfigurationException(Msg.code(1984), e); + } } - + @Override public String getBaseForRequest() { try { @@ -90,7 +88,8 @@ public abstract class AbstractJaxRsPageProvider extends AbstractJaxRsProvider im */ @GET public Response getPages(@QueryParam(Constants.PARAM_PAGINGACTION) String thePageId) throws IOException { - JaxRsRequest theRequest = getRequest(RequestTypeEnum.GET, RestOperationTypeEnum.GET_PAGE).build(); + JaxRsRequest theRequest = + getRequest(RequestTypeEnum.GET, RestOperationTypeEnum.GET_PAGE).build(); try { return (Response) myBinding.invokeServer(this, theRequest); } catch (JaxRsResponseException theException) { @@ -118,5 +117,4 @@ public abstract class AbstractJaxRsPageProvider extends AbstractJaxRsProvider im public PreferReturnEnum getDefaultPreferReturn() { return PreferReturnEnum.REPRESENTATION; } - } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsProvider.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsProvider.java index e02f5b3102a..6d3e3b17cd2 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsProvider.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsProvider.java @@ -19,28 +19,26 @@ */ package ca.uhn.fhir.jaxrs.server; -import java.io.IOException; -import java.util.*; -import java.util.Map.Entry; - -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; - -import ca.uhn.fhir.interceptor.api.IInterceptorService; -import javax.ws.rs.core.MultivaluedMap; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.UriInfo; -import org.apache.commons.lang3.StringUtils; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.api.AddProfileTagEnum; +import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.jaxrs.server.interceptor.JaxRsExceptionInterceptor; import ca.uhn.fhir.jaxrs.server.interceptor.JaxRsResponseException; import ca.uhn.fhir.jaxrs.server.util.JaxRsRequest; import ca.uhn.fhir.jaxrs.server.util.JaxRsRequest.Builder; import ca.uhn.fhir.rest.api.*; -import ca.uhn.fhir.rest.server.*; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; +import ca.uhn.fhir.rest.server.*; +import org.apache.commons.lang3.StringUtils; + +import java.io.IOException; +import java.util.Map.Entry; +import java.util.*; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.MultivaluedMap; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.UriInfo; /** * This is the abstract superclass for all jaxrs providers. It contains some defaults implementing @@ -95,7 +93,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * This method returns the server base, including the resource path. * {@link UriInfo#getBaseUri() UriInfo#getBaseUri()} - * + * * @return the ascii string for the base resource provider path */ public String getBaseForRequest() { @@ -104,7 +102,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * This method returns the server base, independent of the request or resource. - * + * * @see javax.ws.rs.core.UriInfo#getBaseUri() * @return the ascii string for the server base */ @@ -144,7 +142,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * Get the headers - * + * * @return the headers */ public HttpHeaders getHeaders() { @@ -154,7 +152,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * Default: an empty list of interceptors (Interceptors are not yet supported * in the JAX-RS server). Please get in touch if you'd like to help! - * + * * @see ca.uhn.fhir.rest.server.IRestfulServerDefaults#getInterceptors_() */ @Override @@ -172,21 +170,25 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * This method returns the query parameters - * + * * @return the query parameters */ public Map getParameters() { final MultivaluedMap queryParameters = getUriInfo().getQueryParameters(); final HashMap params = new HashMap(); for (final Entry> paramEntry : queryParameters.entrySet()) { - params.put(paramEntry.getKey(), paramEntry.getValue().toArray(new String[paramEntry.getValue().size()])); + params.put( + paramEntry.getKey(), + paramEntry + .getValue() + .toArray(new String[paramEntry.getValue().size()])); } return params; } /** * Return the requestbuilder for the server - * + * * @param requestType * the type of the request * @param restOperation @@ -199,7 +201,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * Return the requestbuilder for the server - * + * * @param requestType * the type of the request * @param restOperation @@ -208,14 +210,18 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { * the resource name * @return the requestbuilder */ - public Builder getRequest(final RequestTypeEnum requestType, final RestOperationTypeEnum restOperation, final String theResourceName) { - return new JaxRsRequest.Builder(this, requestType, restOperation, myUriInfo.getRequestUri().toString(), theResourceName); + public Builder getRequest( + final RequestTypeEnum requestType, + final RestOperationTypeEnum restOperation, + final String theResourceName) { + return new JaxRsRequest.Builder( + this, requestType, restOperation, myUriInfo.getRequestUri().toString(), theResourceName); } /** * This method returns the default server address strategy. The default strategy return the * base uri for the request {@link AbstractJaxRsProvider#getBaseForRequest() getBaseForRequest()} - * + * * @return */ public IServerAddressStrategy getServerAddressStrategy() { @@ -226,7 +232,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * Get the uriInfo - * + * * @return the uri info */ public UriInfo getUriInfo() { @@ -235,7 +241,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * Convert an exception to a response - * + * * @param theRequest * the incoming request * @param theException @@ -243,13 +249,14 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { * @return response * @throws IOException */ - public Response handleException(final JaxRsRequest theRequest, final Throwable theException) - throws IOException { + public Response handleException(final JaxRsRequest theRequest, final Throwable theException) throws IOException { if (theException instanceof JaxRsResponseException) { - return new JaxRsExceptionInterceptor().convertExceptionIntoResponse(theRequest, (JaxRsResponseException) theException); + return new JaxRsExceptionInterceptor() + .convertExceptionIntoResponse(theRequest, (JaxRsResponseException) theException); } else { - return new JaxRsExceptionInterceptor().convertExceptionIntoResponse(theRequest, - new JaxRsExceptionInterceptor().convertException(this, theException)); + return new JaxRsExceptionInterceptor() + .convertExceptionIntoResponse( + theRequest, new JaxRsExceptionInterceptor().convertException(this, theException)); } } @@ -263,7 +270,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * Set the headers - * + * * @param headers * the headers to set */ @@ -273,7 +280,7 @@ public abstract class AbstractJaxRsProvider implements IRestfulServerDefaults { /** * Set the Uri Info - * + * * @param uriInfo * the uri info */ diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProvider.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProvider.java index 5543c15d7d5..ebb634294af 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProvider.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/AbstractJaxRsResourceProvider.java @@ -19,244 +19,261 @@ */ package ca.uhn.fhir.jaxrs.server; -import java.io.IOException; -import java.net.URL; - -import javax.interceptor.Interceptors; -import javax.ws.rs.*; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.api.BundleInclusionRule; import ca.uhn.fhir.jaxrs.server.interceptor.JaxRsExceptionInterceptor; import ca.uhn.fhir.jaxrs.server.util.JaxRsMethodBindings; import ca.uhn.fhir.jaxrs.server.util.JaxRsRequest; import ca.uhn.fhir.jaxrs.server.util.JaxRsRequest.Builder; -import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.api.server.IRestfulServer; +import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.server.IPagingProvider; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.method.BaseMethodBinding; +import org.hl7.fhir.instance.model.api.IBaseResource; + +import java.io.IOException; +import java.net.URL; +import javax.interceptor.Interceptors; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.*; /** * This server is the abstract superclass for all resource providers. It exposes * a large amount of the fhir api functionality using JAXRS * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ -@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML }) -@Consumes({ MediaType.APPLICATION_FORM_URLENCODED, MediaType.APPLICATION_JSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_XML, Constants.CT_FHIR_JSON_NEW, Constants.CT_FHIR_XML_NEW, "application/octet-stream" }) +@Produces({ + MediaType.APPLICATION_JSON, + MediaType.APPLICATION_XML, + MediaType.TEXT_PLAIN, + Constants.CT_FHIR_JSON, + Constants.CT_FHIR_XML +}) +@Consumes({ + MediaType.APPLICATION_FORM_URLENCODED, + MediaType.APPLICATION_JSON, + Constants.CT_FHIR_JSON, + Constants.CT_FHIR_XML, + Constants.CT_FHIR_JSON_NEW, + Constants.CT_FHIR_XML_NEW, + "application/octet-stream" +}) @Interceptors(JaxRsExceptionInterceptor.class) public abstract class AbstractJaxRsResourceProvider extends AbstractJaxRsProvider + implements IRestfulServer, IResourceProvider { -implements IRestfulServer, IResourceProvider { - - /** the method bindings for this class */ - private final JaxRsMethodBindings theBindings; - - /** - * The default constructor. The method bindings are retrieved from the class - * being constructed. - */ - protected AbstractJaxRsResourceProvider() { - super(); - theBindings = JaxRsMethodBindings.getMethodBindings(this, getClass()); - } - - /** - * Provides the ability to specify the {@link FhirContext}. - * @param ctx the {@link FhirContext} instance. - */ - protected AbstractJaxRsResourceProvider(final FhirContext ctx) { - super(ctx); - theBindings = JaxRsMethodBindings.getMethodBindings(this, getClass()); - } - - /** - * This constructor takes in an explicit interface class. This subclass - * should be identical to the class being constructed but is given - * explicitly in order to avoid issues with proxy classes in a jee - * environment. - * - * @param theProviderClass the interface of the class - */ - protected AbstractJaxRsResourceProvider(final Class theProviderClass) { - super(); - theBindings = JaxRsMethodBindings.getMethodBindings(this, theProviderClass); - } - - /** - * This constructor takes in an explicit interface class. This subclass - * should be identical to the class being constructed but is given - * explicitly in order to avoid issues with proxy classes in a jee - * environment. - * - * @param ctx the {@link FhirContext} instance. - * @param theProviderClass the interface of the class - */ - protected AbstractJaxRsResourceProvider(final FhirContext ctx, final Class theProviderClass) { - super(ctx); - theBindings = JaxRsMethodBindings.getMethodBindings(this, theProviderClass); - } + /** the method bindings for this class */ + private final JaxRsMethodBindings theBindings; /** - * The base for request for a resource provider has the following form:
    - * {@link AbstractJaxRsResourceProvider#getBaseForServer() - * getBaseForServer()} + "/" + - * {@link AbstractJaxRsResourceProvider#getResourceType() getResourceType()} - * .{@link java.lang.Class#getSimpleName() getSimpleName()} - */ - @Override - public String getBaseForRequest() { - try { - return new URL(getUriInfo().getBaseUri().toURL(), getResourceType().getSimpleName()).toExternalForm(); - } - catch (final Exception e) { - // cannot happen - return null; - } - } + * The default constructor. The method bindings are retrieved from the class + * being constructed. + */ + protected AbstractJaxRsResourceProvider() { + super(); + theBindings = JaxRsMethodBindings.getMethodBindings(this, getClass()); + } - /** - * Create a new resource with a server assigned id - * - * @param resource the body of the post method containing resource being created in a xml/json form - * @return the response - * @see https://www.hl7. org/fhir/http.html#create - */ - @POST - public Response create(final String resource) - throws IOException { - return execute(getResourceRequest(RequestTypeEnum.POST, RestOperationTypeEnum.CREATE).resource(resource)); - } + /** + * Provides the ability to specify the {@link FhirContext}. + * @param ctx the {@link FhirContext} instance. + */ + protected AbstractJaxRsResourceProvider(final FhirContext ctx) { + super(ctx); + theBindings = JaxRsMethodBindings.getMethodBindings(this, getClass()); + } - /** - * Search the resource type based on some filter criteria - * - * @return the response - * @see https://www.hl7.org/fhir/http.html#search - */ - @POST - @Path("/_search") - public Response searchWithPost() - throws IOException { - return execute(getResourceRequest(RequestTypeEnum.POST, RestOperationTypeEnum.SEARCH_TYPE)); - } + /** + * This constructor takes in an explicit interface class. This subclass + * should be identical to the class being constructed but is given + * explicitly in order to avoid issues with proxy classes in a jee + * environment. + * + * @param theProviderClass the interface of the class + */ + protected AbstractJaxRsResourceProvider(final Class theProviderClass) { + super(); + theBindings = JaxRsMethodBindings.getMethodBindings(this, theProviderClass); + } - /** - * Search the resource type based on some filter criteria - * - * @return the response - * @see https://www.hl7.org/fhir/http.html#search - */ - @GET - public Response search() - throws IOException { - return execute(getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.SEARCH_TYPE)); - } + /** + * This constructor takes in an explicit interface class. This subclass + * should be identical to the class being constructed but is given + * explicitly in order to avoid issues with proxy classes in a jee + * environment. + * + * @param ctx the {@link FhirContext} instance. + * @param theProviderClass the interface of the class + */ + protected AbstractJaxRsResourceProvider( + final FhirContext ctx, final Class theProviderClass) { + super(ctx); + theBindings = JaxRsMethodBindings.getMethodBindings(this, theProviderClass); + } - /** - * Update an existing resource based on the given condition - * @param resource the body contents for the put method - * @return the response - * @see https://www.hl7.org/fhir/http.html#update - */ - @PUT - public Response conditionalUpdate(final String resource) - throws IOException { - return execute(getResourceRequest(RequestTypeEnum.PUT, RestOperationTypeEnum.UPDATE).resource(resource)); - } + /** + * The base for request for a resource provider has the following form:
    + * {@link AbstractJaxRsResourceProvider#getBaseForServer() + * getBaseForServer()} + "/" + + * {@link AbstractJaxRsResourceProvider#getResourceType() getResourceType()} + * .{@link java.lang.Class#getSimpleName() getSimpleName()} + */ + @Override + public String getBaseForRequest() { + try { + return new URL(getUriInfo().getBaseUri().toURL(), getResourceType().getSimpleName()).toExternalForm(); + } catch (final Exception e) { + // cannot happen + return null; + } + } - /** - * Update an existing resource by its id (or create it if it is new) - * - * @param id the id of the resource - * @param resource the body contents for the put method - * @return the response - * @see https://www.hl7.org/fhir/http.html#update - */ - @PUT - @Path("/{id}") - public Response update(@PathParam("id") final String id, final String resource) - throws IOException { - return execute(getResourceRequest(RequestTypeEnum.PUT, RestOperationTypeEnum.UPDATE).id(id).resource(resource)); - } + /** + * Create a new resource with a server assigned id + * + * @param resource the body of the post method containing resource being created in a xml/json form + * @return the response + * @see https://www.hl7. org/fhir/http.html#create + */ + @POST + public Response create(final String resource) throws IOException { + return execute(getResourceRequest(RequestTypeEnum.POST, RestOperationTypeEnum.CREATE) + .resource(resource)); + } - /** - * Delete a resource based on the given condition - * - * @return the response - * @see https://www.hl7.org/fhir/http.html#delete - */ - @DELETE - public Response delete() - throws IOException { - return execute(getResourceRequest(RequestTypeEnum.DELETE, RestOperationTypeEnum.DELETE)); - } + /** + * Search the resource type based on some filter criteria + * + * @return the response + * @see https://www.hl7.org/fhir/http.html#search + */ + @POST + @Path("/_search") + public Response searchWithPost() throws IOException { + return execute(getResourceRequest(RequestTypeEnum.POST, RestOperationTypeEnum.SEARCH_TYPE)); + } - /** - * Delete a resource - * - * @param id the id of the resource to delete - * @return the response - * @see https://www.hl7.org/fhir/http.html#delete - */ - @DELETE - @Path("/{id}") - public Response delete(@PathParam("id") final String id) - throws IOException { - return execute(getResourceRequest(RequestTypeEnum.DELETE, RestOperationTypeEnum.DELETE).id(id)); - } + /** + * Search the resource type based on some filter criteria + * + * @return the response + * @see https://www.hl7.org/fhir/http.html#search + */ + @GET + public Response search() throws IOException { + return execute(getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.SEARCH_TYPE)); + } - /** - * Read the current state of the resource - * - * @param id the id of the resource to read - * @return the response - * @see https://www.hl7.org/fhir/http.html#read - */ - @GET - @Path("/{id : ((?!_history).)*}") - public Response find(@PathParam("id") final String id) - throws IOException { - return execute(getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.READ).id(id)); - } + /** + * Update an existing resource based on the given condition + * @param resource the body contents for the put method + * @return the response + * @see https://www.hl7.org/fhir/http.html#update + */ + @PUT + public Response conditionalUpdate(final String resource) throws IOException { + return execute(getResourceRequest(RequestTypeEnum.PUT, RestOperationTypeEnum.UPDATE) + .resource(resource)); + } - /** - * Execute a custom operation - * - * @param resource the resource to create - * @param requestType the type of request - * @param id the id of the resource on which to perform the operation - * @param operationName the name of the operation to execute - * @param operationType the rest operation type - * @return the response - * @see https://www.hl7.org/fhir/operations.html - */ - protected Response customOperation(final String resource, final RequestTypeEnum requestType, final String id, - final String operationName, final RestOperationTypeEnum operationType) - throws IOException { - final Builder request = getResourceRequest(requestType, operationType).resource(resource).id(id); - return execute(request, operationName); - } + /** + * Update an existing resource by its id (or create it if it is new) + * + * @param id the id of the resource + * @param resource the body contents for the put method + * @return the response + * @see https://www.hl7.org/fhir/http.html#update + */ + @PUT + @Path("/{id}") + public Response update(@PathParam("id") final String id, final String resource) throws IOException { + return execute(getResourceRequest(RequestTypeEnum.PUT, RestOperationTypeEnum.UPDATE) + .id(id) + .resource(resource)); + } - /** - * Retrieve a version of a resource - * - * @param id the id of the resource - * @param version the version of the resource - * @return the response - * @see https://www.hl7.org/fhir/http.html#history - */ - @GET - @Path("/{id}/_history/{version}") - public Response findVersion(@PathParam("id") final String id, @PathParam("version") final String version) - throws IOException { - final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.VREAD).id(id).version(version); - return execute(theRequest); - } + /** + * Delete a resource based on the given condition + * + * @return the response + * @see https://www.hl7.org/fhir/http.html#delete + */ + @DELETE + public Response delete() throws IOException { + return execute(getResourceRequest(RequestTypeEnum.DELETE, RestOperationTypeEnum.DELETE)); + } + + /** + * Delete a resource + * + * @param id the id of the resource to delete + * @return the response + * @see https://www.hl7.org/fhir/http.html#delete + */ + @DELETE + @Path("/{id}") + public Response delete(@PathParam("id") final String id) throws IOException { + return execute(getResourceRequest(RequestTypeEnum.DELETE, RestOperationTypeEnum.DELETE) + .id(id)); + } + + /** + * Read the current state of the resource + * + * @param id the id of the resource to read + * @return the response + * @see https://www.hl7.org/fhir/http.html#read + */ + @GET + @Path("/{id : ((?!_history).)*}") + public Response find(@PathParam("id") final String id) throws IOException { + return execute(getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.READ) + .id(id)); + } + + /** + * Execute a custom operation + * + * @param resource the resource to create + * @param requestType the type of request + * @param id the id of the resource on which to perform the operation + * @param operationName the name of the operation to execute + * @param operationType the rest operation type + * @return the response + * @see https://www.hl7.org/fhir/operations.html + */ + protected Response customOperation( + final String resource, + final RequestTypeEnum requestType, + final String id, + final String operationName, + final RestOperationTypeEnum operationType) + throws IOException { + final Builder request = getResourceRequest(requestType, operationType) + .resource(resource) + .id(id); + return execute(request, operationName); + } + + /** + * Retrieve a version of a resource + * + * @param id the id of the resource + * @param version the version of the resource + * @return the response + * @see https://www.hl7.org/fhir/http.html#history + */ + @GET + @Path("/{id}/_history/{version}") + public Response findVersion(@PathParam("id") final String id, @PathParam("version") final String version) + throws IOException { + final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.VREAD) + .id(id) + .version(version); + return execute(theRequest); + } /** * Retrieve the update history for a particular resource @@ -267,9 +284,9 @@ implements IRestfulServer, IResourceProvider { */ @GET @Path("/{id}/_history") - public Response historyForInstance(@PathParam("id") final String id) - throws IOException { - final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.HISTORY_INSTANCE).id(id); + public Response historyForInstance(@PathParam("id") final String id) throws IOException { + final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.HISTORY_INSTANCE) + .id(id); return execute(theRequest); } @@ -281,92 +298,90 @@ implements IRestfulServer, IResourceProvider { */ @GET @Path("/_history") - public Response historyForType() - throws IOException { + public Response historyForType() throws IOException { final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.HISTORY_TYPE); return execute(theRequest); } /** - * Compartment Based Access - * - * @param id the resource to which the compartment belongs - * @param compartment the compartment - * @return the repsonse - * @see https://www.hl7.org/fhir/http.html#search - * @see https://www.hl7.org/fhir/compartments.html#compartment - */ - @GET - @Path("/{id}/{compartment : ((?!_history).)*}") - public Response findCompartment(@PathParam("id") final String id, @PathParam("compartment") final String compartment) - throws IOException { - final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.SEARCH_TYPE).id(id).compartment( - compartment); - return execute(theRequest, compartment); - } + * Compartment Based Access + * + * @param id the resource to which the compartment belongs + * @param compartment the compartment + * @return the repsonse + * @see https://www.hl7.org/fhir/http.html#search + * @see https://www.hl7.org/fhir/compartments.html#compartment + */ + @GET + @Path("/{id}/{compartment : ((?!_history).)*}") + public Response findCompartment( + @PathParam("id") final String id, @PathParam("compartment") final String compartment) throws IOException { + final Builder theRequest = getResourceRequest(RequestTypeEnum.GET, RestOperationTypeEnum.SEARCH_TYPE) + .id(id) + .compartment(compartment); + return execute(theRequest, compartment); + } - @POST - @Path("/$validate") - public Response validate(final String resource) throws IOException { - return customOperation(resource, RequestTypeEnum.POST, null, "$validate", RestOperationTypeEnum.EXTENDED_OPERATION_TYPE); - } - - /** - * Execute the method described by the requestBuilder and methodKey - * - * @param theRequestBuilder the requestBuilder that contains the information about the request - * @param methodKey the key determining the method to be executed - * @return the response - */ - private Response execute(final Builder theRequestBuilder, final String methodKey) - throws IOException { - final JaxRsRequest theRequest = theRequestBuilder.build(); - final BaseMethodBinding method = getBinding(theRequest.getRestOperationType(), methodKey); - try { - return (Response) method.invokeServer(this, theRequest); - } - catch (final Throwable theException) { - return handleException(theRequest, theException); - } - } + @POST + @Path("/$validate") + public Response validate(final String resource) throws IOException { + return customOperation( + resource, RequestTypeEnum.POST, null, "$validate", RestOperationTypeEnum.EXTENDED_OPERATION_TYPE); + } - /** - * Execute the method described by the requestBuilder - * - * @param theRequestBuilder the requestBuilder that contains the information about the request - * @return the response - */ - private Response execute(final Builder theRequestBuilder) - throws IOException { - return execute(theRequestBuilder, JaxRsMethodBindings.DEFAULT_METHOD_KEY); - } + /** + * Execute the method described by the requestBuilder and methodKey + * + * @param theRequestBuilder the requestBuilder that contains the information about the request + * @param methodKey the key determining the method to be executed + * @return the response + */ + private Response execute(final Builder theRequestBuilder, final String methodKey) throws IOException { + final JaxRsRequest theRequest = theRequestBuilder.build(); + final BaseMethodBinding method = getBinding(theRequest.getRestOperationType(), methodKey); + try { + return (Response) method.invokeServer(this, theRequest); + } catch (final Throwable theException) { + return handleException(theRequest, theException); + } + } - /** - * Return the method binding for the given rest operation - * - * @param restOperation the rest operation to retrieve - * @param theBindingKey the key determining the method to be executed (needed for e.g. custom operation) - * @return - */ - protected BaseMethodBinding getBinding(final RestOperationTypeEnum restOperation, final String theBindingKey) { - return getBindings().getBinding(restOperation, theBindingKey); - } + /** + * Execute the method described by the requestBuilder + * + * @param theRequestBuilder the requestBuilder that contains the information about the request + * @return the response + */ + private Response execute(final Builder theRequestBuilder) throws IOException { + return execute(theRequestBuilder, JaxRsMethodBindings.DEFAULT_METHOD_KEY); + } - /** - * Default: no paging provider - */ - @Override - public IPagingProvider getPagingProvider() { - return null; - } + /** + * Return the method binding for the given rest operation + * + * @param restOperation the rest operation to retrieve + * @param theBindingKey the key determining the method to be executed (needed for e.g. custom operation) + * @return + */ + protected BaseMethodBinding getBinding(final RestOperationTypeEnum restOperation, final String theBindingKey) { + return getBindings().getBinding(restOperation, theBindingKey); + } - /** - * Default: BundleInclusionRule.BASED_ON_INCLUDES - */ - @Override - public BundleInclusionRule getBundleInclusionRule() { - return BundleInclusionRule.BASED_ON_INCLUDES; - } + /** + * Default: no paging provider + */ + @Override + public IPagingProvider getPagingProvider() { + return null; + } + + /** + * Default: BundleInclusionRule.BASED_ON_INCLUDES + */ + @Override + public BundleInclusionRule getBundleInclusionRule() { + return BundleInclusionRule.BASED_ON_INCLUDES; + } @Override public PreferReturnEnum getDefaultPreferReturn() { @@ -374,28 +389,28 @@ implements IRestfulServer, IResourceProvider { } /** - * The resource type should return conform to the generic resource included - * in the topic - */ - @Override - public abstract Class getResourceType(); + * The resource type should return conform to the generic resource included + * in the topic + */ + @Override + public abstract Class getResourceType(); - /** - * Return the bindings defined in this resource provider - * - * @return the jax-rs method bindings - */ - public JaxRsMethodBindings getBindings() { - return theBindings; - } + /** + * Return the bindings defined in this resource provider + * + * @return the jax-rs method bindings + */ + public JaxRsMethodBindings getBindings() { + return theBindings; + } - /** - * Return the request builder based on the resource name for the server - * @param requestType the type of the request - * @param restOperation the rest operation type - * @return the requestbuilder - */ - private Builder getResourceRequest(final RequestTypeEnum requestType, final RestOperationTypeEnum restOperation) { - return getRequest(requestType, restOperation, getResourceType().getSimpleName()); - } + /** + * Return the request builder based on the resource name for the server + * @param requestType the type of the request + * @param restOperation the rest operation type + * @return the requestbuilder + */ + private Builder getResourceRequest(final RequestTypeEnum requestType, final RestOperationTypeEnum restOperation) { + return getRequest(requestType, restOperation, getResourceType().getSimpleName()); + } } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/interceptor/JaxRsExceptionInterceptor.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/interceptor/JaxRsExceptionInterceptor.java index 2116c9ed52f..19b046cf2d9 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/interceptor/JaxRsExceptionInterceptor.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/interceptor/JaxRsExceptionInterceptor.java @@ -19,19 +19,18 @@ */ package ca.uhn.fhir.jaxrs.server.interceptor; -import java.io.IOException; - -import javax.interceptor.AroundInvoke; -import javax.interceptor.InvocationContext; -import javax.servlet.ServletException; -import javax.ws.rs.core.Response; - import ca.uhn.fhir.jaxrs.server.AbstractJaxRsProvider; import ca.uhn.fhir.jaxrs.server.util.JaxRsRequest; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.interceptor.ExceptionHandlingInterceptor; +import java.io.IOException; +import javax.interceptor.AroundInvoke; +import javax.interceptor.InvocationContext; +import javax.servlet.ServletException; +import javax.ws.rs.core.Response; + /** * An interceptor that catches the jax-rs exceptions * @@ -39,90 +38,90 @@ import ca.uhn.fhir.rest.server.interceptor.ExceptionHandlingInterceptor; */ public class JaxRsExceptionInterceptor { - /** the existing exception handler which is able to convert exception into responses*/ - private final ExceptionHandlingInterceptor exceptionHandler; + /** the existing exception handler which is able to convert exception into responses*/ + private final ExceptionHandlingInterceptor exceptionHandler; - /** - * The default constructor - */ - public JaxRsExceptionInterceptor() { - this.exceptionHandler = new ExceptionHandlingInterceptor(); - } + /** + * The default constructor + */ + public JaxRsExceptionInterceptor() { + this.exceptionHandler = new ExceptionHandlingInterceptor(); + } - /** - * A utility constructor for unit testing - * @param exceptionHandler the handler for the exception conversion - */ - JaxRsExceptionInterceptor(final ExceptionHandlingInterceptor exceptionHandler) { - this.exceptionHandler = exceptionHandler; - } + /** + * A utility constructor for unit testing + * @param exceptionHandler the handler for the exception conversion + */ + JaxRsExceptionInterceptor(final ExceptionHandlingInterceptor exceptionHandler) { + this.exceptionHandler = exceptionHandler; + } - /** - * This interceptor will catch all exception and convert them using the exceptionhandler - * @param ctx the invocation context - * @return the result - * @throws JaxRsResponseException an exception that can be handled by a jee container - */ - @AroundInvoke - public Object intercept(final InvocationContext ctx) - throws JaxRsResponseException { - try { - return ctx.proceed(); - } - catch (final Exception theException) { - final AbstractJaxRsProvider theServer = (AbstractJaxRsProvider) ctx.getTarget(); - throw convertException(theServer, theException); - } - } + /** + * This interceptor will catch all exception and convert them using the exceptionhandler + * @param ctx the invocation context + * @return the result + * @throws JaxRsResponseException an exception that can be handled by a jee container + */ + @AroundInvoke + public Object intercept(final InvocationContext ctx) throws JaxRsResponseException { + try { + return ctx.proceed(); + } catch (final Exception theException) { + final AbstractJaxRsProvider theServer = (AbstractJaxRsProvider) ctx.getTarget(); + throw convertException(theServer, theException); + } + } - /** - * This method convert an exception to a JaxRsResponseException - * @param theServer the provider - * @param theException the exception to convert - * @return JaxRsResponseException - */ - public JaxRsResponseException convertException(final AbstractJaxRsProvider theServer, final Throwable theException) { - if (theServer.withStackTrace()) { - exceptionHandler.setReturnStackTracesForExceptionTypes(Throwable.class); - } - final JaxRsRequest requestDetails = theServer.getRequest(null, null).build(); - final BaseServerResponseException convertedException = preprocessException(theException, requestDetails); - return new JaxRsResponseException(convertedException); - } + /** + * This method convert an exception to a JaxRsResponseException + * @param theServer the provider + * @param theException the exception to convert + * @return JaxRsResponseException + */ + public JaxRsResponseException convertException( + final AbstractJaxRsProvider theServer, final Throwable theException) { + if (theServer.withStackTrace()) { + exceptionHandler.setReturnStackTracesForExceptionTypes(Throwable.class); + } + final JaxRsRequest requestDetails = theServer.getRequest(null, null).build(); + final BaseServerResponseException convertedException = preprocessException(theException, requestDetails); + return new JaxRsResponseException(convertedException); + } - /** - * This method converts an exception into a response - * @param theRequest the request - * @param theException the thrown exception - * @return the response describing the error - * @throws IOException - */ - public Response convertExceptionIntoResponse(final JaxRsRequest theRequest, final JaxRsResponseException theException) - throws IOException { - return handleExceptionWithoutServletError(theRequest, theException); - } + /** + * This method converts an exception into a response + * @param theRequest the request + * @param theException the thrown exception + * @return the response describing the error + * @throws IOException + */ + public Response convertExceptionIntoResponse( + final JaxRsRequest theRequest, final JaxRsResponseException theException) throws IOException { + return handleExceptionWithoutServletError(theRequest, theException); + } - private BaseServerResponseException preprocessException(final Throwable theException, final JaxRsRequest requestDetails) { - try { - Throwable theExceptionToConvert = theException; - if (!(theException instanceof BaseServerResponseException) && (theException.getCause() instanceof BaseServerResponseException)) { - theExceptionToConvert = theException.getCause(); - } - return exceptionHandler.preProcessOutgoingException(requestDetails, theExceptionToConvert, null); - } - catch (final ServletException e) { - return new InternalErrorException(e); - } - } + private BaseServerResponseException preprocessException( + final Throwable theException, final JaxRsRequest requestDetails) { + try { + Throwable theExceptionToConvert = theException; + if (!(theException instanceof BaseServerResponseException) + && (theException.getCause() instanceof BaseServerResponseException)) { + theExceptionToConvert = theException.getCause(); + } + return exceptionHandler.preProcessOutgoingException(requestDetails, theExceptionToConvert, null); + } catch (final ServletException e) { + return new InternalErrorException(e); + } + } - private Response handleExceptionWithoutServletError(final JaxRsRequest theRequest, final BaseServerResponseException theException) - throws IOException { - try { - return (Response) exceptionHandler.handleException(theRequest, theException); - } - catch (final ServletException e) { - final BaseServerResponseException newException = preprocessException(new InternalErrorException(e), theRequest); - return handleExceptionWithoutServletError(theRequest, newException); - } - } + private Response handleExceptionWithoutServletError( + final JaxRsRequest theRequest, final BaseServerResponseException theException) throws IOException { + try { + return (Response) exceptionHandler.handleException(theRequest, theException); + } catch (final ServletException e) { + final BaseServerResponseException newException = + preprocessException(new InternalErrorException(e), theRequest); + return handleExceptionWithoutServletError(theRequest, newException); + } + } } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/interceptor/JaxRsResponseException.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/interceptor/JaxRsResponseException.java index 76ff6e09f49..8b14284e530 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/interceptor/JaxRsResponseException.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/interceptor/JaxRsResponseException.java @@ -19,27 +19,26 @@ */ package ca.uhn.fhir.jaxrs.server.interceptor; -import javax.ejb.ApplicationException; - import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import javax.ejb.ApplicationException; + /** * A JEE wrapper exception that will not force a rollback. - * + * * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ -@ApplicationException(rollback=false) +@ApplicationException(rollback = false) public class JaxRsResponseException extends BaseServerResponseException { private static final long serialVersionUID = 1L; /** * Utility constructor - * + * * @param base the base exception */ public JaxRsResponseException(BaseServerResponseException base) { super(base.getStatusCode(), base.getMessage(), base.getCause(), base.getOperationOutcome()); } - } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsMethodBindings.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsMethodBindings.java index 87ccddc5cad..300df8ad04d 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsMethodBindings.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsMethodBindings.java @@ -36,75 +36,81 @@ import java.util.concurrent.ConcurrentHashMap; /** * Class that contains the method bindings defined by a ResourceProvider - * + * * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ public class JaxRsMethodBindings { - + /** DEFAULT_METHOD_KEY="" */ public static final String DEFAULT_METHOD_KEY = ""; /** Static collection of bindings mapped to a class*/ - private static final ConcurrentHashMap, JaxRsMethodBindings> classBindings = new ConcurrentHashMap, JaxRsMethodBindings>(); + private static final ConcurrentHashMap, JaxRsMethodBindings> classBindings = + new ConcurrentHashMap, JaxRsMethodBindings>(); /** Static collection of operationBindings mapped to a class */ - private ConcurrentHashMap> operationBindings = new ConcurrentHashMap>(); - - /** - * The constructor - * @param theProvider the provider which is an implementation of the theProviderClass - * @param theProviderClass the class definition contaning the operations - */ - public JaxRsMethodBindings(AbstractJaxRsProvider theProvider, Class theProviderClass) { - List declaredMethodsForCurrentProvider = ReflectionUtil.getDeclaredMethods(theProviderClass); - declaredMethodsForCurrentProvider.addAll(ReflectionUtil.getDeclaredMethods(theProviderClass.getSuperclass())); - for (final Method m : declaredMethodsForCurrentProvider) { - final BaseMethodBinding foundMethodBinding = BaseMethodBinding.bindMethod(m, theProvider.getFhirContext(), theProvider); - if (foundMethodBinding == null) { - continue; - } - String bindingKey = getBindingKey(foundMethodBinding); - addMethodBinding(bindingKey, foundMethodBinding); - } - } + private ConcurrentHashMap> operationBindings = + new ConcurrentHashMap>(); + + /** + * The constructor + * @param theProvider the provider which is an implementation of the theProviderClass + * @param theProviderClass the class definition contaning the operations + */ + public JaxRsMethodBindings( + AbstractJaxRsProvider theProvider, Class theProviderClass) { + List declaredMethodsForCurrentProvider = ReflectionUtil.getDeclaredMethods(theProviderClass); + declaredMethodsForCurrentProvider.addAll(ReflectionUtil.getDeclaredMethods(theProviderClass.getSuperclass())); + for (final Method m : declaredMethodsForCurrentProvider) { + final BaseMethodBinding foundMethodBinding = + BaseMethodBinding.bindMethod(m, theProvider.getFhirContext(), theProvider); + if (foundMethodBinding == null) { + continue; + } + String bindingKey = getBindingKey(foundMethodBinding); + addMethodBinding(bindingKey, foundMethodBinding); + } + } /** * Get the key for the baseMethodBinding. This is: *
      *
    • the compartName for SearchMethodBindings *
    • the methodName for OperationMethodBindings - *
    • {@link #DEFAULT_METHOD_KEY} for all other MethodBindings + *
    • {@link #DEFAULT_METHOD_KEY} for all other MethodBindings *
    * @param theBinding the methodbinding * @return the key for the methodbinding. */ private String getBindingKey(final BaseMethodBinding theBinding) { if (theBinding instanceof OperationMethodBinding) { - return ((OperationMethodBinding) theBinding).getName(); + return ((OperationMethodBinding) theBinding).getName(); } else if (theBinding instanceof SearchMethodBinding) { - Search search = theBinding.getMethod().getAnnotation(Search.class); - return search.compartmentName(); + Search search = theBinding.getMethod().getAnnotation(Search.class); + return search.compartmentName(); } else { return DEFAULT_METHOD_KEY; } } - private void addMethodBinding(String key, BaseMethodBinding binding) { - ConcurrentHashMap mapByOperation = getMapForOperation(binding.getRestOperationType()); - if (mapByOperation.containsKey(key)) { - throw new IllegalArgumentException(Msg.code(597) + "Multiple Search Method Bindings Found : " + mapByOperation.get(key) + " -- " + binding.getMethod()); - } - mapByOperation.put(key, binding); - } + private void addMethodBinding(String key, BaseMethodBinding binding) { + ConcurrentHashMap mapByOperation = + getMapForOperation(binding.getRestOperationType()); + if (mapByOperation.containsKey(key)) { + throw new IllegalArgumentException(Msg.code(597) + "Multiple Search Method Bindings Found : " + + mapByOperation.get(key) + " -- " + binding.getMethod()); + } + mapByOperation.put(key, binding); + } /** * Get the map for the given operation type. If no map exists for this operation type, create a new hashmap for this * operation type and add it to the operation bindings. - * + * * @param operationType the operation type. * @return the map defined in the operation bindings */ private ConcurrentHashMap getMapForOperation(RestOperationTypeEnum operationType) { ConcurrentHashMap result = operationBindings.get(operationType); - if(result == null) { + if (result == null) { operationBindings.putIfAbsent(operationType, new ConcurrentHashMap()); return getMapForOperation(operationType); } else { @@ -112,33 +118,34 @@ public class JaxRsMethodBindings { } } - /** - * Get the binding - * - * @param operationType the type of operation - * @param theBindingKey the binding key - * @return the binding defined - * @throws NotImplementedOperationException cannot be found - */ - public BaseMethodBinding getBinding(RestOperationTypeEnum operationType, String theBindingKey) { - String bindingKey = StringUtils.defaultIfBlank(theBindingKey, DEFAULT_METHOD_KEY); + /** + * Get the binding + * + * @param operationType the type of operation + * @param theBindingKey the binding key + * @return the binding defined + * @throws NotImplementedOperationException cannot be found + */ + public BaseMethodBinding getBinding(RestOperationTypeEnum operationType, String theBindingKey) { + String bindingKey = StringUtils.defaultIfBlank(theBindingKey, DEFAULT_METHOD_KEY); ConcurrentHashMap map = getMapForOperation(operationType); - if(map == null || !map.containsKey(bindingKey)) { - throw new NotImplementedOperationException(Msg.code(598) + "Operation not implemented"); - } else { - return map.get(bindingKey); - } - } - + if (map == null || !map.containsKey(bindingKey)) { + throw new NotImplementedOperationException(Msg.code(598) + "Operation not implemented"); + } else { + return map.get(bindingKey); + } + } + /** * Get the method bindings for the given class. If this class is not yet contained in the classBindings, they will be added for this class - * + * * @param theProvider the implementation class * @param theProviderClass the provider class * @return the methodBindings for this class */ - public static JaxRsMethodBindings getMethodBindings(AbstractJaxRsProvider theProvider, Class theProviderClass) { - if(!getClassBindings().containsKey(theProviderClass)) { + public static JaxRsMethodBindings getMethodBindings( + AbstractJaxRsProvider theProvider, Class theProviderClass) { + if (!getClassBindings().containsKey(theProviderClass)) { JaxRsMethodBindings foundBindings = new JaxRsMethodBindings(theProvider, theProviderClass); getClassBindings().putIfAbsent(theProviderClass, foundBindings); } @@ -151,6 +158,4 @@ public class JaxRsMethodBindings { static ConcurrentHashMap, JaxRsMethodBindings> getClassBindings() { return classBindings; } - - } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsRequest.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsRequest.java index 28d39761613..3fd75c52e6f 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsRequest.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsRequest.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.jaxrs.server.util; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jaxrs.server.AbstractJaxRsProvider; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RequestTypeEnum; @@ -34,8 +34,6 @@ import ca.uhn.fhir.rest.server.method.ResourceParameter; import ca.uhn.fhir.util.UrlUtil; import org.apache.commons.lang3.StringUtils; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; import java.io.IOException; import java.io.InputStream; import java.io.Reader; @@ -44,6 +42,8 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.MediaType; /** * The JaxRsRequest is a jax-rs specific implementation of the RequestDetails. @@ -65,8 +65,11 @@ public class JaxRsRequest extends RequestDetails { * @param requestType the request type * @param restOperation the operation type */ - public JaxRsRequest(AbstractJaxRsProvider server, String resourceString, RequestTypeEnum requestType, - RestOperationTypeEnum restOperation) { + public JaxRsRequest( + AbstractJaxRsProvider server, + String resourceString, + RequestTypeEnum requestType, + RestOperationTypeEnum restOperation) { super(server.getInterceptorService()); this.myHeaders = server.getHeaders(); this.myResourceString = resourceString; @@ -80,7 +83,7 @@ public class JaxRsRequest extends RequestDetails { @Override protected byte[] getByteStreamRequestContents() { return StringUtils.defaultString(myResourceString, "") - .getBytes(ResourceParameter.determineRequestCharset(this)); + .getBytes(ResourceParameter.determineRequestCharset(this)); } @Override @@ -185,8 +188,12 @@ public class JaxRsRequest extends RequestDetails { * @param theRestOperation the rest operation * @param theRequestUrl */ - public Builder(AbstractJaxRsProvider theServer, RequestTypeEnum theRequestType, - RestOperationTypeEnum theRestOperation, String theRequestUrl, String theResourceName) { + public Builder( + AbstractJaxRsProvider theServer, + RequestTypeEnum theRequestType, + RestOperationTypeEnum theRestOperation, + String theRequestUrl, + String theResourceName) { this.myServer = theServer; this.myRequestType = theRequestType; this.myRestOperation = theRestOperation; @@ -202,52 +209,65 @@ public class JaxRsRequest extends RequestDetails { public JaxRsRequest build() { JaxRsRequest result = new JaxRsRequest(myServer, myResource, myRequestType, myRestOperation); if ((StringUtils.isNotBlank(myVersion) || StringUtils.isNotBlank(myCompartment)) - && StringUtils.isBlank(myId)) { + && StringUtils.isBlank(myId)) { throw new InvalidRequestException(Msg.code(601) + "Don't know how to handle request path: " - + myServer.getUriInfo().getRequestUri().toASCIIString()); + + myServer.getUriInfo().getRequestUri().toASCIIString()); } - FhirVersionEnum fhirContextVersion = myServer.getFhirContext().getVersion().getVersion(); + FhirVersionEnum fhirContextVersion = + myServer.getFhirContext().getVersion().getVersion(); if (StringUtils.isNotBlank(myVersion)) { switch (fhirContextVersion) { case R4: - result.setId(new org.hl7.fhir.r4.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); + result.setId(new org.hl7.fhir.r4.model.IdType( + myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); break; case DSTU3: - result.setId(new org.hl7.fhir.dstu3.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); + result.setId(new org.hl7.fhir.dstu3.model.IdType( + myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); break; case DSTU2_1: - result.setId(new org.hl7.fhir.dstu2016may.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); + result.setId(new org.hl7.fhir.dstu2016may.model.IdType( + myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); break; case DSTU2_HL7ORG: - result.setId(new org.hl7.fhir.dstu2.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); + result.setId(new org.hl7.fhir.dstu2.model.IdType( + myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); break; case DSTU2: - result.setId(new ca.uhn.fhir.model.primitive.IdDt(myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); + result.setId(new ca.uhn.fhir.model.primitive.IdDt( + myServer.getBaseForRequest(), UrlUtil.unescape(myId), UrlUtil.unescape(myVersion))); break; default: - throw new ConfigurationException(Msg.code(602) + "Unsupported Fhir version: " + fhirContextVersion); + throw new ConfigurationException( + Msg.code(602) + "Unsupported Fhir version: " + fhirContextVersion); } } else if (StringUtils.isNotBlank(myId)) { switch (fhirContextVersion) { case R4: - result.setId(new org.hl7.fhir.r4.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId))); + result.setId( + new org.hl7.fhir.r4.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId))); break; case DSTU3: - result.setId(new org.hl7.fhir.dstu3.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId))); + result.setId(new org.hl7.fhir.dstu3.model.IdType( + myServer.getBaseForRequest(), UrlUtil.unescape(myId))); break; case DSTU2_1: - result.setId(new org.hl7.fhir.dstu2016may.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId))); + result.setId(new org.hl7.fhir.dstu2016may.model.IdType( + myServer.getBaseForRequest(), UrlUtil.unescape(myId))); break; case DSTU2_HL7ORG: - result.setId(new org.hl7.fhir.dstu2.model.IdType(myServer.getBaseForRequest(), UrlUtil.unescape(myId))); + result.setId(new org.hl7.fhir.dstu2.model.IdType( + myServer.getBaseForRequest(), UrlUtil.unescape(myId))); break; case DSTU2: - result.setId(new ca.uhn.fhir.model.primitive.IdDt(myServer.getBaseForRequest(), UrlUtil.unescape(myId))); + result.setId(new ca.uhn.fhir.model.primitive.IdDt( + myServer.getBaseForRequest(), UrlUtil.unescape(myId))); break; default: - throw new ConfigurationException(Msg.code(603) + "Unsupported Fhir version: " + fhirContextVersion); + throw new ConfigurationException( + Msg.code(603) + "Unsupported Fhir version: " + fhirContextVersion); } } @@ -271,7 +291,8 @@ public class JaxRsRequest extends RequestDetails { result.setId(new ca.uhn.fhir.model.primitive.IdDt(contentLocation)); break; default: - throw new ConfigurationException(Msg.code(604) + "Unsupported Fhir version: " + fhirContextVersion); + throw new ConfigurationException( + Msg.code(604) + "Unsupported Fhir version: " + fhirContextVersion); } } } diff --git a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java index dc802ba3412..5d08b17f87e 100644 --- a/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java +++ b/hapi-fhir-jaxrsserver-base/src/main/java/ca/uhn/fhir/jaxrs/server/util/JaxRsResponse.java @@ -25,9 +25,6 @@ import ca.uhn.fhir.util.IoUtil; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.ResponseBuilder; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.OutputStream; @@ -35,12 +32,15 @@ import java.io.StringWriter; import java.io.Writer; import java.util.List; import java.util.Map.Entry; +import javax.annotation.Nonnull; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.ResponseBuilder; import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * The JaxRsResponse is a jax-rs specific implementation of the RestfulResponse. - * + * * @author Peter Van Houte | peter.vanhoute@agfa.com | Agfa Healthcare */ public class JaxRsResponse extends BaseRestfulResponse { @@ -53,7 +53,7 @@ public class JaxRsResponse extends BaseRestfulResponse { /** * The constructor - * + * * @param request the JaxRs Request */ public JaxRsResponse(JaxRsRequest request) { @@ -66,7 +66,8 @@ public class JaxRsResponse extends BaseRestfulResponse { */ @Nonnull @Override - public Writer getResponseWriter(int theStatusCode, String theContentType, String theCharset, boolean theRespondGzip) { + public Writer getResponseWriter( + int theStatusCode, String theContentType, String theCharset, boolean theRespondGzip) { Validate.isTrue(myWriter == null, "getResponseWriter() called multiple times"); Validate.isTrue(myOutputStream == null, "getResponseWriter() called after getResponseOutputStream()"); myWriter = new StringWriter(); @@ -96,7 +97,8 @@ public class JaxRsResponse extends BaseRestfulResponse { ResponseBuilder builder = buildResponse(myStatusCode); if (isNotBlank(myContentType)) { if (myWriter != null) { - String charContentType = myContentType + "; charset=" + StringUtils.defaultIfBlank(myCharset, Constants.CHARSET_NAME_UTF8); + String charContentType = myContentType + "; charset=" + + StringUtils.defaultIfBlank(myCharset, Constants.CHARSET_NAME_UTF8); builder.header(Constants.HEADER_CONTENT_TYPE, charContentType); builder.entity(myWriter.toString()); } else { @@ -122,5 +124,4 @@ public class JaxRsResponse extends BaseRestfulResponse { } return response; } - } diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirLocalContainerEntityManagerFactoryBean.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirLocalContainerEntityManagerFactoryBean.java index b04da6fcf0e..e00f858b49f 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirLocalContainerEntityManagerFactoryBean.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirLocalContainerEntityManagerFactoryBean.java @@ -19,8 +19,6 @@ */ package ca.uhn.fhir.jpa.config; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.system.HapiSystemProperties; import com.google.common.base.Strings; import org.hibernate.cfg.AvailableSettings; import org.hibernate.query.criteria.LiteralHandlingMode; @@ -40,10 +38,11 @@ import java.util.Map; */ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean { - //https://stackoverflow.com/questions/57902388/how-to-inject-spring-beans-into-the-hibernate-envers-revisionlistener + // https://stackoverflow.com/questions/57902388/how-to-inject-spring-beans-into-the-hibernate-envers-revisionlistener ConfigurableListableBeanFactory myConfigurableListableBeanFactory; - public HapiFhirLocalContainerEntityManagerFactoryBean(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) { + public HapiFhirLocalContainerEntityManagerFactoryBean( + ConfigurableListableBeanFactory theConfigurableListableBeanFactory) { myConfigurableListableBeanFactory = theConfigurableListableBeanFactory; } @@ -57,7 +56,8 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain } if (!retVal.containsKey(AvailableSettings.CONNECTION_HANDLING)) { - retVal.put(AvailableSettings.CONNECTION_HANDLING, PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_HOLD); + retVal.put( + AvailableSettings.CONNECTION_HANDLING, PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_HOLD); } /* @@ -79,7 +79,8 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) { retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true"); } - // Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate needs + // Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate + // needs // in order to be able to resolve beans, so we add it back in manually here if (!retVal.containsKey(AvailableSettings.BEAN_CONTAINER)) { retVal.put(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory)); @@ -110,6 +111,4 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain retVal.put(thePropertyName, String.join(",", listeners)); } } - - } diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/HapiJob.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/HapiJob.java index e8561f3c582..95635c26d42 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/HapiJob.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/HapiJob.java @@ -23,5 +23,4 @@ import org.quartz.DisallowConcurrentExecution; import org.quartz.Job; @DisallowConcurrentExecution -public interface HapiJob extends Job { -} +public interface HapiJob extends Job {} diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/ISchedulerService.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/ISchedulerService.java index 81d3368bb99..972673802b9 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/ISchedulerService.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/ISchedulerService.java @@ -69,6 +69,7 @@ public interface ISchedulerService { /** * @return true if this server supports clustered scheduling */ - - default boolean isClusteredSchedulingEnabled() { return false; } + default boolean isClusteredSchedulingEnabled() { + return false; + } } diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/ScheduledJobDefinition.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/ScheduledJobDefinition.java index 2925843c801..a741ada0321 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/ScheduledJobDefinition.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/model/sched/ScheduledJobDefinition.java @@ -82,10 +82,10 @@ public class ScheduledJobDefinition { @Override public String toString() { return new ToStringBuilder(this) - .append("myJobClass", myJobClass) - .append("myId", myId) - .append("myGroup", myGroup) - .toString(); + .append("myJobClass", myJobClass) + .append("myId", myId) + .append("myGroup", myGroup) + .toString(); } public JobKey toJobKey() { diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/NicknameMap.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/NicknameMap.java index 1c19714cdd5..5399355c6f6 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/NicknameMap.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/NicknameMap.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.nickname; -import javax.annotation.Nonnull; import java.io.BufferedReader; import java.io.IOException; import java.io.Reader; @@ -28,6 +27,7 @@ import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; class NicknameMap { private final Map> myFormalToNick = new HashMap<>(); diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/NicknameSvc.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/NicknameSvc.java index 023154292b2..cc4c2d37df9 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/NicknameSvc.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/nickname/NicknameSvc.java @@ -26,7 +26,6 @@ import org.slf4j.LoggerFactory; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -35,6 +34,7 @@ import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; /** * Nickname service is used to load nicknames @@ -51,8 +51,7 @@ public class NicknameSvc implements INicknameSvc { private Resource myNicknameResource; - public NicknameSvc() { - } + public NicknameSvc() {} public void setNicknameResource(Resource theNicknameResource) { myNicknameResource = theNicknameResource; diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/AutowiringSpringBeanJobFactory.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/AutowiringSpringBeanJobFactory.java index ba95ffc50d6..207bb5c039d 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/AutowiringSpringBeanJobFactory.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/AutowiringSpringBeanJobFactory.java @@ -51,7 +51,13 @@ public class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory impleme String next = toString(bundle.getNextFireTime()); String fireInstanceId = bundle.getTrigger().getFireInstanceId(); JobKey key = bundle.getJobDetail().getKey(); - ourLog.trace("Firing job[{}] ID[{}] - Previous[{}] Scheduled[{}] Next[{}]", key, fireInstanceId, prev, scheduled, next); + ourLog.trace( + "Firing job[{}] ID[{}] - Previous[{}] Scheduled[{}] Next[{}]", + key, + fireInstanceId, + prev, + scheduled, + next); Object job = super.createJobInstance(bundle); myBeanFactory.autowireBean(job); diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/BaseHapiScheduler.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/BaseHapiScheduler.java index 2d82959f9e5..6a514116e9c 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/BaseHapiScheduler.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/BaseHapiScheduler.java @@ -43,11 +43,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.scheduling.quartz.SchedulerFactoryBean; -import javax.annotation.Nonnull; import java.util.Properties; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public abstract class BaseHapiScheduler implements IHapiScheduler { private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiScheduler.class); @@ -67,12 +67,10 @@ public abstract class BaseHapiScheduler implements IHapiScheduler { mySpringBeanJobFactory = theSpringBeanJobFactory; } - void setInstanceName(String theInstanceName) { myInstanceName = theInstanceName; } - int nextSchedulerId() { return ourNextSchedulerId.getAndIncrement(); } @@ -102,7 +100,8 @@ public abstract class BaseHapiScheduler implements IHapiScheduler { protected void setProperties() { addProperty("org.quartz.threadPool.threadCount", "4"); - myProperties.setProperty(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, myInstanceName + "-" + nextSchedulerId()); + myProperties.setProperty( + StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, myInstanceName + "-" + nextSchedulerId()); addProperty("org.quartz.threadPool.threadNamePrefix", getThreadPrefix()); } @@ -180,18 +179,18 @@ public abstract class BaseHapiScheduler implements IHapiScheduler { TriggerKey triggerKey = theJobDefinition.toTriggerKey(); JobDetailImpl jobDetail = buildJobDetail(theJobDefinition); - ScheduleBuilder schedule = SimpleScheduleBuilder - .simpleSchedule() - .withIntervalInMilliseconds(theIntervalMillis) - .withMisfireHandlingInstructionIgnoreMisfires()//We ignore misfires in cases of multiple JVMs each trying to fire. - .repeatForever(); + ScheduleBuilder schedule = SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMilliseconds(theIntervalMillis) + .withMisfireHandlingInstructionIgnoreMisfires() // We ignore misfires in cases of multiple JVMs each + // trying to fire. + .repeatForever(); Trigger trigger = TriggerBuilder.newTrigger() - .forJob(jobDetail) - .withIdentity(triggerKey) - .startNow() - .withSchedule(schedule) - .build(); + .forJob(jobDetail) + .withIdentity(triggerKey) + .startNow() + .withSchedule(schedule) + .build(); Set triggers = Sets.newHashSet(trigger); try { @@ -200,7 +199,6 @@ public abstract class BaseHapiScheduler implements IHapiScheduler { ourLog.error("Failed to schedule job", e); throw new InternalErrorException(Msg.code(1638) + e); } - } @Nonnull diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/BaseSchedulerServiceImpl.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/BaseSchedulerServiceImpl.java index 419680136fe..58672a138a2 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/BaseSchedulerServiceImpl.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/BaseSchedulerServiceImpl.java @@ -38,10 +38,10 @@ import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.EventListener; import org.springframework.core.env.Environment; -import javax.annotation.PostConstruct; import java.util.Collection; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import javax.annotation.PostConstruct; /** * This class provides task scheduling for the entire module using the Quartz library. @@ -74,8 +74,10 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService { @Autowired private Environment myEnvironment; + @Autowired private ApplicationContext myApplicationContext; + @Autowired protected AutowiringSpringBeanJobFactory mySchedulerJobFactory; @@ -169,7 +171,8 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService { } private void scheduleJobs() { - Collection values = myApplicationContext.getBeansOfType(IHasScheduledJobs.class).values(); + Collection values = + myApplicationContext.getBeansOfType(IHasScheduledJobs.class).values(); ourLog.info("Scheduling {} jobs in {}", values.size(), myApplicationContext.getId()); values.forEach(t -> t.scheduleJobs(this)); } @@ -205,7 +208,11 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService { scheduleJob("clustered", myClusteredScheduler, theIntervalMillis, theJobDefinition); } - private void scheduleJob(String theInstanceName, IHapiScheduler theScheduler, long theIntervalMillis, ScheduledJobDefinition theJobDefinition) { + private void scheduleJob( + String theInstanceName, + IHapiScheduler theScheduler, + long theIntervalMillis, + ScheduledJobDefinition theJobDefinition) { if (isSchedulingDisabled()) { return; } @@ -213,7 +220,11 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService { assert theJobDefinition.getId() != null; assert theJobDefinition.getJobClass() != null; - ourLog.info("Scheduling {} job {} with interval {}", theInstanceName, theJobDefinition.getId(), StopWatch.formatMillis(theIntervalMillis)); + ourLog.info( + "Scheduling {} job {} with interval {}", + theInstanceName, + theJobDefinition.getId(), + StopWatch.formatMillis(theIntervalMillis)); defaultGroup(theJobDefinition); theScheduler.scheduleJob(theIntervalMillis, theJobDefinition); } diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/HapiNullScheduler.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/HapiNullScheduler.java index b87454ce32e..4d16e66046a 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/HapiNullScheduler.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/sched/HapiNullScheduler.java @@ -37,14 +37,10 @@ public class HapiNullScheduler implements IHapiScheduler { } @Override - public void start() { - - } + public void start() {} @Override - public void shutdown() { - - } + public void shutdown() {} @Override public boolean isStarted() { @@ -52,14 +48,10 @@ public class HapiNullScheduler implements IHapiScheduler { } @Override - public void clear() throws SchedulerException { - - } + public void clear() throws SchedulerException {} @Override - public void logStatusForUnitTest() { - - } + public void logStatusForUnitTest() {} @Override public void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) { diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/DerbyTenSevenHapiFhirDialect.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/DerbyTenSevenHapiFhirDialect.java index 2ed4c8d3c54..b83b99a13d9 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/DerbyTenSevenHapiFhirDialect.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/DerbyTenSevenHapiFhirDialect.java @@ -38,12 +38,14 @@ public class DerbyTenSevenHapiFhirDialect extends DerbyTenSevenDialect { protected String doExtractConstraintName(SQLException theSqlException) throws NumberFormatException { switch (theSqlException.getSQLState()) { case "23505": - return this.extractUsingTemplate("unique or primary key constraint or unique index identified by '", "'", theSqlException.getMessage()); + return this.extractUsingTemplate( + "unique or primary key constraint or unique index identified by '", + "'", + theSqlException.getMessage()); default: return null; } } }; } - } diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java index c1a04fccac7..6d6c134a4bf 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java @@ -33,7 +33,6 @@ public class TestUtil { super(); } - public static InstantType getTimestamp(IBaseResource resource) { return new InstantType(new Date(resource.getMeta().getLastUpdated().getTime())); } @@ -41,6 +40,4 @@ public class TestUtil { public static void sleepOneClick() { ca.uhn.fhir.util.TestUtil.sleepAtLeast(1, false); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JobInstanceUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JobInstanceUtil.java index 97f5ce63912..156eb22028d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JobInstanceUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JobInstanceUtil.java @@ -69,7 +69,8 @@ class JobInstanceUtil { * @param theJobInstance the job * @param theJobInstanceEntity the target entity */ - public static void fromInstanceToEntity(@Nonnull JobInstance theJobInstance, @Nonnull Batch2JobInstanceEntity theJobInstanceEntity) { + public static void fromInstanceToEntity( + @Nonnull JobInstance theJobInstance, @Nonnull Batch2JobInstanceEntity theJobInstanceEntity) { theJobInstanceEntity.setId(theJobInstance.getInstanceId()); theJobInstanceEntity.setDefinitionId(theJobInstance.getJobDefinitionId()); theJobInstanceEntity.setDefinitionVersion(theJobInstance.getJobDefinitionVersion()); @@ -81,7 +82,8 @@ class JobInstanceUtil { theJobInstanceEntity.setEndTime(theJobInstance.getEndTime()); theJobInstanceEntity.setUpdateTime(theJobInstance.getUpdateTime()); theJobInstanceEntity.setCombinedRecordsProcessed(theJobInstance.getCombinedRecordsProcessed()); - theJobInstanceEntity.setCombinedRecordsProcessedPerSecond(theJobInstance.getCombinedRecordsProcessedPerSecond()); + theJobInstanceEntity.setCombinedRecordsProcessedPerSecond( + theJobInstance.getCombinedRecordsProcessedPerSecond()); theJobInstanceEntity.setTotalElapsedMillis(theJobInstance.getTotalElapsedMillis()); theJobInstanceEntity.setWorkChunksPurged(theJobInstance.isWorkChunksPurged()); theJobInstanceEntity.setProgress(theJobInstance.getProgress()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaBatch2Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaBatch2Config.java index 8951920a26e..c4803afcf86 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaBatch2Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaBatch2Config.java @@ -25,41 +25,44 @@ import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; -import ca.uhn.fhir.system.HapiSystemProperties; -import ca.uhn.fhir.util.ProxyUtil; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; -import org.springframework.transaction.PlatformTransactionManager; import javax.persistence.EntityManager; @Configuration -@Import({ - BulkExportJobConfig.class -}) +@Import({BulkExportJobConfig.class}) public class JpaBatch2Config extends BaseBatch2Config { @Bean - public IJobPersistence batch2JobInstancePersister(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository, IHapiTransactionService theTransactionService, EntityManager theEntityManager) { - return new JpaJobPersistenceImpl(theJobInstanceRepository, theWorkChunkRepository, theTransactionService, theEntityManager); + public IJobPersistence batch2JobInstancePersister( + IBatch2JobInstanceRepository theJobInstanceRepository, + IBatch2WorkChunkRepository theWorkChunkRepository, + IHapiTransactionService theTransactionService, + EntityManager theEntityManager) { + return new JpaJobPersistenceImpl( + theJobInstanceRepository, theWorkChunkRepository, theTransactionService, theEntityManager); } @Primary @Bean - public IJobPersistence batch2JobInstancePersisterWrapper(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository, IHapiTransactionService theTransactionService, EntityManager theEntityManager) { - IJobPersistence retVal = batch2JobInstancePersister(theJobInstanceRepository, theWorkChunkRepository, theTransactionService, theEntityManager); + public IJobPersistence batch2JobInstancePersisterWrapper( + IBatch2JobInstanceRepository theJobInstanceRepository, + IBatch2WorkChunkRepository theWorkChunkRepository, + IHapiTransactionService theTransactionService, + EntityManager theEntityManager) { + IJobPersistence retVal = batch2JobInstancePersister( + theJobInstanceRepository, theWorkChunkRepository, theTransactionService, theEntityManager); // Avoid H2 synchronization issues caused by // https://github.com/h2database/h2database/issues/1808 // TODO: Update 2023-03-14 - The bug above appears to be fixed. I'm going to try // disabing this and see if we can get away without it. If so, we can delete // this entirely -// if (HapiSystemProperties.isUnitTestModeEnabled()) { -// retVal = ProxyUtil.synchronizedProxy(IJobPersistence.class, retVal); -// } + // if (HapiSystemProperties.isUnitTestModeEnabled()) { + // retVal = ProxyUtil.synchronizedProxy(IJobPersistence.class, retVal); + // } return retVal; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImpl.java index 647036f7319..19a060a915a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImpl.java @@ -49,11 +49,6 @@ import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionSynchronizationManager; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.persistence.EntityManager; -import javax.persistence.LockModeType; -import javax.persistence.Query; import java.util.Date; import java.util.Iterator; import java.util.List; @@ -64,6 +59,11 @@ import java.util.UUID; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.persistence.EntityManager; +import javax.persistence.LockModeType; +import javax.persistence.Query; import static ca.uhn.fhir.batch2.coordinator.WorkChunkProcessor.MAX_CHUNK_ERROR_COUNT; import static ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity.ERROR_MSG_MAX_LENGTH; @@ -81,7 +81,11 @@ public class JpaJobPersistenceImpl implements IJobPersistence { /** * Constructor */ - public JpaJobPersistenceImpl(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository, IHapiTransactionService theTransactionService, EntityManager theEntityManager) { + public JpaJobPersistenceImpl( + IBatch2JobInstanceRepository theJobInstanceRepository, + IBatch2WorkChunkRepository theWorkChunkRepository, + IHapiTransactionService theTransactionService, + EntityManager theEntityManager) { Validate.notNull(theJobInstanceRepository); Validate.notNull(theWorkChunkRepository); myJobInstanceRepository = theJobInstanceRepository; @@ -104,7 +108,8 @@ public class JpaJobPersistenceImpl implements IJobPersistence { entity.setStartTime(new Date()); entity.setStatus(WorkChunkStatusEnum.QUEUED); ourLog.debug("Create work chunk {}/{}/{}", entity.getInstanceId(), entity.getId(), entity.getTargetStepId()); - ourLog.trace("Create work chunk data {}/{}: {}", entity.getInstanceId(), entity.getId(), entity.getSerializedData()); + ourLog.trace( + "Create work chunk data {}/{}: {}", entity.getInstanceId(), entity.getId(), entity.getSerializedData()); myWorkChunkRepository.save(entity); return entity.getId(); } @@ -112,9 +117,12 @@ public class JpaJobPersistenceImpl implements IJobPersistence { @Override @Transactional(propagation = Propagation.REQUIRED) public Optional onWorkChunkDequeue(String theChunkId) { - // NOTE: Ideally, IN_PROGRESS wouldn't be allowed here. On chunk failure, we probably shouldn't be allowed. But how does re-run happen if k8s kills a processor mid run? - List priorStates = List.of(WorkChunkStatusEnum.QUEUED, WorkChunkStatusEnum.ERRORED, WorkChunkStatusEnum.IN_PROGRESS); - int rowsModified = myWorkChunkRepository.updateChunkStatusForStart(theChunkId, new Date(), WorkChunkStatusEnum.IN_PROGRESS, priorStates); + // NOTE: Ideally, IN_PROGRESS wouldn't be allowed here. On chunk failure, we probably shouldn't be allowed. + // But how does re-run happen if k8s kills a processor mid run? + List priorStates = + List.of(WorkChunkStatusEnum.QUEUED, WorkChunkStatusEnum.ERRORED, WorkChunkStatusEnum.IN_PROGRESS); + int rowsModified = myWorkChunkRepository.updateChunkStatusForStart( + theChunkId, new Date(), WorkChunkStatusEnum.IN_PROGRESS, priorStates); if (rowsModified == 0) { ourLog.info("Attempting to start chunk {} but it was already started.", theChunkId); return Optional.empty(); @@ -147,20 +155,25 @@ public class JpaJobPersistenceImpl implements IJobPersistence { @Override @Transactional(propagation = Propagation.REQUIRES_NEW) - public List fetchInstances(String theJobDefinitionId, Set theStatuses, Date theCutoff, Pageable thePageable) { - return toInstanceList(myJobInstanceRepository.findInstancesByJobIdAndStatusAndExpiry(theJobDefinitionId, theStatuses, theCutoff, thePageable)); + public List fetchInstances( + String theJobDefinitionId, Set theStatuses, Date theCutoff, Pageable thePageable) { + return toInstanceList(myJobInstanceRepository.findInstancesByJobIdAndStatusAndExpiry( + theJobDefinitionId, theStatuses, theCutoff, thePageable)); } @Override @Transactional(propagation = Propagation.REQUIRES_NEW) - public List fetchInstancesByJobDefinitionIdAndStatus(String theJobDefinitionId, Set theRequestedStatuses, int thePageSize, int thePageIndex) { + public List fetchInstancesByJobDefinitionIdAndStatus( + String theJobDefinitionId, Set theRequestedStatuses, int thePageSize, int thePageIndex) { PageRequest pageRequest = PageRequest.of(thePageIndex, thePageSize, Sort.Direction.ASC, CREATE_TIME); - return toInstanceList(myJobInstanceRepository.fetchInstancesByJobDefinitionIdAndStatus(theJobDefinitionId, theRequestedStatuses, pageRequest)); + return toInstanceList(myJobInstanceRepository.fetchInstancesByJobDefinitionIdAndStatus( + theJobDefinitionId, theRequestedStatuses, pageRequest)); } @Override @Transactional(propagation = Propagation.REQUIRES_NEW) - public List fetchInstancesByJobDefinitionId(String theJobDefinitionId, int thePageSize, int thePageIndex) { + public List fetchInstancesByJobDefinitionId( + String theJobDefinitionId, int thePageSize, int thePageIndex) { PageRequest pageRequest = PageRequest.of(thePageIndex, thePageSize, Sort.Direction.ASC, CREATE_TIME); return toInstanceList(myJobInstanceRepository.findInstancesByJobDefinitionId(theJobDefinitionId, pageRequest)); } @@ -168,11 +181,8 @@ public class JpaJobPersistenceImpl implements IJobPersistence { @Override @Transactional(propagation = Propagation.REQUIRES_NEW) public Page fetchJobInstances(JobInstanceFetchRequest theRequest) { - PageRequest pageRequest = PageRequest.of( - theRequest.getPageStart(), - theRequest.getBatchSize(), - theRequest.getSort() - ); + PageRequest pageRequest = + PageRequest.of(theRequest.getPageStart(), theRequest.getBatchSize(), theRequest.getSort()); String jobStatus = theRequest.getJobStatus(); if (Objects.equals(jobStatus, "")) { @@ -194,8 +204,8 @@ public class JpaJobPersistenceImpl implements IJobPersistence { @Nonnull public Optional fetchInstance(String theInstanceId) { return myTransactionService - .withSystemRequest() - .execute(() -> myJobInstanceRepository.findById(theInstanceId).map(this::toInstance)); + .withSystemRequest() + .execute(() -> myJobInstanceRepository.findById(theInstanceId).map(this::toInstance)); } @Override @@ -211,17 +221,9 @@ public class JpaJobPersistenceImpl implements IJobPersistence { if (statuses != null && !statuses.isEmpty()) { instanceEntities = myJobInstanceRepository.findInstancesByJobIdParamsAndStatus( - definitionId, - params, - statuses, - pageable - ); + definitionId, params, statuses, pageable); } else { - instanceEntities = myJobInstanceRepository.findInstancesByJobIdAndParams( - definitionId, - params, - pageable - ); + instanceEntities = myJobInstanceRepository.findInstancesByJobIdAndParams(definitionId, params, pageable); } return toInstanceList(instanceEntities); } @@ -231,14 +233,18 @@ public class JpaJobPersistenceImpl implements IJobPersistence { public List fetchInstances(int thePageSize, int thePageIndex) { // default sort is myCreateTime Asc PageRequest pageRequest = PageRequest.of(thePageIndex, thePageSize, Sort.Direction.ASC, CREATE_TIME); - return myJobInstanceRepository.findAll(pageRequest).stream().map(this::toInstance).collect(Collectors.toList()); + return myJobInstanceRepository.findAll(pageRequest).stream() + .map(this::toInstance) + .collect(Collectors.toList()); } @Override @Transactional(propagation = Propagation.REQUIRES_NEW) public List fetchRecentInstances(int thePageSize, int thePageIndex) { PageRequest pageRequest = PageRequest.of(thePageIndex, thePageSize, Sort.Direction.DESC, CREATE_TIME); - return myJobInstanceRepository.findAll(pageRequest).stream().map(this::toInstance).collect(Collectors.toList()); + return myJobInstanceRepository.findAll(pageRequest).stream() + .map(this::toInstance) + .collect(Collectors.toList()); } private WorkChunk toChunk(Batch2WorkChunkEntity theEntity) { @@ -254,14 +260,13 @@ public class JpaJobPersistenceImpl implements IJobPersistence { public WorkChunkStatusEnum onWorkChunkError(WorkChunkErrorEvent theParameters) { String chunkId = theParameters.getChunkId(); String errorMessage = truncateErrorMessage(theParameters.getErrorMsg()); - int changeCount = myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError(chunkId, new Date(), errorMessage, WorkChunkStatusEnum.ERRORED); - Validate.isTrue(changeCount>0, "changed chunk matching %s", chunkId); + int changeCount = myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError( + chunkId, new Date(), errorMessage, WorkChunkStatusEnum.ERRORED); + Validate.isTrue(changeCount > 0, "changed chunk matching %s", chunkId); - Query query = myEntityManager.createQuery( - "update Batch2WorkChunkEntity " + - "set myStatus = :failed " + - ",myErrorMessage = CONCAT('Too many errors: ', myErrorCount, '. Last error msg was ', myErrorMessage) " + - "where myId = :chunkId and myErrorCount > :maxCount"); + Query query = myEntityManager.createQuery("update Batch2WorkChunkEntity " + "set myStatus = :failed " + + ",myErrorMessage = CONCAT('Too many errors: ', myErrorCount, '. Last error msg was ', myErrorMessage) " + + "where myId = :chunkId and myErrorCount > :maxCount"); query.setParameter("chunkId", chunkId); query.setParameter("failed", WorkChunkStatusEnum.FAILED); query.setParameter("maxCount", MAX_CHUNK_ERROR_COUNT); @@ -279,13 +284,20 @@ public class JpaJobPersistenceImpl implements IJobPersistence { public void onWorkChunkFailed(String theChunkId, String theErrorMessage) { ourLog.info("Marking chunk {} as failed with message: {}", theChunkId, theErrorMessage); String errorMessage = truncateErrorMessage(theErrorMessage); - myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError(theChunkId, new Date(), errorMessage, WorkChunkStatusEnum.FAILED); + myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError( + theChunkId, new Date(), errorMessage, WorkChunkStatusEnum.FAILED); } @Override @Transactional public void onWorkChunkCompletion(WorkChunkCompletionEvent theEvent) { - myWorkChunkRepository.updateChunkStatusAndClearDataForEndSuccess(theEvent.getChunkId(), new Date(), theEvent.getRecordsProcessed(), theEvent.getRecoveredErrorCount(), WorkChunkStatusEnum.COMPLETED, theEvent.getRecoveredWarningMessage()); + myWorkChunkRepository.updateChunkStatusAndClearDataForEndSuccess( + theEvent.getChunkId(), + new Date(), + theEvent.getRecordsProcessed(), + theEvent.getRecoveredErrorCount(), + WorkChunkStatusEnum.COMPLETED, + theEvent.getRecoveredWarningMessage()); } @Nullable @@ -301,14 +313,16 @@ public class JpaJobPersistenceImpl implements IJobPersistence { } @Override - public void markWorkChunksWithStatusAndWipeData(String theInstanceId, List theChunkIds, WorkChunkStatusEnum theStatus, String theErrorMessage) { + public void markWorkChunksWithStatusAndWipeData( + String theInstanceId, List theChunkIds, WorkChunkStatusEnum theStatus, String theErrorMessage) { assert TransactionSynchronizationManager.isActualTransactionActive(); ourLog.debug("Marking all chunks for instance {} to status {}", theInstanceId, theStatus); String errorMessage = truncateErrorMessage(theErrorMessage); List> listOfListOfIds = ListUtils.partition(theChunkIds, 100); for (List idList : listOfListOfIds) { - myWorkChunkRepository.updateAllChunksForInstanceStatusClearDataAndSetError(idList, new Date(), theStatus, errorMessage); + myWorkChunkRepository.updateAllChunksForInstanceStatusClearDataAndSetError( + idList, new Date(), theStatus, errorMessage); } } @@ -322,35 +336,49 @@ public class JpaJobPersistenceImpl implements IJobPersistence { if (instance.get().getStatus().isEnded()) { return false; } - Set statusesForStep = myWorkChunkRepository.getDistinctStatusesForStep(theInstanceId, theCurrentStepId); + Set statusesForStep = + myWorkChunkRepository.getDistinctStatusesForStep(theInstanceId, theCurrentStepId); - ourLog.debug("Checking whether gated job can advanced to next step. [instanceId={}, currentStepId={}, statusesForStep={}]", theInstanceId, theCurrentStepId, statusesForStep); + ourLog.debug( + "Checking whether gated job can advanced to next step. [instanceId={}, currentStepId={}, statusesForStep={}]", + theInstanceId, + theCurrentStepId, + statusesForStep); return statusesForStep.isEmpty() || statusesForStep.equals(Set.of(WorkChunkStatusEnum.COMPLETED)); } - private void fetchChunks(String theInstanceId, boolean theIncludeData, int thePageSize, int thePageIndex, Consumer theConsumer) { + private void fetchChunks( + String theInstanceId, + boolean theIncludeData, + int thePageSize, + int thePageIndex, + Consumer theConsumer) { myTransactionService - .withSystemRequest() - .withPropagation(Propagation.REQUIRES_NEW) - .execute(() -> { - List chunks; - if (theIncludeData) { - chunks = myWorkChunkRepository.fetchChunks(PageRequest.of(thePageIndex, thePageSize), theInstanceId); - } else { - chunks = myWorkChunkRepository.fetchChunksNoData(PageRequest.of(thePageIndex, thePageSize), theInstanceId); - } - for (Batch2WorkChunkEntity chunk : chunks) { - theConsumer.accept(toChunk(chunk)); - } - }); + .withSystemRequest() + .withPropagation(Propagation.REQUIRES_NEW) + .execute(() -> { + List chunks; + if (theIncludeData) { + chunks = myWorkChunkRepository.fetchChunks( + PageRequest.of(thePageIndex, thePageSize), theInstanceId); + } else { + chunks = myWorkChunkRepository.fetchChunksNoData( + PageRequest.of(thePageIndex, thePageSize), theInstanceId); + } + for (Batch2WorkChunkEntity chunk : chunks) { + theConsumer.accept(toChunk(chunk)); + } + }); } @Override - public List fetchAllChunkIdsForStepWithStatus(String theInstanceId, String theStepId, WorkChunkStatusEnum theStatusEnum) { + public List fetchAllChunkIdsForStepWithStatus( + String theInstanceId, String theStepId, WorkChunkStatusEnum theStatusEnum) { return myTransactionService - .withSystemRequest() - .withPropagation(Propagation.REQUIRES_NEW) - .execute(() -> myWorkChunkRepository.fetchAllChunkIdsForStepWithStatus(theInstanceId, theStepId, theStatusEnum)); + .withSystemRequest() + .withPropagation(Propagation.REQUIRES_NEW) + .execute(() -> myWorkChunkRepository.fetchAllChunkIdsForStepWithStatus( + theInstanceId, theStepId, theStatusEnum)); } @Override @@ -358,23 +386,26 @@ public class JpaJobPersistenceImpl implements IJobPersistence { myJobInstanceRepository.updateInstanceUpdateTime(theInstanceId, new Date()); } - /** * Note: Not @Transactional because the transaction happens in a lambda that's called outside of this method's scope */ @Override public Iterator fetchAllWorkChunksIterator(String theInstanceId, boolean theWithData) { - return new PagingIterator<>((thePageIndex, theBatchSize, theConsumer) -> fetchChunks(theInstanceId, theWithData, theBatchSize, thePageIndex, theConsumer)); + return new PagingIterator<>((thePageIndex, theBatchSize, theConsumer) -> + fetchChunks(theInstanceId, theWithData, theBatchSize, thePageIndex, theConsumer)); } @Override public Stream fetchAllWorkChunksForStepStream(String theInstanceId, String theStepId) { - return myWorkChunkRepository.fetchChunksForStep(theInstanceId, theStepId).map(this::toChunk); + return myWorkChunkRepository + .fetchChunksForStep(theInstanceId, theStepId) + .map(this::toChunk); } @Override public boolean updateInstance(String theInstanceId, JobInstanceUpdateCallback theModifier) { - Batch2JobInstanceEntity instanceEntity = myEntityManager.find(Batch2JobInstanceEntity.class, theInstanceId, LockModeType.PESSIMISTIC_WRITE); + Batch2JobInstanceEntity instanceEntity = + myEntityManager.find(Batch2JobInstanceEntity.class, theInstanceId, LockModeType.PESSIMISTIC_WRITE); if (null == instanceEntity) { ourLog.error("No instance found with Id {}", theInstanceId); return false; @@ -411,9 +442,16 @@ public class JpaJobPersistenceImpl implements IJobPersistence { } @Override - public boolean markInstanceAsStatusWhenStatusIn(String theInstanceId, StatusEnum theStatusEnum, Set thePriorStates) { - int recordsChanged = myJobInstanceRepository.updateInstanceStatusIfIn(theInstanceId, theStatusEnum, thePriorStates); - ourLog.debug("Update job {} to status {} if in status {}: {}", theInstanceId, theStatusEnum, thePriorStates, recordsChanged>0); + public boolean markInstanceAsStatusWhenStatusIn( + String theInstanceId, StatusEnum theStatusEnum, Set thePriorStates) { + int recordsChanged = + myJobInstanceRepository.updateInstanceStatusIfIn(theInstanceId, theStatusEnum, thePriorStates); + ourLog.debug( + "Update job {} to status {} if in status {}: {}", + theInstanceId, + theStatusEnum, + thePriorStates, + recordsChanged > 0); return recordsChanged > 0; } @@ -431,12 +469,11 @@ public class JpaJobPersistenceImpl implements IJobPersistence { } else { Optional instance = fetchInstance(theInstanceId); if (instance.isPresent()) { - return JobOperationResultJson.newFailure(operationString, messagePrefix + " was already cancelled. Nothing to do."); + return JobOperationResultJson.newFailure( + operationString, messagePrefix + " was already cancelled. Nothing to do."); } else { return JobOperationResultJson.newFailure(operationString, messagePrefix + " not found."); } } } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImpl.java index 606940e69d2..58936143305 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/binstore/DatabaseBlobBinaryStorageSvcImpl.java @@ -37,10 +37,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -48,20 +44,30 @@ import java.sql.Blob; import java.sql.SQLException; import java.util.Date; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; @Transactional public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; + @Autowired private IBinaryStorageEntityDao myBinaryStorageEntityDao; @Nonnull @Override @Transactional(propagation = Propagation.REQUIRED) - public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, - InputStream theInputStream, RequestDetails theRequestDetails) throws IOException { + public StoredDetails storeBlob( + IIdType theResourceId, + String theBlobIdOrNull, + String theContentType, + InputStream theInputStream, + RequestDetails theRequestDetails) + throws IOException { /* * Note on transactionality: This method used to have a propagation value of SUPPORTS and then do the actual @@ -97,33 +103,35 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { myEntityManager.persist(entity); return new StoredDetails() - .setBlobId(id) - .setBytes(bytes) - .setPublished(publishedDate) - .setHash(hash) - .setContentType(theContentType); + .setBlobId(id) + .setBytes(bytes) + .setPublished(publishedDate) + .setHash(hash) + .setContentType(theContentType); } @Override public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) { - Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); + Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( + theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); if (entityOpt.isEmpty()) { return null; } BinaryStorageEntity entity = entityOpt.get(); return new StoredDetails() - .setBlobId(theBlobId) - .setContentType(entity.getBlobContentType()) - .setHash(entity.getHash()) - .setPublished(entity.getPublished()) - .setBytes(entity.getSize()); + .setBlobId(theBlobId) + .setContentType(entity.getBlobContentType()) + .setHash(entity.getHash()) + .setPublished(entity.getPublished()) + .setBytes(entity.getSize()); } @Override public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException { - Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); + Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( + theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); if (entityOpt.isEmpty()) { return false; } @@ -135,15 +143,19 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { @Override public void expungeBlob(IIdType theResourceId, String theBlobId) { - Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); - entityOpt.ifPresent(theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getBlobId())); + Optional entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId( + theBlobId, theResourceId.toUnqualifiedVersionless().getValue()); + entityOpt.ifPresent( + theBinaryStorageEntity -> myBinaryStorageEntityDao.deleteByPid(theBinaryStorageEntity.getBlobId())); } @Override public byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException { BinaryStorageEntity entityOpt = myBinaryStorageEntityDao - .findByIdAndResourceId(theBlobId, theResourceId.toUnqualifiedVersionless().getValue()) - .orElseThrow(() -> new ResourceNotFoundException("Unknown blob ID: " + theBlobId + " for resource ID " + theResourceId)); + .findByIdAndResourceId( + theBlobId, theResourceId.toUnqualifiedVersionless().getValue()) + .orElseThrow(() -> new ResourceNotFoundException( + "Unknown blob ID: " + theBlobId + " for resource ID " + theResourceId)); return copyBlobToByteArray(entityOpt); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java index 38f73e9c927..b5cfd37f796 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java @@ -34,5 +34,4 @@ public class BulkExportJobConfig { public MdmExpansionCacheSvc mdmExpansionCacheSvc() { return new MdmExpansionCacheSvc(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportJobSchedulingHelperImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportJobSchedulingHelperImpl.java index de0b9bbe034..5d352926733 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportJobSchedulingHelperImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportJobSchedulingHelperImpl.java @@ -34,7 +34,6 @@ import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.util.Batch2JobDefinitionConstants; import ca.uhn.fhir.util.JsonUtil; -import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; import org.hl7.fhir.instance.model.api.IBaseBinary; @@ -49,15 +48,14 @@ import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; -import javax.annotation.PostConstruct; import java.time.LocalDateTime; import java.time.ZoneId; import java.util.Date; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; import static org.slf4j.LoggerFactory.getLogger; @@ -72,7 +70,13 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob private final IJobPersistence myJpaJobPersistence; private TransactionTemplate myTxTemplate; - public BulkDataExportJobSchedulingHelperImpl(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, JpaStorageSettings theDaoConfig, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence, TransactionTemplate theTxTemplate) { + public BulkDataExportJobSchedulingHelperImpl( + DaoRegistry theDaoRegistry, + PlatformTransactionManager theTxManager, + JpaStorageSettings theDaoConfig, + BulkExportHelperService theBulkExportHelperSvc, + IJobPersistence theJpaJobPersistence, + TransactionTemplate theTxTemplate) { myDaoRegistry = theDaoRegistry; myTxManager = theTxManager; myDaoConfig = theDaoConfig; @@ -116,12 +120,11 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob return; } - final List jobInstancesToDelete = myTxTemplate.execute(t -> - myJpaJobPersistence.fetchInstances(Batch2JobDefinitionConstants.BULK_EXPORT, + final List jobInstancesToDelete = myTxTemplate.execute(t -> myJpaJobPersistence.fetchInstances( + Batch2JobDefinitionConstants.BULK_EXPORT, StatusEnum.getEndedStatuses(), computeCutoffFromConfig(), - PageRequest.of(0, 50)) - ); + PageRequest.of(0, 50))); if (jobInstancesToDelete == null || jobInstancesToDelete.isEmpty()) { ourLog.debug("No batch 2 bulk export jobs found! Nothing to do!"); @@ -133,19 +136,24 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob ourLog.info("Deleting batch 2 bulk export job: {}", jobInstance); myTxTemplate.execute(t -> { - final Optional optJobInstanceForInstanceId = myJpaJobPersistence.fetchInstance(jobInstance.getInstanceId()); + final Optional optJobInstanceForInstanceId = + myJpaJobPersistence.fetchInstance(jobInstance.getInstanceId()); if (optJobInstanceForInstanceId.isEmpty()) { - ourLog.error("Can't find job instance for ID: {} despite having retrieved it in the first step", jobInstance.getInstanceId()); + ourLog.error( + "Can't find job instance for ID: {} despite having retrieved it in the first step", + jobInstance.getInstanceId()); return null; } final JobInstance jobInstanceForInstanceId = optJobInstanceForInstanceId.get(); ourLog.info("Deleting bulk export job: {}", jobInstanceForInstanceId); - // We need to keep these for investigation but we also need a process to manually delete these jobs once we're done investigating + // We need to keep these for investigation but we also need a process to manually delete these jobs once + // we're done investigating if (StatusEnum.FAILED == jobInstanceForInstanceId.getStatus()) { - ourLog.info("skipping because the status is FAILED for ID: {}" + jobInstanceForInstanceId.getInstanceId()); + ourLog.info("skipping because the status is FAILED for ID: {}" + + jobInstanceForInstanceId.getInstanceId()); return null; } @@ -166,11 +174,15 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob } private void purgeBinariesIfNeeded(JobInstance theJobInstanceForInstanceId, String theJobInstanceReportString) { - final Optional optBulkExportJobResults = getBulkExportJobResults(theJobInstanceReportString); + final Optional optBulkExportJobResults = + getBulkExportJobResults(theJobInstanceReportString); if (optBulkExportJobResults.isPresent()) { final BulkExportJobResults bulkExportJobResults = optBulkExportJobResults.get(); - ourLog.debug("job: {} resource type to binary ID: {}", theJobInstanceForInstanceId.getInstanceId(), bulkExportJobResults.getResourceTypeToBinaryIds()); + ourLog.debug( + "job: {} resource type to binary ID: {}", + theJobInstanceForInstanceId.getInstanceId(), + bulkExportJobResults.getResourceTypeToBinaryIds()); final Map> resourceTypeToBinaryIds = bulkExportJobResults.getResourceTypeToBinaryIds(); for (String resourceType : resourceTypeToBinaryIds.keySet()) { @@ -192,7 +204,8 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob @Nonnull private Optional getBulkExportJobResults(String theJobInstanceReportString) { if (StringUtils.isBlank(theJobInstanceReportString)) { - ourLog.error(String.format("Cannot parse job report string because it's null or blank: %s", theJobInstanceReportString)); + ourLog.error(String.format( + "Cannot parse job report string because it's null or blank: %s", theJobInstanceReportString)); return Optional.empty(); } @@ -208,15 +221,11 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob private Date computeCutoffFromConfig() { final int bulkExportFileRetentionPeriodHours = myDaoConfig.getBulkExportFileRetentionPeriodHours(); - final LocalDateTime cutoffLocalDateTime = LocalDateTime.now() - .minusHours(bulkExportFileRetentionPeriodHours); + final LocalDateTime cutoffLocalDateTime = LocalDateTime.now().minusHours(bulkExportFileRetentionPeriodHours); - return Date.from(cutoffLocalDateTime - .atZone(ZoneId.systemDefault()) - .toInstant()); + return Date.from(cutoffLocalDateTime.atZone(ZoneId.systemDefault()).toInstant()); } - public static class PurgeExpiredFilesJob implements HapiJob { @Autowired private IBulkDataExportJobSchedulingHelper myTarget; @@ -227,4 +236,3 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob } } } - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java index 4532062cdee..ae0fb1115f1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java @@ -66,8 +66,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.persistence.EntityManager; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; @@ -79,6 +77,8 @@ import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.persistence.EntityManager; import static ca.uhn.fhir.rest.api.Constants.PARAM_HAS; import static ca.uhn.fhir.rest.api.Constants.PARAM_ID; @@ -87,7 +87,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { private static final Logger ourLog = LoggerFactory.getLogger(JpaBulkExportProcessor.class); public static final int QUERY_CHUNK_SIZE = 100; - public static final List PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = List.of("Practitioner", "Organization"); + public static final List PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = + List.of("Practitioner", "Organization"); @Autowired private FhirContext myContext; @@ -125,45 +126,54 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { @Override public Iterator getResourcePidIterator(ExportPIDIteratorParameters theParams) { return myHapiTransactionService - .withSystemRequest() - .withRequestPartitionId(theParams.getPartitionIdOrAllPartitions()) - .readOnly() - .execute(() -> { - String resourceType = theParams.getResourceType(); - String jobId = theParams.getInstanceId(); - String chunkId = theParams.getChunkId(); - RuntimeResourceDefinition def = myContext.getResourceDefinition(resourceType); + .withSystemRequest() + .withRequestPartitionId(theParams.getPartitionIdOrAllPartitions()) + .readOnly() + .execute(() -> { + String resourceType = theParams.getResourceType(); + String jobId = theParams.getInstanceId(); + String chunkId = theParams.getChunkId(); + RuntimeResourceDefinition def = myContext.getResourceDefinition(resourceType); - LinkedHashSet pids; - if (theParams.getExportStyle() == BulkExportJobParameters.ExportStyle.PATIENT) { - pids = getPidsForPatientStyleExport(theParams, resourceType, jobId, chunkId, def); - } else if (theParams.getExportStyle() == BulkExportJobParameters.ExportStyle.GROUP) { - pids = getPidsForGroupStyleExport(theParams, resourceType, def); - } else { - pids = getPidsForSystemStyleExport(theParams, jobId, chunkId, def); - } + LinkedHashSet pids; + if (theParams.getExportStyle() == BulkExportJobParameters.ExportStyle.PATIENT) { + pids = getPidsForPatientStyleExport(theParams, resourceType, jobId, chunkId, def); + } else if (theParams.getExportStyle() == BulkExportJobParameters.ExportStyle.GROUP) { + pids = getPidsForGroupStyleExport(theParams, resourceType, def); + } else { + pids = getPidsForSystemStyleExport(theParams, jobId, chunkId, def); + } - ourLog.debug("Finished expanding resource pids to export, size is {}", pids.size()); - return pids.iterator(); - }); + ourLog.debug("Finished expanding resource pids to export, size is {}", pids.size()); + return pids.iterator(); + }); } @SuppressWarnings("unchecked") - private LinkedHashSet getPidsForPatientStyleExport(ExportPIDIteratorParameters theParams, String resourceType, String theJobId, String theChunkId, RuntimeResourceDefinition def) throws IOException { + private LinkedHashSet getPidsForPatientStyleExport( + ExportPIDIteratorParameters theParams, + String resourceType, + String theJobId, + String theChunkId, + RuntimeResourceDefinition def) + throws IOException { LinkedHashSet pids = new LinkedHashSet<>(); // Patient if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.DISABLED) { - String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export"; + String errorMessage = + "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export"; ourLog.error(errorMessage); throw new IllegalStateException(Msg.code(797) + errorMessage); } - Set patientSearchParams = SearchParameterUtil.getPatientSearchParamsForResourceType(myContext, theParams.getResourceType()); + Set patientSearchParams = + SearchParameterUtil.getPatientSearchParamsForResourceType(myContext, theParams.getResourceType()); for (String patientSearchParam : patientSearchParams) { - List maps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(def, theParams, false); + List maps = + myBulkExportHelperSvc.createSearchParameterMapsForResourceType(def, theParams, false); for (SearchParameterMap map : maps) { - //Ensure users did not monkey with the patient compartment search parameter. + // Ensure users did not monkey with the patient compartment search parameter. validateSearchParametersForPatient(map, theParams); ISearchBuilder searchBuilder = getSearchBuilderForResourceType(theParams.getResourceType()); @@ -172,13 +182,24 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { SearchRuntimeDetails searchRuntime = new SearchRuntimeDetails(null, theJobId); - Logs.getBatchTroubleshootingLog().debug("Executing query for bulk export job[{}] chunk[{}]: {}", theJobId, theChunkId, map.toNormalizedQueryString(myContext)); + Logs.getBatchTroubleshootingLog() + .debug( + "Executing query for bulk export job[{}] chunk[{}]: {}", + theJobId, + theChunkId, + map.toNormalizedQueryString(myContext)); - try (IResultIterator resultIterator = searchBuilder.createQuery(map, searchRuntime, new SystemRequestDetails(), theParams.getPartitionIdOrAllPartitions())) { + try (IResultIterator resultIterator = searchBuilder.createQuery( + map, searchRuntime, new SystemRequestDetails(), theParams.getPartitionIdOrAllPartitions())) { int pidCount = 0; while (resultIterator.hasNext()) { if (pidCount % 10000 == 0) { - Logs.getBatchTroubleshootingLog().debug("Bulk export job[{}] chunk[{}] has loaded {} pids", theJobId, theChunkId, pidCount); + Logs.getBatchTroubleshootingLog() + .debug( + "Bulk export job[{}] chunk[{}] has loaded {} pids", + theJobId, + theChunkId, + pidCount); } pidCount++; pids.add(resultIterator.next()); @@ -189,7 +210,11 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { return pids; } - private static void filterBySpecificPatient(ExportPIDIteratorParameters theParams, String resourceType, String patientSearchParam, SearchParameterMap map) { + private static void filterBySpecificPatient( + ExportPIDIteratorParameters theParams, + String resourceType, + String patientSearchParam, + SearchParameterMap map) { if (resourceType.equalsIgnoreCase("Patient")) { if (theParams.getPatientIds() != null) { ReferenceOrListParam referenceOrListParam = getReferenceOrListParam(theParams); @@ -215,24 +240,35 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { } @SuppressWarnings("unchecked") - private LinkedHashSet getPidsForSystemStyleExport(ExportPIDIteratorParameters theParams, String theJobId, String theChunkId, RuntimeResourceDefinition theDef) throws IOException { + private LinkedHashSet getPidsForSystemStyleExport( + ExportPIDIteratorParameters theParams, String theJobId, String theChunkId, RuntimeResourceDefinition theDef) + throws IOException { LinkedHashSet pids = new LinkedHashSet<>(); // System - List maps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(theDef, theParams, true); + List maps = + myBulkExportHelperSvc.createSearchParameterMapsForResourceType(theDef, theParams, true); ISearchBuilder searchBuilder = getSearchBuilderForResourceType(theParams.getResourceType()); for (SearchParameterMap map : maps) { - Logs.getBatchTroubleshootingLog().debug("Executing query for bulk export job[{}] chunk[{}]: {}", theJobId, theChunkId, map.toNormalizedQueryString(myContext)); + Logs.getBatchTroubleshootingLog() + .debug( + "Executing query for bulk export job[{}] chunk[{}]: {}", + theJobId, + theChunkId, + map.toNormalizedQueryString(myContext)); // requires a transaction - try (IResultIterator resultIterator = searchBuilder.createQuery(map, - new SearchRuntimeDetails(null, theJobId), - null, - theParams.getPartitionIdOrAllPartitions())) { + try (IResultIterator resultIterator = searchBuilder.createQuery( + map, new SearchRuntimeDetails(null, theJobId), null, theParams.getPartitionIdOrAllPartitions())) { int pidCount = 0; while (resultIterator.hasNext()) { if (pidCount % 10000 == 0) { - Logs.getBatchTroubleshootingLog().debug("Bulk export job[{}] chunk[{}] has loaded {} pids", theJobId, theChunkId, pidCount); + Logs.getBatchTroubleshootingLog() + .debug( + "Bulk export job[{}] chunk[{}] has loaded {} pids", + theJobId, + theChunkId, + pidCount); } pidCount++; pids.add(resultIterator.next()); @@ -242,7 +278,9 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { return pids; } - private LinkedHashSet getPidsForGroupStyleExport(ExportPIDIteratorParameters theParams, String theResourceType, RuntimeResourceDefinition theDef) throws IOException { + private LinkedHashSet getPidsForGroupStyleExport( + ExportPIDIteratorParameters theParams, String theResourceType, RuntimeResourceDefinition theDef) + throws IOException { LinkedHashSet pids; if (theResourceType.equalsIgnoreCase("Patient")) { @@ -257,12 +295,14 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { return pids; } - private LinkedHashSet getRelatedResourceTypePids(ExportPIDIteratorParameters theParams, RuntimeResourceDefinition theDef) throws IOException { + private LinkedHashSet getRelatedResourceTypePids( + ExportPIDIteratorParameters theParams, RuntimeResourceDefinition theDef) throws IOException { LinkedHashSet pids = new LinkedHashSet<>(); // expand the group pid -> list of patients in that group (list of patient pids) Set expandedMemberResourceIds = expandAllPatientPidsFromGroup(theParams); assert !expandedMemberResourceIds.isEmpty(); - Logs.getBatchTroubleshootingLog().debug("{} has been expanded to members:[{}]", theParams.getGroupId(), expandedMemberResourceIds); + Logs.getBatchTroubleshootingLog() + .debug("{} has been expanded to members:[{}]", theParams.getGroupId(), expandedMemberResourceIds); // for each patient pid -> // search for the target resources, with their correct patient references, chunked. @@ -285,8 +325,9 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { private LinkedHashSet getSingletonGroupList(ExportPIDIteratorParameters theParams) { RequestPartitionId partitionId = theParams.getPartitionIdOrAllPartitions(); - IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(theParams.getGroupId()), - new SystemRequestDetails().setRequestPartitionId(partitionId)); + IBaseResource group = myDaoRegistry + .getResourceDao("Group") + .read(new IdDt(theParams.getGroupId()), new SystemRequestDetails().setRequestPartitionId(partitionId)); JpaPid pidOrNull = myIdHelperService.getPidOrNull(partitionId, group); LinkedHashSet pids = new LinkedHashSet<>(); pids.add(pidOrNull); @@ -305,7 +346,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { protected RuntimeSearchParam getPatientSearchParamForCurrentResourceType(String theResourceType) { RuntimeSearchParam searchParam = null; - Optional onlyPatientSearchParamForResourceType = SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, theResourceType); + Optional onlyPatientSearchParamForResourceType = + SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, theResourceType); if (onlyPatientSearchParamForResourceType.isPresent()) { searchParam = onlyPatientSearchParamForResourceType.get(); } @@ -324,11 +366,15 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { /** * For Patient **/ - - private RuntimeSearchParam validateSearchParametersForPatient(SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) { - RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType(theParams.getResourceType()); + private RuntimeSearchParam validateSearchParametersForPatient( + SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) { + RuntimeSearchParam runtimeSearchParam = + getPatientSearchParamForCurrentResourceType(theParams.getResourceType()); if (expandedSpMap.get(runtimeSearchParam.getName()) != null) { - throw new IllegalArgumentException(Msg.code(796) + String.format("Patient Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName())); + throw new IllegalArgumentException(Msg.code(796) + + String.format( + "Patient Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", + runtimeSearchParam.getName())); } return runtimeSearchParam; } @@ -336,13 +382,15 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { /** * for group exports **/ - private void validateSearchParametersForGroup(SearchParameterMap expandedSpMap, String theResourceType) { // we only validate for certain types if (!PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(theResourceType)) { RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType(theResourceType); if (expandedSpMap.get(runtimeSearchParam.getName()) != null) { - throw new IllegalArgumentException(Msg.code(792) + String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName())); + throw new IllegalArgumentException(Msg.code(792) + + String.format( + "Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", + runtimeSearchParam.getName())); } } } @@ -354,7 +402,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { @SuppressWarnings("unchecked") private LinkedHashSet getExpandedPatientList(ExportPIDIteratorParameters theParameters) throws IOException { List members = getMembersFromGroupWithFilter(theParameters, true); - List ids = members.stream().map(member -> new IdDt("Patient/" + member)).collect(Collectors.toList()); + List ids = + members.stream().map(member -> new IdDt("Patient/" + member)).collect(Collectors.toList()); ourLog.info("While extracting patients from a group, we found {} patients.", ids.size()); ourLog.info("Found patients: {}", ids.stream().map(id -> id.getValue()).collect(Collectors.joining(", "))); @@ -366,7 +415,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { SystemRequestDetails srd = new SystemRequestDetails().setRequestPartitionId(partitionId); IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(theParameters.getGroupId()), srd); JpaPid pidOrNull = myIdHelperService.getPidOrNull(partitionId, group); - List> goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH); + List> goldenPidSourcePidTuple = + myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH); goldenPidSourcePidTuple.forEach(tuple -> { patientPidsToExport.add(tuple.getGoldenPid()); patientPidsToExport.add(tuple.getSourcePid()); @@ -382,21 +432,28 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { * @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"] */ @SuppressWarnings("unchecked") - private List getMembersFromGroupWithFilter(ExportPIDIteratorParameters theParameters, boolean theConsiderSince) throws IOException { + private List getMembersFromGroupWithFilter( + ExportPIDIteratorParameters theParameters, boolean theConsiderSince) throws IOException { RuntimeResourceDefinition def = myContext.getResourceDefinition("Patient"); List resPids = new ArrayList<>(); - List maps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(def, theParameters, theConsiderSince); + List maps = + myBulkExportHelperSvc.createSearchParameterMapsForResourceType(def, theParameters, theConsiderSince); maps.forEach(map -> addMembershipToGroupClause(map, theParameters.getGroupId())); for (SearchParameterMap map : maps) { ISearchBuilder searchBuilder = getSearchBuilderForResourceType("Patient"); - ourLog.debug("Searching for members of group {} with job instance {} with map {}", theParameters.getGroupId(), theParameters.getInstanceId(), map); - try (IResultIterator resultIterator = searchBuilder.createQuery(map, - new SearchRuntimeDetails(null, theParameters.getInstanceId()), - null, - theParameters.getPartitionIdOrAllPartitions())) { + ourLog.debug( + "Searching for members of group {} with job instance {} with map {}", + theParameters.getGroupId(), + theParameters.getInstanceId(), + map); + try (IResultIterator resultIterator = searchBuilder.createQuery( + map, + new SearchRuntimeDetails(null, theParameters.getInstanceId()), + null, + theParameters.getPartitionIdOrAllPartitions())) { while (resultIterator.hasNext()) { resPids.add(resultIterator.next()); @@ -422,20 +479,20 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { /** * @param thePidTuples */ - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({"unchecked", "rawtypes"}) private void populateMdmResourceCache(List> thePidTuples) { if (myMdmExpansionCacheSvc.hasBeenPopulated()) { return; } - //First, convert this zipped set of tuples to a map of - //{ + // First, convert this zipped set of tuples to a map of + // { // patient/gold-1 -> [patient/1, patient/2] // patient/gold-2 -> [patient/3, patient/4] - //} + // } Map> goldenResourceToSourcePidMap = new HashMap<>(); extract(thePidTuples, goldenResourceToSourcePidMap); - //Next, lets convert it to an inverted index for fast lookup + // Next, lets convert it to an inverted index for fast lookup // { // patient/1 -> patient/gold-1 // patient/2 -> patient/gold-1 @@ -444,44 +501,53 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { // } Map sourceResourceIdToGoldenResourceIdMap = new HashMap<>(); goldenResourceToSourcePidMap.forEach((key, value) -> { - String goldenResourceId = myIdHelperService.translatePidIdToForcedIdWithCache(key).orElse(key.toString()); + String goldenResourceId = + myIdHelperService.translatePidIdToForcedIdWithCache(key).orElse(key.toString()); PersistentIdToForcedIdMap pidsToForcedIds = myIdHelperService.translatePidsToForcedIds(value); Set sourceResourceIds = pidsToForcedIds.getResolvedResourceIds(); - sourceResourceIds - .forEach(sourceResourceId -> sourceResourceIdToGoldenResourceIdMap.put(sourceResourceId, goldenResourceId)); + sourceResourceIds.forEach( + sourceResourceId -> sourceResourceIdToGoldenResourceIdMap.put(sourceResourceId, goldenResourceId)); }); - //Now that we have built our cached expansion, store it. + // Now that we have built our cached expansion, store it. myMdmExpansionCacheSvc.setCacheContents(sourceResourceIdToGoldenResourceIdMap); } - private void extract(List> theGoldenPidTargetPidTuples, Map> theGoldenResourceToSourcePidMap) { + private void extract( + List> theGoldenPidTargetPidTuples, + Map> theGoldenResourceToSourcePidMap) { for (MdmPidTuple goldenPidTargetPidTuple : theGoldenPidTargetPidTuples) { JpaPid goldenPid = goldenPidTargetPidTuple.getGoldenPid(); JpaPid sourcePid = goldenPidTargetPidTuple.getSourcePid(); - theGoldenResourceToSourcePidMap.computeIfAbsent(goldenPid, key -> new HashSet<>()).add(sourcePid); + theGoldenResourceToSourcePidMap + .computeIfAbsent(goldenPid, key -> new HashSet<>()) + .add(sourcePid); } } // gets all the resources related to each patient provided in the list of thePatientPids @SuppressWarnings("unchecked") - private void queryResourceTypeWithReferencesToPatients(Set theReadPids, - List thePatientPids, - ExportPIDIteratorParameters theParams, - RuntimeResourceDefinition theDef) throws IOException { + private void queryResourceTypeWithReferencesToPatients( + Set theReadPids, + List thePatientPids, + ExportPIDIteratorParameters theParams, + RuntimeResourceDefinition theDef) + throws IOException { - //Convert Resource Persistent IDs to actual client IDs. + // Convert Resource Persistent IDs to actual client IDs. Set pidSet = new HashSet<>(thePatientPids); Set patientIds = myIdHelperService.translatePidsToFhirResourceIds(pidSet); - //Build SP map - //First, inject the _typeFilters and _since from the export job - List expandedSpMaps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(theDef, theParams, true); + // Build SP map + // First, inject the _typeFilters and _since from the export job + List expandedSpMaps = + myBulkExportHelperSvc.createSearchParameterMapsForResourceType(theDef, theParams, true); for (SearchParameterMap expandedSpMap : expandedSpMaps) { - //Since we are in a bulk job, we have to ensure the user didn't jam in a patient search param, since we need to manually set that. + // Since we are in a bulk job, we have to ensure the user didn't jam in a patient search param, since we + // need to manually set that. validateSearchParametersForGroup(expandedSpMap, theParams.getResourceType()); // Fetch and cache a search builder for this resource type @@ -496,12 +562,10 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { filterSearchByResourceIds(patientIds, expandedSpMap, theParams); } - //Execute query and all found pids to our local iterator. + // Execute query and all found pids to our local iterator. RequestPartitionId partitionId = theParams.getPartitionIdOrAllPartitions(); - try (IResultIterator resultIterator = searchBuilder.createQuery(expandedSpMap, - new SearchRuntimeDetails(null, theParams.getInstanceId()), - null, - partitionId)) { + try (IResultIterator resultIterator = searchBuilder.createQuery( + expandedSpMap, new SearchRuntimeDetails(null, theParams.getInstanceId()), null, partitionId)) { while (resultIterator.hasNext()) { theReadPids.add(resultIterator.next()); } @@ -516,7 +580,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { } SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId); - SearchBuilderLoadIncludesParameters loadIncludesParameters = new SearchBuilderLoadIncludesParameters<>(); + SearchBuilderLoadIncludesParameters loadIncludesParameters = + new SearchBuilderLoadIncludesParameters<>(); loadIncludesParameters.setFhirContext(myContext); loadIncludesParameters.setMatches(theReadPids); loadIncludesParameters.setEntityManager(myEntityManager); @@ -529,7 +594,9 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { Set includeIds = searchBuilder.loadIncludes(loadIncludesParameters); // gets rid of the Patient duplicates - theReadPids.addAll(includeIds.stream().filter((id) -> !id.getResourceType().equals("Patient")).collect(Collectors.toSet())); + theReadPids.addAll(includeIds.stream() + .filter((id) -> !id.getResourceType().equals("Patient")) + .collect(Collectors.toSet())); } } @@ -540,10 +607,12 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { * @param expandedSpMap * @param theParams */ - private void filterSearchByResourceIds(Set idChunk, SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) { + private void filterSearchByResourceIds( + Set idChunk, SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) { ReferenceOrListParam orList = new ReferenceOrListParam(); idChunk.forEach(id -> orList.add(new ReferenceParam(id))); - RuntimeSearchParam patientSearchParamForCurrentResourceType = getPatientSearchParamForCurrentResourceType(theParams.getResourceType()); + RuntimeSearchParam patientSearchParamForCurrentResourceType = + getPatientSearchParamForCurrentResourceType(theParams.getResourceType()); expandedSpMap.add(patientSearchParamForCurrentResourceType.getName(), orList); } @@ -551,7 +620,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { * @param idChunk * @param expandedSpMap */ - private void filterSearchByHasParam(Set idChunk, SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) { + private void filterSearchByHasParam( + Set idChunk, SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) { HasOrListParam hasOrListParam = new HasOrListParam(); idChunk.stream().forEach(id -> hasOrListParam.addOr(buildHasParam(id, theParams.getResourceType()))); expandedSpMap.add("_has", hasOrListParam); @@ -563,7 +633,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { } else if ("Organization".equalsIgnoreCase(theResourceType)) { return new HasParam("Patient", "organization", "_id", theResourceId); } else { - throw new IllegalArgumentException(Msg.code(2077) + " We can't handle forward references onto type " + theResourceType); + throw new IllegalArgumentException( + Msg.code(2077) + " We can't handle forward references onto type " + theResourceType); } } @@ -578,16 +649,18 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { Set expandedIds = new HashSet<>(); RequestPartitionId partitionId = theParams.getPartitionIdOrAllPartitions(); SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId); - IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(theParams.getGroupId()), requestDetails); + IBaseResource group = + myDaoRegistry.getResourceDao("Group").read(new IdDt(theParams.getGroupId()), requestDetails); JpaPid pidOrNull = myIdHelperService.getPidOrNull(partitionId, group); - //Attempt to perform MDM Expansion of membership + // Attempt to perform MDM Expansion of membership if (theParams.isExpandMdm()) { expandedIds.addAll(performMembershipExpansionViaMdmTable(pidOrNull)); } - //Now manually add the members of the group (its possible even with mdm expansion that some members dont have MDM matches, - //so would be otherwise skipped + // Now manually add the members of the group (its possible even with mdm expansion that some members dont have + // MDM matches, + // so would be otherwise skipped List membersFromGroupWithFilter = getMembersFromGroupWithFilter(theParams, false); ourLog.debug("Group with ID [{}] has been expanded to: {}", theParams.getGroupId(), membersFromGroupWithFilter); expandedIds.addAll(membersFromGroupWithFilter); @@ -597,8 +670,9 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { @SuppressWarnings({"rawtypes", "unchecked"}) private Set performMembershipExpansionViaMdmTable(JpaPid pidOrNull) { - List> goldenPidTargetPidTuples = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH); - //Now lets translate these pids into resource IDs + List> goldenPidTargetPidTuples = + myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH); + // Now lets translate these pids into resource IDs Set uniquePids = new HashSet<>(); goldenPidTargetPidTuples.forEach(tuple -> { uniquePids.add(tuple.getGoldenPid()); @@ -616,9 +690,12 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { /* Mdm Expansion */ private RuntimeSearchParam getRuntimeSearchParam(IBaseResource theResource) { - Optional oPatientSearchParam = SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, theResource.fhirType()); + Optional oPatientSearchParam = + SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, theResource.fhirType()); if (!oPatientSearchParam.isPresent()) { - String errorMessage = String.format("[%s] has no search parameters that are for patients, so it is invalid for Group Bulk Export!", theResource.fhirType()); + String errorMessage = String.format( + "[%s] has no search parameters that are for patients, so it is invalid for Group Bulk Export!", + theResource.fhirType()); throw new IllegalArgumentException(Msg.code(2242) + errorMessage); } else { return oPatientSearchParam.get(); @@ -630,8 +707,10 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { if (patientReference.isPresent()) { addGoldenResourceExtension(iBaseResource, patientReference.get()); } else { - ourLog.error("Failed to find the patient reference information for resource {}. This is a bug, " + - "as all resources which can be exported via Group Bulk Export must reference a patient.", iBaseResource); + ourLog.error( + "Failed to find the patient reference information for resource {}. This is a bug, " + + "as all resources which can be exported via Group Bulk Export must reference a patient.", + iBaseResource); } } @@ -644,9 +723,11 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { if (iBaseResource.fhirType().equalsIgnoreCase("Patient")) { return Optional.of(iBaseResource.getIdElement().getIdPart()); } else { - Optional optionalReference = getFhirParser().evaluateFirst(iBaseResource, fhirPath, IBaseReference.class); + Optional optionalReference = + getFhirParser().evaluateFirst(iBaseResource, fhirPath, IBaseReference.class); if (optionalReference.isPresent()) { - return optionalReference.map(theIBaseReference -> theIBaseReference.getReferenceElement().getIdPart()); + return optionalReference.map(theIBaseReference -> + theIBaseReference.getReferenceElement().getIdPart()); } else { return Optional.empty(); } @@ -655,7 +736,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { private void addGoldenResourceExtension(IBaseResource iBaseResource, String sourceResourceId) { String goldenResourceId = myMdmExpansionCacheSvc.getGoldenResourceId(sourceResourceId); - IBaseExtension extension = ExtensionUtil.getOrCreateExtension(iBaseResource, HapiExtensions.ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL); + IBaseExtension extension = ExtensionUtil.getOrCreateExtension( + iBaseResource, HapiExtensions.ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL); if (!StringUtils.isBlank(goldenResourceId)) { ExtensionUtil.setExtension(myContext, extension, "reference", prefixPatient(goldenResourceId)); } @@ -675,7 +757,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { private String getPatientFhirPath(RuntimeSearchParam theRuntimeParam) { String path = theRuntimeParam.getPath(); // GGG: Yes this is a stupid hack, but by default this runtime search param will return stuff like - // Observation.subject.where(resolve() is Patient) which unfortunately our FHIRpath evaluator doesn't play nicely with + // Observation.subject.where(resolve() is Patient) which unfortunately our FHIRpath evaluator doesn't play + // nicely with // our FHIRPath evaluator. if (path.contains(".where")) { path = path.substring(0, path.indexOf(".where")); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java index cc417ab38f1..301d7739466 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java @@ -23,7 +23,6 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.model.ActivateJobResult; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; @@ -38,6 +37,7 @@ import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.util.ValidateUtil; import com.apicatalog.jsonld.StringUtils; import org.apache.commons.lang3.time.DateUtils; @@ -53,25 +53,29 @@ import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; -import javax.annotation.PostConstruct; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.UUID; import java.util.concurrent.Semaphore; +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; import static ca.uhn.fhir.batch2.jobs.importpull.BulkImportPullConfig.BULK_IMPORT_JOB_NAME; public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJobs { private static final Logger ourLog = LoggerFactory.getLogger(BulkDataImportSvcImpl.class); private final Semaphore myRunningJobSemaphore = new Semaphore(1); + @Autowired private IBulkImportJobDao myJobDao; + @Autowired private IBulkImportJobFileDao myJobFileDao; + @Autowired private PlatformTransactionManager myTxManager; + private TransactionTemplate myTxTemplate; @Autowired @@ -85,7 +89,6 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ myTxTemplate = new TransactionTemplate(myTxManager); } - @Override public void scheduleJobs(ISchedulerService theSchedulerService) { // This job should be local so that each node in the cluster can pick up jobs @@ -95,17 +98,20 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ theSchedulerService.scheduleLocalJob(10 * DateUtils.MILLIS_PER_SECOND, jobDetail); } - @Override @Transactional - public String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List theInitialFiles) { + public String createNewJob( + BulkImportJobJson theJobDescription, @Nonnull List theInitialFiles) { ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription, "Job must not be null"); - ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription.getProcessingMode(), "Job File Processing mode must not be null"); - ValidateUtil.isTrueOrThrowInvalidRequest(theJobDescription.getBatchSize() > 0, "Job File Batch Size must be > 0"); + ValidateUtil.isNotNullOrThrowUnprocessableEntity( + theJobDescription.getProcessingMode(), "Job File Processing mode must not be null"); + ValidateUtil.isTrueOrThrowInvalidRequest( + theJobDescription.getBatchSize() > 0, "Job File Batch Size must be > 0"); String biJobId = UUID.randomUUID().toString(); - ourLog.info("Creating new Bulk Import job with {} files, assigning bijob ID: {}", theInitialFiles.size(), biJobId); + ourLog.info( + "Creating new Bulk Import job with {} files, assigning bijob ID: {}", theInitialFiles.size(), biJobId); BulkImportJobEntity job = new BulkImportJobEntity(); job.setJobId(biJobId); @@ -129,7 +135,11 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ BulkImportJobEntity job = findJobByBiJobId(theBiJobId); - ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "bijob id %s has status %s and can not be added to", theBiJobId, job.getStatus()); + ValidateUtil.isTrueOrThrowInvalidRequest( + job.getStatus() == BulkImportJobStatusEnum.STAGING, + "bijob id %s has status %s and can not be added to", + theBiJobId, + job.getStatus()); addFilesToJob(theFiles, job, job.getFileCount()); @@ -138,9 +148,8 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ } private BulkImportJobEntity findJobByBiJobId(String theBiJobId) { - BulkImportJobEntity job = myJobDao - .findByJobId(theBiJobId) - .orElseThrow(() -> new InvalidRequestException("Unknown bijob id: " + theBiJobId)); + BulkImportJobEntity job = myJobDao.findByJobId(theBiJobId) + .orElseThrow(() -> new InvalidRequestException("Unknown bijob id: " + theBiJobId)); return job; } @@ -150,7 +159,11 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ ourLog.info("Activating bulk import bijob {}", theBiJobId); BulkImportJobEntity job = findJobByBiJobId(theBiJobId); - ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Bulk import bijob %s can not be activated in status: %s", theBiJobId, job.getStatus()); + ValidateUtil.isTrueOrThrowInvalidRequest( + job.getStatus() == BulkImportJobStatusEnum.STAGING, + "Bulk import bijob %s can not be activated in status: %s", + theBiJobId, + job.getStatus()); job.setStatus(BulkImportJobStatusEnum.READY); myJobDao.save(job); @@ -163,7 +176,8 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ @Override public ActivateJobResult activateNextReadyJob() { if (!myStorageSettings.isEnableTaskBulkImportJobExecution()) { - Logs.getBatchTroubleshootingLog().trace("Bulk import job execution is not enabled on this server. No action taken."); + Logs.getBatchTroubleshootingLog() + .trace("Bulk import job execution is not enabled on this server. No action taken."); return new ActivateJobResult(false, null); } @@ -241,14 +255,14 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ return job.toJson(); } - @Override - public JobInfo getJobStatus(String theBiJobId) { - BulkImportJobEntity theJob = findJobByBiJobId(theBiJobId); - return new JobInfo() - .setStatus(theJob.getStatus()) - .setStatusMessage(theJob.getStatusMessage()) - .setStatusTime(theJob.getStatusTime()); - } + @Override + public JobInfo getJobStatus(String theBiJobId) { + BulkImportJobEntity theJob = findJobByBiJobId(theBiJobId); + return new JobInfo() + .setStatus(theJob.getStatus()) + .setStatusMessage(theJob.getStatusMessage()) + .setStatusTime(theJob.getStatusTime()); + } @Transactional @Override @@ -256,9 +270,10 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ BulkImportJobEntity job = findJobByBiJobId(theBiJobId); return myJobFileDao - .findForJob(job, theFileIndex) - .map(t -> t.toJson()) - .orElseThrow(() -> new IllegalArgumentException("Invalid index " + theFileIndex + " for bijob " + theBiJobId)); + .findForJob(job, theFileIndex) + .map(t -> t.toJson()) + .orElseThrow(() -> + new IllegalArgumentException("Invalid index " + theFileIndex + " for bijob " + theBiJobId)); } @Transactional @@ -297,9 +312,11 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ return myJobCoordinator.startInstance(request).getInstanceId(); } - private void addFilesToJob(@Nonnull List theInitialFiles, BulkImportJobEntity job, int nextSequence) { + private void addFilesToJob( + @Nonnull List theInitialFiles, BulkImportJobEntity job, int nextSequence) { for (BulkImportJobFileJson nextFile : theInitialFiles) { - ValidateUtil.isNotBlankOrThrowUnprocessableEntity(nextFile.getContents(), "Job File Contents mode must not be null"); + ValidateUtil.isNotBlankOrThrowUnprocessableEntity( + nextFile.getContents(), "Job File Contents mode must not be null"); BulkImportJobFileEntity jobFile = new BulkImportJobFileEntity(); jobFile.setJob(job); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java index e50c3d28ade..cdd9e944eef 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionSvcDaoImpl.java @@ -26,22 +26,22 @@ import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.util.QueryChunker; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static org.slf4j.LoggerFactory.getLogger; @@ -55,25 +55,27 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc { @Autowired DaoRegistry myDaoRegistry; + @Autowired IResourceTableDao myResourceTableDao; + @Autowired IIdHelperService myIdHelperService; @Override @Nonnull - @Transactional - public ResourceVersionMap getVersionMap(RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap) { + @Transactional + public ResourceVersionMap getVersionMap( + RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceName); if (ourLog.isDebugEnabled()) { ourLog.debug("About to retrieve version map for resource type: {}", theResourceName); } - List jpaPids = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)); - List matchingIds = jpaPids.stream() - .map(JpaPid::getId) - .collect(Collectors.toList()); + List jpaPids = dao.searchForIds( + theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)); + List matchingIds = jpaPids.stream().map(JpaPid::getId).collect(Collectors.toList()); List allById = new ArrayList<>(); new QueryChunker().chunk(matchingIds, t -> { @@ -98,7 +100,8 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc { * @param theIds - list of IIdTypes for resources of interest. * @return */ - public ResourcePersistentIdMap getLatestVersionIdsForResourceIds(RequestPartitionId theRequestPartitionId, List theIds) { + public ResourcePersistentIdMap getLatestVersionIdsForResourceIds( + RequestPartitionId theRequestPartitionId, List theIds) { ResourcePersistentIdMap idToPID = new ResourcePersistentIdMap(); HashMap> resourceTypeToIds = new HashMap<>(); @@ -111,8 +114,8 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc { } for (String resourceType : resourceTypeToIds.keySet()) { - ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId, - resourceTypeToIds.get(resourceType)); + ResourcePersistentIdMap idAndPID = + getIdsOfExistingResources(theRequestPartitionId, resourceTypeToIds.get(resourceType)); idToPID.putAll(idAndPID); } @@ -127,8 +130,8 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc { * @param theIds - list of IIdType ids (for the same resource) * @return */ - private ResourcePersistentIdMap getIdsOfExistingResources(RequestPartitionId thePartitionId, - Collection theIds) { + private ResourcePersistentIdMap getIdsOfExistingResources( + RequestPartitionId thePartitionId, Collection theIds) { // these are the found Ids that were in the db ResourcePersistentIdMap retval = new ResourcePersistentIdMap(); @@ -136,7 +139,8 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc { return retval; } - List jpaPids = myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId, new ArrayList<>(theIds)); + List jpaPids = + myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId, new ArrayList<>(theIds)); // we'll use this map to fetch pids that require versions HashMap pidsToVersionToResourcePid = new HashMap<>(); @@ -147,8 +151,9 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc { pidsToVersionToResourcePid.put(pid.getId(), pid); } Optional idOp = theIds.stream() - .filter(i -> i.getIdPart().equals(pid.getAssociatedResourceId().getIdPart())) - .findFirst(); + .filter(i -> + i.getIdPart().equals(pid.getAssociatedResourceId().getIdPart())) + .findFirst(); // this should always be present // since it was passed in. // but land of optionals... @@ -159,8 +164,8 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc { // set any versions we don't already have if (!pidsToVersionToResourcePid.isEmpty()) { - Collection resourceEntries = myResourceTableDao - .getResourceVersionsForPid(new ArrayList<>(pidsToVersionToResourcePid.keySet())); + Collection resourceEntries = + myResourceTableDao.getResourceVersionsForPid(new ArrayList<>(pidsToVersionToResourcePid.keySet())); for (Object[] record : resourceEntries) { // order matters! diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java index 0b5cc7287fb..7fbf1209703 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/Batch2SupportConfig.java @@ -42,12 +42,20 @@ public class Batch2SupportConfig { } @Bean - public IDeleteExpungeSvc deleteExpungeSvc(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, @Autowired(required = false) IFulltextSearchSvc theFullTextSearchSvc) { + public IDeleteExpungeSvc deleteExpungeSvc( + EntityManager theEntityManager, + DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, + @Autowired(required = false) IFulltextSearchSvc theFullTextSearchSvc) { return new DeleteExpungeSvcImpl(theEntityManager, theDeleteExpungeSqlBuilder, theFullTextSearchSvc); } @Bean - DeleteExpungeSqlBuilder deleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, JpaStorageSettings theStorageSettings, IIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) { - return new DeleteExpungeSqlBuilder(theResourceTableFKProvider, theStorageSettings, theIdHelper, theResourceLinkDao); + DeleteExpungeSqlBuilder deleteExpungeSqlBuilder( + ResourceTableFKProvider theResourceTableFKProvider, + JpaStorageSettings theStorageSettings, + IIdHelperService theIdHelper, + IResourceLinkDao theResourceLinkDao) { + return new DeleteExpungeSqlBuilder( + theResourceTableFKProvider, theStorageSettings, theIdHelper, theResourceLinkDao); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BeanPostProcessorConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BeanPostProcessorConfig.java index 433aa069b9c..a4b91058ed8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BeanPostProcessorConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BeanPostProcessorConfig.java @@ -35,5 +35,4 @@ public class BeanPostProcessorConfig { public PersistenceExceptionTranslationPostProcessor persistenceExceptionTranslationPostProcessor() { return new PersistenceExceptionTranslationPostProcessor(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/EnversAuditConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/EnversAuditConfig.java index 5a7694d7854..18fd08f7a28 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/EnversAuditConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/EnversAuditConfig.java @@ -39,5 +39,4 @@ public class EnversAuditConfig { AuditReader auditReader() { return AuditReaderFactory.get(myEntityManagerFactory.createEntityManager()); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirHibernateJpaDialect.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirHibernateJpaDialect.java index e6f700c62f8..4e2b5d1f5c3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirHibernateJpaDialect.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirHibernateJpaDialect.java @@ -51,10 +51,10 @@ public class HapiFhirHibernateJpaDialect extends HibernateJpaDialect { myLocalizer = theLocalizer; } - public RuntimeException translate(PersistenceException theException, String theMessageToPrepend) { if (theException.getCause() instanceof HibernateException) { - return new PersistenceException(convertHibernateAccessException((HibernateException) theException.getCause(), theMessageToPrepend)); + return new PersistenceException( + convertHibernateAccessException((HibernateException) theException.getCause(), theMessageToPrepend)); } return theException; } @@ -64,7 +64,8 @@ public class HapiFhirHibernateJpaDialect extends HibernateJpaDialect { return convertHibernateAccessException(theException, null); } - private DataAccessException convertHibernateAccessException(HibernateException theException, String theMessageToPrepend) { + private DataAccessException convertHibernateAccessException( + HibernateException theException, String theMessageToPrepend) { String messageToPrepend = ""; if (isNotBlank(theMessageToPrepend)) { messageToPrepend = theMessageToPrepend + " - "; @@ -80,13 +81,22 @@ public class HapiFhirHibernateJpaDialect extends HibernateJpaDialect { if (isNotBlank(constraintName)) { constraintName = constraintName.toUpperCase(); if (constraintName.contains(ResourceHistoryTable.IDX_RESVER_ID_VER)) { - throw new ResourceVersionConflictException(Msg.code(823) + messageToPrepend + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "resourceVersionConstraintFailure")); + throw new ResourceVersionConflictException(Msg.code(823) + + messageToPrepend + + myLocalizer.getMessage( + HapiFhirHibernateJpaDialect.class, "resourceVersionConstraintFailure")); } if (constraintName.contains(ResourceIndexedComboStringUnique.IDX_IDXCMPSTRUNIQ_STRING)) { - throw new ResourceVersionConflictException(Msg.code(824) + messageToPrepend + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "resourceIndexedCompositeStringUniqueConstraintFailure")); + throw new ResourceVersionConflictException(Msg.code(824) + + messageToPrepend + + myLocalizer.getMessage( + HapiFhirHibernateJpaDialect.class, + "resourceIndexedCompositeStringUniqueConstraintFailure")); } if (constraintName.contains(ForcedId.IDX_FORCEDID_TYPE_FID)) { - throw new ResourceVersionConflictException(Msg.code(825) + messageToPrepend + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "forcedIdConstraintFailure")); + throw new ResourceVersionConflictException(Msg.code(825) + + messageToPrepend + + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "forcedIdConstraintFailure")); } if (constraintName.contains(ResourceSearchUrlEntity.RES_SEARCH_URL_COLUMN_NAME)) { throw super.convertHibernateAccessException(theException); @@ -109,14 +119,16 @@ public class HapiFhirHibernateJpaDialect extends HibernateJpaDialect { * StressTestR4Test method testMultiThreadedUpdateSameResourceInTransaction() */ if (theException instanceof org.hibernate.StaleStateException) { - String msg = messageToPrepend + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "resourceVersionConstraintFailure"); + String msg = messageToPrepend + + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "resourceVersionConstraintFailure"); throw new ResourceVersionConflictException(Msg.code(826) + msg); } if (theException instanceof org.hibernate.PessimisticLockException) { PessimisticLockException ex = (PessimisticLockException) theException; String sql = defaultString(ex.getSQL()).toUpperCase(); if (sql.contains(ResourceHistoryTable.HFJ_RES_VER)) { - String msg = messageToPrepend + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "resourceVersionConstraintFailure"); + String msg = messageToPrepend + + myLocalizer.getMessage(HapiFhirHibernateJpaDialect.class, "resourceVersionConstraintFailure"); throw new ResourceVersionConflictException(Msg.code(827) + msg); } } @@ -124,5 +136,4 @@ public class HapiFhirHibernateJpaDialect extends HibernateJpaDialect { DataAccessException retVal = super.convertHibernateAccessException(theException); return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernatePropertiesProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernatePropertiesProvider.java index 6fb23eb4a37..ad998da7814 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernatePropertiesProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernatePropertiesProvider.java @@ -35,6 +35,7 @@ public class HibernatePropertiesProvider { @Autowired private LocalContainerEntityManagerFactoryBean myEntityManagerFactory; + private Dialect myDialect; private String myHibernateSearchBackend; @@ -49,7 +50,8 @@ public class HibernatePropertiesProvider { public Dialect getDialect() { Dialect dialect = myDialect; if (dialect == null) { - String dialectClass = (String) myEntityManagerFactory.getJpaPropertyMap().get("hibernate.dialect"); + String dialectClass = + (String) myEntityManagerFactory.getJpaPropertyMap().get("hibernate.dialect"); dialect = ReflectionUtil.newInstanceOrReturnNull(dialectClass, Dialect.class); Validate.notNull(dialect, "Unable to create instance of class: %s", dialectClass); myDialect = dialect; @@ -58,17 +60,18 @@ public class HibernatePropertiesProvider { return dialect; } - public String getHibernateSearchBackend(){ + public String getHibernateSearchBackend() { String hibernateSearchBackend = myHibernateSearchBackend; if (StringUtils.isBlank(hibernateSearchBackend)) { - hibernateSearchBackend = (String) myEntityManagerFactory.getJpaPropertyMap().get(BackendSettings.backendKey(BackendSettings.TYPE)); - Validate.notNull(hibernateSearchBackend, BackendSettings.backendKey(BackendSettings.TYPE) + " property is unset!"); + hibernateSearchBackend = (String) + myEntityManagerFactory.getJpaPropertyMap().get(BackendSettings.backendKey(BackendSettings.TYPE)); + Validate.notNull( + hibernateSearchBackend, BackendSettings.backendKey(BackendSettings.TYPE) + " property is unset!"); myHibernateSearchBackend = hibernateSearchBackend; } return myHibernateSearchBackend; } - public DataSource getDataSource() { return myEntityManagerFactory.getDataSource(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java index 7681d957657..941f2273b7c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.config; import ca.uhn.fhir.batch2.api.IJobPersistence; +import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobSubmitterImpl; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; @@ -33,16 +34,15 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.IMdmClearHelperSvc; import ca.uhn.fhir.jpa.api.svc.ISearchUrlJobMaintenanceSvc; import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor; import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper; -import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportHelperService; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl; -import ca.uhn.fhir.jpa.api.svc.IMdmClearHelperSvc; import ca.uhn.fhir.jpa.bulk.mdm.MdmClearHelperSvcImpl; import ca.uhn.fhir.jpa.cache.IResourceVersionSvc; import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl; @@ -199,12 +199,14 @@ import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler; import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean; import org.springframework.transaction.PlatformTransactionManager; -import javax.annotation.Nullable; import java.util.Date; +import javax.annotation.Nullable; @Configuration // repositoryFactoryBeanClass: EnversRevisionRepositoryFactoryBean is needed primarily for unit testing -@EnableJpaRepositories(basePackages = "ca.uhn.fhir.jpa.dao.data", repositoryFactoryBeanClass = EnversRevisionRepositoryFactoryBean.class) +@EnableJpaRepositories( + basePackages = "ca.uhn.fhir.jpa.dao.data", + repositoryFactoryBeanClass = EnversRevisionRepositoryFactoryBean.class) @Import({ BeanPostProcessorConfig.class, TermCodeSystemConfig.class, @@ -223,7 +225,8 @@ public class JpaConfig { public static final String GRAPHQL_PROVIDER_NAME = "myGraphQLProvider"; public static final String PERSISTED_JPA_BUNDLE_PROVIDER = "PersistedJpaBundleProvider"; public static final String PERSISTED_JPA_BUNDLE_PROVIDER_BY_SEARCH = "PersistedJpaBundleProvider_BySearch"; - public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider"; + public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = + "PersistedJpaSearchFirstPageBundleProvider"; public static final String SEARCH_BUILDER = "SearchBuilder"; public static final String HISTORY_BUILDER = "HistoryBuilder"; private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI"; @@ -238,8 +241,13 @@ public class JpaConfig { @Lazy @Bean - public CascadingDeleteInterceptor cascadingDeleteInterceptor(FhirContext theFhirContext, DaoRegistry theDaoRegistry, IInterceptorBroadcaster theInterceptorBroadcaster, ThreadSafeResourceDeleterSvc threadSafeResourceDeleterSvc) { - return new CascadingDeleteInterceptor(theFhirContext, theDaoRegistry, theInterceptorBroadcaster, threadSafeResourceDeleterSvc); + public CascadingDeleteInterceptor cascadingDeleteInterceptor( + FhirContext theFhirContext, + DaoRegistry theDaoRegistry, + IInterceptorBroadcaster theInterceptorBroadcaster, + ThreadSafeResourceDeleterSvc threadSafeResourceDeleterSvc) { + return new CascadingDeleteInterceptor( + theFhirContext, theDaoRegistry, theInterceptorBroadcaster, threadSafeResourceDeleterSvc); } @Bean @@ -249,18 +257,25 @@ public class JpaConfig { @Lazy @Bean - public ThreadSafeResourceDeleterSvc safeDeleter(DaoRegistry theDaoRegistry, IInterceptorBroadcaster theInterceptorBroadcaster, HapiTransactionService hapiTransactionService) { + public ThreadSafeResourceDeleterSvc safeDeleter( + DaoRegistry theDaoRegistry, + IInterceptorBroadcaster theInterceptorBroadcaster, + HapiTransactionService hapiTransactionService) { return new ThreadSafeResourceDeleterSvc(theDaoRegistry, theInterceptorBroadcaster, hapiTransactionService); } @Lazy @Bean - public ResponseTerminologyTranslationInterceptor responseTerminologyTranslationInterceptor(IValidationSupport theValidationSupport, ResponseTerminologyTranslationSvc theResponseTerminologyTranslationSvc) { - return new ResponseTerminologyTranslationInterceptor(theValidationSupport, theResponseTerminologyTranslationSvc); + public ResponseTerminologyTranslationInterceptor responseTerminologyTranslationInterceptor( + IValidationSupport theValidationSupport, + ResponseTerminologyTranslationSvc theResponseTerminologyTranslationSvc) { + return new ResponseTerminologyTranslationInterceptor( + theValidationSupport, theResponseTerminologyTranslationSvc); } @Bean - public ResponseTerminologyTranslationSvc responseTerminologyTranslationSvc(IValidationSupport theValidationSupport) { + public ResponseTerminologyTranslationSvc responseTerminologyTranslationSvc( + IValidationSupport theValidationSupport) { return new ResponseTerminologyTranslationSvc(theValidationSupport); } @@ -311,8 +326,10 @@ public class JpaConfig { @Bean(name = "myBinaryStorageInterceptor") @Lazy - public BinaryStorageInterceptor> binaryStorageInterceptor(JpaStorageSettings theStorageSettings, FhirContext theCtx) { - BinaryStorageInterceptor> interceptor = new BinaryStorageInterceptor<>(theCtx); + public BinaryStorageInterceptor> binaryStorageInterceptor( + JpaStorageSettings theStorageSettings, FhirContext theCtx) { + BinaryStorageInterceptor> interceptor = + new BinaryStorageInterceptor<>(theCtx); interceptor.setAllowAutoInflateBinaries(theStorageSettings.isAllowAutoInflateBinaries()); interceptor.setAutoInflateBinariesMaximumSize(theStorageSettings.getAutoInflateBinariesMaximumBytes()); return interceptor; @@ -404,7 +421,8 @@ public class JpaConfig { @Bean @Lazy - public OverridePathBasedReferentialIntegrityForDeletesInterceptor overridePathBasedReferentialIntegrityForDeletesInterceptor() { + public OverridePathBasedReferentialIntegrityForDeletesInterceptor + overridePathBasedReferentialIntegrityForDeletesInterceptor() { return new OverridePathBasedReferentialIntegrityForDeletesInterceptor(); } @@ -457,7 +475,6 @@ public class JpaConfig { return new RequestTenantPartitionInterceptor(); } - @Bean public MdmLinkExpandSvc mdmLinkExpandSvc() { return new MdmLinkExpandSvc(); @@ -486,8 +503,14 @@ public class JpaConfig { } @Bean - public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, JpaStorageSettings theStorageSettings, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence) { - return new BulkDataExportJobSchedulingHelperImpl(theDaoRegistry, theTxManager, theStorageSettings, theBulkExportHelperSvc, theJpaJobPersistence, null); + public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper( + DaoRegistry theDaoRegistry, + PlatformTransactionManager theTxManager, + JpaStorageSettings theStorageSettings, + BulkExportHelperService theBulkExportHelperSvc, + IJobPersistence theJpaJobPersistence) { + return new BulkDataExportJobSchedulingHelperImpl( + theDaoRegistry, theTxManager, theStorageSettings, theBulkExportHelperSvc, theJpaJobPersistence, null); } @Bean @@ -556,8 +579,14 @@ public class JpaConfig { @Bean(name = PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER) @Scope("prototype") - public PersistedJpaSearchFirstPageBundleProvider newPersistedJpaSearchFirstPageBundleProvider(RequestDetails theRequest, Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) { - return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest, theRequestPartitionId); + public PersistedJpaSearchFirstPageBundleProvider newPersistedJpaSearchFirstPageBundleProvider( + RequestDetails theRequest, + Search theSearch, + SearchTask theSearchTask, + ISearchBuilder theSearchBuilder, + RequestPartitionId theRequestPartitionId) { + return new PersistedJpaSearchFirstPageBundleProvider( + theSearch, theSearchTask, theSearchBuilder, theRequest, theRequestPartitionId); } @Bean(name = RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER) @@ -568,13 +597,15 @@ public class JpaConfig { @Bean @Scope("prototype") - public ComboUniqueSearchParameterPredicateBuilder newComboUniqueSearchParameterPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + public ComboUniqueSearchParameterPredicateBuilder newComboUniqueSearchParameterPredicateBuilder( + SearchQueryBuilder theSearchSqlBuilder) { return new ComboUniqueSearchParameterPredicateBuilder(theSearchSqlBuilder); } @Bean @Scope("prototype") - public ComboNonUniqueSearchParameterPredicateBuilder newComboNonUniqueSearchParameterPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + public ComboNonUniqueSearchParameterPredicateBuilder newComboNonUniqueSearchParameterPredicateBuilder( + SearchQueryBuilder theSearchSqlBuilder) { return new ComboNonUniqueSearchParameterPredicateBuilder(theSearchSqlBuilder); } @@ -610,13 +641,15 @@ public class JpaConfig { @Bean @Scope("prototype") - public QuantityNormalizedPredicateBuilder newQuantityNormalizedPredicateBuilder(SearchQueryBuilder theSearchBuilder) { + public QuantityNormalizedPredicateBuilder newQuantityNormalizedPredicateBuilder( + SearchQueryBuilder theSearchBuilder) { return new QuantityNormalizedPredicateBuilder(theSearchBuilder); } @Bean @Scope("prototype") - public ResourceLinkPredicateBuilder newResourceLinkPredicateBuilder(QueryStack theQueryStack, SearchQueryBuilder theSearchBuilder, boolean theReversed) { + public ResourceLinkPredicateBuilder newResourceLinkPredicateBuilder( + QueryStack theQueryStack, SearchQueryBuilder theSearchBuilder, boolean theReversed) { return new ResourceLinkPredicateBuilder(theQueryStack, theSearchBuilder, theReversed); } @@ -640,7 +673,8 @@ public class JpaConfig { @Bean @Scope("prototype") - public SearchParamPresentPredicateBuilder newSearchParamPresentPredicateBuilder(SearchQueryBuilder theSearchBuilder) { + public SearchParamPresentPredicateBuilder newSearchParamPresentPredicateBuilder( + SearchQueryBuilder theSearchBuilder) { return new SearchParamPresentPredicateBuilder(theSearchBuilder); } @@ -668,7 +702,6 @@ public class JpaConfig { return new UriPredicateBuilder(theSearchBuilder); } - @Bean @Scope("prototype") public SearchQueryExecutor newSearchQueryExecutor(GeneratedSql theGeneratedSql, Integer theMaxResultsToFetch) { @@ -677,7 +710,11 @@ public class JpaConfig { @Bean(name = HISTORY_BUILDER) @Scope("prototype") - public HistoryBuilder newPersistedJpaSearchFirstPageBundleProvider(@Nullable String theResourceType, @Nullable Long theResourceId, @Nullable Date theRangeStartInclusive, @Nullable Date theRangeEndInclusive) { + public HistoryBuilder newPersistedJpaSearchFirstPageBundleProvider( + @Nullable String theResourceType, + @Nullable Long theResourceId, + @Nullable Date theRangeStartInclusive, + @Nullable Date theRangeEndInclusive) { return new HistoryBuilder(theResourceType, theResourceId, theRangeStartInclusive, theRangeEndInclusive); } @@ -714,7 +751,11 @@ public class JpaConfig { @Bean @Scope("prototype") - public ExpungeOperation expungeOperation(String theResourceName, IResourcePersistentId theResourceId, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) { + public ExpungeOperation expungeOperation( + String theResourceName, + IResourcePersistentId theResourceId, + ExpungeOptions theExpungeOptions, + RequestDetails theRequestDetails) { return new ExpungeOperation(theResourceName, theResourceId, theExpungeOptions, theRequestDetails); } @@ -769,7 +810,8 @@ public class JpaConfig { } @Bean - public UnknownCodeSystemWarningValidationSupport unknownCodeSystemWarningValidationSupport(FhirContext theFhirContext) { + public UnknownCodeSystemWarningValidationSupport unknownCodeSystemWarningValidationSupport( + FhirContext theFhirContext) { return new UnknownCodeSystemWarningValidationSupport(theFhirContext); } @@ -778,14 +820,16 @@ public class JpaConfig { return new SynchronousSearchSvcImpl(); } - @Bean public VersionCanonicalizer versionCanonicalizer(FhirContext theFhirContext) { return new VersionCanonicalizer(theFhirContext); } @Bean - public SearchParameterDaoValidator searchParameterDaoValidator(FhirContext theFhirContext, JpaStorageSettings theStorageSettings, ISearchParamRegistry theSearchParamRegistry) { + public SearchParameterDaoValidator searchParameterDaoValidator( + FhirContext theFhirContext, + JpaStorageSettings theStorageSettings, + ISearchParamRegistry theSearchParamRegistry) { return new SearchParameterDaoValidator(theFhirContext, theStorageSettings, theSearchParamRegistry); } @@ -810,7 +854,7 @@ public class JpaConfig { } @Bean - public IMdmLinkDao mdmLinkDao(){ + public IMdmLinkDao mdmLinkDao() { return new MdmLinkDaoJpaImpl(); } @@ -821,17 +865,25 @@ public class JpaConfig { @Bean @Scope("prototype") - public PersistenceContextProvider persistenceContextProvider(){ + public PersistenceContextProvider persistenceContextProvider() { return new PersistenceContextProvider(); } @Bean - public ResourceSearchUrlSvc resourceSearchUrlSvc(PersistenceContextProvider thePersistenceContextProvider, IResourceSearchUrlDao theResourceSearchUrlDao, MatchUrlService theMatchUrlService, FhirContext theFhirContext){ - return new ResourceSearchUrlSvc(thePersistenceContextProvider.getEntityManager(), theResourceSearchUrlDao, theMatchUrlService, theFhirContext); + public ResourceSearchUrlSvc resourceSearchUrlSvc( + PersistenceContextProvider thePersistenceContextProvider, + IResourceSearchUrlDao theResourceSearchUrlDao, + MatchUrlService theMatchUrlService, + FhirContext theFhirContext) { + return new ResourceSearchUrlSvc( + thePersistenceContextProvider.getEntityManager(), + theResourceSearchUrlDao, + theMatchUrlService, + theFhirContext); } @Bean - public ISearchUrlJobMaintenanceSvc searchUrlJobMaintenanceSvc(ResourceSearchUrlSvc theResourceSearchUrlSvc){ + public ISearchUrlJobMaintenanceSvc searchUrlJobMaintenanceSvc(ResourceSearchUrlSvc theResourceSearchUrlSvc) { return new SearchUrlJobMaintenanceSvcImpl(theResourceSearchUrlSvc); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaDstu2Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaDstu2Config.java index 93d56aa52be..caba0c3b13c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaDstu2Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaDstu2Config.java @@ -35,11 +35,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; @Configuration @EnableTransactionManagement -@Import({ - FhirContextDstu2Config.class, - GeneratedDaoAndResourceProviderConfigDstu2.class, - JpaConfig.class -}) +@Import({FhirContextDstu2Config.class, GeneratedDaoAndResourceProviderConfigDstu2.class, JpaConfig.class}) public class JpaDstu2Config { @Bean public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() { @@ -64,5 +60,4 @@ public class JpaDstu2Config { retVal.setContext(theFhirContext); return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/PackageLoaderConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/PackageLoaderConfig.java index 9618c8cebcd..1de612bbbb3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/PackageLoaderConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/PackageLoaderConfig.java @@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.config; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.packages.loader.PackageLoaderSvc; import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc; -import org.hl7.fhir.utilities.npm.PackageClient; import org.hl7.fhir.utilities.npm.PackageServer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java index a4f7d96c37b..9fb6daf6365 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/SearchConfig.java @@ -62,69 +62,90 @@ public class SearchConfig { @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; + @Autowired private SqlObjectFactory mySqlBuilderFactory; + @Autowired private HibernatePropertiesProvider myDialectProvider; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private PartitionSettings myPartitionSettings; + @Autowired protected IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired protected IResourceTagDao myResourceTagDao; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private IResourceSearchViewDao myResourceSearchViewDao; + @Autowired private FhirContext myContext; + @Autowired private IIdHelperService myIdHelperService; + @Autowired private PlatformTransactionManager myManagedTxManager; + @Autowired private SearchStrategyFactory mySearchStrategyFactory; + @Autowired private SearchBuilderFactory mySearchBuilderFactory; + @Autowired private ISearchResultCacheSvc mySearchResultCacheSvc; + @Autowired private ISearchCacheSvc mySearchCacheSvc; + @Autowired private IPagingProvider myPagingProvider; + @Autowired private BeanFactory myBeanFactory; + @Autowired private ISynchronousSearchSvc mySynchronousSearchSvc; + @Autowired private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperService; + @Autowired private HapiTransactionService myHapiTransactionService; @Bean public ISearchCoordinatorSvc searchCoordinatorSvc() { return new SearchCoordinatorSvcImpl( - myContext, - myStorageSettings, - myInterceptorBroadcaster, - myHapiTransactionService, - mySearchCacheSvc, - mySearchResultCacheSvc, - myDaoRegistry, - mySearchBuilderFactory, - mySynchronousSearchSvc, - myPersistedJpaBundleProviderFactory, - mySearchParamRegistry, - mySearchStrategyFactory, - exceptionService(), - myBeanFactory - ); + myContext, + myStorageSettings, + myInterceptorBroadcaster, + myHapiTransactionService, + mySearchCacheSvc, + mySearchResultCacheSvc, + myDaoRegistry, + mySearchBuilderFactory, + mySynchronousSearchSvc, + myPersistedJpaBundleProviderFactory, + mySearchParamRegistry, + mySearchStrategyFactory, + exceptionService(), + myBeanFactory); } @Bean @@ -134,54 +155,55 @@ public class SearchConfig { @Bean(name = ISearchBuilder.SEARCH_BUILDER_BEAN_NAME) @Scope("prototype") - public ISearchBuilder newSearchBuilder(IDao theDao, String theResourceName, Class theResourceType) { - return new SearchBuilder(theDao, - theResourceName, - myStorageSettings, - myEntityManagerFactory, - mySqlBuilderFactory, - myDialectProvider, - mySearchParamRegistry, - myPartitionSettings, - myInterceptorBroadcaster, - myResourceTagDao, - myDaoRegistry, - myResourceSearchViewDao, - myContext, - myIdHelperService, - theResourceType - ); + public ISearchBuilder newSearchBuilder( + IDao theDao, String theResourceName, Class theResourceType) { + return new SearchBuilder( + theDao, + theResourceName, + myStorageSettings, + myEntityManagerFactory, + mySqlBuilderFactory, + myDialectProvider, + mySearchParamRegistry, + myPartitionSettings, + myInterceptorBroadcaster, + myResourceTagDao, + myDaoRegistry, + myResourceSearchViewDao, + myContext, + myIdHelperService, + theResourceType); } @Bean(name = SEARCH_TASK) @Scope("prototype") public SearchTask createSearchTask(SearchTaskParameters theParams) { - return new SearchTask(theParams, - myHapiTransactionService, - myContext, - myInterceptorBroadcaster, - mySearchBuilderFactory, - mySearchResultCacheSvc, - myStorageSettings, - mySearchCacheSvc, - myPagingProvider - ); + return new SearchTask( + theParams, + myHapiTransactionService, + myContext, + myInterceptorBroadcaster, + mySearchBuilderFactory, + mySearchResultCacheSvc, + myStorageSettings, + mySearchCacheSvc, + myPagingProvider); } - @Bean(name = CONTINUE_TASK) @Scope("prototype") public SearchContinuationTask createSearchContinuationTask(SearchTaskParameters theParams) { - return new SearchContinuationTask(theParams, - myHapiTransactionService, - myContext, - myInterceptorBroadcaster, - mySearchBuilderFactory, - mySearchResultCacheSvc, - myStorageSettings, - mySearchCacheSvc, - myPagingProvider, - exceptionService() // singleton - ); + return new SearchContinuationTask( + theParams, + myHapiTransactionService, + myContext, + myInterceptorBroadcaster, + mySearchBuilderFactory, + mySearchResultCacheSvc, + myStorageSettings, + mySearchCacheSvc, + myPagingProvider, + exceptionService() // singleton + ); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/ValidationSupportConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/ValidationSupportConfig.java index a7e7f91f102..3cbd8bc5ddf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/ValidationSupportConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/ValidationSupportConfig.java @@ -56,16 +56,23 @@ public class ValidationSupportConfig { } @Bean(name = "myInstanceValidator") - public IInstanceValidatorModule instanceValidator(FhirContext theFhirContext, CachingValidationSupport theCachingValidationSupport, ValidationSupportChain theValidationSupportChain, IValidationSupport theValidationSupport, DaoRegistry theDaoRegistry) { + public IInstanceValidatorModule instanceValidator( + FhirContext theFhirContext, + CachingValidationSupport theCachingValidationSupport, + ValidationSupportChain theValidationSupportChain, + IValidationSupport theValidationSupport, + DaoRegistry theDaoRegistry) { if (theFhirContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) { FhirInstanceValidator val = new FhirInstanceValidator(theCachingValidationSupport); - val.setValidatorResourceFetcher(jpaValidatorResourceFetcher(theFhirContext, theValidationSupport, theDaoRegistry)); + val.setValidatorResourceFetcher( + jpaValidatorResourceFetcher(theFhirContext, theValidationSupport, theDaoRegistry)); val.setValidatorPolicyAdvisor(jpaValidatorPolicyAdvisor()); val.setBestPracticeWarningLevel(BestPracticeWarningLevel.Warning); val.setValidationSupport(theCachingValidationSupport); return val; } else { - CachingValidationSupport cachingValidationSupport = new CachingValidationSupport(new HapiToHl7OrgDstu2ValidatingSupportWrapper(theValidationSupportChain)); + CachingValidationSupport cachingValidationSupport = new CachingValidationSupport( + new HapiToHl7OrgDstu2ValidatingSupportWrapper(theValidationSupportChain)); FhirInstanceValidator retVal = new FhirInstanceValidator(cachingValidationSupport); retVal.setBestPracticeWarningLevel(BestPracticeWarningLevel.Warning); return retVal; @@ -74,7 +81,8 @@ public class ValidationSupportConfig { @Bean @Lazy - public ValidatorResourceFetcher jpaValidatorResourceFetcher(FhirContext theFhirContext, IValidationSupport theValidationSupport, DaoRegistry theDaoRegistry) { + public ValidatorResourceFetcher jpaValidatorResourceFetcher( + FhirContext theFhirContext, IValidationSupport theValidationSupport, DaoRegistry theDaoRegistry) { return new ValidatorResourceFetcher(theFhirContext, theValidationSupport, theDaoRegistry); } @@ -83,5 +91,4 @@ public class ValidationSupportConfig { public ValidatorPolicyAdvisor jpaValidatorPolicyAdvisor() { return new ValidatorPolicyAdvisor(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/FhirContextDstu3Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/FhirContextDstu3Config.java index 34fcab7bdac..9d0e132c844 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/FhirContextDstu3Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/FhirContextDstu3Config.java @@ -35,5 +35,4 @@ public class FhirContextDstu3Config { return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/JpaDstu3Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/JpaDstu3Config.java index b5ce122c8de..9a9907d57de 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/JpaDstu3Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/dstu3/JpaDstu3Config.java @@ -48,11 +48,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; @Configuration @EnableTransactionManagement -@Import({ - FhirContextDstu3Config.class, - GeneratedDaoAndResourceProviderConfigDstu3.class, - JpaConfig.class -}) +@Import({FhirContextDstu3Config.class, GeneratedDaoAndResourceProviderConfigDstu3.class, JpaConfig.class}) public class JpaDstu3Config { @Bean public ITermVersionAdapterSvc terminologyVersionAdapterSvc() { @@ -61,8 +57,18 @@ public class JpaDstu3Config { @Bean(name = JpaConfig.GRAPHQL_PROVIDER_NAME) @Lazy - public GraphQLProvider graphQLProvider(FhirContext theFhirContext, IGraphQLStorageServices theGraphqlStorageServices, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry, IDaoRegistry theDaoRegistry) { - return new GraphQLProviderWithIntrospection(theFhirContext, theValidationSupport, theGraphqlStorageServices, theSearchParamRegistry, theDaoRegistry); + public GraphQLProvider graphQLProvider( + FhirContext theFhirContext, + IGraphQLStorageServices theGraphqlStorageServices, + IValidationSupport theValidationSupport, + ISearchParamRegistry theSearchParamRegistry, + IDaoRegistry theDaoRegistry) { + return new GraphQLProviderWithIntrospection( + theFhirContext, + theValidationSupport, + theGraphqlStorageServices, + theSearchParamRegistry, + theDaoRegistry); } @Bean @@ -84,8 +90,8 @@ public class JpaDstu3Config { } @Bean - public ITermLoaderSvc termLoaderService(ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { + public ITermLoaderSvc termLoaderService( + ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { return new TermLoaderSvcImpl(theDeferredStorageSvc, theCodeSystemStorageSvc); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/JpaR4Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/JpaR4Config.java index 38dd34df2e0..281abe9a277 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/JpaR4Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4/JpaR4Config.java @@ -56,11 +56,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; @Configuration @EnableTransactionManagement -@Import({ - FhirContextR4Config.class, - GeneratedDaoAndResourceProviderConfigR4.class, - JpaConfig.class -}) +@Import({FhirContextR4Config.class, GeneratedDaoAndResourceProviderConfigR4.class, JpaConfig.class}) public class JpaR4Config { @Bean @@ -75,8 +71,18 @@ public class JpaR4Config { @Bean(name = JpaConfig.GRAPHQL_PROVIDER_NAME) @Lazy - public GraphQLProvider graphQLProvider(FhirContext theFhirContext, IGraphQLStorageServices theGraphqlStorageServices, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry, IDaoRegistry theDaoRegistry) { - return new GraphQLProviderWithIntrospection(theFhirContext, theValidationSupport, theGraphqlStorageServices, theSearchParamRegistry, theDaoRegistry); + public GraphQLProvider graphQLProvider( + FhirContext theFhirContext, + IGraphQLStorageServices theGraphqlStorageServices, + IValidationSupport theValidationSupport, + ISearchParamRegistry theSearchParamRegistry, + IDaoRegistry theDaoRegistry) { + return new GraphQLProviderWithIntrospection( + theFhirContext, + theValidationSupport, + theGraphqlStorageServices, + theSearchParamRegistry, + theDaoRegistry); } @Bean(name = "mySystemDaoR4") @@ -94,29 +100,25 @@ public class JpaR4Config { } @Bean - public ITermLoaderSvc termLoaderService(ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { + public ITermLoaderSvc termLoaderService( + ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { return new TermLoaderSvcImpl(theDeferredStorageSvc, theCodeSystemStorageSvc); } @Bean public MemberMatcherR4Helper memberMatcherR4Helper( - @Autowired FhirContext theContext, - @Autowired IFhirResourceDao theCoverageDao, - @Autowired IFhirResourceDao thePatientDao, - @Autowired IFhirResourceDao theConsentDao, - @Autowired(required = false) IMemberMatchConsentHook theExtensionProvider - ) { + @Autowired FhirContext theContext, + @Autowired IFhirResourceDao theCoverageDao, + @Autowired IFhirResourceDao thePatientDao, + @Autowired IFhirResourceDao theConsentDao, + @Autowired(required = false) IMemberMatchConsentHook theExtensionProvider) { return new MemberMatcherR4Helper( - theContext, - theCoverageDao, - thePatientDao, - theConsentDao, - theExtensionProvider - ); + theContext, theCoverageDao, thePatientDao, theConsentDao, theExtensionProvider); } @Bean - public MemberMatchR4ResourceProvider memberMatchR4ResourceProvider(FhirContext theFhirContext, MemberMatcherR4Helper theMemberMatchR4Helper) { + public MemberMatchR4ResourceProvider memberMatchR4ResourceProvider( + FhirContext theFhirContext, MemberMatcherR4Helper theMemberMatchR4Helper) { return new MemberMatchR4ResourceProvider(theFhirContext, theMemberMatchR4Helper); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4b/FhirContextR4BConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4b/FhirContextR4BConfig.java index 68af537318d..8fcd4b5e945 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4b/FhirContextR4BConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4b/FhirContextR4BConfig.java @@ -36,5 +36,4 @@ public class FhirContextR4BConfig { return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4b/JpaR4BConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4b/JpaR4BConfig.java index 335447ee595..e671d95feba 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4b/JpaR4BConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r4b/JpaR4BConfig.java @@ -48,11 +48,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; @Configuration @EnableTransactionManagement -@Import({ - FhirContextR4BConfig.class, - GeneratedDaoAndResourceProviderConfigR4B.class, - JpaConfig.class -}) +@Import({FhirContextR4BConfig.class, GeneratedDaoAndResourceProviderConfigR4B.class, JpaConfig.class}) public class JpaR4BConfig { @Bean @@ -67,8 +63,18 @@ public class JpaR4BConfig { @Bean(name = JpaConfig.GRAPHQL_PROVIDER_NAME) @Lazy - public GraphQLProvider graphQLProvider(FhirContext theFhirContext, IGraphQLStorageServices theGraphqlStorageServices, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry, IDaoRegistry theDaoRegistry) { - return new GraphQLProviderWithIntrospection(theFhirContext, theValidationSupport, theGraphqlStorageServices, theSearchParamRegistry, theDaoRegistry); + public GraphQLProvider graphQLProvider( + FhirContext theFhirContext, + IGraphQLStorageServices theGraphqlStorageServices, + IValidationSupport theValidationSupport, + ISearchParamRegistry theSearchParamRegistry, + IDaoRegistry theDaoRegistry) { + return new GraphQLProviderWithIntrospection( + theFhirContext, + theValidationSupport, + theGraphqlStorageServices, + theSearchParamRegistry, + theDaoRegistry); } @Bean(name = "mySystemDaoR4B") @@ -86,8 +92,8 @@ public class JpaR4BConfig { } @Bean - public ITermLoaderSvc termLoaderService(ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { + public ITermLoaderSvc termLoaderService( + ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { return new TermLoaderSvcImpl(theDeferredStorageSvc, theCodeSystemStorageSvc); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/FhirContextR5Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/FhirContextR5Config.java index e7e36c5e2d9..d24177895a7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/FhirContextR5Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/FhirContextR5Config.java @@ -35,5 +35,4 @@ public class FhirContextR5Config { return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/JpaR5Config.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/JpaR5Config.java index badb1f01217..ef945a55b67 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/JpaR5Config.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/r5/JpaR5Config.java @@ -48,11 +48,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; @Configuration @EnableTransactionManagement -@Import({ - FhirContextR5Config.class, - GeneratedDaoAndResourceProviderConfigR5.class, - JpaConfig.class -}) +@Import({FhirContextR5Config.class, GeneratedDaoAndResourceProviderConfigR5.class, JpaConfig.class}) public class JpaR5Config { @Bean @@ -60,7 +56,6 @@ public class JpaR5Config { return new TermVersionAdapterSvcR5(); } - @Bean public ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() { return new TransactionProcessorVersionAdapterR5(); @@ -68,8 +63,18 @@ public class JpaR5Config { @Bean(name = JpaConfig.GRAPHQL_PROVIDER_NAME) @Lazy - public GraphQLProvider graphQLProvider(FhirContext theFhirContext, IGraphQLStorageServices theGraphqlStorageServices, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry, IDaoRegistry theDaoRegistry) { - return new GraphQLProviderWithIntrospection(theFhirContext, theValidationSupport, theGraphqlStorageServices, theSearchParamRegistry, theDaoRegistry); + public GraphQLProvider graphQLProvider( + FhirContext theFhirContext, + IGraphQLStorageServices theGraphqlStorageServices, + IValidationSupport theValidationSupport, + ISearchParamRegistry theSearchParamRegistry, + IDaoRegistry theDaoRegistry) { + return new GraphQLProviderWithIntrospection( + theFhirContext, + theValidationSupport, + theGraphqlStorageServices, + theSearchParamRegistry, + theDaoRegistry); } @Bean(name = "mySystemDaoR5") @@ -87,8 +92,8 @@ public class JpaR5Config { } @Bean - public ITermLoaderSvc terminologyLoaderService(ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { + public ITermLoaderSvc terminologyLoaderService( + ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { return new TermLoaderSvcImpl(theDeferredStorageSvc, theCodeSystemStorageSvc); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/BasicDataSourceConnectionPoolInfoProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/BasicDataSourceConnectionPoolInfoProvider.java index 65bc571abd0..df5bcaee03e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/BasicDataSourceConnectionPoolInfoProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/BasicDataSourceConnectionPoolInfoProvider.java @@ -36,17 +36,16 @@ public class BasicDataSourceConnectionPoolInfoProvider implements IConnectionPoo @Override public Optional getTotalConnectionSize() { - return Optional.of( myDataSource.getMaxTotal() ); + return Optional.of(myDataSource.getMaxTotal()); } @Override public Optional getActiveConnections() { - return Optional.of( myDataSource.getNumActive() ); + return Optional.of(myDataSource.getNumActive()); } @Override public Optional getMaxWaitMillis() { - return Optional.of( myDataSource.getMaxWaitMillis() ); + return Optional.of(myDataSource.getMaxWaitMillis()); } } - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ConnectionPoolInfoProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ConnectionPoolInfoProvider.java index 1efd62b5541..6d9c57af632 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ConnectionPoolInfoProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ConnectionPoolInfoProvider.java @@ -24,9 +24,9 @@ import org.apache.commons.dbcp2.BasicDataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.sql.DataSource; import java.sql.SQLException; import java.util.Optional; +import javax.sql.DataSource; /** * Utility to hide complexity involved in obtaining connection pool information @@ -36,14 +36,13 @@ public class ConnectionPoolInfoProvider implements IConnectionPoolInfoProvider { private IConnectionPoolInfoProvider myProvider; - public ConnectionPoolInfoProvider(DataSource theDataSource) { if (theDataSource.getClass().isAssignableFrom(BasicDataSource.class)) { - myProvider = new BasicDataSourceConnectionPoolInfoProvider((BasicDataSource) theDataSource); + myProvider = new BasicDataSourceConnectionPoolInfoProvider((BasicDataSource) theDataSource); return; } - if ( theDataSource.getClass().isAssignableFrom(ProxyDataSource.class)) { + if (theDataSource.getClass().isAssignableFrom(ProxyDataSource.class)) { boolean basiDataSourceWrapped; try { basiDataSourceWrapped = theDataSource.isWrapperFor(BasicDataSource.class); @@ -51,11 +50,11 @@ public class ConnectionPoolInfoProvider implements IConnectionPoolInfoProvider { BasicDataSource basicDataSource = theDataSource.unwrap(BasicDataSource.class); myProvider = new BasicDataSourceConnectionPoolInfoProvider(basicDataSource); } - } catch (SQLException ignored) { } + } catch (SQLException ignored) { + } } } - @Override public Optional getTotalConnectionSize() { return myProvider == null ? Optional.empty() : myProvider.getTotalConnectionSize(); @@ -71,6 +70,3 @@ public class ConnectionPoolInfoProvider implements IConnectionPoolInfoProvider { return myProvider == null ? Optional.empty() : myProvider.getMaxWaitMillis(); } } - - - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/HapiEntityManagerFactoryUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/HapiEntityManagerFactoryUtil.java index 654a5f58ec4..f3c0a72e75f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/HapiEntityManagerFactoryUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/HapiEntityManagerFactoryUtil.java @@ -32,16 +32,18 @@ public final class HapiEntityManagerFactoryUtil { * This method provides a partially completed entity manager * factory with HAPI FHIR customizations */ - public static LocalContainerEntityManagerFactoryBean newEntityManagerFactory(ConfigurableListableBeanFactory myConfigurableListableBeanFactory, FhirContext theFhirContext) { - LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory); + public static LocalContainerEntityManagerFactoryBean newEntityManagerFactory( + ConfigurableListableBeanFactory myConfigurableListableBeanFactory, FhirContext theFhirContext) { + LocalContainerEntityManagerFactoryBean retVal = + new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory); configureEntityManagerFactory(retVal, theFhirContext); return retVal; } - public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theFhirContext) { + public static void configureEntityManagerFactory( + LocalContainerEntityManagerFactoryBean theFactory, FhirContext theFhirContext) { theFactory.setJpaDialect(new HapiFhirHibernateJpaDialect(theFhirContext.getLocalizer())); theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity"); theFactory.setPersistenceProvider(new HibernatePersistenceProvider()); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/IConnectionPoolInfoProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/IConnectionPoolInfoProvider.java index 3c741b06b13..8f87b3ddf06 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/IConnectionPoolInfoProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/IConnectionPoolInfoProvider.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.config.util; - import java.util.Optional; public interface IConnectionPoolInfoProvider { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ResourceCountCacheUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ResourceCountCacheUtil.java index 4f940450a47..a3dd1b3ce9d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ResourceCountCacheUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ResourceCountCacheUtil.java @@ -25,6 +25,7 @@ import org.apache.commons.lang3.time.DateUtils; public final class ResourceCountCacheUtil { private ResourceCountCacheUtil() {} + public static ResourceCountCache newResourceCountCache(IFhirSystemDao theSystemDao) { ResourceCountCache retVal = new ResourceCountCache(() -> theSystemDao.getResourceCounts()); retVal.setCacheMillis(4 * DateUtils.MILLIS_PER_HOUR); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ValidationSupportConfigUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ValidationSupportConfigUtil.java index 2bc0abb9873..2c0edc73519 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ValidationSupportConfigUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/util/ValidationSupportConfigUtil.java @@ -25,15 +25,19 @@ import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport; public final class ValidationSupportConfigUtil { private ValidationSupportConfigUtil() {} - public static CachingValidationSupport newCachingValidationSupport(JpaValidationSupportChain theJpaValidationSupportChain) { + public static CachingValidationSupport newCachingValidationSupport( + JpaValidationSupportChain theJpaValidationSupportChain) { return newCachingValidationSupport(theJpaValidationSupportChain, false); } - public static CachingValidationSupport newCachingValidationSupport(JpaValidationSupportChain theJpaValidationSupportChain, boolean theIsEnabledValidationForCodingsLogicalAnd) { + public static CachingValidationSupport newCachingValidationSupport( + JpaValidationSupportChain theJpaValidationSupportChain, + boolean theIsEnabledValidationForCodingsLogicalAnd) { // Short timeout for code translation because TermConceptMappingSvcImpl has its own caching - CachingValidationSupport.CacheTimeouts cacheTimeouts = CachingValidationSupport.CacheTimeouts.defaultValues() - .setTranslateCodeMillis(1000); + CachingValidationSupport.CacheTimeouts cacheTimeouts = + CachingValidationSupport.CacheTimeouts.defaultValues().setTranslateCodeMillis(1000); - return new CachingValidationSupport(theJpaValidationSupportChain, cacheTimeouts, theIsEnabledValidationForCodingsLogicalAnd); + return new CachingValidationSupport( + theJpaValidationSupportChain, cacheTimeouts, theIsEnabledValidationForCodingsLogicalAnd); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java index c947b7a051c..4e0691dd8a4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java @@ -137,6 +137,18 @@ import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Set; +import java.util.StringTokenizer; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.PostConstruct; @@ -151,18 +163,6 @@ import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.xml.stream.events.Characters; import javax.xml.stream.events.XMLEvent; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.List; -import java.util.Set; -import java.util.StringTokenizer; -import java.util.stream.Collectors; import static java.util.Objects.isNull; import static java.util.Objects.nonNull; @@ -171,9 +171,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.trim; - - - /** * TODO: JA - This class has only one subclass now. Historically it was a common * ancestor for BaseHapiFhirSystemDao and BaseHapiFhirResourceDao but I've untangled @@ -184,7 +181,8 @@ import static org.apache.commons.lang3.StringUtils.trim; */ @SuppressWarnings("WeakerAccess") @Repository -public abstract class BaseHapiFhirDao extends BaseStorageResourceDao implements IDao, IJpaDao, ApplicationContextAware { +public abstract class BaseHapiFhirDao extends BaseStorageResourceDao + implements IDao, IJpaDao, ApplicationContextAware { public static final long INDEX_STATUS_INDEXED = 1L; public static final long INDEX_STATUS_INDEXING_FAILED = 2L; @@ -197,59 +195,84 @@ public abstract class BaseHapiFhirDao extends BaseStora @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + @Autowired protected IIdHelperService myIdHelperService; + @Autowired protected IForcedIdDao myForcedIdDao; + @Autowired protected ISearchCoordinatorSvc mySearchCoordinatorSvc; + @Autowired protected ITermReadSvc myTerminologySvc; + @Autowired protected IResourceHistoryTableDao myResourceHistoryTableDao; + @Autowired protected IResourceTableDao myResourceTableDao; + @Autowired protected IResourceLinkDao myResourceLinkDao; + @Autowired protected IResourceTagDao myResourceTagDao; + @Autowired protected DeleteConflictService myDeleteConflictService; + @Autowired protected IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired protected DaoRegistry myDaoRegistry; + @Autowired protected InMemoryResourceMatcher myInMemoryResourceMatcher; + @Autowired protected IJpaStorageResourceParser myJpaStorageResourceParser; + @Autowired protected PartitionSettings myPartitionSettings; + @Autowired ExpungeService myExpungeService; + @Autowired private ExternallyStoredResourceServiceRegistry myExternallyStoredResourceServiceRegistry; + @Autowired private ISearchParamPresenceSvc mySearchParamPresenceSvc; + @Autowired private SearchParamWithInlineReferencesExtractor mySearchParamWithInlineReferencesExtractor; + @Autowired private DaoSearchParamSynchronizer myDaoSearchParamSynchronizer; + private FhirContext myContext; private ApplicationContext myApplicationContext; + @Autowired private IPartitionLookupSvc myPartitionLookupSvc; + @Autowired private MemoryCacheService myMemoryCacheService; + @Autowired(required = false) private IFulltextSearchSvc myFulltextSearchSvc; + @Autowired private PlatformTransactionManager myTransactionManager; protected final CodingSpy myCodingSpy = new CodingSpy(); @VisibleForTesting - public void setExternallyStoredResourceServiceRegistryForUnitTest(ExternallyStoredResourceServiceRegistry theExternallyStoredResourceServiceRegistry) { + public void setExternallyStoredResourceServiceRegistryForUnitTest( + ExternallyStoredResourceServiceRegistry theExternallyStoredResourceServiceRegistry) { myExternallyStoredResourceServiceRegistry = theExternallyStoredResourceServiceRegistry; } @@ -279,12 +302,22 @@ public abstract class BaseHapiFhirDao extends BaseStora } } - private void extractTagsHapi(TransactionDetails theTransactionDetails, IResource theResource, ResourceTable theEntity, Set allDefs) { + private void extractTagsHapi( + TransactionDetails theTransactionDetails, + IResource theResource, + ResourceTable theEntity, + Set allDefs) { TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(theResource); if (tagList != null) { for (Tag next : tagList) { - TagDefinition def = getTagOrNull(theTransactionDetails, TagTypeEnum.TAG, next.getScheme(), next.getTerm(), - next.getLabel(), next.getVersion(), myCodingSpy.getBooleanObject(next)); + TagDefinition def = getTagOrNull( + theTransactionDetails, + TagTypeEnum.TAG, + next.getScheme(), + next.getTerm(), + next.getLabel(), + next.getVersion(), + myCodingSpy.getBooleanObject(next)); if (def != null) { ResourceTag tag = theEntity.addTag(def); allDefs.add(tag); @@ -296,8 +329,14 @@ public abstract class BaseHapiFhirDao extends BaseStora List securityLabels = ResourceMetadataKeyEnum.SECURITY_LABELS.get(theResource); if (securityLabels != null) { for (BaseCodingDt next : securityLabels) { - TagDefinition def = getTagOrNull(theTransactionDetails, TagTypeEnum.SECURITY_LABEL, next.getSystemElement().getValue(), - next.getCodeElement().getValue(), next.getDisplayElement().getValue(), null, null); + TagDefinition def = getTagOrNull( + theTransactionDetails, + TagTypeEnum.SECURITY_LABEL, + next.getSystemElement().getValue(), + next.getCodeElement().getValue(), + next.getDisplayElement().getValue(), + null, + null); if (def != null) { ResourceTag tag = theEntity.addTag(def); allDefs.add(tag); @@ -309,7 +348,8 @@ public abstract class BaseHapiFhirDao extends BaseStora List profiles = ResourceMetadataKeyEnum.PROFILES.get(theResource); if (profiles != null) { for (IIdType next : profiles) { - TagDefinition def = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null); + TagDefinition def = getTagOrNull( + theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null); if (def != null) { ResourceTag tag = theEntity.addTag(def); allDefs.add(tag); @@ -319,12 +359,22 @@ public abstract class BaseHapiFhirDao extends BaseStora } } - private void extractTagsRi(TransactionDetails theTransactionDetails, IAnyResource theResource, ResourceTable theEntity, Set theAllTags) { + private void extractTagsRi( + TransactionDetails theTransactionDetails, + IAnyResource theResource, + ResourceTable theEntity, + Set theAllTags) { List tagList = theResource.getMeta().getTag(); if (tagList != null) { for (IBaseCoding next : tagList) { - TagDefinition def = getTagOrNull(theTransactionDetails, TagTypeEnum.TAG, next.getSystem(), next.getCode(), - next.getDisplay(), next.getVersion(), myCodingSpy.getBooleanObject(next)); + TagDefinition def = getTagOrNull( + theTransactionDetails, + TagTypeEnum.TAG, + next.getSystem(), + next.getCode(), + next.getDisplay(), + next.getVersion(), + myCodingSpy.getBooleanObject(next)); if (def != null) { ResourceTag tag = theEntity.addTag(def); theAllTags.add(tag); @@ -336,7 +386,14 @@ public abstract class BaseHapiFhirDao extends BaseStora List securityLabels = theResource.getMeta().getSecurity(); if (securityLabels != null) { for (IBaseCoding next : securityLabels) { - TagDefinition def = getTagOrNull(theTransactionDetails, TagTypeEnum.SECURITY_LABEL, next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion(), myCodingSpy.getBooleanObject(next)); + TagDefinition def = getTagOrNull( + theTransactionDetails, + TagTypeEnum.SECURITY_LABEL, + next.getSystem(), + next.getCode(), + next.getDisplay(), + next.getVersion(), + myCodingSpy.getBooleanObject(next)); if (def != null) { ResourceTag tag = theEntity.addTag(def); theAllTags.add(tag); @@ -348,7 +405,8 @@ public abstract class BaseHapiFhirDao extends BaseStora List> profiles = theResource.getMeta().getProfile(); if (profiles != null) { for (IPrimitiveType next : profiles) { - TagDefinition def = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null); + TagDefinition def = getTagOrNull( + theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null); if (def != null) { ResourceTag tag = theEntity.addTag(def); theAllTags.add(tag); @@ -356,7 +414,6 @@ public abstract class BaseHapiFhirDao extends BaseStora } } } - } private Set getAllTagDefinitions(ResourceTable theEntity) { @@ -386,18 +443,26 @@ public abstract class BaseHapiFhirDao extends BaseStora /** * null will only be returned if the scheme and tag are both blank */ - protected TagDefinition getTagOrNull(TransactionDetails theTransactionDetails, TagTypeEnum theTagType, String theScheme, - String theTerm, String theLabel, String theVersion, Boolean theUserSelected) { + protected TagDefinition getTagOrNull( + TransactionDetails theTransactionDetails, + TagTypeEnum theTagType, + String theScheme, + String theTerm, + String theLabel, + String theVersion, + Boolean theUserSelected) { if (isBlank(theScheme) && isBlank(theTerm) && isBlank(theLabel)) { return null; } - MemoryCacheService.TagDefinitionCacheKey key = toTagDefinitionMemoryCacheKey(theTagType, theScheme, theTerm, theVersion, theUserSelected); + MemoryCacheService.TagDefinitionCacheKey key = + toTagDefinitionMemoryCacheKey(theTagType, theScheme, theTerm, theVersion, theUserSelected); TagDefinition retVal = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key); if (retVal == null) { - HashMap resolvedTagDefinitions = theTransactionDetails - .getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, HashMap::new); + HashMap resolvedTagDefinitions = + theTransactionDetails.getOrCreateUserData( + HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, HashMap::new); retVal = resolvedTagDefinitions.get(key); @@ -421,8 +486,13 @@ public abstract class BaseHapiFhirDao extends BaseStora *

    * Can also throw an InternalErrorException if something bad happens. */ - private TagDefinition getOrCreateTag(TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel, - String theVersion, Boolean theUserSelected) { + private TagDefinition getOrCreateTag( + TagTypeEnum theTagType, + String theScheme, + String theTerm, + String theLabel, + String theVersion, + Boolean theUserSelected) { TypedQuery q = buildTagQuery(theTagType, theScheme, theTerm, theVersion, theUserSelected); q.setMaxResults(1); @@ -464,11 +534,11 @@ public abstract class BaseHapiFhirDao extends BaseStora // log any exceptions - just in case // they may be signs of things to come... ourLog.warn( - "Tag read/write failed: " - + ex.getMessage() + ". " - + "This is not a failure on its own, " - + "but could be useful information in the result of an actual failure.", ex - ); + "Tag read/write failed: " + + ex.getMessage() + ". " + + "This is not a failure on its own, " + + "but could be useful information in the result of an actual failure.", + ex); throwables.add(ex); } @@ -479,9 +549,8 @@ public abstract class BaseHapiFhirDao extends BaseStora // transaction template can fail if connections to db are exhausted // and/or timeout ourLog.warn("Transaction failed with: " - + ex.getMessage() + ". " - + "Transaction will rollback and be reattempted." - ); + + ex.getMessage() + ". " + + "Transaction will rollback and be reattempted."); retVal = null; } count++; @@ -491,50 +560,46 @@ public abstract class BaseHapiFhirDao extends BaseStora // if tag is still null, // something bad must be happening // - throw - String msg = throwables.stream() - .map(Throwable::getMessage) - .collect(Collectors.joining(", ")); - throw new InternalErrorException( - Msg.code(2023) + String msg = throwables.stream().map(Throwable::getMessage).collect(Collectors.joining(", ")); + throw new InternalErrorException(Msg.code(2023) + "Tag get/create failed after " + TOTAL_TAG_READ_ATTEMPTS + " attempts with error(s): " - + msg - ); + + msg); } return retVal; } - private TypedQuery buildTagQuery(TagTypeEnum theTagType, String theScheme, String theTerm, - String theVersion, Boolean theUserSelected) { + private TypedQuery buildTagQuery( + TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) { CriteriaBuilder builder = myEntityManager.getCriteriaBuilder(); CriteriaQuery cq = builder.createQuery(TagDefinition.class); Root from = cq.from(TagDefinition.class); List predicates = new ArrayList<>(); + predicates.add(builder.and( + builder.equal(from.get("myTagType"), theTagType), builder.equal(from.get("myCode"), theTerm))); + predicates.add( - builder.and( - builder.equal(from.get("myTagType"), theTagType), - builder.equal(from.get("myCode"), theTerm))); + isBlank(theScheme) + ? builder.isNull(from.get("mySystem")) + : builder.equal(from.get("mySystem"), theScheme)); - predicates.add( isBlank(theScheme) - ? builder.isNull(from.get("mySystem")) - : builder.equal(from.get("mySystem"), theScheme)); + predicates.add( + isBlank(theVersion) + ? builder.isNull(from.get("myVersion")) + : builder.equal(from.get("myVersion"), theVersion)); - predicates.add( isBlank(theVersion) - ? builder.isNull(from.get("myVersion")) - : builder.equal(from.get("myVersion"), theVersion)); - - predicates.add( isNull(theUserSelected) - ? builder.isNull(from.get("myUserSelected")) - : builder.equal(from.get("myUserSelected"), theUserSelected)); + predicates.add( + isNull(theUserSelected) + ? builder.isNull(from.get("myUserSelected")) + : builder.equal(from.get("myUserSelected"), theUserSelected)); cq.where(predicates.toArray(new Predicate[0])); return myEntityManager.createQuery(cq); } - void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) { if (theResourceId == null || theResourceId.getVersionIdPart() == null) { theSavedEntity.initializeVersion(); @@ -555,7 +620,12 @@ public abstract class BaseHapiFhirDao extends BaseStora /** * Returns {@literal true} if the resource has changed (either the contents or the tags) */ - protected EncodedResource populateResourceIntoEntity(TransactionDetails theTransactionDetails, RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, boolean thePerformIndexing) { + protected EncodedResource populateResourceIntoEntity( + TransactionDetails theTransactionDetails, + RequestDetails theRequest, + IBaseResource theResource, + ResourceTable theEntity, + boolean thePerformIndexing) { if (theEntity.getResourceType() == null) { theEntity.setResourceType(toResourceName(theResource)); } @@ -597,7 +667,8 @@ public abstract class BaseHapiFhirDao extends BaseStora HashFunction sha256 = Hashing.sha256(); HashCode hashCode; String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext); - if (myStorageSettings.getInlineResourceTextBelowSize() > 0 && encodedResource.length() < myStorageSettings.getInlineResourceTextBelowSize()) { + if (myStorageSettings.getInlineResourceTextBelowSize() > 0 + && encodedResource.length() < myStorageSettings.getInlineResourceTextBelowSize()) { resourceText = encodedResource; resourceBinary = null; encoding = ResourceEncodingEnum.JSON; @@ -614,13 +685,11 @@ public abstract class BaseHapiFhirDao extends BaseStora } theEntity.setHashSha256(hashSha256); - if (sourceExtension != null) { IBaseExtension newSourceExtension = ((IBaseHasExtensions) meta).addExtension(); newSourceExtension.setUrl(sourceExtension.getUrl()); newSourceExtension.setValue(sourceExtension.getValue()); } - } } else { @@ -628,7 +697,6 @@ public abstract class BaseHapiFhirDao extends BaseStora encoding = null; resourceBinary = null; resourceText = null; - } boolean skipUpdatingTags = myStorageSettings.isMassIngestionMode() && theEntity.isHasTags(); @@ -640,7 +708,7 @@ public abstract class BaseHapiFhirDao extends BaseStora } else { - if(nonNull(theEntity.getHashSha256())){ + if (nonNull(theEntity.getHashSha256())) { theEntity.setHashSha256(null); changed = true; } @@ -648,7 +716,6 @@ public abstract class BaseHapiFhirDao extends BaseStora resourceBinary = null; resourceText = null; encoding = ResourceEncodingEnum.DEL; - } if (thePerformIndexing && !changed) { @@ -665,7 +732,8 @@ public abstract class BaseHapiFhirDao extends BaseStora } else { ResourceHistoryTable currentHistoryVersion = theEntity.getCurrentVersionEntity(); if (currentHistoryVersion == null) { - currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), theEntity.getVersion()); + currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( + theEntity.getId(), theEntity.getVersion()); } if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) { changed = true; @@ -718,7 +786,8 @@ public abstract class BaseHapiFhirDao extends BaseStora * @param theMeta the meta element of the resource * @return source extension if present in the meta element */ - private IBaseExtension getExcludedElements(String theResourceType, List theExcludeElements, IBaseMetaType theMeta) { + private IBaseExtension getExcludedElements( + String theResourceType, List theExcludeElements, IBaseMetaType theMeta) { boolean hasExtensions = false; IBaseExtension sourceExtension = null; if (theMeta instanceof IBaseHasExtensions) { @@ -749,7 +818,8 @@ public abstract class BaseHapiFhirDao extends BaseStora } theExcludeElements.add("id"); - boolean inlineTagMode = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.INLINE; + boolean inlineTagMode = + getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.INLINE; if (hasExtensions || inlineTagMode) { if (!inlineTagMode) { theExcludeElements.add(theResourceType + ".meta.profile"); @@ -770,7 +840,11 @@ public abstract class BaseHapiFhirDao extends BaseStora return sourceExtension; } - private boolean updateTags(TransactionDetails theTransactionDetails, RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity) { + private boolean updateTags( + TransactionDetails theTransactionDetails, + RequestDetails theRequest, + IBaseResource theResource, + ResourceTable theEntity) { Set allDefs = new HashSet<>(); Set allTagsOld = getAllTagDefinitions(theEntity); @@ -781,10 +855,11 @@ public abstract class BaseHapiFhirDao extends BaseStora } RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource); - if ( ! def.isStandardType()) { + if (!def.isStandardType()) { String profile = def.getResourceProfile(""); if (isNotBlank(profile)) { - TagDefinition profileDef = getTagOrNull(theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null, null, null); + TagDefinition profileDef = getTagOrNull( + theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null, null, null); ResourceTag tag = theEntity.addTag(profileDef); allDefs.add(tag); @@ -805,23 +880,24 @@ public abstract class BaseHapiFhirDao extends BaseStora if (!allDefs.contains(tag)) { if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) { theEntity.getTags().remove(tag); - } else if (HapiExtensions.EXT_SUBSCRIPTION_MATCHING_STRATEGY.equals(tag.getTag().getSystem())) { + } else if (HapiExtensions.EXT_SUBSCRIPTION_MATCHING_STRATEGY.equals( + tag.getTag().getSystem())) { theEntity.getTags().remove(tag); } } - }); // Update the resource to contain the old tags allTagsOld.forEach(tag -> { - IBaseCoding iBaseCoding = theResource.getMeta() - .addTag() - .setCode(tag.getTag().getCode()) - .setSystem(tag.getTag().getSystem()) - .setVersion(tag.getTag().getVersion()); - if (tag.getTag().getUserSelected() != null) { - iBaseCoding.setUserSelected(tag.getTag().getUserSelected()); - } + IBaseCoding iBaseCoding = theResource + .getMeta() + .addTag() + .setCode(tag.getTag().getCode()) + .setSystem(tag.getTag().getSystem()) + .setVersion(tag.getTag().getVersion()); + if (tag.getTag().getUserSelected() != null) { + iBaseCoding.setUserSelected(tag.getTag().getUserSelected()); + } }); theEntity.setHasTags(!allTagsNew.isEmpty()); @@ -911,7 +987,6 @@ public abstract class BaseHapiFhirDao extends BaseStora return metaSnapshotModeTokens.contains(theTag.getTag().getTagType()); } - String toResourceName(IBaseResource theResource) { return myContext.getResourceType(theResource); } @@ -922,7 +997,8 @@ public abstract class BaseHapiFhirDao extends BaseStora } @VisibleForTesting - public void setSearchParamWithInlineReferencesExtractor(SearchParamWithInlineReferencesExtractor theSearchParamWithInlineReferencesExtractor) { + public void setSearchParamWithInlineReferencesExtractor( + SearchParamWithInlineReferencesExtractor theSearchParamWithInlineReferencesExtractor) { mySearchParamWithInlineReferencesExtractor = theSearchParamWithInlineReferencesExtractor; } @@ -936,22 +1012,40 @@ public abstract class BaseHapiFhirDao extends BaseStora myDaoSearchParamSynchronizer = theDaoSearchParamSynchronizer; } - private void verifyMatchUrlForConditionalCreate(IBaseResource theResource, String theIfNoneExist, ResourceIndexedSearchParams theParams, RequestDetails theRequestDetails) { + private void verifyMatchUrlForConditionalCreate( + IBaseResource theResource, + String theIfNoneExist, + ResourceIndexedSearchParams theParams, + RequestDetails theRequestDetails) { // Make sure that the match URL was actually appropriate for the supplied resource - InMemoryMatchResult outcome = myInMemoryResourceMatcher.match(theIfNoneExist, theResource, theParams, theRequestDetails); + InMemoryMatchResult outcome = + myInMemoryResourceMatcher.match(theIfNoneExist, theResource, theParams, theRequestDetails); if (outcome.supported() && !outcome.matched()) { - throw new InvalidRequestException(Msg.code(929) + "Failed to process conditional create. The supplied resource did not satisfy the conditional URL."); + throw new InvalidRequestException( + Msg.code(929) + + "Failed to process conditional create. The supplied resource did not satisfy the conditional URL."); } } - @SuppressWarnings("unchecked") @Override - public ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, IBasePersistedResource - theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing, - boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) { + public ResourceTable updateEntity( + RequestDetails theRequest, + final IBaseResource theResource, + IBasePersistedResource theEntity, + Date theDeletedTimestampOrNull, + boolean thePerformIndexing, + boolean theUpdateVersion, + TransactionDetails theTransactionDetails, + boolean theForceUpdate, + boolean theCreateNewHistoryEntry) { Validate.notNull(theEntity); - Validate.isTrue(theDeletedTimestampOrNull != null || theResource != null, "Must have either a resource[%s] or a deleted timestamp[%s] for resource PID[%s]", theDeletedTimestampOrNull != null, theResource != null, theEntity.getPersistentId()); + Validate.isTrue( + theDeletedTimestampOrNull != null || theResource != null, + "Must have either a resource[%s] or a deleted timestamp[%s] for resource PID[%s]", + theDeletedTimestampOrNull != null, + theResource != null, + theEntity.getPersistentId()); ourLog.debug("Starting entity update"); @@ -995,7 +1089,10 @@ public abstract class BaseHapiFhirDao extends BaseStora // CREATE or UPDATE - IdentityHashMap existingSearchParams = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS, () -> new IdentityHashMap<>()); + IdentityHashMap existingSearchParams = + theTransactionDetails.getOrCreateUserData( + HapiTransactionService.XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS, + () -> new IdentityHashMap<>()); existingParams = existingSearchParams.get(entity); if (existingParams == null) { existingParams = new ResourceIndexedSearchParams(entity); @@ -1005,11 +1102,9 @@ public abstract class BaseHapiFhirDao extends BaseStora * old set later on */ if (existingParams.getResourceLinks().size() >= 10) { - List pids = existingParams - .getResourceLinks() - .stream() - .map(t -> t.getId()) - .collect(Collectors.toList()); + List pids = existingParams.getResourceLinks().stream() + .map(t -> t.getId()) + .collect(Collectors.toList()); new QueryChunker().chunk(pids, t -> { List targets = myResourceLinkDao.findByPidAndFetchTargetDetails(t); ourLog.trace("Prefetched targets: {}", targets); @@ -1036,7 +1131,15 @@ public abstract class BaseHapiFhirDao extends BaseStora failIfPartitionMismatch(theRequest, entity); // Extract search params for resource - mySearchParamWithInlineReferencesExtractor.populateFromResource(requestPartitionId, newParams, theTransactionDetails, entity, theResource, existingParams, theRequest, thePerformIndexing); + mySearchParamWithInlineReferencesExtractor.populateFromResource( + requestPartitionId, + newParams, + theTransactionDetails, + entity, + theResource, + existingParams, + theRequest, + thePerformIndexing); // Actually persist the ResourceTable and ResourceHistoryTable entities changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true); @@ -1054,7 +1157,8 @@ public abstract class BaseHapiFhirDao extends BaseStora // matches. We could certainly make this configurable though in the // future. if (entity.getVersion() <= 1L && entity.getCreatedByMatchUrl() != null && thePerformIndexing) { - verifyMatchUrlForConditionalCreate(theResource, entity.getCreatedByMatchUrl(), newParams, theRequest); + verifyMatchUrlForConditionalCreate( + theResource, entity.getCreatedByMatchUrl(), newParams, theRequest); } if (CURRENTLY_REINDEXING.get(theResource) != Boolean.TRUE) { @@ -1074,13 +1178,18 @@ public abstract class BaseHapiFhirDao extends BaseStora entity.setIndexStatus(null); changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, false); - } - } - if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myStorageSettings.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) { - ourLog.debug("Resource {} has not changed", entity.getIdDt().toUnqualified().getValue()); + if (thePerformIndexing + && changed != null + && !changed.isChanged() + && !theForceUpdate + && myStorageSettings.isSuppressUpdatesWithNoChange() + && (entity.getVersion() > 1 || theUpdateVersion)) { + ourLog.debug( + "Resource {} has not changed", + entity.getIdDt().toUnqualified().getValue()); if (theResource != null) { myJpaStorageResourceParser.updateResourceMetadata(entity, theResource); } @@ -1130,21 +1239,26 @@ public abstract class BaseHapiFhirDao extends BaseStora * those by path and not by parameter name. */ if (thePerformIndexing && newParams != null) { - AddRemoveCount presenceCount = mySearchParamPresenceSvc.updatePresence(entity, newParams.mySearchParamPresentEntities); + AddRemoveCount presenceCount = + mySearchParamPresenceSvc.updatePresence(entity, newParams.mySearchParamPresentEntities); // Interceptor broadcast: JPA_PERFTRACE_INFO if (!presenceCount.isEmpty()) { - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) { StorageProcessingMessage message = new StorageProcessingMessage(); - message.setMessage("For " + entity.getIdDt().toUnqualifiedVersionless().getValue() + " added " + presenceCount.getAddCount() + " and removed " + presenceCount.getRemoveCount() + " resource search parameter presence entries"); + message.setMessage( + "For " + entity.getIdDt().toUnqualifiedVersionless().getValue() + " added " + + presenceCount.getAddCount() + " and removed " + presenceCount.getRemoveCount() + + " resource search parameter presence entries"); HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(StorageProcessingMessage.class, message); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(StorageProcessingMessage.class, message); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); } } - } /* @@ -1157,25 +1271,34 @@ public abstract class BaseHapiFhirDao extends BaseStora } else { // Synchronize search param indexes - AddRemoveCount searchParamAddRemoveCount = myDaoSearchParamSynchronizer.synchronizeSearchParamsToDatabase(newParams, entity, existingParams); + AddRemoveCount searchParamAddRemoveCount = + myDaoSearchParamSynchronizer.synchronizeSearchParamsToDatabase( + newParams, entity, existingParams); newParams.populateResourceTableParamCollections(entity); // Interceptor broadcast: JPA_PERFTRACE_INFO if (!searchParamAddRemoveCount.isEmpty()) { - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) { StorageProcessingMessage message = new StorageProcessingMessage(); - message.setMessage("For " + entity.getIdDt().toUnqualifiedVersionless().getValue() + " added " + searchParamAddRemoveCount.getAddCount() + " and removed " + searchParamAddRemoveCount.getRemoveCount() + " resource search parameter index entries"); + message.setMessage("For " + + entity.getIdDt().toUnqualifiedVersionless().getValue() + " added " + + searchParamAddRemoveCount.getAddCount() + " and removed " + + searchParamAddRemoveCount.getRemoveCount() + + " resource search parameter index entries"); HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(StorageProcessingMessage.class, message); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(StorageProcessingMessage.class, message); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); } } // Synchronize composite params - mySearchParamWithInlineReferencesExtractor.storeUniqueComboParameters(newParams, entity, existingParams); + mySearchParamWithInlineReferencesExtractor.storeUniqueComboParameters( + newParams, entity, existingParams); } } @@ -1183,14 +1306,23 @@ public abstract class BaseHapiFhirDao extends BaseStora myJpaStorageResourceParser.updateResourceMetadata(entity, theResource); } - return entity; } - public IBasePersistedResource updateHistoryEntity(RequestDetails theRequest, T theResource, IBasePersistedResource - theEntity, IBasePersistedResource theHistoryEntity, IIdType theResourceId, TransactionDetails theTransactionDetails, boolean isUpdatingCurrent) { + public IBasePersistedResource updateHistoryEntity( + RequestDetails theRequest, + T theResource, + IBasePersistedResource theEntity, + IBasePersistedResource theHistoryEntity, + IIdType theResourceId, + TransactionDetails theTransactionDetails, + boolean isUpdatingCurrent) { Validate.notNull(theEntity); - Validate.isTrue(theResource != null, "Must have either a resource[%s] for resource PID[%s]", theResource != null, theEntity.getPersistentId()); + Validate.isTrue( + theResource != null, + "Must have either a resource[%s] for resource PID[%s]", + theResource != null, + theEntity.getPersistentId()); ourLog.debug("Starting history entity update"); EncodedResource encodedResource = new EncodedResource(); @@ -1208,8 +1340,10 @@ public abstract class BaseHapiFhirDao extends BaseStora notifyInterceptors(theRequest, theResource, oldResource, theTransactionDetails, true); - ResourceTable savedEntity = updateEntity(theRequest, theResource, entity, null, true, false, theTransactionDetails, false, false); - // Have to call populate again for the encodedResource, since using createHistoryEntry() will cause version constraint failure, ie updating the same resource at the same time + ResourceTable savedEntity = updateEntity( + theRequest, theResource, entity, null, true, false, theTransactionDetails, false, false); + // Have to call populate again for the encodedResource, since using createHistoryEntry() will cause version + // constraint failure, ie updating the same resource at the same time encodedResource = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true); // For some reason the current version entity is not attached until after using updateEntity historyEntity = ((ResourceTable) readEntity(theResourceId, theRequest)).getCurrentVersionEntity(); @@ -1241,12 +1375,15 @@ public abstract class BaseHapiFhirDao extends BaseStora historyEntity.setUpdated(theTransactionDetails.getTransactionDate()); if (!changed && myStorageSettings.isSuppressUpdatesWithNoChange() && (historyEntity.getVersion() > 1)) { - ourLog.debug("Resource {} has not changed", historyEntity.getIdDt().toUnqualified().getValue()); + ourLog.debug( + "Resource {} has not changed", + historyEntity.getIdDt().toUnqualified().getValue()); myJpaStorageResourceParser.updateResourceMetadata(historyEntity, theResource); return historyEntity; } - if (getStorageSettings().getInlineResourceTextBelowSize() > 0 && encodedResourceString.length() < getStorageSettings().getInlineResourceTextBelowSize()) { + if (getStorageSettings().getInlineResourceTextBelowSize() > 0 + && encodedResourceString.length() < getStorageSettings().getInlineResourceTextBelowSize()) { populateEncodedResource(encodedResource, encodedResourceString, null, ResourceEncodingEnum.JSON); } else { populateEncodedResource(encodedResource, null, resourceBinary, encoding); @@ -1266,13 +1403,16 @@ public abstract class BaseHapiFhirDao extends BaseStora return historyEntity; } - private void populateEncodedResource(EncodedResource encodedResource, String encodedResourceString, byte[] theResourceBinary, ResourceEncodingEnum theEncoding) { + private void populateEncodedResource( + EncodedResource encodedResource, + String encodedResourceString, + byte[] theResourceBinary, + ResourceEncodingEnum theEncoding) { encodedResource.setResourceText(encodedResourceString); encodedResource.setResourceBinary(theResourceBinary); encodedResource.setEncoding(theEncoding); } - /** * TODO eventually consider refactoring this to be part of an interceptor. *

    @@ -1282,17 +1422,24 @@ public abstract class BaseHapiFhirDao extends BaseStora * @param entity the existing entity. */ private void failIfPartitionMismatch(RequestDetails theRequest, ResourceTable entity) { - if (myPartitionSettings.isPartitioningEnabled() && theRequest != null && theRequest.getTenantId() != null && entity.getPartitionId() != null) { + if (myPartitionSettings.isPartitioningEnabled() + && theRequest != null + && theRequest.getTenantId() != null + && entity.getPartitionId() != null) { PartitionEntity partitionEntity = myPartitionLookupSvc.getPartitionByName(theRequest.getTenantId()); - //partitionEntity should never be null - if (partitionEntity != null && !partitionEntity.getId().equals(entity.getPartitionId().getPartitionId())) { - throw new InvalidRequestException(Msg.code(2079) + "Resource " + entity.getResourceType() + "/" + entity.getId() + " is not known"); + // partitionEntity should never be null + if (partitionEntity != null + && !partitionEntity.getId().equals(entity.getPartitionId().getPartitionId())) { + throw new InvalidRequestException(Msg.code(2079) + "Resource " + entity.getResourceType() + "/" + + entity.getId() + " is not known"); } } } - private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) { - boolean versionedTags = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.VERSIONED; + private void createHistoryEntry( + RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) { + boolean versionedTags = + getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.VERSIONED; final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags); historyEntry.setEncoding(theChanged.getEncoding()); @@ -1313,21 +1460,22 @@ public abstract class BaseHapiFhirDao extends BaseStora } if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU3)) { source = ((IBaseHasExtensions) theResource.getMeta()) - .getExtension() - .stream() - .filter(t -> HapiExtensions.EXT_META_SOURCE.equals(t.getUrl())) - .filter(t -> t.getValue() instanceof IPrimitiveType) - .map(t -> ((IPrimitiveType) t.getValue()).getValueAsString()) - .findFirst() - .orElse(null); + .getExtension().stream() + .filter(t -> HapiExtensions.EXT_META_SOURCE.equals(t.getUrl())) + .filter(t -> t.getValue() instanceof IPrimitiveType) + .map(t -> ((IPrimitiveType) t.getValue()).getValueAsString()) + .findFirst() + .orElse(null); } } String requestId = getRequestId(theRequest, source); source = MetaUtil.cleanProvenanceSourceUriOrEmpty(source); - boolean shouldStoreSource = myStorageSettings.getStoreMetaSourceInformation().isStoreSourceUri(); - boolean shouldStoreRequestId = myStorageSettings.getStoreMetaSourceInformation().isStoreRequestId(); + boolean shouldStoreSource = + myStorageSettings.getStoreMetaSourceInformation().isStoreSourceUri(); + boolean shouldStoreRequestId = + myStorageSettings.getStoreMetaSourceInformation().isStoreRequestId(); boolean haveSource = isNotBlank(source) && shouldStoreSource; boolean haveRequestId = isNotBlank(requestId) && shouldStoreRequestId; if (haveSource || haveRequestId) { @@ -1346,7 +1494,11 @@ public abstract class BaseHapiFhirDao extends BaseStora historyEntry.setSourceUri(persistedSource); } if (theResource != null) { - MetaUtil.populateResourceSource(myFhirContext, shouldStoreSource ? source : null, shouldStoreRequestId ? requestId : null , theResource); + MetaUtil.populateResourceSource( + myFhirContext, + shouldStoreSource ? source : null, + shouldStoreRequestId ? requestId : null, + theResource); } myEntityManager.persist(provenance); @@ -1363,13 +1515,24 @@ public abstract class BaseHapiFhirDao extends BaseStora private void validateIncomingResourceTypeMatchesExisting(IBaseResource theResource, BaseHasResource entity) { String resourceType = myContext.getResourceType(theResource); if (!resourceType.equals(entity.getResourceType())) { - throw new UnprocessableEntityException(Msg.code(930) + "Existing resource ID[" + entity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + entity.getResourceType() + "] - Cannot update with [" + resourceType + "]"); + throw new UnprocessableEntityException(Msg.code(930) + "Existing resource ID[" + + entity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + entity.getResourceType() + + "] - Cannot update with [" + resourceType + "]"); } } @Override - public DaoMethodOutcome updateInternal(RequestDetails theRequestDetails, T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, - IBasePersistedResource theEntity, IIdType theResourceId, @Nullable IBaseResource theOldResource, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) { + public DaoMethodOutcome updateInternal( + RequestDetails theRequestDetails, + T theResource, + String theMatchUrl, + boolean thePerformIndexing, + boolean theForceUpdateVersion, + IBasePersistedResource theEntity, + IIdType theResourceId, + @Nullable IBaseResource theOldResource, + RestOperationTypeEnum theOperationType, + TransactionDetails theTransactionDetails) { ResourceTable entity = (ResourceTable) theEntity; @@ -1381,14 +1544,25 @@ public abstract class BaseHapiFhirDao extends BaseStora notifyInterceptors(theRequestDetails, theResource, theOldResource, theTransactionDetails, true); // Perform update - ResourceTable savedEntity = updateEntity(theRequestDetails, theResource, entity, null, thePerformIndexing, thePerformIndexing, theTransactionDetails, theForceUpdateVersion, thePerformIndexing); + ResourceTable savedEntity = updateEntity( + theRequestDetails, + theResource, + entity, + null, + thePerformIndexing, + thePerformIndexing, + theTransactionDetails, + theForceUpdateVersion, + thePerformIndexing); /* * If we aren't indexing (meaning we're probably executing a sub-operation within a transaction), * we'll manually increase the version. This is important because we want the updated version number * to be reflected in the resource shared with interceptors */ - if (!thePerformIndexing && !savedEntity.isUnchangedInCurrentOperation() && !ourDisableIncrementOnUpdateForUnitTest) { + if (!thePerformIndexing + && !savedEntity.isUnchangedInCurrentOperation() + && !ourDisableIncrementOnUpdateForUnitTest) { if (theResourceId.hasVersionIdPart() == false) { theResourceId = theResourceId.withVersion(Long.toString(savedEntity.getVersion())); } @@ -1418,7 +1592,9 @@ public abstract class BaseHapiFhirDao extends BaseStora wasDeleted = theOldResource.isDeleted(); } - DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType).setCreated(wasDeleted); + DaoMethodOutcome outcome = toMethodOutcome( + theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType) + .setCreated(wasDeleted); if (!thePerformIndexing) { IIdType id = getContext().getVersion().newIdType(); @@ -1441,18 +1617,25 @@ public abstract class BaseHapiFhirDao extends BaseStora return outcome; } - private void notifyInterceptors(RequestDetails theRequestDetails, T theResource, IBaseResource theOldResource, TransactionDetails theTransactionDetails, boolean isUnchanged) { + private void notifyInterceptors( + RequestDetails theRequestDetails, + T theResource, + IBaseResource theOldResource, + TransactionDetails theTransactionDetails, + boolean isUnchanged) { Pointcut interceptorPointcut = Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED; HookParams hookParams = new HookParams() - .add(IBaseResource.class, theOldResource) - .add(IBaseResource.class, theResource) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, theTransactionDetails); + .add(IBaseResource.class, theOldResource) + .add(IBaseResource.class, theResource) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, theTransactionDetails); if (!isUnchanged) { - hookParams.add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); + hookParams.add( + InterceptorInvocationTimingEnum.class, + theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); interceptorPointcut = Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED; } @@ -1461,9 +1644,11 @@ public abstract class BaseHapiFhirDao extends BaseStora protected void addPidToResource(IResourceLookup theEntity, IBaseResource theResource) { if (theResource instanceof IAnyResource) { - IDao.RESOURCE_PID.put((IAnyResource) theResource, theEntity.getPersistentId().getId()); + IDao.RESOURCE_PID.put( + (IAnyResource) theResource, theEntity.getPersistentId().getId()); } else if (theResource instanceof IResource) { - IDao.RESOURCE_PID.put((IResource) theResource, theEntity.getPersistentId().getId()); + IDao.RESOURCE_PID.put( + (IResource) theResource, theEntity.getPersistentId().getId()); } } @@ -1514,14 +1699,15 @@ public abstract class BaseHapiFhirDao extends BaseStora if (!isLogicalReference(referencedId)) { if (!referencedId.getValue().contains("?")) { if (!validTypes.contains(referencedId.getResourceType())) { - throw new UnprocessableEntityException(Msg.code(931) + "Invalid reference found at path '" + newPath + "'. Resource type '" + referencedId.getResourceType() + "' is not valid for this path"); + throw new UnprocessableEntityException(Msg.code(931) + + "Invalid reference found at path '" + newPath + "'. Resource type '" + + referencedId.getResourceType() + "' is not valid for this path"); } } } } } } - } } } @@ -1529,7 +1715,9 @@ public abstract class BaseHapiFhirDao extends BaseStora protected void validateMetaCount(int theMetaCount) { if (myStorageSettings.getResourceMetaCountHardLimit() != null) { if (theMetaCount > myStorageSettings.getResourceMetaCountHardLimit()) { - throw new UnprocessableEntityException(Msg.code(932) + "Resource contains " + theMetaCount + " meta entries (tag/profile/security label), maximum is " + myStorageSettings.getResourceMetaCountHardLimit()); + throw new UnprocessableEntityException(Msg.code(932) + "Resource contains " + theMetaCount + + " meta entries (tag/profile/security label), maximum is " + + myStorageSettings.getResourceMetaCountHardLimit()); } } } @@ -1566,7 +1754,9 @@ public abstract class BaseHapiFhirDao extends BaseStora } if (tag != null) { - throw new UnprocessableEntityException(Msg.code(933) + "Resource contains the 'subsetted' tag, and must not be stored as it may contain a subset of available data"); + throw new UnprocessableEntityException( + Msg.code(933) + + "Resource contains the 'subsetted' tag, and must not be stored as it may contain a subset of available data"); } if (getStorageSettings().isEnforceReferenceTargetTypes()) { @@ -1575,7 +1765,6 @@ public abstract class BaseHapiFhirDao extends BaseStora } validateMetaCount(totalMetaCount); - } @PostConstruct @@ -1588,7 +1777,11 @@ public abstract class BaseHapiFhirDao extends BaseStora myStorageSettings = theStorageSettings; } - public void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) { + public void populateFullTextFields( + final FhirContext theContext, + final IBaseResource theResource, + ResourceTable theEntity, + ResourceIndexedSearchParams theNewParams) { if (theEntity.getDeleted() != null) { theEntity.setNarrativeText(null); theEntity.setContentText(null); @@ -1596,7 +1789,8 @@ public abstract class BaseHapiFhirDao extends BaseStora theEntity.setNarrativeText(parseNarrativeTextIntoWords(theResource)); theEntity.setContentText(parseContentTextIntoWords(theContext, theResource)); if (myStorageSettings.isAdvancedHSearchIndexing()) { - ExtendedHSearchIndexData hSearchIndexData = myFulltextSearchSvc.extractLuceneIndexData(theResource, theNewParams); + ExtendedHSearchIndexData hSearchIndexData = + myFulltextSearchSvc.extractLuceneIndexData(theResource, theNewParams); theEntity.setLuceneIndexData(hSearchIndexData); } } @@ -1620,7 +1814,8 @@ public abstract class BaseHapiFhirDao extends BaseStora private final TagDefinition myTagDefinition; private final MemoryCacheService.TagDefinitionCacheKey myKey; - public AddTagDefinitionToCacheAfterCommitSynchronization(MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) { + public AddTagDefinitionToCacheAfterCommitSynchronization( + MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) { myTagDefinition = theTagDefinition; myKey = theKey; } @@ -1633,17 +1828,20 @@ public abstract class BaseHapiFhirDao extends BaseStora @Nonnull public static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey( - TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) { - return new MemoryCacheService.TagDefinitionCacheKey(theTagType, theScheme, theTerm, theVersion, theUserSelected); + TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) { + return new MemoryCacheService.TagDefinitionCacheKey( + theTagType, theScheme, theTerm, theVersion, theUserSelected); } @SuppressWarnings("unchecked") public static String parseContentTextIntoWords(FhirContext theContext, IBaseResource theResource) { - Class> stringType = (Class>) theContext.getElementDefinition("string").getImplementingClass(); + Class> stringType = (Class>) + theContext.getElementDefinition("string").getImplementingClass(); StringBuilder retVal = new StringBuilder(); - List> childElements = theContext.newTerser().getAllPopulatedChildElementsOfType(theResource, stringType); + List> childElements = + theContext.newTerser().getAllPopulatedChildElementsOfType(theResource, stringType); for (IPrimitiveType nextType : childElements) { if (stringType.equals(nextType.getClass())) { String nextValue = nextType.getValueAsString(); @@ -1672,7 +1870,11 @@ public abstract class BaseHapiFhirDao extends BaseStora return resourceText; } - public static String encodeResource(IBaseResource theResource, ResourceEncodingEnum theEncoding, List theExcludeElements, FhirContext theContext) { + public static String encodeResource( + IBaseResource theResource, + ResourceEncodingEnum theEncoding, + List theExcludeElements, + FhirContext theContext) { IParser parser = theEncoding.newParser(theContext); parser.setDontEncodeElements(theExcludeElements); return parser.encodeResourceToString(theResource); @@ -1708,7 +1910,6 @@ public abstract class BaseHapiFhirDao extends BaseStora } catch (Exception e) { throw new DataFormatException(Msg.code(934) + "Unable to convert DIV to string", e); } - } return b.toString(); } @@ -1725,5 +1926,4 @@ public abstract class BaseHapiFhirDao extends BaseStora public static void setValidationDisabledForUnitTest(boolean theValidationDisabledForUnitTest) { ourValidationDisabledForUnitTest = theValidationDisabledForUnitTest; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index b09293a4265..8cfc96ae7cb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -138,13 +138,6 @@ import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; -import javax.persistence.LockModeType; -import javax.persistence.NoResultException; -import javax.persistence.TypedQuery; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -159,77 +152,113 @@ import java.util.UUID; import java.util.concurrent.Callable; import java.util.function.Supplier; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; +import javax.persistence.LockModeType; +import javax.persistence.NoResultException; +import javax.persistence.TypedQuery; +import javax.servlet.http.HttpServletResponse; +import static java.util.Objects.isNull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; -import static java.util.Objects.isNull; -public abstract class BaseHapiFhirResourceDao extends BaseHapiFhirDao implements IFhirResourceDao { +public abstract class BaseHapiFhirResourceDao extends BaseHapiFhirDao + implements IFhirResourceDao { public static final String BASE_RESOURCE_NAME = "resource"; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class); + @Autowired protected IInterceptorBroadcaster myInterceptorBroadcaster; @Autowired protected PlatformTransactionManager myPlatformTransactionManager; + @Autowired(required = false) protected IFulltextSearchSvc mySearchDao; + @Autowired protected HapiTransactionService myTransactionService; + @Autowired private MatchResourceUrlService myMatchResourceUrlService; + @Autowired private SearchBuilderFactory mySearchBuilderFactory; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperService; + @Autowired private MatchUrlService myMatchUrlService; + @Autowired private IDeleteExpungeJobSubmitter myDeleteExpungeJobSubmitter; + @Autowired private IJobCoordinator myJobCoordinator; + private IInstanceValidatorModule myInstanceValidator; private String myResourceName; private Class myResourceType; + @Autowired private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory; + @Autowired private MemoryCacheService myMemoryCacheService; + private TransactionTemplate myTxTemplate; + @Autowired private UrlPartitioner myUrlPartitioner; + @Autowired private ResourceSearchUrlSvc myResourceSearchUrlSvc; + @Autowired private IFhirSystemDao mySystemDao; - public static T invokeStoragePreShowResources(IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequest, T retVal) { - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PRESHOW_RESOURCES, theInterceptorBroadcaster, theRequest)) { + public static T invokeStoragePreShowResources( + IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequest, T retVal) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_PRESHOW_RESOURCES, theInterceptorBroadcaster, theRequest)) { SimplePreResourceShowDetails showDetails = new SimplePreResourceShowDetails(retVal); HookParams params = new HookParams() - .add(IPreResourceShowDetails.class, showDetails) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); + .add(IPreResourceShowDetails.class, showDetails) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); //noinspection unchecked - retVal = (T) showDetails.getResource(0);//TODO GGG/JA : getting resource 0 is interesting. We apparently allow null values in the list. Should we? + retVal = (T) showDetails.getResource( + 0); // TODO GGG/JA : getting resource 0 is interesting. We apparently allow null values in the list. + // Should we? return retVal; } else { return retVal; } } - public static void invokeStoragePreAccessResources(IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequest, IIdType theId, IBaseResource theResource) { - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, theInterceptorBroadcaster, theRequest)) { + public static void invokeStoragePreAccessResources( + IInterceptorBroadcaster theInterceptorBroadcaster, + RequestDetails theRequest, + IIdType theId, + IBaseResource theResource) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_PREACCESS_RESOURCES, theInterceptorBroadcaster, theRequest)) { SimplePreResourceAccessDetails accessDetails = new SimplePreResourceAccessDetails(theResource); HookParams params = new HookParams() - .add(IPreResourceAccessDetails.class, accessDetails) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); + .add(IPreResourceAccessDetails.class, accessDetails) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); if (accessDetails.isDontReturnResourceAtIndex(0)) { throw new ResourceNotFoundException(Msg.code(1995) + "Resource " + theId + " is not known"); } @@ -288,13 +317,25 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) { - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName()); + public DaoMethodOutcome create( + T theResource, + String theIfNoneExist, + boolean thePerformIndexing, + RequestDetails theRequestDetails, + @Nonnull TransactionDetails theTransactionDetails) { + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest( + theRequestDetails, theResource, getResourceName()); return myTransactionService - .withRequest(theRequestDetails) - .withTransactionDetails(theTransactionDetails) - .withRequestPartitionId(requestPartitionId) - .execute(tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails, requestPartitionId)); + .withRequest(theRequestDetails) + .withTransactionDetails(theTransactionDetails) + .withRequestPartitionId(requestPartitionId) + .execute(tx -> doCreateForPost( + theResource, + theIfNoneExist, + thePerformIndexing, + theTransactionDetails, + theRequestDetails, + requestPartitionId)); } @VisibleForTesting @@ -305,7 +346,13 @@ public abstract class BaseHapiFhirResourceDao extends B /** * Called for FHIR create (POST) operations */ - protected DaoMethodOutcome doCreateForPost(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + protected DaoMethodOutcome doCreateForPost( + T theResource, + String theIfNoneExist, + boolean thePerformIndexing, + TransactionDetails theTransactionDetails, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { if (theResource == null) { String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "missingBody"); throw new InvalidRequestException(Msg.code(956) + msg); @@ -313,8 +360,11 @@ public abstract class BaseHapiFhirResourceDao extends B if (isNotBlank(theResource.getIdElement().getIdPart())) { if (getContext().getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) { - String message = getMessageSanitized("failedToCreateWithClientAssignedId", theResource.getIdElement().getIdPart()); - throw new InvalidRequestException(Msg.code(957) + message, createErrorOperationOutcome(message, "processing")); + String message = getMessageSanitized( + "failedToCreateWithClientAssignedId", + theResource.getIdElement().getIdPart()); + throw new InvalidRequestException( + Msg.code(957) + message, createErrorOperationOutcome(message, "processing")); } else { // As of DSTU3, ID and version in the body should be ignored for a create/update theResource.setId(""); @@ -326,14 +376,30 @@ public abstract class BaseHapiFhirResourceDao extends B theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE); } - return doCreateForPostOrPut(theRequestDetails, theResource, theIfNoneExist, true, thePerformIndexing, theRequestPartitionId, RestOperationTypeEnum.CREATE, theTransactionDetails); + return doCreateForPostOrPut( + theRequestDetails, + theResource, + theIfNoneExist, + true, + thePerformIndexing, + theRequestPartitionId, + RestOperationTypeEnum.CREATE, + theTransactionDetails); } /** * Called both for FHIR create (POST) operations (via {@link #doCreateForPost(IBaseResource, String, boolean, TransactionDetails, RequestDetails, RequestPartitionId)} * as well as for FHIR update (PUT) where we're doing a create-with-client-assigned-ID (via {@link #doUpdate(IBaseResource, String, boolean, boolean, RequestDetails, TransactionDetails, RequestPartitionId)}. */ - private DaoMethodOutcome doCreateForPostOrPut(RequestDetails theRequest, T theResource, String theMatchUrl, boolean theProcessMatchUrl, boolean thePerformIndexing, RequestPartitionId theRequestPartitionId, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) { + private DaoMethodOutcome doCreateForPostOrPut( + RequestDetails theRequest, + T theResource, + String theMatchUrl, + boolean theProcessMatchUrl, + boolean thePerformIndexing, + RequestPartitionId theRequestPartitionId, + RestOperationTypeEnum theOperationType, + TransactionDetails theTransactionDetails) { StopWatch w = new StopWatch(); preProcessResourceForStorage(theResource); @@ -346,9 +412,17 @@ public abstract class BaseHapiFhirResourceDao extends B entity.initializeVersion(); if (isNotBlank(theMatchUrl) && theProcessMatchUrl) { - Set match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest); + Set match = myMatchResourceUrlService.processMatchUrl( + theMatchUrl, myResourceType, theTransactionDetails, theRequest); if (match.size() > 1) { - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theMatchUrl, match.size()); + String msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "transactionOperationWithMultipleMatchFailure", + "CREATE", + theMatchUrl, + match.size()); throw new PreconditionFailedException(Msg.code(958) + msg); } else if (match.size() == 1) { @@ -398,23 +472,43 @@ public abstract class BaseHapiFhirResourceDao extends B Supplier idSupplier = () -> myTxTemplate.execute(tx -> { IIdType retVal = myIdHelperService.translatePidIdToForcedId(myFhirContext, myResourceName, pid); if (!retVal.hasVersionIdPart()) { - Long version = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, pid.getId()); + Long version = myMemoryCacheService.getIfPresent( + MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, pid.getId()); if (version == null) { version = myResourceTableDao.findCurrentVersionByPid(pid.getId()); if (version != null) { - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, pid.getId(), version); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, + pid.getId(), + version); } } if (version != null) { - retVal = myFhirContext.getVersion().newIdType().setParts(retVal.getBaseUrl(), retVal.getResourceType(), retVal.getIdPart(), Long.toString(version)); + retVal = myFhirContext + .getVersion() + .newIdType() + .setParts( + retVal.getBaseUrl(), + retVal.getResourceType(), + retVal.getIdPart(), + Long.toString(version)); } } return retVal; }); - DaoMethodOutcome outcome = toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true); - StorageResponseCodeEnum responseCode = StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH; - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreateConditionalWithMatch", w.getMillisAndRestart(), UrlUtil.sanitizeUrlPart(theMatchUrl)); + DaoMethodOutcome outcome = toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier) + .setCreated(false) + .setNop(true); + StorageResponseCodeEnum responseCode = + StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH; + String msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "successfulCreateConditionalWithMatch", + w.getMillisAndRestart(), + UrlUtil.sanitizeUrlPart(theMatchUrl)); outcome.setOperationOutcome(createInfoOperationOutcome(msg, responseCode)); return outcome; } @@ -423,7 +517,8 @@ public abstract class BaseHapiFhirResourceDao extends B String resourceIdBeforeStorage = theResource.getIdElement().getIdPart(); boolean resourceHadIdBeforeStorage = isNotBlank(resourceIdBeforeStorage); - boolean resourceIdWasServerAssigned = theResource.getUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED) == Boolean.TRUE; + boolean resourceIdWasServerAssigned = + theResource.getUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED) == Boolean.TRUE; if (resourceHadIdBeforeStorage) { entity.setFhirId(resourceIdBeforeStorage); } @@ -432,19 +527,17 @@ public abstract class BaseHapiFhirResourceDao extends B // Notify interceptor for accepting/rejecting client assigned ids if (!resourceIdWasServerAssigned && resourceHadIdBeforeStorage) { - hookParams = new HookParams() - .add(IBaseResource.class, theResource) - .add(RequestDetails.class, theRequest); + hookParams = new HookParams().add(IBaseResource.class, theResource).add(RequestDetails.class, theRequest); doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_CLIENT_ASSIGNED_ID, hookParams); } // Interceptor call: STORAGE_PRESTORAGE_RESOURCE_CREATED hookParams = new HookParams() - .add(IBaseResource.class, theResource) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(RequestPartitionId.class, theRequestPartitionId) - .add(TransactionDetails.class, theTransactionDetails); + .add(IBaseResource.class, theResource) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(RequestPartitionId.class, theRequestPartitionId) + .add(TransactionDetails.class, theTransactionDetails); doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, hookParams); if (resourceHadIdBeforeStorage && !resourceIdWasServerAssigned) { @@ -461,7 +554,16 @@ public abstract class BaseHapiFhirResourceDao extends B // Perform actual DB update // this call will also update the metadata - ResourceTable updatedEntity = updateEntity(theRequest, theResource, entity, null, thePerformIndexing, false, theTransactionDetails, false, thePerformIndexing); + ResourceTable updatedEntity = updateEntity( + theRequest, + theResource, + entity, + null, + thePerformIndexing, + false, + theTransactionDetails, + false, + thePerformIndexing); // Store the resource forced ID if necessary JpaPid jpaPid = JpaPid.fromId(updatedEntity.getResourceId()); @@ -470,7 +572,8 @@ public abstract class BaseHapiFhirResourceDao extends B boolean createForPureNumericIds = true; createForcedIdIfNeeded(entity, resourceIdBeforeStorage, createForPureNumericIds); } else { - boolean createForPureNumericIds = getStorageSettings().getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC; + boolean createForPureNumericIds = getStorageSettings().getResourceClientIdStrategy() + != JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC; createForcedIdIfNeeded(entity, resourceIdBeforeStorage, createForPureNumericIds); } } else { @@ -480,7 +583,8 @@ public abstract class BaseHapiFhirResourceDao extends B break; case ANY: boolean createForPureNumericIds = true; - createForcedIdIfNeeded(updatedEntity, theResource.getIdElement().getIdPart(), createForPureNumericIds); + createForcedIdIfNeeded( + updatedEntity, theResource.getIdElement().getIdPart(), createForPureNumericIds); // for client ID mode ANY, we will always have a forced ID. If we ever // stop populating the transient forced ID be warned that we use it // (and expect it to be set correctly) farther below. @@ -494,7 +598,8 @@ public abstract class BaseHapiFhirResourceDao extends B // Pre-cache the resource ID jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext)); - myIdHelperService.addResolvedPidToForcedId(jpaPid, theRequestPartitionId, getResourceName(), entity.getTransientForcedId(), null); + myIdHelperService.addResolvedPidToForcedId( + jpaPid, theRequestPartitionId, getResourceName(), entity.getTransientForcedId(), null); theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid); theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource); @@ -517,16 +622,18 @@ public abstract class BaseHapiFhirResourceDao extends B // Notify JPA interceptors if (!updatedEntity.isUnchangedInCurrentOperation()) { hookParams = new HookParams() - .add(IBaseResource.class, theResource) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(TransactionDetails.class, theTransactionDetails) - .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); + .add(IBaseResource.class, theResource) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(TransactionDetails.class, theTransactionDetails) + .add( + InterceptorInvocationTimingEnum.class, + theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, hookParams); } DaoMethodOutcome outcome = toMethodOutcome(theRequest, entity, theResource, theMatchUrl, theOperationType) - .setCreated(true); + .setCreated(true); if (!thePerformIndexing) { outcome.setId(theResource.getIdElement()); @@ -537,7 +644,8 @@ public abstract class BaseHapiFhirResourceDao extends B return outcome; } - private void createForcedIdIfNeeded(ResourceTable theEntity, String theResourceId, boolean theCreateForPureNumericIds) { + private void createForcedIdIfNeeded( + ResourceTable theEntity, String theResourceId, boolean theCreateForPureNumericIds) { if (isNotBlank(theResourceId) && theEntity.getForcedId() == null) { if (theCreateForPureNumericIds || !IdHelperService.isValidPid(theResourceId)) { ForcedId forcedId = new ForcedId(); @@ -576,13 +684,19 @@ public abstract class BaseHapiFhirResourceDao extends B if (strategy == JpaStorageSettings.ClientIdStrategyEnum.NOT_ALLOWED) { if (!isSystemRequest(theRequest)) { - throw new ResourceNotFoundException(Msg.code(959) + getMessageSanitized("failedToCreateWithClientAssignedIdNotAllowed", theResource.getIdElement().getIdPart())); + throw new ResourceNotFoundException(Msg.code(959) + + getMessageSanitized( + "failedToCreateWithClientAssignedIdNotAllowed", + theResource.getIdElement().getIdPart())); } } if (strategy == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) { if (theResource.getIdElement().isIdPartValidLong()) { - throw new InvalidRequestException(Msg.code(960) + getMessageSanitized("failedToCreateWithClientAssignedNumericId", theResource.getIdElement().getIdPart())); + throw new InvalidRequestException(Msg.code(960) + + getMessageSanitized( + "failedToCreateWithClientAssignedNumericId", + theResource.getIdElement().getIdPart())); } } } @@ -632,10 +746,11 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - public DaoMethodOutcome delete(IIdType theId, - DeleteConflictList theDeleteConflicts, - RequestDetails theRequestDetails, - @Nonnull TransactionDetails theTransactionDetails) { + public DaoMethodOutcome delete( + IIdType theId, + DeleteConflictList theDeleteConflicts, + RequestDetails theRequestDetails, + @Nonnull TransactionDetails theTransactionDetails) { validateIdPresentForDelete(theId); validateDeleteEnabled(); @@ -647,11 +762,15 @@ public abstract class BaseHapiFhirResourceDao extends B // if not found, return an outcome anyways. // Because no object actually existed, we'll // just set the id and nothing else - return createMethodOutcomeForResourceId(theId.getValue(), MESSAGE_KEY_DELETE_RESOURCE_NOT_EXISTING, StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND); + return createMethodOutcomeForResourceId( + theId.getValue(), + MESSAGE_KEY_DELETE_RESOURCE_NOT_EXISTING, + StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND); } if (theId.hasVersionIdPart() && Long.parseLong(theId.getVersionIdPart()) != entity.getVersion()) { - throw new ResourceVersionConflictException(Msg.code(961) + "Trying to delete " + theId + " but this is not the current version"); + throw new ResourceVersionConflictException( + Msg.code(961) + "Trying to delete " + theId + " but this is not the current version"); } JpaPid persistentId = JpaPid.fromId(entity.getResourceId()); @@ -659,7 +778,10 @@ public abstract class BaseHapiFhirResourceDao extends B // Don't delete again if it's already deleted if (isDeleted(entity)) { - DaoMethodOutcome outcome = createMethodOutcomeForResourceId(entity.getIdDt().getValue(), MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED, StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED); + DaoMethodOutcome outcome = createMethodOutcomeForResourceId( + entity.getIdDt().getValue(), + MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED, + StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED); // used to exist, so we'll set the persistent id outcome.setPersistentId(persistentId); @@ -675,13 +797,14 @@ public abstract class BaseHapiFhirResourceDao extends B // Notify IServerOperationInterceptors about pre-action call HookParams hook = new HookParams() - .add(IBaseResource.class, resourceToDelete) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, theTransactionDetails); + .add(IBaseResource.class, resourceToDelete) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, theTransactionDetails); doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED, hook); - myDeleteConflictService.validateOkToDelete(theDeleteConflicts, entity, false, theRequestDetails, theTransactionDetails); + myDeleteConflictService.validateOkToDelete( + theDeleteConflicts, entity, false, theRequestDetails, theTransactionDetails); preDelete(resourceToDelete, entity, theRequestDetails); @@ -690,19 +813,25 @@ public abstract class BaseHapiFhirResourceDao extends B // Notify JPA interceptors HookParams hookParams = new HookParams() - .add(IBaseResource.class, resourceToDelete) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, theTransactionDetails) - .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED)); - + .add(IBaseResource.class, resourceToDelete) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, theTransactionDetails) + .add( + InterceptorInvocationTimingEnum.class, + theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED)); doCallHooks(theTransactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams); - DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, resourceToDelete, null, RestOperationTypeEnum.DELETE).setCreated(true); + DaoMethodOutcome outcome = toMethodOutcome( + theRequestDetails, savedEntity, resourceToDelete, null, RestOperationTypeEnum.DELETE) + .setCreated(true); String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulDeletes", 1); - msg += " " + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis()); + msg += " " + + getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis()); outcome.setOperationOutcome(createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE)); return outcome; @@ -732,14 +861,25 @@ public abstract class BaseHapiFhirResourceDao extends B * transaction processors */ @Override - public DeleteMethodOutcome deleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) { + public DeleteMethodOutcome deleteByUrl( + String theUrl, + DeleteConflictList deleteConflicts, + RequestDetails theRequestDetails, + @Nonnull TransactionDetails theTransactionDetails) { validateDeleteEnabled(); - return myTransactionService.execute(theRequestDetails, theTransactionDetails, tx -> doDeleteByUrl(theUrl, deleteConflicts, theTransactionDetails, theRequestDetails)); + return myTransactionService.execute( + theRequestDetails, + theTransactionDetails, + tx -> doDeleteByUrl(theUrl, deleteConflicts, theTransactionDetails, theRequestDetails)); } @Nonnull - private DeleteMethodOutcome doDeleteByUrl(String theUrl, DeleteConflictList deleteConflicts, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) { + private DeleteMethodOutcome doDeleteByUrl( + String theUrl, + DeleteConflictList deleteConflicts, + TransactionDetails theTransactionDetails, + RequestDetails theRequestDetails) { ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl); SearchParameterMap paramMap = resourceSearch.getSearchParameterMap(); paramMap.setLoadSynchronous(true); @@ -748,7 +888,15 @@ public abstract class BaseHapiFhirResourceDao extends B if (resourceIds.size() > 1) { if (!getStorageSettings().isAllowMultipleDelete()) { - throw new PreconditionFailedException(Msg.code(962) + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "DELETE", theUrl, resourceIds.size())); + throw new PreconditionFailedException(Msg.code(962) + + getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "transactionOperationWithMultipleMatchFailure", + "DELETE", + theUrl, + resourceIds.size())); } } @@ -768,21 +916,23 @@ public abstract class BaseHapiFhirResourceDao extends B ourLog.warn("Unable to process expunge on resource {}", pid); return; } - } } @Nonnull @Override - public

    DeleteMethodOutcome deletePidList(String theUrl, Collection

    theResourceIds, DeleteConflictList theDeleteConflicts, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + public

    DeleteMethodOutcome deletePidList( + String theUrl, + Collection

    theResourceIds, + DeleteConflictList theDeleteConflicts, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails) { StopWatch w = new StopWatch(); TransactionDetails transactionDetails = new TransactionDetails(); List deletedResources = new ArrayList<>(); - List> resolvedIds = theResourceIds - .stream() - .map(t -> (IResourcePersistentId) t) - .collect(Collectors.toList()); + List> resolvedIds = + theResourceIds.stream().map(t -> (IResourcePersistentId) t).collect(Collectors.toList()); mySystemDao.preFetchResources(resolvedIds, false); for (P pid : theResourceIds) { @@ -796,13 +946,14 @@ public abstract class BaseHapiFhirResourceDao extends B // Notify IServerOperationInterceptors about pre-action call HookParams hooks = new HookParams() - .add(IBaseResource.class, resourceToDelete) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, transactionDetails); + .add(IBaseResource.class, resourceToDelete) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, transactionDetails); doCallHooks(transactionDetails, theRequestDetails, Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED, hooks); - myDeleteConflictService.validateOkToDelete(theDeleteConflicts, entity, false, theRequestDetails, transactionDetails); + myDeleteConflictService.validateOkToDelete( + theDeleteConflicts, entity, false, theRequestDetails, transactionDetails); // Perform delete @@ -816,27 +967,46 @@ public abstract class BaseHapiFhirResourceDao extends B @Override public void beforeCommit(boolean readOnly) { HookParams hookParams = new HookParams() - .add(IBaseResource.class, resourceToDelete) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, transactionDetails) - .add(InterceptorInvocationTimingEnum.class, transactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED)); - doCallHooks(transactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams); + .add(IBaseResource.class, resourceToDelete) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, transactionDetails) + .add( + InterceptorInvocationTimingEnum.class, + transactionDetails.getInvocationTiming( + Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED)); + doCallHooks( + transactionDetails, + theRequestDetails, + Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, + hookParams); } }); } IBaseOperationOutcome oo; if (deletedResources.isEmpty()) { - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "unableToDeleteNotFound", theUrl); - oo = createOperationOutcome(OO_SEVERITY_WARN, msg, "not-found", StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND); + String msg = getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "unableToDeleteNotFound", theUrl); + oo = createOperationOutcome( + OO_SEVERITY_WARN, msg, "not-found", StorageResponseCodeEnum.SUCCESSFUL_DELETE_NOT_FOUND); } else { - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulDeletes", deletedResources.size()); - msg += " " + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis()); + String msg = getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "successfulDeletes", deletedResources.size()); + msg += " " + + getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis()); oo = createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE); } - ourLog.debug("Processed delete on {} (matched {} resource(s)) in {}ms", theUrl, deletedResources.size(), w.getMillis()); + ourLog.debug( + "Processed delete on {} (matched {} resource(s)) in {}ms", + theUrl, + deletedResources.size(), + w.getMillis()); theTransactionDetails.addDeletedResourceIds(theResourceIds); @@ -846,7 +1016,8 @@ public abstract class BaseHapiFhirResourceDao extends B return retVal; } - protected ResourceTable updateEntityForDelete(RequestDetails theRequest, TransactionDetails theTransactionDetails, ResourceTable theEntity) { + protected ResourceTable updateEntityForDelete( + RequestDetails theRequest, TransactionDetails theTransactionDetails, ResourceTable theEntity) { myResourceSearchUrlSvc.deleteByResId(theEntity.getId()); Date updateTime = new Date(); return updateEntity(theRequest, null, theEntity, updateTime, true, true, theTransactionDetails, false, true); @@ -865,7 +1036,11 @@ public abstract class BaseHapiFhirResourceDao extends B } } - private void doMetaAdd(MT theMetaAdd, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + private void doMetaAdd( + MT theMetaAdd, + BaseHasResource theEntity, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails) { IBaseResource oldVersion = myJpaStorageResourceParser.toResource(theEntity, false); List tags = toTagList(theMetaAdd); @@ -873,22 +1048,27 @@ public abstract class BaseHapiFhirResourceDao extends B boolean hasTag = false; for (BaseTag next : new ArrayList<>(theEntity.getTags())) { - if (Objects.equals(next.getTag().getTagType(), nextDef.getTagType()) && - Objects.equals(next.getTag().getSystem(), nextDef.getSystem()) && - Objects.equals(next.getTag().getCode(), nextDef.getCode()) && - Objects.equals(next.getTag().getVersion(), nextDef.getVersion()) && - Objects.equals(next.getTag().getUserSelected(), nextDef.getUserSelected())) { + if (Objects.equals(next.getTag().getTagType(), nextDef.getTagType()) + && Objects.equals(next.getTag().getSystem(), nextDef.getSystem()) + && Objects.equals(next.getTag().getCode(), nextDef.getCode()) + && Objects.equals(next.getTag().getVersion(), nextDef.getVersion()) + && Objects.equals(next.getTag().getUserSelected(), nextDef.getUserSelected())) { hasTag = true; break; } } - if (!hasTag) { theEntity.setHasTags(true); - TagDefinition def = getTagOrNull(theTransactionDetails, nextDef.getTagType(), nextDef.getSystem(), - nextDef.getCode(), nextDef.getDisplay(), nextDef.getVersion(), nextDef.getUserSelected()); + TagDefinition def = getTagOrNull( + theTransactionDetails, + nextDef.getTagType(), + nextDef.getSystem(), + nextDef.getCode(), + nextDef.getDisplay(), + nextDef.getVersion(), + nextDef.getUserSelected()); if (def != null) { BaseTag newEntity = theEntity.addTag(def); if (newEntity.getTagId() == null) { @@ -905,26 +1085,31 @@ public abstract class BaseHapiFhirResourceDao extends B // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED IBaseResource newVersion = myJpaStorageResourceParser.toResource(theEntity, false); HookParams preStorageParams = new HookParams() - .add(IBaseResource.class, oldVersion) - .add(IBaseResource.class, newVersion) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, theTransactionDetails); + .add(IBaseResource.class, oldVersion) + .add(IBaseResource.class, newVersion) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, theTransactionDetails); myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams); // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED HookParams preCommitParams = new HookParams() - .add(IBaseResource.class, oldVersion) - .add(IBaseResource.class, newVersion) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, theTransactionDetails) - .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED)); + .add(IBaseResource.class, oldVersion) + .add(IBaseResource.class, newVersion) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, theTransactionDetails) + .add( + InterceptorInvocationTimingEnum.class, + theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED)); myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams); - } - private void doMetaDelete(MT theMetaDel, BaseHasResource theEntity, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + private void doMetaDelete( + MT theMetaDel, + BaseHasResource theEntity, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails) { // todo mb update hibernate search index if we are storing resources - it assumes inline tags. IBaseResource oldVersion = myJpaStorageResourceParser.toResource(theEntity, false); @@ -933,9 +1118,9 @@ public abstract class BaseHapiFhirResourceDao extends B for (TagDefinition nextDef : tags) { for (BaseTag next : new ArrayList(theEntity.getTags())) { - if (ObjectUtil.equals(next.getTag().getTagType(), nextDef.getTagType()) && - ObjectUtil.equals(next.getTag().getSystem(), nextDef.getSystem()) && - ObjectUtil.equals(next.getTag().getCode(), nextDef.getCode())) { + if (ObjectUtil.equals(next.getTag().getTagType(), nextDef.getTagType()) + && ObjectUtil.equals(next.getTag().getSystem(), nextDef.getSystem()) + && ObjectUtil.equals(next.getTag().getCode(), nextDef.getCode())) { myEntityManager.remove(next); theEntity.getTags().remove(next); } @@ -951,23 +1136,24 @@ public abstract class BaseHapiFhirResourceDao extends B // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED IBaseResource newVersion = myJpaStorageResourceParser.toResource(theEntity, false); HookParams preStorageParams = new HookParams() - .add(IBaseResource.class, oldVersion) - .add(IBaseResource.class, newVersion) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, theTransactionDetails); + .add(IBaseResource.class, oldVersion) + .add(IBaseResource.class, newVersion) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, theTransactionDetails); myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams); HookParams preCommitParams = new HookParams() - .add(IBaseResource.class, oldVersion) - .add(IBaseResource.class, newVersion) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(TransactionDetails.class, theTransactionDetails) - .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED)); + .add(IBaseResource.class, oldVersion) + .add(IBaseResource.class, newVersion) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(TransactionDetails.class, theTransactionDetails) + .add( + InterceptorInvocationTimingEnum.class, + theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED)); myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams); - } private void validateExpungeEnabled() { @@ -984,7 +1170,8 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - public ExpungeOutcome forceExpungeInExistingTransaction(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) { + public ExpungeOutcome forceExpungeInExistingTransaction( + IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) { TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager); BaseHasResource entity = txTemplate.execute(t -> readEntity(theId, theRequest)); @@ -993,16 +1180,26 @@ public abstract class BaseHapiFhirResourceDao extends B if (theId.hasVersionIdPart()) { BaseHasResource currentVersion; currentVersion = txTemplate.execute(t -> readEntity(theId.toVersionless(), theRequest)); - Validate.notNull(currentVersion, "Current version of resource with ID %s not found in database", theId.toVersionless()); + Validate.notNull( + currentVersion, + "Current version of resource with ID %s not found in database", + theId.toVersionless()); if (entity.getVersion() == currentVersion.getVersion()) { - throw new PreconditionFailedException(Msg.code(969) + "Can not perform version-specific expunge of resource " + theId.toUnqualified().getValue() + " as this is the current version"); + throw new PreconditionFailedException( + Msg.code(969) + "Can not perform version-specific expunge of resource " + + theId.toUnqualified().getValue() + " as this is the current version"); } - return myExpungeService.expunge(getResourceName(), JpaPid.fromIdAndVersion(entity.getResourceId(), entity.getVersion()), theExpungeOptions, theRequest); + return myExpungeService.expunge( + getResourceName(), + JpaPid.fromIdAndVersion(entity.getResourceId(), entity.getVersion()), + theExpungeOptions, + theRequest); } - return myExpungeService.expunge(getResourceName(), JpaPid.fromId(entity.getResourceId()), theExpungeOptions, theRequest); + return myExpungeService.expunge( + getResourceName(), JpaPid.fromId(entity.getResourceId()), theExpungeOptions, theRequest); } @Override @@ -1034,13 +1231,15 @@ public abstract class BaseHapiFhirResourceDao extends B public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) { StopWatch w = new StopWatch(); ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, null); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); IBundleProvider retVal = myTransactionService - .withRequest(theRequestDetails) - .withRequestPartitionId(requestPartitionId) - .execute(() -> { - return myPersistedJpaBundleProviderFactory.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset, requestPartitionId); - }); + .withRequest(theRequestDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + return myPersistedJpaBundleProviderFactory.history( + theRequestDetails, myResourceName, null, theSince, theUntil, theOffset, requestPartitionId); + }); ourLog.debug("Processed history on {} in {}ms", myResourceName, w.getMillisAndRestart()); return retVal; } @@ -1049,46 +1248,60 @@ public abstract class BaseHapiFhirResourceDao extends B * @deprecated Use {@link #history(IIdType, HistorySearchDateRangeParam, RequestDetails)} instead */ @Override - public IBundleProvider history(final IIdType theId, final Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequest) { + public IBundleProvider history( + final IIdType theId, final Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequest) { StopWatch w = new StopWatch(); ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, theId); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details); IBundleProvider retVal = myTransactionService - .withRequest(theRequest) - .withRequestPartitionId(requestPartitionId) - .execute(() -> { - IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); - BaseHasResource entity = readEntity(id, true, theRequest, requestPartitionId); + .withRequest(theRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); + BaseHasResource entity = readEntity(id, true, theRequest, requestPartitionId); - return myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset, requestPartitionId); - }); + return myPersistedJpaBundleProviderFactory.history( + theRequest, + myResourceName, + entity.getId(), + theSince, + theUntil, + theOffset, + requestPartitionId); + }); ourLog.debug("Processed history on {} in {}ms", theId, w.getMillisAndRestart()); return retVal; } @Override - public IBundleProvider history(final IIdType theId, final HistorySearchDateRangeParam theHistorySearchDateRangeParam, - RequestDetails theRequest) { + public IBundleProvider history( + final IIdType theId, + final HistorySearchDateRangeParam theHistorySearchDateRangeParam, + RequestDetails theRequest) { StopWatch w = new StopWatch(); ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, theId); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details); IBundleProvider retVal = myTransactionService - .withRequest(theRequest) - .withRequestPartitionId(requestPartitionId) - .execute(() -> { - IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); - BaseHasResource entity = readEntity(id, true, theRequest, requestPartitionId); + .withRequest(theRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless(); + BaseHasResource entity = readEntity(id, true, theRequest, requestPartitionId); - return myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), - theHistorySearchDateRangeParam.getLowerBoundAsInstant(), - theHistorySearchDateRangeParam.getUpperBoundAsInstant(), - theHistorySearchDateRangeParam.getOffset(), - theHistorySearchDateRangeParam.getHistorySearchType(), - requestPartitionId - ); - }); + return myPersistedJpaBundleProviderFactory.history( + theRequest, + myResourceName, + entity.getId(), + theHistorySearchDateRangeParam.getLowerBoundAsInstant(), + theHistorySearchDateRangeParam.getUpperBoundAsInstant(), + theHistorySearchDateRangeParam.getOffset(), + theHistorySearchDateRangeParam.getHistorySearchType(), + requestPartitionId); + }); ourLog.debug("Processed history on {} in {}ms", theId, w.getMillisAndRestart()); return retVal; @@ -1103,7 +1316,8 @@ public abstract class BaseHapiFhirResourceDao extends B return pagingProvider != null; } - protected void requestReindexForRelatedResources(Boolean theCurrentlyReindexing, List theBase, RequestDetails theRequestDetails) { + protected void requestReindexForRelatedResources( + Boolean theCurrentlyReindexing, List theBase, RequestDetails theRequestDetails) { // Avoid endless loops if (Boolean.TRUE.equals(theCurrentlyReindexing) || shouldSkipReindex(theRequestDetails)) { return; @@ -1117,8 +1331,10 @@ public abstract class BaseHapiFhirResourceDao extends B addAllResourcesTypesToReindex(theBase, theRequestDetails, params); } - ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX); - RequestPartitionId requestPartition = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); + ReadPartitionIdRequestDetails details = + ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX); + RequestPartitionId requestPartition = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); params.setRequestPartitionId(requestPartition); JobInstanceStartRequest request = new JobInstanceStartRequest(); @@ -1127,7 +1343,6 @@ public abstract class BaseHapiFhirResourceDao extends B myJobCoordinator.startInstance(theRequestDetails, request); ourLog.debug("Started reindex job with parameters {}", params); - } mySearchParamRegistry.requestRefresh(); @@ -1141,24 +1356,23 @@ public abstract class BaseHapiFhirResourceDao extends B return Boolean.parseBoolean(shouldSkip.toString()); } - private void addAllResourcesTypesToReindex(List theBase, RequestDetails theRequestDetails, ReindexJobParameters params) { - theBase - .stream() - .map(t -> t + "?") - .map(url -> myUrlPartitioner.partitionUrl(url, theRequestDetails)) - .forEach(params::addPartitionedUrl); + private void addAllResourcesTypesToReindex( + List theBase, RequestDetails theRequestDetails, ReindexJobParameters params) { + theBase.stream() + .map(t -> t + "?") + .map(url -> myUrlPartitioner.partitionUrl(url, theRequestDetails)) + .forEach(params::addPartitionedUrl); } private boolean isCommonSearchParam(List theBase) { // If the base contains the special resource "Resource", this is a common SP that applies to all resources - return theBase.stream() - .map(String::toLowerCase) - .anyMatch(BASE_RESOURCE_NAME::equals); + return theBase.stream().map(String::toLowerCase).anyMatch(BASE_RESOURCE_NAME::equals); } @Override @Transactional - public MT metaAddOperation(IIdType theResourceId, MT theMetaAdd, RequestDetails theRequest) { + public MT metaAddOperation( + IIdType theResourceId, MT theMetaAdd, RequestDetails theRequest) { TransactionDetails transactionDetails = new TransactionDetails(); StopWatch w = new StopWatch(); @@ -1174,7 +1388,8 @@ public abstract class BaseHapiFhirResourceDao extends B doMetaAdd(theMetaAdd, latestVersion, theRequest, transactionDetails); // Also update history entry - ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(entity.getId(), entity.getVersion()); + ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( + entity.getId(), entity.getVersion()); doMetaAdd(theMetaAdd, history, theRequest, transactionDetails); } @@ -1187,7 +1402,8 @@ public abstract class BaseHapiFhirResourceDao extends B @Override @Transactional - public MT metaDeleteOperation(IIdType theResourceId, MT theMetaDel, RequestDetails theRequest) { + public MT metaDeleteOperation( + IIdType theResourceId, MT theMetaDel, RequestDetails theRequest) { TransactionDetails transactionDetails = new TransactionDetails(); StopWatch w = new StopWatch(); @@ -1197,13 +1413,15 @@ public abstract class BaseHapiFhirResourceDao extends B } ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest, transactionDetails); - boolean nonVersionedTags = myStorageSettings.getTagStorageMode() != JpaStorageSettings.TagStorageModeEnum.VERSIONED; + boolean nonVersionedTags = + myStorageSettings.getTagStorageMode() != JpaStorageSettings.TagStorageModeEnum.VERSIONED; if (latestVersion.getVersion() != entity.getVersion() || nonVersionedTags) { doMetaDelete(theMetaDel, entity, theRequest, transactionDetails); } else { doMetaDelete(theMetaDel, latestVersion, theRequest, transactionDetails); // Also update history entry - ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(entity.getId(), entity.getVersion()); + ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( + entity.getId(), entity.getVersion()); doMetaDelete(theMetaDel, history, theRequest, transactionDetails); } @@ -1233,7 +1451,8 @@ public abstract class BaseHapiFhirResourceDao extends B @Override @Transactional public MT metaGetOperation(Class theType, RequestDetails theRequestDetails) { - String sql = "SELECT d FROM TagDefinition d WHERE d.myId IN (SELECT DISTINCT t.myTagId FROM ResourceTag t WHERE t.myResourceType = :res_type)"; + String sql = + "SELECT d FROM TagDefinition d WHERE d.myId IN (SELECT DISTINCT t.myTagId FROM ResourceTag t WHERE t.myResourceType = :res_type)"; TypedQuery q = myEntityManager.createQuery(sql, TagDefinition.class); q.setParameter("res_type", myResourceName); List tagDefinitions = q.getResultList(); @@ -1315,16 +1534,18 @@ public abstract class BaseHapiFhirResourceDao extends B validateResourceTypeAndThrowInvalidRequestException(theId); TransactionDetails transactionDetails = new TransactionDetails(); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequest, myResourceName, theId); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead( + theRequest, myResourceName, theId); return myTransactionService - .withRequest(theRequest) - .withTransactionDetails(transactionDetails) - .withRequestPartitionId(requestPartitionId) - .execute(() -> doReadInTransaction(theId, theRequest, theDeletedOk, requestPartitionId)); + .withRequest(theRequest) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> doReadInTransaction(theId, theRequest, theDeletedOk, requestPartitionId)); } - private T doReadInTransaction(IIdType theId, RequestDetails theRequest, boolean theDeletedOk, RequestPartitionId theRequestPartitionId) { + private T doReadInTransaction( + IIdType theId, RequestDetails theRequest, boolean theDeletedOk, RequestPartitionId theRequestPartitionId) { assert TransactionSynchronizationManager.isActualTransactionActive(); StopWatch w = new StopWatch(); @@ -1338,7 +1559,7 @@ public abstract class BaseHapiFhirResourceDao extends B throw createResourceGoneException(entity); } } - //If the resolved fhir model is null, we don't need to run pre-access over or pre-show over it. + // If the resolved fhir model is null, we don't need to run pre-access over or pre-show over it. if (retVal != null) { invokeStoragePreAccessResources(theId, theRequest, retVal); retVal = invokeStoragePreShowResources(theRequest, retVal); @@ -1359,16 +1580,21 @@ public abstract class BaseHapiFhirResourceDao extends B @Override public BaseHasResource readEntity(IIdType theId, RequestDetails theRequest) { - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequest, myResourceName, theId); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead( + theRequest, myResourceName, theId); return myTransactionService - .withRequest(theRequest) - .withRequestPartitionId(requestPartitionId) - .execute(() -> readEntity(theId, true, theRequest, requestPartitionId)); + .withRequest(theRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> readEntity(theId, true, theRequest, requestPartitionId)); } @SuppressWarnings("unchecked") @Override - public ReindexOutcome reindex(IResourcePersistentId thePid, ReindexParameters theReindexParameters, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + public ReindexOutcome reindex( + IResourcePersistentId thePid, + ReindexParameters theReindexParameters, + RequestDetails theRequest, + TransactionDetails theTransactionDetails) { ReindexOutcome retVal = new ReindexOutcome(); JpaPid jpaPid = (JpaPid) thePid; @@ -1400,7 +1626,8 @@ public abstract class BaseHapiFhirResourceDao extends B } @SuppressWarnings("unchecked") - private void reindexSearchParameters(ResourceTable entity, ReindexOutcome theReindexOutcome, TransactionDetails theTransactionDetails) { + private void reindexSearchParameters( + ResourceTable entity, ReindexOutcome theReindexOutcome, TransactionDetails theTransactionDetails) { try { T resource = (T) myJpaStorageResourceParser.toResource(entity, false); reindexSearchParameters(resource, entity, theTransactionDetails); @@ -1423,27 +1650,31 @@ public abstract class BaseHapiFhirResourceDao extends B reindexSearchParameters(theResource, theEntity, transactionDetails); } - private void reindexSearchParameters(T theResource, IBasePersistedResource theEntity, TransactionDetails transactionDetails) { + private void reindexSearchParameters( + T theResource, IBasePersistedResource theEntity, TransactionDetails transactionDetails) { ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId()); if (theResource != null) { CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE); } - ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false); + ResourceTable resourceTable = updateEntity( + null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false); if (theResource != null) { CURRENTLY_REINDEXING.put(theResource, null); } } - - private void reindexOptimizeStorage(ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) { + private void reindexOptimizeStorage( + ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) { ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity(); if (historyEntity != null) { reindexOptimizeStorageHistoryEntity(entity, historyEntity); if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) { int pageSize = 100; - for (int page = 0; ((long)page * pageSize) < entity.getVersion(); page++) { - Slice historyEntities = myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance(PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion()); + for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) { + Slice historyEntities = + myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance( + PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion()); for (ResourceHistoryTable next : historyEntities) { reindexOptimizeStorageHistoryEntity(entity, next); } @@ -1454,12 +1685,17 @@ public abstract class BaseHapiFhirResourceDao extends B private void reindexOptimizeStorageHistoryEntity(ResourceTable entity, ResourceHistoryTable historyEntity) { boolean changed = false; - if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC || historyEntity.getEncoding() == ResourceEncodingEnum.JSON) { + if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC + || historyEntity.getEncoding() == ResourceEncodingEnum.JSON) { byte[] resourceBytes = historyEntity.getResource(); if (resourceBytes != null) { String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding()); - if (myStorageSettings.getInlineResourceTextBelowSize() > 0 && resourceText.length() < myStorageSettings.getInlineResourceTextBelowSize()) { - ourLog.debug("Storing text of resource {} version {} as inline VARCHAR", entity.getResourceId(), historyEntity.getVersion()); + if (myStorageSettings.getInlineResourceTextBelowSize() > 0 + && resourceText.length() < myStorageSettings.getInlineResourceTextBelowSize()) { + ourLog.debug( + "Storing text of resource {} version {} as inline VARCHAR", + entity.getResourceId(), + historyEntity.getVersion()); historyEntity.setResourceTextVc(resourceText); historyEntity.setResource(null); historyEntity.setEncoding(ResourceEncodingEnum.JSON); @@ -1479,11 +1715,16 @@ public abstract class BaseHapiFhirResourceDao extends B } } - private BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest, RequestPartitionId requestPartitionId) { + private BaseHasResource readEntity( + IIdType theId, + boolean theCheckForForcedId, + RequestDetails theRequest, + RequestPartitionId requestPartitionId) { validateResourceTypeAndThrowInvalidRequestException(theId); BaseHasResource entity; - JpaPid pid = myIdHelperService.resolveResourcePersistentIds(requestPartitionId, getResourceName(), theId.getIdPart()); + JpaPid pid = myIdHelperService.resolveResourcePersistentIds( + requestPartitionId, getResourceName(), theId.getIdPart()); Set readPartitions = null; if (requestPartitionId.isAllPartitions()) { entity = myEntityManager.find(ResourceTable.class, pid.getId()); @@ -1491,16 +1732,25 @@ public abstract class BaseHapiFhirResourceDao extends B readPartitions = myRequestPartitionHelperService.toReadPartitions(requestPartitionId); if (readPartitions.size() == 1) { if (readPartitions.contains(null)) { - entity = myResourceTableDao.readByPartitionIdNull(pid.getId()).orElse(null); + entity = myResourceTableDao + .readByPartitionIdNull(pid.getId()) + .orElse(null); } else { - entity = myResourceTableDao.readByPartitionId(readPartitions.iterator().next(), pid.getId()).orElse(null); + entity = myResourceTableDao + .readByPartitionId(readPartitions.iterator().next(), pid.getId()) + .orElse(null); } } else { if (readPartitions.contains(null)) { - List readPartitionsWithoutNull = readPartitions.stream().filter(t -> t != null).collect(Collectors.toList()); - entity = myResourceTableDao.readByPartitionIdsOrNull(readPartitionsWithoutNull, pid.getId()).orElse(null); + List readPartitionsWithoutNull = + readPartitions.stream().filter(t -> t != null).collect(Collectors.toList()); + entity = myResourceTableDao + .readByPartitionIdsOrNull(readPartitionsWithoutNull, pid.getId()) + .orElse(null); } else { - entity = myResourceTableDao.readByPartitionIds(readPartitions, pid.getId()).orElse(null); + entity = myResourceTableDao + .readByPartitionIds(readPartitions, pid.getId()) + .orElse(null); } } } @@ -1508,7 +1758,10 @@ public abstract class BaseHapiFhirResourceDao extends B // Verify that the resource is for the correct partition if (entity != null && readPartitions != null && entity.getPartitionId() != null) { if (!readPartitions.contains(entity.getPartitionId().getPartitionId())) { - ourLog.debug("Performing a read for PartitionId={} but entity has partition: {}", requestPartitionId, entity.getPartitionId()); + ourLog.debug( + "Performing a read for PartitionId={} but entity has partition: {}", + requestPartitionId, + entity.getPartitionId()); entity = null; } } @@ -1519,7 +1772,14 @@ public abstract class BaseHapiFhirResourceDao extends B if (theId.hasVersionIdPart()) { if (theId.isVersionIdPartValidLong() == false) { - throw new ResourceNotFoundException(Msg.code(978) + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless())); + throw new ResourceNotFoundException(Msg.code(978) + + getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "invalidVersion", + theId.getVersionIdPart(), + theId.toUnqualifiedVersionless())); } if (entity.getVersion() != theId.getVersionIdPartAsLong()) { entity = null; @@ -1528,14 +1788,23 @@ public abstract class BaseHapiFhirResourceDao extends B if (entity == null) { if (theId.hasVersionIdPart()) { - TypedQuery q = myEntityManager.createQuery("SELECT t from ResourceHistoryTable t WHERE t.myResourceId = :RID AND t.myResourceType = :RTYP AND t.myResourceVersion = :RVER", ResourceHistoryTable.class); + TypedQuery q = myEntityManager.createQuery( + "SELECT t from ResourceHistoryTable t WHERE t.myResourceId = :RID AND t.myResourceType = :RTYP AND t.myResourceVersion = :RVER", + ResourceHistoryTable.class); q.setParameter("RID", pid.getId()); q.setParameter("RTYP", myResourceName); q.setParameter("RVER", theId.getVersionIdPartAsLong()); try { entity = q.getSingleResult(); } catch (NoResultException e) { - throw new ResourceNotFoundException(Msg.code(979) + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless())); + throw new ResourceNotFoundException(Msg.code(979) + + getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "invalidVersion", + theId.getVersionIdPart(), + theId.toUnqualifiedVersionless())); } } } @@ -1550,20 +1819,28 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - protected IBasePersistedResource readEntityLatestVersion(IResourcePersistentId thePersistentId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + protected IBasePersistedResource readEntityLatestVersion( + IResourcePersistentId thePersistentId, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails) { JpaPid jpaPid = (JpaPid) thePersistentId; return myEntityManager.find(ResourceTable.class, jpaPid.getId()); } @Override @Nonnull - protected ResourceTable readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequestDetails, getResourceName(), theId); + protected ResourceTable readEntityLatestVersion( + IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead( + theRequestDetails, getResourceName(), theId); return readEntityLatestVersion(theId, requestPartitionId, theTransactionDetails); } @Nonnull - private ResourceTable readEntityLatestVersion(IIdType theId, @Nonnull RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails) { + private ResourceTable readEntityLatestVersion( + IIdType theId, + @Nonnull RequestPartitionId theRequestPartitionId, + TransactionDetails theTransactionDetails) { validateResourceTypeAndThrowInvalidRequestException(theId); JpaPid persistentId = null; @@ -1577,7 +1854,8 @@ public abstract class BaseHapiFhirResourceDao extends B } if (persistentId == null) { - persistentId = myIdHelperService.resolveResourcePersistentIds(theRequestPartitionId, getResourceName(), theId.getIdPart()); + persistentId = myIdHelperService.resolveResourcePersistentIds( + theRequestPartitionId, getResourceName(), theId.getIdPart()); } ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId()); @@ -1597,7 +1875,8 @@ public abstract class BaseHapiFhirResourceDao extends B @Transactional @Override - public void removeTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm, RequestDetails theRequest) { + public void removeTag( + IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm, RequestDetails theRequest) { StopWatch w = new StopWatch(); BaseHasResource entity = readEntity(theId, theRequest); if (entity == null) { @@ -1605,9 +1884,9 @@ public abstract class BaseHapiFhirResourceDao extends B } for (BaseTag next : new ArrayList<>(entity.getTags())) { - if (ObjectUtil.equals(next.getTag().getTagType(), theTagType) && - ObjectUtil.equals(next.getTag().getSystem(), theScheme) && - ObjectUtil.equals(next.getTag().getCode(), theTerm)) { + if (ObjectUtil.equals(next.getTag().getTagType(), theTagType) + && ObjectUtil.equals(next.getTag().getSystem(), theScheme) + && ObjectUtil.equals(next.getTag().getCode(), theTerm)) { myEntityManager.remove(next); entity.getTags().remove(next); } @@ -1619,7 +1898,12 @@ public abstract class BaseHapiFhirResourceDao extends B myEntityManager.merge(entity); - ourLog.debug("Processed remove tag {}/{} on {} in {}ms", theScheme, theTerm, theId.getValue(), w.getMillisAndRestart()); + ourLog.debug( + "Processed remove tag {}/{} on {} in {}ms", + theScheme, + theTerm, + theId.getValue(), + w.getMillisAndRestart()); } /** @@ -1639,13 +1923,16 @@ public abstract class BaseHapiFhirResourceDao extends B @Transactional(propagation = Propagation.SUPPORTS) @Override - public IBundleProvider search(final SearchParameterMap theParams, RequestDetails theRequest, HttpServletResponse theServletResponse) { + public IBundleProvider search( + final SearchParameterMap theParams, RequestDetails theRequest, HttpServletResponse theServletResponse) { if (theParams.getSearchContainedMode() == SearchContainedModeEnum.BOTH) { throw new MethodNotAllowedException(Msg.code(983) + "Contained mode 'both' is not currently supported"); } - if (theParams.getSearchContainedMode() != SearchContainedModeEnum.FALSE && !myStorageSettings.isIndexOnContainedResources()) { - throw new MethodNotAllowedException(Msg.code(984) + "Searching with _contained mode enabled is not enabled on this server"); + if (theParams.getSearchContainedMode() != SearchContainedModeEnum.FALSE + && !myStorageSettings.isIndexOnContainedResources()) { + throw new MethodNotAllowedException( + Msg.code(984) + "Searching with _contained mode enabled is not enabled on this server"); } translateListSearchParams(theParams); @@ -1657,8 +1944,11 @@ public abstract class BaseHapiFhirResourceDao extends B cacheControlDirective.parse(theRequest.getHeaders(Constants.HEADER_CACHE_CONTROL)); } - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequest, getResourceName(), theParams, null); - IBundleProvider retVal = mySearchCoordinatorSvc.registerSearch(this, theParams, getResourceName(), cacheControlDirective, theRequest, requestPartitionId); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType( + theRequest, getResourceName(), theParams, null); + IBundleProvider retVal = mySearchCoordinatorSvc.registerSearch( + this, theParams, getResourceName(), cacheControlDirective, theRequest, requestPartitionId); if (retVal instanceof PersistedJpaBundleProvider) { PersistedJpaBundleProvider provider = (PersistedJpaBundleProvider) retVal; @@ -1687,7 +1977,11 @@ public abstract class BaseHapiFhirResourceDao extends B for (List orValues : andOrValues) { List orList = new ArrayList<>(); for (IQueryParameterType value : orValues) { - orList.add(new HasParam("List", ListResource.SP_ITEM, BaseResource.SP_RES_ID, value.getValueAsQueryToken(null))); + orList.add(new HasParam( + "List", + ListResource.SP_ITEM, + BaseResource.SP_RES_ID, + value.getValueAsQueryToken(null))); } hasParamValues.add(orList); } @@ -1702,7 +1996,11 @@ public abstract class BaseHapiFhirResourceDao extends B if (theRequest.isSubRequest()) { Integer max = getStorageSettings().getMaximumSearchResultCountInTransaction(); if (max != null) { - Validate.inclusiveBetween(1, Integer.MAX_VALUE, max, "Maximum search result count in transaction ust be a positive integer"); + Validate.inclusiveBetween( + 1, + Integer.MAX_VALUE, + max, + "Maximum search result count in transaction ust be a positive integer"); theParams.setLoadSynchronousUpTo(getStorageSettings().getMaximumSearchResultCountInTransaction()); } } @@ -1720,7 +2018,11 @@ public abstract class BaseHapiFhirResourceDao extends B if (count != null) { Integer maxPageSize = theRequest.getServer().getMaximumPageSize(); if (maxPageSize != null && count > maxPageSize) { - ourLog.info("Reducing {} from {} to {} which is the maximum allowable page size.", Constants.PARAM_COUNT, count, maxPageSize); + ourLog.info( + "Reducing {} from {} to {} which is the maximum allowable page size.", + Constants.PARAM_COUNT, + count, + maxPageSize); count = maxPageSize; } theParams.setCount(count); @@ -1731,37 +2033,43 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - public List searchForIds(SearchParameterMap theParams, RequestDetails theRequest, @Nullable IBaseResource theConditionalOperationTargetOrNull) { + public List searchForIds( + SearchParameterMap theParams, + RequestDetails theRequest, + @Nullable IBaseResource theConditionalOperationTargetOrNull) { TransactionDetails transactionDetails = new TransactionDetails(); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequest, myResourceName, theParams, theConditionalOperationTargetOrNull); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType( + theRequest, myResourceName, theParams, theConditionalOperationTargetOrNull); return myTransactionService - .withRequest(theRequest) - .withTransactionDetails(transactionDetails) - .withRequestPartitionId(requestPartitionId) - .execute(() -> { - - if(isNull(theParams.getLoadSynchronousUpTo())){ - theParams.setLoadSynchronousUpTo(myStorageSettings.getInternalSynchronousSearchSize()); - } - - ISearchBuilder builder = mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType()); - - List ids = new ArrayList<>(); - - String uuid = UUID.randomUUID().toString(); - - SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequest, uuid); - try (IResultIterator iter = builder.createQuery(theParams, searchRuntimeDetails, theRequest, requestPartitionId)) { - while (iter.hasNext()) { - ids.add(iter.next()); + .withRequest(theRequest) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + if (isNull(theParams.getLoadSynchronousUpTo())) { + theParams.setLoadSynchronousUpTo(myStorageSettings.getInternalSynchronousSearchSize()); } - } catch (IOException e) { - ourLog.error("IO failure during database access", e); - } - return ids; - }); + ISearchBuilder builder = + mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType()); + + List ids = new ArrayList<>(); + + String uuid = UUID.randomUUID().toString(); + + SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequest, uuid); + try (IResultIterator iter = + builder.createQuery(theParams, searchRuntimeDetails, theRequest, requestPartitionId)) { + while (iter.hasNext()) { + ids.add(iter.next()); + } + } catch (IOException e) { + ourLog.error("IO failure during database access", e); + } + + return ids; + }); } protected MT toMetaDt(Class theType, Collection tagDefinitions) { @@ -1772,10 +2080,16 @@ public abstract class BaseHapiFhirResourceDao extends B retVal.addProfile(next.getCode()); break; case SECURITY_LABEL: - retVal.addSecurity().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addSecurity() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; case TAG: - retVal.addTag().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addTag() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; } } @@ -1789,7 +2103,8 @@ public abstract class BaseHapiFhirResourceDao extends B retVal.add(new TagDefinition(TagTypeEnum.TAG, next.getSystem(), next.getCode(), next.getDisplay())); } for (IBaseCoding next : theMeta.getSecurity()) { - retVal.add(new TagDefinition(TagTypeEnum.SECURITY_LABEL, next.getSystem(), next.getCode(), next.getDisplay())); + retVal.add( + new TagDefinition(TagTypeEnum.SECURITY_LABEL, next.getSystem(), next.getCode(), next.getDisplay())); } for (IPrimitiveType next : theMeta.getProfile()) { retVal.add(new TagDefinition(TagTypeEnum.PROFILE, BaseHapiFhirDao.NS_JPA_PROFILE, next.getValue(), null)); @@ -1825,12 +2140,19 @@ public abstract class BaseHapiFhirResourceDao extends B } @Override - public DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, RequestDetails theRequestDetails) { + public DaoMethodOutcome update( + T theResource, String theMatchUrl, boolean thePerformIndexing, RequestDetails theRequestDetails) { return update(theResource, theMatchUrl, thePerformIndexing, false, theRequestDetails, new TransactionDetails()); } @Override - public DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, @Nonnull TransactionDetails theTransactionDetails) { + public DaoMethodOutcome update( + T theResource, + String theMatchUrl, + boolean thePerformIndexing, + boolean theForceUpdateVersion, + RequestDetails theRequest, + @Nonnull TransactionDetails theTransactionDetails) { if (theResource == null) { String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "missingBody"); throw new InvalidRequestException(Msg.code(986) + msg); @@ -1849,25 +2171,43 @@ public abstract class BaseHapiFhirResourceDao extends B String id = theResource.getIdElement().getValue(); Runnable onRollback = () -> theResource.getIdElement().setValue(id); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName()); + RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest( + theRequest, theResource, getResourceName()); Callable updateCallback; - if (myStorageSettings.isUpdateWithHistoryRewriteEnabled() && theRequest != null && theRequest.isRewriteHistory()) { - updateCallback = () -> doUpdateWithHistoryRewrite(theResource, theRequest, theTransactionDetails, requestPartitionId); + if (myStorageSettings.isUpdateWithHistoryRewriteEnabled() + && theRequest != null + && theRequest.isRewriteHistory()) { + updateCallback = () -> + doUpdateWithHistoryRewrite(theResource, theRequest, theTransactionDetails, requestPartitionId); } else { - updateCallback = () -> doUpdate(theResource, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theRequest, theTransactionDetails, requestPartitionId); + updateCallback = () -> doUpdate( + theResource, + theMatchUrl, + thePerformIndexing, + theForceUpdateVersion, + theRequest, + theTransactionDetails, + requestPartitionId); } // Execute the update in a retryable transaction return myTransactionService - .withRequest(theRequest) - .withTransactionDetails(theTransactionDetails) - .withRequestPartitionId(requestPartitionId) - .onRollback(onRollback) - .execute(updateCallback); + .withRequest(theRequest) + .withTransactionDetails(theTransactionDetails) + .withRequestPartitionId(requestPartitionId) + .onRollback(onRollback) + .execute(updateCallback); } - private DaoMethodOutcome doUpdate(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId) { + private DaoMethodOutcome doUpdate( + T theResource, + String theMatchUrl, + boolean thePerformIndexing, + boolean theForceUpdateVersion, + RequestDetails theRequest, + TransactionDetails theTransactionDetails, + RequestPartitionId theRequestPartitionId) { T resource = theResource; preProcessResourceForStorage(resource); @@ -1878,31 +2218,57 @@ public abstract class BaseHapiFhirResourceDao extends B IIdType resourceId; RestOperationTypeEnum update = RestOperationTypeEnum.UPDATE; if (isNotBlank(theMatchUrl)) { - Set match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest, theResource); + Set match = myMatchResourceUrlService.processMatchUrl( + theMatchUrl, myResourceType, theTransactionDetails, theRequest, theResource); if (match.size() > 1) { - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "UPDATE", theMatchUrl, match.size()); + String msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "transactionOperationWithMultipleMatchFailure", + "UPDATE", + theMatchUrl, + match.size()); throw new PreconditionFailedException(Msg.code(988) + msg); } else if (match.size() == 1) { JpaPid pid = match.iterator().next(); entity = myEntityManager.find(ResourceTable.class, pid.getId()); resourceId = entity.getIdDt(); - if (myFhirContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4) && resource.getIdElement().getIdPart() != null) { + if (myFhirContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4) + && resource.getIdElement().getIdPart() != null) { if (!Objects.equals(resource.getIdElement().getIdPart(), resourceId.getIdPart())) { - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithIdNotMatchFailure", "UPDATE", theMatchUrl); + String msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "transactionOperationWithIdNotMatchFailure", + "UPDATE", + theMatchUrl); throw new InvalidRequestException(Msg.code(2279) + msg); } } } else { // assign UUID if no id provided in the request (numeric id mode is handled in doCreateForPostOrPut) - if (!theResource.getIdElement().hasIdPart() && getStorageSettings().getResourceServerIdStrategy() == JpaStorageSettings.IdStrategyEnum.UUID) { + if (!theResource.getIdElement().hasIdPart() + && getStorageSettings().getResourceServerIdStrategy() + == JpaStorageSettings.IdStrategyEnum.UUID) { theResource.setId(UUID.randomUUID().toString()); theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE); } - DaoMethodOutcome outcome = doCreateForPostOrPut(theRequest, resource, theMatchUrl, false, thePerformIndexing, theRequestPartitionId, update, theTransactionDetails); + DaoMethodOutcome outcome = doCreateForPostOrPut( + theRequest, + resource, + theMatchUrl, + false, + thePerformIndexing, + theRequestPartitionId, + update, + theTransactionDetails); // Pre-cache the match URL if (outcome.getPersistentId() != null) { - myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theMatchUrl, (JpaPid) outcome.getPersistentId()); + myMatchResourceUrlService.matchUrlResolved( + theTransactionDetails, getResourceName(), theMatchUrl, (JpaPid) outcome.getPersistentId()); } return outcome; @@ -1935,26 +2301,63 @@ public abstract class BaseHapiFhirResourceDao extends B } if (create) { - return doCreateForPostOrPut(theRequest, resource, null, false, thePerformIndexing, theRequestPartitionId, update, theTransactionDetails); + return doCreateForPostOrPut( + theRequest, + resource, + null, + false, + thePerformIndexing, + theRequestPartitionId, + update, + theTransactionDetails); } } // Start - return doUpdateForUpdateOrPatch(theRequest, resourceId, theMatchUrl, thePerformIndexing, theForceUpdateVersion, resource, entity, update, theTransactionDetails); + return doUpdateForUpdateOrPatch( + theRequest, + resourceId, + theMatchUrl, + thePerformIndexing, + theForceUpdateVersion, + resource, + entity, + update, + theTransactionDetails); } @Override - protected DaoMethodOutcome doUpdateForUpdateOrPatch(RequestDetails theRequest, IIdType theResourceId, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, T theResource, IBasePersistedResource theEntity, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) { + protected DaoMethodOutcome doUpdateForUpdateOrPatch( + RequestDetails theRequest, + IIdType theResourceId, + String theMatchUrl, + boolean thePerformIndexing, + boolean theForceUpdateVersion, + T theResource, + IBasePersistedResource theEntity, + RestOperationTypeEnum theOperationType, + TransactionDetails theTransactionDetails) { - // we stored a resource searchUrl at creation time to prevent resource duplication. Let's remove the entry on the + // we stored a resource searchUrl at creation time to prevent resource duplication. Let's remove the entry on + // the // first update but guard against unnecessary trips to the database on subsequent ones. ResourceTable entity = (ResourceTable) theEntity; if (entity.isSearchUrlPresent() && thePerformIndexing) { - myResourceSearchUrlSvc.deleteByResId((Long) theEntity.getPersistentId().getId()); + myResourceSearchUrlSvc.deleteByResId( + (Long) theEntity.getPersistentId().getId()); entity.setSearchUrlPresent(false); } - return super.doUpdateForUpdateOrPatch(theRequest, theResourceId, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theResource, theEntity, theOperationType, theTransactionDetails); + return super.doUpdateForUpdateOrPatch( + theRequest, + theResourceId, + theMatchUrl, + thePerformIndexing, + theForceUpdateVersion, + theResource, + theEntity, + theOperationType, + theTransactionDetails); } /** @@ -1965,7 +2368,11 @@ public abstract class BaseHapiFhirResourceDao extends B * @param theTransactionDetails details of the transaction * @return the outcome of the operation */ - private DaoMethodOutcome doUpdateWithHistoryRewrite(T theResource, RequestDetails theRequest, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId) { + private DaoMethodOutcome doUpdateWithHistoryRewrite( + T theResource, + RequestDetails theRequest, + TransactionDetails theTransactionDetails, + RequestPartitionId theRequestPartitionId) { StopWatch w = new StopWatch(); // No need for indexing as this will update a non-current version of the resource which will not be searchable @@ -1981,27 +2388,36 @@ public abstract class BaseHapiFhirResourceDao extends B assert resourceId.hasIdPart(); try { - currentEntity = readEntityLatestVersion(resourceId.toVersionless(), theRequestPartitionId, theTransactionDetails); + currentEntity = + readEntityLatestVersion(resourceId.toVersionless(), theRequestPartitionId, theTransactionDetails); if (!resourceId.hasVersionIdPart()) { - throw new InvalidRequestException(Msg.code(2093) + "Invalid resource ID, ID must contain a history version"); + throw new InvalidRequestException( + Msg.code(2093) + "Invalid resource ID, ID must contain a history version"); } entity = readEntity(resourceId, theRequest); validateResourceType(entity); } catch (ResourceNotFoundException e) { - throw new ResourceNotFoundException(Msg.code(2087) + "Resource not found [" + resourceId + "] - Doesn't exist"); + throw new ResourceNotFoundException( + Msg.code(2087) + "Resource not found [" + resourceId + "] - Doesn't exist"); } if (resourceId.hasResourceType() && !resourceId.getResourceType().equals(getResourceName())) { - throw new UnprocessableEntityException(Msg.code(2088) + "Invalid resource ID[" + entity.getIdDt().toUnqualifiedVersionless() + "] of type[" + entity.getResourceType() + "] - Does not match expected [" + getResourceName() + "]"); + throw new UnprocessableEntityException( + Msg.code(2088) + "Invalid resource ID[" + entity.getIdDt().toUnqualifiedVersionless() + "] of type[" + + entity.getResourceType() + "] - Does not match expected [" + getResourceName() + "]"); } assert resourceId.hasVersionIdPart(); boolean wasDeleted = isDeleted(entity); entity.setDeleted(null); - boolean isUpdatingCurrent = resourceId.hasVersionIdPart() && Long.parseLong(resourceId.getVersionIdPart()) == currentEntity.getVersion(); - IBasePersistedResource savedEntity = updateHistoryEntity(theRequest, theResource, currentEntity, entity, resourceId, theTransactionDetails, isUpdatingCurrent); - DaoMethodOutcome outcome = toMethodOutcome(theRequest, savedEntity, theResource, null, RestOperationTypeEnum.UPDATE).setCreated(wasDeleted); + boolean isUpdatingCurrent = resourceId.hasVersionIdPart() + && Long.parseLong(resourceId.getVersionIdPart()) == currentEntity.getVersion(); + IBasePersistedResource savedEntity = updateHistoryEntity( + theRequest, theResource, currentEntity, entity, resourceId, theTransactionDetails, isUpdatingCurrent); + DaoMethodOutcome outcome = toMethodOutcome( + theRequest, savedEntity, theResource, null, RestOperationTypeEnum.UPDATE) + .setCreated(wasDeleted); populateOperationOutcomeForUpdate(w, outcome, null, RestOperationTypeEnum.UPDATE); @@ -2010,12 +2426,20 @@ public abstract class BaseHapiFhirResourceDao extends B @Override @Transactional(propagation = Propagation.SUPPORTS) - public MethodOutcome validate(T theResource, IIdType theId, String theRawResource, EncodingEnum theEncoding, ValidationModeEnum theMode, String theProfile, RequestDetails theRequest) { + public MethodOutcome validate( + T theResource, + IIdType theId, + String theRawResource, + EncodingEnum theEncoding, + ValidationModeEnum theMode, + String theProfile, + RequestDetails theRequest) { TransactionDetails transactionDetails = new TransactionDetails(); if (theMode == ValidationModeEnum.DELETE) { if (theId == null || theId.hasIdPart() == false) { - throw new InvalidRequestException(Msg.code(991) + "No ID supplied. ID is required when validating with mode=DELETE"); + throw new InvalidRequestException( + Msg.code(991) + "No ID supplied. ID is required when validating with mode=DELETE"); } final ResourceTable entity = readEntityLatestVersion(theId, theRequest, transactionDetails); @@ -2023,7 +2447,8 @@ public abstract class BaseHapiFhirResourceDao extends B // would prevent deletion DeleteConflictList deleteConflicts = new DeleteConflictList(); if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) { - myDeleteConflictService.validateOkToDelete(deleteConflicts, entity, true, theRequest, new TransactionDetails()); + myDeleteConflictService.validateOkToDelete( + deleteConflicts, entity, true, theRequest, new TransactionDetails()); } DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts); @@ -2032,21 +2457,21 @@ public abstract class BaseHapiFhirResourceDao extends B } FhirValidator validator = getContext().newValidator(); - validator.setInterceptorBroadcaster(CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest)); + validator.setInterceptorBroadcaster( + CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest)); validator.registerValidatorModule(getInstanceValidator()); validator.registerValidatorModule(new IdChecker(theMode)); IBaseResource resourceToValidateById = null; if (theId != null && theId.hasResourceType() && theId.hasIdPart()) { - Class type = getContext().getResourceDefinition(theId.getResourceType()).getImplementingClass(); + Class type = + getContext().getResourceDefinition(theId.getResourceType()).getImplementingClass(); IFhirResourceDao dao = myDaoRegistry.getResourceDaoOrNull(type); resourceToValidateById = dao.read(theId, theRequest); } - ValidationResult result; - ValidationOptions options = new ValidationOptions() - .addProfileIfNotBlank(theProfile); + ValidationOptions options = new ValidationOptions().addProfileIfNotBlank(theProfile); if (theResource == null) { if (resourceToValidateById != null) { @@ -2090,8 +2515,10 @@ public abstract class BaseHapiFhirResourceDao extends B if (entity.getForcedId() != null) { if (getStorageSettings().getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) { if (theId.isIdPartValidLong()) { - // This means that the resource with the given numeric ID exists, but it has a "forced ID", meaning that - // as far as the outside world is concerned, the given ID doesn't exist (it's just an internal pointer + // This means that the resource with the given numeric ID exists, but it has a "forced ID", meaning + // that + // as far as the outside world is concerned, the given ID doesn't exist (it's just an internal + // pointer // to the // forced ID) throw new ResourceNotFoundException(Msg.code(2000) + theId); @@ -2106,8 +2533,10 @@ public abstract class BaseHapiFhirResourceDao extends B private void validateResourceTypeAndThrowInvalidRequestException(IIdType theId) { if (theId.hasResourceType() && !theId.getResourceType().equals(myResourceName)) { - // Note- Throw a HAPI FHIR exception here so that hibernate doesn't try to translate it into a database exception - throw new InvalidRequestException(Msg.code(996) + "Incorrect resource type (" + theId.getResourceType() + ") for this DAO, wanted: " + myResourceName); + // Note- Throw a HAPI FHIR exception here so that hibernate doesn't try to translate it into a database + // exception + throw new InvalidRequestException(Msg.code(996) + "Incorrect resource type (" + theId.getResourceType() + + ") for this DAO, wanted: " + myResourceName); } } @@ -2129,22 +2558,23 @@ public abstract class BaseHapiFhirResourceDao extends B IBaseResource resource = theCtx.getResource(); if (resource instanceof Parameters) { List params = ((Parameters) resource).getParameter(); - params = params.stream().filter(param -> param.getName().contains("resource")).collect(Collectors.toList()); + params = params.stream() + .filter(param -> param.getName().contains("resource")) + .collect(Collectors.toList()); resource = params.get(0).getResource(); } boolean hasId = resource.getIdElement().hasIdPart(); if (myMode == ValidationModeEnum.CREATE) { if (hasId) { - throw new UnprocessableEntityException(Msg.code(997) + "Resource has an ID - ID must not be populated for a FHIR create"); + throw new UnprocessableEntityException( + Msg.code(997) + "Resource has an ID - ID must not be populated for a FHIR create"); } } else if (myMode == ValidationModeEnum.UPDATE) { if (hasId == false) { - throw new UnprocessableEntityException(Msg.code(998) + "Resource has no ID - ID must be populated for a FHIR update"); + throw new UnprocessableEntityException( + Msg.code(998) + "Resource has no ID - ID must be populated for a FHIR update"); } } - } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index 8112024432e..6696a3b0e52 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -54,9 +54,14 @@ import org.springframework.context.ApplicationContext; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import javax.annotation.Nullable; import javax.persistence.EntityManager; -import javax.persistence.LockModeType; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; import javax.persistence.TypedQuery; @@ -65,14 +70,9 @@ import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.JoinType; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -public abstract class BaseHapiFhirSystemDao extends BaseStorageDao implements IFhirSystemDao { +public abstract class BaseHapiFhirSystemDao extends BaseStorageDao + implements IFhirSystemDao { public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0]; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class); @@ -80,22 +80,31 @@ public abstract class BaseHapiFhirSystemDao extends B @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + @Autowired private TransactionProcessor myTransactionProcessor; + @Autowired private ApplicationContext myApplicationContext; + @Autowired private ExpungeService myExpungeService; + @Autowired private IResourceTableDao myResourceTableDao; + @Autowired private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory; + @Autowired private IResourceTagDao myResourceTagDao; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperService; + @Autowired private IHapiTransactionService myTransactionService; @@ -128,7 +137,9 @@ public abstract class BaseHapiFhirSystemDao extends B List> counts = myResourceTableDao.getResourceCounts(); for (Map next : counts) { - retVal.put(next.get("type").toString(), Long.parseLong(next.get("count").toString())); + retVal.put( + next.get("type").toString(), + Long.parseLong(next.get("count").toString())); } return retVal; @@ -148,11 +159,13 @@ public abstract class BaseHapiFhirSystemDao extends B public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) { StopWatch w = new StopWatch(); ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(null, null); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details); IBundleProvider retVal = myTransactionService - .withRequest(theRequestDetails) - .withRequestPartitionId(requestPartitionId) - .execute(() -> myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset, requestPartitionId)); + .withRequest(theRequestDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> myPersistedJpaBundleProviderFactory.history( + theRequestDetails, null, null, theSince, theUntil, theOffset, requestPartitionId)); ourLog.info("Processed global history in {}ms", w.getMillisAndRestart()); return retVal; } @@ -170,12 +183,10 @@ public abstract class BaseHapiFhirSystemDao extends B } @Override - public

    void preFetchResources(List

    theResolvedIds, boolean thePreFetchIndexes) { + public

    void preFetchResources( + List

    theResolvedIds, boolean thePreFetchIndexes) { HapiTransactionService.requireTransaction(); - List pids = theResolvedIds - .stream() - .map(t -> ((JpaPid) t).getId()) - .collect(Collectors.toList()); + List pids = theResolvedIds.stream().map(t -> ((JpaPid) t).getId()).collect(Collectors.toList()); new QueryChunker().chunk(pids, ids -> { @@ -197,82 +208,103 @@ public abstract class BaseHapiFhirSystemDao extends B List entityIds; if (thePreFetchIndexes) { - entityIds = loadedResourceTableEntries.stream().filter(ResourceTable::isParamsStringPopulated).map(ResourceTable::getId).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream() + .filter(ResourceTable::isParamsStringPopulated) + .map(ResourceTable::getId) + .collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "string", "myParamsString", null); } - entityIds = loadedResourceTableEntries.stream().filter(ResourceTable::isParamsTokenPopulated).map(ResourceTable::getId).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream() + .filter(ResourceTable::isParamsTokenPopulated) + .map(ResourceTable::getId) + .collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "token", "myParamsToken", null); } - entityIds = loadedResourceTableEntries.stream().filter(ResourceTable::isParamsDatePopulated).map(ResourceTable::getId).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream() + .filter(ResourceTable::isParamsDatePopulated) + .map(ResourceTable::getId) + .collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "date", "myParamsDate", null); } - entityIds = loadedResourceTableEntries.stream().filter(ResourceTable::isParamsQuantityPopulated).map(ResourceTable::getId).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream() + .filter(ResourceTable::isParamsQuantityPopulated) + .map(ResourceTable::getId) + .collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "quantity", "myParamsQuantity", null); } - entityIds = loadedResourceTableEntries.stream().filter(ResourceTable::isHasLinks).map(ResourceTable::getId).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream() + .filter(ResourceTable::isHasLinks) + .map(ResourceTable::getId) + .collect(Collectors.toList()); if (entityIds.size() > 0) { preFetchIndexes(entityIds, "resourceLinks", "myResourceLinks", null); } - entityIds = loadedResourceTableEntries.stream().filter(BaseHasResource::isHasTags).map(ResourceTable::getId).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream() + .filter(BaseHasResource::isHasTags) + .map(ResourceTable::getId) + .collect(Collectors.toList()); if (entityIds.size() > 0) { myResourceTagDao.findByResourceIds(entityIds); preFetchIndexes(entityIds, "tags", "myTags", null); } - entityIds = loadedResourceTableEntries.stream().map(ResourceTable::getId).collect(Collectors.toList()); + entityIds = loadedResourceTableEntries.stream() + .map(ResourceTable::getId) + .collect(Collectors.toList()); if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) { preFetchIndexes(entityIds, "searchParamPresence", "mySearchParamPresents", null); } } - new QueryChunker().chunk(loadedResourceTableEntries, SearchBuilder.getMaximumPageSize() / 2, entries -> { + new QueryChunker() + .chunk(loadedResourceTableEntries, SearchBuilder.getMaximumPageSize() / 2, entries -> { + Map entities = + entries.stream().collect(Collectors.toMap(ResourceTable::getId, t -> t)); - Map entities = entries - .stream() - .collect(Collectors.toMap(ResourceTable::getId, t -> t)); - - CriteriaBuilder b = myEntityManager.getCriteriaBuilder(); - CriteriaQuery q = b.createQuery(ResourceHistoryTable.class); - Root from = q.from(ResourceHistoryTable.class); - - from.fetch("myProvenance", JoinType.LEFT); - - List orPredicates = new ArrayList<>(); - for (ResourceTable next : entries) { - Predicate resId = b.equal(from.get("myResourceId"), next.getId()); - Predicate resVer = b.equal(from.get("myResourceVersion"), next.getVersion()); - orPredicates.add(b.and(resId, resVer)); - } - q.where(b.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY))); - List resultList = myEntityManager.createQuery(q).getResultList(); - for (ResourceHistoryTable next : resultList) { - ResourceTable nextEntity = entities.get(next.getResourceId()); - if (nextEntity != null) { - nextEntity.setCurrentVersionEntity(next); - } - } - - }); + CriteriaBuilder b = myEntityManager.getCriteriaBuilder(); + CriteriaQuery q = b.createQuery(ResourceHistoryTable.class); + Root from = q.from(ResourceHistoryTable.class); + from.fetch("myProvenance", JoinType.LEFT); + List orPredicates = new ArrayList<>(); + for (ResourceTable next : entries) { + Predicate resId = b.equal(from.get("myResourceId"), next.getId()); + Predicate resVer = b.equal(from.get("myResourceVersion"), next.getVersion()); + orPredicates.add(b.and(resId, resVer)); + } + q.where(b.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY))); + List resultList = + myEntityManager.createQuery(q).getResultList(); + for (ResourceHistoryTable next : resultList) { + ResourceTable nextEntity = entities.get(next.getResourceId()); + if (nextEntity != null) { + nextEntity.setCurrentVersionEntity(next); + } + } + }); } - - }); } - private void preFetchIndexes(List theIds, String typeDesc, String fieldName, @Nullable List theEntityListToPopulate) { + private void preFetchIndexes( + List theIds, + String typeDesc, + String fieldName, + @Nullable List theEntityListToPopulate) { new QueryChunker().chunk(theIds, ids -> { - TypedQuery query = myEntityManager.createQuery("FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", ResourceTable.class); + TypedQuery query = myEntityManager.createQuery( + "FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", + ResourceTable.class); query.setParameter("IDS", ids); List indexFetchOutcome = query.getResultList(); ourLog.debug("Pre-fetched {} {}} indexes", indexFetchOutcome.size(), typeDesc); @@ -282,14 +314,12 @@ public abstract class BaseHapiFhirSystemDao extends B }); } - @Nullable @Override protected String getResourceName() { return null; } - @Override protected IInterceptorBroadcaster getInterceptorBroadcaster() { return myInterceptorBroadcaster; @@ -309,5 +339,4 @@ public abstract class BaseHapiFhirSystemDao extends B public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) { myStorageSettings = theStorageSettings; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/CodingSpy.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/CodingSpy.java index d6c19e50095..897d5d104da 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/CodingSpy.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/CodingSpy.java @@ -26,7 +26,6 @@ import org.hl7.fhir.instance.model.api.IBaseBooleanDatatype; import org.hl7.fhir.instance.model.api.IBaseCoding; import java.lang.reflect.Field; -import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -59,7 +58,8 @@ public class CodingSpy { if (o instanceof Boolean) { return (Boolean) o; } - throw new RuntimeException(Msg.code(2342) + "unsupported type :" + theValue.getClass().getName()); + throw new RuntimeException( + Msg.code(2342) + "unsupported type :" + theValue.getClass().getName()); } catch (IllegalAccessException theException) { // should never happen - all Coding models have this field. throw new RuntimeException(Msg.code(2343) + "illegal access during reflection", theException); @@ -77,5 +77,4 @@ public class CodingSpy { } return result; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java index d166952d3e4..1e557942030 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FhirResourceDaoSubscriptionDstu2.java @@ -34,7 +34,8 @@ import org.springframework.transaction.PlatformTransactionManager; import java.util.Date; -public class FhirResourceDaoSubscriptionDstu2 extends BaseHapiFhirResourceDao implements IFhirResourceDaoSubscription { +public class FhirResourceDaoSubscriptionDstu2 extends BaseHapiFhirResourceDao + implements IFhirResourceDaoSubscription { @Autowired private ISubscriptionTableDao mySubscriptionTableDao; @@ -50,7 +51,8 @@ public class FhirResourceDaoSubscriptionDstu2 extends BaseHapiFhirResourceDao { @@ -51,10 +50,16 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao { retVal.addProfile(next.getCode()); break; case SECURITY_LABEL: - retVal.addSecurity().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addSecurity() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; case TAG: - retVal.addTag().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addTag() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; } } @@ -65,5 +70,4 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao { public IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) { return JpaResourceDao.throwProcessMessageNotImplemented(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java index d9f069dcfbe..346b1063e3d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java @@ -71,10 +71,6 @@ import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -82,6 +78,10 @@ import java.util.List; import java.util.Spliterators; import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import javax.annotation.Nonnull; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; import static ca.uhn.fhir.rest.server.BasePagingProvider.DEFAULT_MAX_PAGE_SIZE; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -89,25 +89,35 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FulltextSearchSvcImpl implements IFulltextSearchSvc { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FulltextSearchSvcImpl.class); private static final int DEFAULT_MAX_NON_PAGED_SIZE = 500; - final private ExtendedHSearchSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedHSearchSearchBuilder(); + private final ExtendedHSearchSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedHSearchSearchBuilder(); + @Autowired ISearchParamExtractor mySearchParamExtractor; + @Autowired IIdHelperService myIdHelperService; + @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; + @Autowired private PlatformTransactionManager myTxManager; + @Autowired private FhirContext myFhirContext; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private IHSearchSortHelper myExtendedFulltextSortHelper; + @Autowired(required = false) private IHSearchEventListener myHSearchEventListener; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; @@ -121,11 +131,12 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { } @Override - public ExtendedHSearchIndexData extractLuceneIndexData(IBaseResource theResource, ResourceIndexedSearchParams theNewParams) { + public ExtendedHSearchIndexData extractLuceneIndexData( + IBaseResource theResource, ResourceIndexedSearchParams theNewParams) { String resourceType = myFhirContext.getResourceType(theResource); ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(resourceType); ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor( - myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor); + myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor); return extractor.extract(theResource, theNewParams); } @@ -133,9 +144,12 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { public boolean supportsSomeOf(SearchParameterMap myParams) { // keep this in sync with the guts of doSearch - boolean requiresHibernateSearchAccess = myParams.containsKey(Constants.PARAM_CONTENT) || myParams.containsKey(Constants.PARAM_TEXT) || myParams.isLastN(); + boolean requiresHibernateSearchAccess = myParams.containsKey(Constants.PARAM_CONTENT) + || myParams.containsKey(Constants.PARAM_TEXT) + || myParams.isLastN(); - requiresHibernateSearchAccess |= myStorageSettings.isAdvancedHSearchIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams); + requiresHibernateSearchAccess |= + myStorageSettings.isAdvancedHSearchIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams); return requiresHibernateSearchAccess; } @@ -149,26 +163,33 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { } @Override - public ISearchQueryExecutor searchNotScrolled(String theResourceName, SearchParameterMap theParams, Integer theMaxResultsToFetch, RequestDetails theRequestDetails) { + public ISearchQueryExecutor searchNotScrolled( + String theResourceName, + SearchParameterMap theParams, + Integer theMaxResultsToFetch, + RequestDetails theRequestDetails) { validateHibernateSearchIsEnabled(); return doSearch(theResourceName, theParams, null, theMaxResultsToFetch, theRequestDetails); } - // keep this in sync with supportsSomeOf(); - private ISearchQueryExecutor doSearch(String theResourceType, SearchParameterMap theParams, - IResourcePersistentId theReferencingPid, Integer theMaxResultsToFetch, RequestDetails theRequestDetails) { + private ISearchQueryExecutor doSearch( + String theResourceType, + SearchParameterMap theParams, + IResourcePersistentId theReferencingPid, + Integer theMaxResultsToFetch, + RequestDetails theRequestDetails) { int offset = theParams.getOffset() == null ? 0 : theParams.getOffset(); int count = getMaxFetchSize(theParams, theMaxResultsToFetch); // perform an offset search instead of a scroll one, which doesn't allow for offset - SearchQueryOptionsStep searchQueryOptionsStep = getSearchQueryOptionsStep(theResourceType, theParams, theReferencingPid); + SearchQueryOptionsStep searchQueryOptionsStep = + getSearchQueryOptionsStep(theResourceType, theParams, theReferencingPid); logQuery(searchQueryOptionsStep, theRequestDetails); List longs = searchQueryOptionsStep.fetchHits(offset, count); - // indicate param was already processed, otherwise queries DB to process it theParams.setOffset(null); return SearchQueryExecutors.from(longs); @@ -187,26 +208,20 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { return DEFAULT_MAX_NON_PAGED_SIZE; } - private SearchQueryOptionsStep getSearchQueryOptionsStep( - String theResourceType, SearchParameterMap theParams, IResourcePersistentId theReferencingPid) { + String theResourceType, SearchParameterMap theParams, IResourcePersistentId theReferencingPid) { dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH); - var query = getSearchSession().search(ResourceTable.class) - // The document id is the PK which is pid. We use this instead of _myId to avoid fetching the doc body. - .select( - // adapt the String docRef.id() to the Long that it really is. - f -> f.composite( - docRef -> Long.valueOf(docRef.id()), - f.documentReference()) - ) - .where( - f -> buildWhereClause(f, theResourceType, theParams, theReferencingPid) - ); + var query = getSearchSession() + .search(ResourceTable.class) + // The document id is the PK which is pid. We use this instead of _myId to avoid fetching the doc body. + .select( + // adapt the String docRef.id() to the Long that it really is. + f -> f.composite(docRef -> Long.valueOf(docRef.id()), f.documentReference())) + .where(f -> buildWhereClause(f, theResourceType, theParams, theReferencingPid)); if (theParams.getSort() != null) { - query.sort( - f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType)); + query.sort(f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType)); // indicate parameter was processed theParams.setSort(null); @@ -215,11 +230,14 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { return query; } - - private PredicateFinalStep buildWhereClause(SearchPredicateFactory f, String theResourceType, - SearchParameterMap theParams, IResourcePersistentId theReferencingPid) { + private PredicateFinalStep buildWhereClause( + SearchPredicateFactory f, + String theResourceType, + SearchParameterMap theParams, + IResourcePersistentId theReferencingPid) { return f.bool(b -> { - ExtendedHSearchClauseBuilder builder = new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f); + ExtendedHSearchClauseBuilder builder = + new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f); /* * Handle _content parameter (resource body content) @@ -253,30 +271,33 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { * Handle other supported parameters */ if (myStorageSettings.isAdvancedHSearchIndexing() && theParams.getEverythingMode() == null) { - myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses(builder, theResourceType, theParams, mySearchParamRegistry); + myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses( + builder, theResourceType, theParams, mySearchParamRegistry); } - //DROP EARLY HERE IF BOOL IS EMPTY? + // DROP EARLY HERE IF BOOL IS EMPTY? }); } - @Nonnull private SearchSession getSearchSession() { return Search.session(myEntityManager); } private List convertLongsToResourcePersistentIds(List theLongPids) { - return theLongPids.stream() - .map(JpaPid::fromId) - .collect(Collectors.toList()); + return theLongPids.stream().map(JpaPid::fromId).collect(Collectors.toList()); } @Override - public List everything(String theResourceName, SearchParameterMap theParams, IResourcePersistentId theReferencingPid, RequestDetails theRequestDetails) { + public List everything( + String theResourceName, + SearchParameterMap theParams, + IResourcePersistentId theReferencingPid, + RequestDetails theRequestDetails) { validateHibernateSearchIsEnabled(); // todo mb what about max results here? - List retVal = toList(doSearch(null, theParams, theReferencingPid, 10_000,theRequestDetails), 10_000); + List retVal = + toList(doSearch(null, theParams, theReferencingPid, 10_000, theRequestDetails), 10_000); if (theReferencingPid != null) { retVal.add(theReferencingPid); } @@ -301,7 +322,8 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { return Boolean.FALSE; } catch (Exception e) { ourLog.trace("FullText test failed", e); - ourLog.debug("Hibernate Search (Lucene) appears to be disabled on this server, fulltext will be disabled"); + ourLog.debug( + "Hibernate Search (Lucene) appears to be disabled on this server, fulltext will be disabled"); return Boolean.TRUE; } }); @@ -314,9 +336,12 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { @Transactional() @Override - public List search(String theResourceName, SearchParameterMap theParams, RequestDetails theRequestDetails) { + public List search( + String theResourceName, SearchParameterMap theParams, RequestDetails theRequestDetails) { validateHibernateSearchIsEnabled(); - return toList(doSearch(theResourceName, theParams, null, DEFAULT_MAX_NON_PAGED_SIZE, theRequestDetails ), DEFAULT_MAX_NON_PAGED_SIZE); + return toList( + doSearch(theResourceName, theParams, null, DEFAULT_MAX_NON_PAGED_SIZE, theRequestDetails), + DEFAULT_MAX_NON_PAGED_SIZE); } /** @@ -324,9 +349,9 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { */ private List toList(ISearchQueryExecutor theSearchResultStream, long theMaxSize) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSearchResultStream, 0), false) - .map(JpaPid::fromId) - .limit(theMaxSize) - .collect(Collectors.toList()); + .map(JpaPid::fromId) + .limit(theMaxSize) + .collect(Collectors.toList()); } @Transactional() @@ -335,7 +360,8 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { validateHibernateSearchIsEnabled(); ensureElastic(); - ValueSetAutocompleteSearch autocomplete = new ValueSetAutocompleteSearch(myFhirContext, myStorageSettings, getSearchSession()); + ValueSetAutocompleteSearch autocomplete = + new ValueSetAutocompleteSearch(myFhirContext, myStorageSettings, getSearchSession()); dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH); return autocomplete.search(theOptions); @@ -349,22 +375,21 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { */ private void ensureElastic() { try { - getSearchSession().scope(ResourceTable.class) - .aggregation() - .extension(ElasticsearchExtension.get()); + getSearchSession().scope(ResourceTable.class).aggregation().extension(ElasticsearchExtension.get()); } catch (SearchException e) { // unsupported. we are probably running Lucene. - throw new IllegalStateException(Msg.code(2070) + "This operation requires Elasticsearch. Lucene is not supported."); + throw new IllegalStateException( + Msg.code(2070) + "This operation requires Elasticsearch. Lucene is not supported."); } - } @Override public List lastN(SearchParameterMap theParams, Integer theMaximumResults) { ensureElastic(); dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH); - List pidList = new LastNOperation(getSearchSession(), myFhirContext, myStorageSettings, mySearchParamRegistry) - .executeLastN(theParams, theMaximumResults); + List pidList = new LastNOperation( + getSearchSession(), myFhirContext, myStorageSettings, mySearchParamRegistry) + .executeLastN(theParams, theMaximumResults); return convertLongsToResourcePersistentIds(pidList); } @@ -377,53 +402,49 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { SearchSession session = getSearchSession(); dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH); List rawResourceDataList = session.search(ResourceTable.class) - .select( - this::buildResourceSelectClause - ) - .where( - f -> f.id().matchingAny(thePids) // matches '_id' from resource index - ).fetchAllHits(); + .select(this::buildResourceSelectClause) + .where( + f -> f.id().matchingAny(thePids) // matches '_id' from resource index + ) + .fetchAllHits(); // order resource projections as per thePids ArrayList pidList = new ArrayList<>(thePids); List orderedAsPidsResourceDataList = rawResourceDataList.stream() - .sorted(Ordering.explicit(pidList).onResultOf(ExtendedHSearchResourceProjection::getPid)).collect(Collectors.toList()); + .sorted(Ordering.explicit(pidList).onResultOf(ExtendedHSearchResourceProjection::getPid)) + .collect(Collectors.toList()); return resourceProjectionsToResources(orderedAsPidsResourceDataList); } - @Nonnull - private List resourceProjectionsToResources(List theResourceDataList) { + private List resourceProjectionsToResources( + List theResourceDataList) { IParser parser = myFhirContext.newJsonParser(); - return theResourceDataList.stream() - .map(p -> p.toResource(parser)) - .collect(Collectors.toList()); + return theResourceDataList.stream().map(p -> p.toResource(parser)).collect(Collectors.toList()); } - private CompositeProjectionOptionsStep buildResourceSelectClause( - SearchProjectionFactory f) { + SearchProjectionFactory f) { return f.composite( - ExtendedHSearchResourceProjection::new, - f.field("myId", Long.class), - f.field("myForcedId", String.class), - f.field("myRawResource", String.class)); + ExtendedHSearchResourceProjection::new, + f.field("myId", Long.class), + f.field("myForcedId", String.class), + f.field("myRawResource", String.class)); } - @Override public long count(String theResourceName, SearchParameterMap theParams) { SearchQueryOptionsStep queryOptionsStep = - getSearchQueryOptionsStep(theResourceName, theParams, null); + getSearchQueryOptionsStep(theResourceName, theParams, null); return queryOptionsStep.fetchTotalHitCount(); } - @Override @Transactional(readOnly = true) - public List searchForResources(String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails) { + public List searchForResources( + String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails) { int offset = 0; int limit = theParams.getCount() == null ? DEFAULT_MAX_PAGE_SIZE : theParams.getCount(); @@ -435,13 +456,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH); - var query = getSearchSession().search(ResourceTable.class) - .select(this::buildResourceSelectClause) - .where(f -> buildWhereClause(f, theResourceType, theParams, null)); + var query = getSearchSession() + .search(ResourceTable.class) + .select(this::buildResourceSelectClause) + .where(f -> buildWhereClause(f, theResourceType, theParams, null)); if (theParams.getSort() != null) { - query.sort( - f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType)); + query.sort(f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType)); } logQuery(query, theRequestDetails); @@ -457,32 +478,31 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { */ @SuppressWarnings("rawtypes") private void logQuery(SearchQueryOptionsStep theQuery, RequestDetails theRequestDetails) { - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequestDetails)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequestDetails)) { StorageProcessingMessage storageProcessingMessage = new StorageProcessingMessage(); String queryString = theQuery.toQuery().queryString(); storageProcessingMessage.setMessage(queryString); HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(StorageProcessingMessage.class, storageProcessingMessage); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_INFO, params); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(StorageProcessingMessage.class, storageProcessingMessage); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_INFO, params); } } - @Override public boolean supportsAllOf(SearchParameterMap theParams) { return myAdvancedIndexQueryBuilder.isSupportsAllOf(theParams); } - private void dispatchEvent(IHSearchEventListener.HSearchEventType theEventType) { if (myHSearchEventListener != null) { myHSearchEventListener.hsearchEvent(theEventType); } } - @Override public void deleteIndexedDocumentsByTypeAndId(Class theClazz, List theGivenIds) { SearchSession session = Search.session(myEntityManager); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java index 0c2e06be492..445e9d8860b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java @@ -36,6 +36,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nullable; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; @@ -48,16 +54,9 @@ import javax.persistence.criteria.JoinType; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.persistence.criteria.Subquery; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toPredicateArray; - /** * The HistoryBuilder is responsible for building history queries */ @@ -68,21 +67,30 @@ public class HistoryBuilder { private final Long myResourceId; private final Date myRangeStartInclusive; private final Date myRangeEndInclusive; + @Autowired protected IInterceptorBroadcaster myInterceptorBroadcaster; + @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + @Autowired private PartitionSettings myPartitionSettings; + @Autowired private FhirContext myCtx; + @Autowired private IIdHelperService myIdHelperService; /** * Constructor */ - public HistoryBuilder(@Nullable String theResourceType, @Nullable Long theResourceId, @Nullable Date theRangeStartInclusive, @Nullable Date theRangeEndInclusive) { + public HistoryBuilder( + @Nullable String theResourceType, + @Nullable Long theResourceId, + @Nullable Date theRangeStartInclusive, + @Nullable Date theRangeEndInclusive) { myResourceType = theResourceType; myResourceId = theResourceId; myRangeStartInclusive = theRangeStartInclusive; @@ -102,8 +110,12 @@ public class HistoryBuilder { } @SuppressWarnings("OptionalIsPresent") - public List fetchEntities(RequestPartitionId thePartitionId, Integer theOffset, int theFromIndex, - int theToIndex, HistorySearchStyleEnum theHistorySearchStyle) { + public List fetchEntities( + RequestPartitionId thePartitionId, + Integer theOffset, + int theFromIndex, + int theToIndex, + HistorySearchStyleEnum theHistorySearchStyle) { CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); CriteriaQuery criteriaQuery = cb.createQuery(ResourceHistoryTable.class); Root from = criteriaQuery.from(ResourceHistoryTable.class); @@ -126,8 +138,11 @@ public class HistoryBuilder { List tables = query.getResultList(); if (tables.size() > 0) { - ImmutableListMultimap resourceIdToHistoryEntries = Multimaps.index(tables, ResourceHistoryTable::getResourceId); - Set pids = resourceIdToHistoryEntries.keySet().stream().map(JpaPid::fromId).collect(Collectors.toSet()); + ImmutableListMultimap resourceIdToHistoryEntries = + Multimaps.index(tables, ResourceHistoryTable::getResourceId); + Set pids = resourceIdToHistoryEntries.keySet().stream() + .map(JpaPid::fromId) + .collect(Collectors.toSet()); PersistentIdToForcedIdMap pidToForcedId = myIdHelperService.translatePidsToForcedIds(pids); ourLog.trace("Translated IDs: {}", pidToForcedId.getResourcePersistentIdOptionalMap()); @@ -152,20 +167,28 @@ public class HistoryBuilder { return tables; } - private void addPredicatesToQuery(CriteriaBuilder theCriteriaBuilder, RequestPartitionId thePartitionId, CriteriaQuery theQuery, - Root theFrom, HistorySearchStyleEnum theHistorySearchStyle) { + private void addPredicatesToQuery( + CriteriaBuilder theCriteriaBuilder, + RequestPartitionId thePartitionId, + CriteriaQuery theQuery, + Root theFrom, + HistorySearchStyleEnum theHistorySearchStyle) { List predicates = new ArrayList<>(); if (!thePartitionId.isAllPartitions()) { if (thePartitionId.isDefaultPartition()) { - predicates.add(theCriteriaBuilder.isNull(theFrom.get("myPartitionIdValue").as(Integer.class))); + predicates.add(theCriteriaBuilder.isNull( + theFrom.get("myPartitionIdValue").as(Integer.class))); } else if (thePartitionId.hasDefaultPartitionId()) { predicates.add(theCriteriaBuilder.or( - theCriteriaBuilder.isNull(theFrom.get("myPartitionIdValue").as(Integer.class)), - theFrom.get("myPartitionIdValue").as(Integer.class).in(thePartitionId.getPartitionIdsWithoutDefault()) - )); + theCriteriaBuilder.isNull( + theFrom.get("myPartitionIdValue").as(Integer.class)), + theFrom.get("myPartitionIdValue") + .as(Integer.class) + .in(thePartitionId.getPartitionIdsWithoutDefault()))); } else { - predicates.add(theFrom.get("myPartitionIdValue").as(Integer.class).in(thePartitionId.getPartitionIds())); + predicates.add( + theFrom.get("myPartitionIdValue").as(Integer.class).in(thePartitionId.getPartitionIds())); } } @@ -179,14 +202,16 @@ public class HistoryBuilder { } if (myRangeStartInclusive != null) { - if(HistorySearchStyleEnum.AT == theHistorySearchStyle && myResourceId != null) { + if (HistorySearchStyleEnum.AT == theHistorySearchStyle && myResourceId != null) { addPredicateForAtQueryParameter(theCriteriaBuilder, theQuery, theFrom, predicates); } else { - predicates.add(theCriteriaBuilder.greaterThanOrEqualTo(theFrom.get("myUpdated").as(Date.class), myRangeStartInclusive)); + predicates.add(theCriteriaBuilder.greaterThanOrEqualTo( + theFrom.get("myUpdated").as(Date.class), myRangeStartInclusive)); } } if (myRangeEndInclusive != null) { - predicates.add(theCriteriaBuilder.lessThanOrEqualTo(theFrom.get("myUpdated").as(Date.class), myRangeEndInclusive)); + predicates.add(theCriteriaBuilder.lessThanOrEqualTo( + theFrom.get("myUpdated").as(Date.class), myRangeEndInclusive)); } if (predicates.size() > 0) { @@ -194,31 +219,37 @@ public class HistoryBuilder { } } - private void addPredicateForAtQueryParameter(CriteriaBuilder theCriteriaBuilder, CriteriaQuery theQuery, - Root theFrom, List thePredicates) { + private void addPredicateForAtQueryParameter( + CriteriaBuilder theCriteriaBuilder, + CriteriaQuery theQuery, + Root theFrom, + List thePredicates) { Subquery pastDateSubQuery = theQuery.subquery(Date.class); Root subQueryResourceHistory = pastDateSubQuery.from(ResourceHistoryTable.class); - Expression myUpdatedMostRecent = theCriteriaBuilder.max(subQueryResourceHistory.get("myUpdated")).as(Date.class); - Expression myUpdatedMostRecentOrDefault = theCriteriaBuilder.coalesce(myUpdatedMostRecent, - theCriteriaBuilder.literal(myRangeStartInclusive)); + Expression myUpdatedMostRecent = + theCriteriaBuilder.max(subQueryResourceHistory.get("myUpdated")).as(Date.class); + Expression myUpdatedMostRecentOrDefault = + theCriteriaBuilder.coalesce(myUpdatedMostRecent, theCriteriaBuilder.literal(myRangeStartInclusive)); - pastDateSubQuery.select(myUpdatedMostRecentOrDefault) - .where(theCriteriaBuilder.lessThanOrEqualTo(subQueryResourceHistory.get("myUpdated").as(Date.class), myRangeStartInclusive) - , theCriteriaBuilder.equal(subQueryResourceHistory.get("myResourceId"), myResourceId)); + pastDateSubQuery + .select(myUpdatedMostRecentOrDefault) + .where( + theCriteriaBuilder.lessThanOrEqualTo( + subQueryResourceHistory.get("myUpdated").as(Date.class), myRangeStartInclusive), + theCriteriaBuilder.equal(subQueryResourceHistory.get("myResourceId"), myResourceId)); - Predicate updatedDatePredicate = theCriteriaBuilder.greaterThanOrEqualTo(theFrom.get("myUpdated").as(Date.class), - pastDateSubQuery); + Predicate updatedDatePredicate = + theCriteriaBuilder.greaterThanOrEqualTo(theFrom.get("myUpdated").as(Date.class), pastDateSubQuery); thePredicates.add(updatedDatePredicate); } private void validateNotSearchingAllPartitions(RequestPartitionId thePartitionId) { if (myPartitionSettings.isPartitioningEnabled()) { if (thePartitionId.isAllPartitions()) { - String msg = myCtx.getLocalizer().getMessage(HistoryBuilder.class, "noSystemOrTypeHistoryForPartitionAwareServer"); + String msg = myCtx.getLocalizer() + .getMessage(HistoryBuilder.class, "noSystemOrTypeHistoryForPartitionAwareServer"); throw new InvalidRequestException(Msg.code(953) + msg); } } } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilderFactory.java index 51e6ea85d71..31205fdba27 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilderFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilderFactory.java @@ -23,16 +23,24 @@ import ca.uhn.fhir.jpa.config.JpaConfig; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; -import javax.annotation.Nullable; import java.util.Date; +import javax.annotation.Nullable; public class HistoryBuilderFactory { @Autowired private ApplicationContext myApplicationContext; - public HistoryBuilder newHistoryBuilder(@Nullable String theResourceType, @Nullable Long theResourceId, @Nullable Date theRangeStartInclusive, @Nullable Date theRangeEndInclusive) { - return (HistoryBuilder) myApplicationContext.getBean(JpaConfig.HISTORY_BUILDER, theResourceType, theResourceId, theRangeStartInclusive, theRangeEndInclusive); + public HistoryBuilder newHistoryBuilder( + @Nullable String theResourceType, + @Nullable Long theResourceId, + @Nullable Date theRangeStartInclusive, + @Nullable Date theRangeEndInclusive) { + return (HistoryBuilder) myApplicationContext.getBean( + JpaConfig.HISTORY_BUILDER, + theResourceType, + theResourceId, + theRangeStartInclusive, + theRangeEndInclusive); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java index 08fc03cbb50..de764c3dfdc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java @@ -34,7 +34,6 @@ import java.util.List; public interface IFulltextSearchSvc { - /** * Search the Lucene/Elastic index for pids using params supported in theParams, * consuming entries from theParams when used to query. @@ -44,8 +43,8 @@ public interface IFulltextSearchSvc { * @param theRequestDetails The request details * @return the pid list for the matchign resources. */ - List search(String theResourceName, SearchParameterMap theParams, RequestDetails theRequestDetails); - + List search( + String theResourceName, SearchParameterMap theParams, RequestDetails theRequestDetails); /** * Query the index for a plain list (non-scrollable) iterator of results. @@ -56,7 +55,11 @@ public interface IFulltextSearchSvc { * @param theRequestDetails The request details * @return Iterator of result PIDs */ - ISearchQueryExecutor searchNotScrolled(String theResourceName, SearchParameterMap theParams, Integer theMaxResultsToFetch, RequestDetails theRequestDetails); + ISearchQueryExecutor searchNotScrolled( + String theResourceName, + SearchParameterMap theParams, + Integer theMaxResultsToFetch, + RequestDetails theRequestDetails); /** * Autocomplete search for NIH $expand contextDirection=existing @@ -65,11 +68,16 @@ public interface IFulltextSearchSvc { */ IBaseResource tokenAutocompleteValueSetSearch(ValueSetAutocompleteOptions theOptions); - List everything(String theResourceName, SearchParameterMap theParams, T theReferencingPid, RequestDetails theRequestDetails); + List everything( + String theResourceName, + SearchParameterMap theParams, + T theReferencingPid, + RequestDetails theRequestDetails); boolean isDisabled(); - ExtendedHSearchIndexData extractLuceneIndexData(IBaseResource theResource, ResourceIndexedSearchParams theNewParams); + ExtendedHSearchIndexData extractLuceneIndexData( + IBaseResource theResource, ResourceIndexedSearchParams theNewParams); boolean supportsSomeOf(SearchParameterMap myParams); @@ -81,7 +89,7 @@ public interface IFulltextSearchSvc { * * @param theEntity the fully populated ResourceTable entity */ - void reindex(ResourceTable theEntity); + void reindex(ResourceTable theEntity); List lastN(SearchParameterMap theParams, Integer theMaximumResults); @@ -98,7 +106,8 @@ public interface IFulltextSearchSvc { */ long count(String theResourceName, SearchParameterMap theParams); - List searchForResources(String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails); + List searchForResources( + String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails); boolean supportsAllOf(SearchParameterMap theParams); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java index 4f67245dcbd..e706c3d6e02 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IJpaStorageResourceParser.java @@ -24,8 +24,8 @@ import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTag; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nullable; import java.util.Collection; +import javax.annotation.Nullable; public interface IJpaStorageResourceParser extends IStorageResourceParser { @@ -33,13 +33,22 @@ public interface IJpaStorageResourceParser extends IStorageResourceParser { * Convert a storage entity into a FHIR resource model instance. This method may return null if the entity is not * completely flushed, including the entities history entries. */ - R toResource(Class theResourceType, IBaseResourceEntity theEntity, Collection theTagList, boolean theForHistoryOperation); + R toResource( + Class theResourceType, + IBaseResourceEntity theEntity, + Collection theTagList, + boolean theForHistoryOperation); /** * Populate the metadata (Resource.meta.*) from a storage entity and other related * objects pulled from the database */ - R populateResourceMetadata(IBaseResourceEntity theEntitySource, boolean theForHistoryOperation, @Nullable Collection tagList, long theVersion, R theResourceTarget); + R populateResourceMetadata( + IBaseResourceEntity theEntitySource, + boolean theForHistoryOperation, + @Nullable Collection tagList, + long theVersion, + R theResourceTarget); /** * Populates a resource model object's metadata (Resource.meta.*) based on the diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IndexedParam.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IndexedParam.java index 532763e83de..0679cfcae41 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IndexedParam.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IndexedParam.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.jpa.dao; public @interface IndexedParam { String path(); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java index f5259004527..7935d32684d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaPersistedResourceValidationSupport.java @@ -52,13 +52,13 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW; @@ -90,7 +90,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport // TermReadSvcImpl calls these methods as a part of its "isCodeSystemSupported" calls. // We should modify CachingValidationSupport to cache the results of "isXXXSupported" // at which point we could do away with this cache - private Cache myLoadCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(1), 1000); + private Cache myLoadCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(1), 1000); /** * Constructor @@ -103,7 +103,6 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport myNoMatch = myFhirContext.getResourceDefinition("Basic").newInstance(); } - @Override public IBaseResource fetchCodeSystem(String theSystem) { if (TermReadSvcUtil.isLoincUnversionedCodeSystem(theSystem)) { @@ -122,14 +121,13 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport * version is always pointed by the ForcedId for the no-versioned CS */ private Optional getCodeSystemCurrentVersion(UriType theUrl) { - if (!theUrl.getValueAsString().contains(LOINC_LOW)) { - return Optional.empty(); - } + if (!theUrl.getValueAsString().contains(LOINC_LOW)) { + return Optional.empty(); + } return myTermReadSvc.readCodeSystemByForcedId(LOINC_LOW); } - @Override public IBaseResource fetchValueSet(String theSystem) { if (TermReadSvcUtil.isLoincUnversionedValueSet(theSystem)) { @@ -146,16 +144,15 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport */ private Optional getValueSetCurrentVersion(UriType theUrl) { Optional vsIdOpt = TermReadSvcUtil.getValueSetId(theUrl.getValueAsString()); - if (!vsIdOpt.isPresent()) { - return Optional.empty(); - } + if (!vsIdOpt.isPresent()) { + return Optional.empty(); + } IFhirResourceDao valueSetResourceDao = myDaoRegistry.getResourceDao(myValueSetType); IBaseResource valueSet = valueSetResourceDao.read(new IdDt("ValueSet", vsIdOpt.get())); return Optional.ofNullable(valueSet); } - @Override public IBaseResource fetchStructureDefinition(String theUrl) { return fetchResource(myStructureDefinitionType, theUrl); @@ -168,7 +165,9 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport if (!myDaoRegistry.isResourceTypeSupported("StructureDefinition")) { return null; } - IBundleProvider search = myDaoRegistry.getResourceDao("StructureDefinition").search(new SearchParameterMap().setLoadSynchronousUpTo(1000)); + IBundleProvider search = myDaoRegistry + .getResourceDao("StructureDefinition") + .search(new SearchParameterMap().setLoadSynchronousUpTo(1000)); return (List) search.getResources(0, 1000); } @@ -191,17 +190,16 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport private IBaseResource doFetchResource(@Nullable Class theClass, String theUri) { if (theClass == null) { - Supplier[] fetchers = new Supplier[]{ + Supplier[] fetchers = new Supplier[] { () -> doFetchResource(ValueSet.class, theUri), () -> doFetchResource(CodeSystem.class, theUri), () -> doFetchResource(StructureDefinition.class, theUri) }; - return Arrays - .stream(fetchers) - .map(t -> t.get()) - .filter(t -> t != myNoMatch) - .findFirst() - .orElse(myNoMatch); + return Arrays.stream(fetchers) + .map(t -> t.get()) + .filter(t -> t != myNoMatch) + .findFirst() + .orElse(myNoMatch); } IdType id = new IdType(theUri); @@ -238,7 +236,8 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC)); search = myDaoRegistry.getResourceDao(resourceName).search(params); - if (search.isEmpty() && myFhirContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) { + if (search.isEmpty() + && myFhirContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) { params = new SearchParameterMap(); params.setLoadSynchronousUpTo(1); if (versionSeparator != -1) { @@ -250,7 +249,6 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC)); search = myDaoRegistry.getResourceDao(resourceName).search(params); } - } break; case "StructureDefinition": { @@ -301,7 +299,8 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport break; } default: - // N.B.: this code assumes that we are searching by canonical URL and that the CanonicalType in question has a URL + // N.B.: this code assumes that we are searching by canonical URL and that the CanonicalType in question + // has a URL SearchParameterMap params = new SearchParameterMap(); params.setLoadSynchronousUpTo(1); params.add("url", new UriParam(theUri)); @@ -327,7 +326,8 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport @PostConstruct public void start() { - myStructureDefinitionType = myFhirContext.getResourceDefinition("StructureDefinition").getImplementingClass(); + myStructureDefinitionType = + myFhirContext.getResourceDefinition("StructureDefinition").getImplementingClass(); myValueSetType = myFhirContext.getResourceDefinition("ValueSet").getImplementingClass(); if (myFhirContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2)) { @@ -337,7 +337,6 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport } } - public void clearCaches() { myLoadCache.invalidateAll(); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDao.java index ce178165f1c..3ab37e14008 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDao.java @@ -37,8 +37,8 @@ public class JpaResourceDao extends BaseHapiFhirResourc super(); } - public static IBaseBundle throwProcessMessageNotImplemented() { - throw new NotImplementedOperationException(Msg.code(945) + "This operation is not yet implemented on this server"); + throw new NotImplementedOperationException( + Msg.code(945) + "This operation is not yet implemented on this server"); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoBundle.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoBundle.java index ae7a9a28108..ca73c90cb78 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoBundle.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoBundle.java @@ -27,12 +27,14 @@ import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; -import static org.apache.commons.lang3.StringUtils.defaultString; - public class JpaResourceDaoBundle extends BaseHapiFhirResourceDao { @Override - protected void preProcessResourceForStorage(IBaseResource theResource, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, boolean thePerformIndexing) { + protected void preProcessResourceForStorage( + IBaseResource theResource, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + boolean thePerformIndexing) { super.preProcessResourceForStorage(theResource, theRequestDetails, theTransactionDetails, thePerformIndexing); if (getContext().getVersion().getVersion() == FhirVersionEnum.DSTU2) { @@ -41,7 +43,4 @@ public class JpaResourceDaoBundle extends BaseHapiFhirRes } } } - - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java index e642938a39b..e4b128f608e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java @@ -51,29 +51,37 @@ import org.hl7.fhir.r4.model.CodeableConcept; import org.hl7.fhir.r4.model.Coding; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Date; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; import static ca.uhn.fhir.util.DatatypeUtil.toStringValue; import static org.apache.commons.lang3.StringUtils.isNotBlank; -public class JpaResourceDaoCodeSystem extends BaseHapiFhirResourceDao implements IFhirResourceDaoCodeSystem { +public class JpaResourceDaoCodeSystem extends BaseHapiFhirResourceDao + implements IFhirResourceDaoCodeSystem { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(JpaResourceDaoCodeSystem.class); + @Autowired protected ITermCodeSystemStorageSvc myTerminologyCodeSystemStorageSvc; + @Autowired protected IIdHelperService myIdHelperService; + @Autowired protected ITermDeferredStorageSvc myTermDeferredStorageSvc; + @Autowired private IValidationSupport myValidationSupport; + @Autowired private FhirContext myFhirContext; + private FhirTerser myTerser; + @Autowired private VersionCanonicalizer myVersionCanonicalizer; @@ -85,9 +93,12 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF } @Override - public List findCodeSystemIdsContainingSystemAndCode(String theCode, String theSystem, RequestDetails theRequest) { + public List findCodeSystemIdsContainingSystemAndCode( + String theCode, String theSystem, RequestDetails theRequest) { List valueSetIds; - List ids = searchForIds(new SearchParameterMap(org.hl7.fhir.r4.model.CodeSystem.SP_CODE, new TokenParam(theSystem, theCode)), theRequest); + List ids = searchForIds( + new SearchParameterMap(org.hl7.fhir.r4.model.CodeSystem.SP_CODE, new TokenParam(theSystem, theCode)), + theRequest); valueSetIds = new ArrayList<>(); for (IResourcePersistentId next : ids) { IIdType id = myIdHelperService.translatePidIdToForcedId(myFhirContext, "CodeSystem", next); @@ -98,18 +109,34 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF @Nonnull @Override - public IValidationSupport.LookupCodeResult lookupCode(IPrimitiveType theCode, IPrimitiveType theSystem, IBaseCoding theCoding, RequestDetails theRequestDetails) { + public IValidationSupport.LookupCodeResult lookupCode( + IPrimitiveType theCode, + IPrimitiveType theSystem, + IBaseCoding theCoding, + RequestDetails theRequestDetails) { return lookupCode(theCode, theSystem, theCoding, null, theRequestDetails); } @Nonnull @Override - public IValidationSupport.LookupCodeResult lookupCode(IPrimitiveType theCode, IPrimitiveType theSystem, IBaseCoding theCoding, IPrimitiveType theDisplayLanguage, RequestDetails theRequestDetails) { - return doLookupCode(myFhirContext, myTerser, myValidationSupport, theCode, theSystem, theCoding, theDisplayLanguage); + public IValidationSupport.LookupCodeResult lookupCode( + IPrimitiveType theCode, + IPrimitiveType theSystem, + IBaseCoding theCoding, + IPrimitiveType theDisplayLanguage, + RequestDetails theRequestDetails) { + return doLookupCode( + myFhirContext, myTerser, myValidationSupport, theCode, theSystem, theCoding, theDisplayLanguage); } @Override - public SubsumesResult subsumes(IPrimitiveType theCodeA, IPrimitiveType theCodeB, IPrimitiveType theSystem, IBaseCoding theCodingA, IBaseCoding theCodingB, RequestDetails theRequestDetails) { + public SubsumesResult subsumes( + IPrimitiveType theCodeA, + IPrimitiveType theCodeB, + IPrimitiveType theSystem, + IBaseCoding theCodingA, + IBaseCoding theCodingB, + RequestDetails theRequestDetails) { return myTerminologySvc.subsumes(theCodeA, theCodeB, theSystem, theCodingA, theCodingB); } @@ -118,18 +145,36 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF super.preDelete(theResourceToDelete, theEntityToDelete, theRequestDetails); myTermDeferredStorageSvc.deleteCodeSystemForResource(theEntityToDelete); - } @Override - public ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, IBasePersistedResource theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) { - ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry); + public ResourceTable updateEntity( + RequestDetails theRequest, + IBaseResource theResource, + IBasePersistedResource theEntity, + Date theDeletedTimestampOrNull, + boolean thePerformIndexing, + boolean theUpdateVersion, + TransactionDetails theTransactionDetails, + boolean theForceUpdate, + boolean theCreateNewHistoryEntry) { + ResourceTable retVal = super.updateEntity( + theRequest, + theResource, + theEntity, + theDeletedTimestampOrNull, + thePerformIndexing, + theUpdateVersion, + theTransactionDetails, + theForceUpdate, + theCreateNewHistoryEntry); if (!retVal.isUnchangedInCurrentOperation()) { org.hl7.fhir.r4.model.CodeSystem cs = myVersionCanonicalizer.codeSystemToCanonical(theResource); addPidToResource(theEntity, cs); - myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(cs, (ResourceTable) theEntity, theRequest); + myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded( + cs, (ResourceTable) theEntity, theRequest); } return retVal; @@ -137,10 +182,19 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF @Nonnull @Override - public CodeValidationResult validateCode(IIdType theCodeSystemId, IPrimitiveType theCodeSystemUrl, IPrimitiveType theVersion, IPrimitiveType theCode, IPrimitiveType theDisplay, IBaseCoding theCoding, IBaseDatatype theCodeableConcept, RequestDetails theRequestDetails) { + public CodeValidationResult validateCode( + IIdType theCodeSystemId, + IPrimitiveType theCodeSystemUrl, + IPrimitiveType theVersion, + IPrimitiveType theCode, + IPrimitiveType theDisplay, + IBaseCoding theCoding, + IBaseDatatype theCodeableConcept, + RequestDetails theRequestDetails) { CodeableConcept codeableConcept = myVersionCanonicalizer.codeableConceptToCanonical(theCodeableConcept); - boolean haveCodeableConcept = codeableConcept != null && codeableConcept.getCoding().size() > 0; + boolean haveCodeableConcept = + codeableConcept != null && codeableConcept.getCoding().size() > 0; Coding coding = myVersionCanonicalizer.codingToCanonical(theCoding); boolean haveCoding = coding != null && !coding.isEmpty(); @@ -149,10 +203,12 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF boolean haveCode = isNotBlank(code); if (!haveCodeableConcept && !haveCoding && !haveCode) { - throw new InvalidRequestException(Msg.code(906) + "No code, coding, or codeableConcept provided to validate."); + throw new InvalidRequestException( + Msg.code(906) + "No code, coding, or codeableConcept provided to validate."); } if (!LogicUtil.multiXor(haveCodeableConcept, haveCoding, haveCode)) { - throw new InvalidRequestException(Msg.code(907) + "$validate-code can only validate (code) OR (coding) OR (codeableConcept)"); + throw new InvalidRequestException( + Msg.code(907) + "$validate-code can only validate (code) OR (coding) OR (codeableConcept)"); } String codeSystemUrl; @@ -162,7 +218,8 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF } else if (isNotBlank(toStringValue(theCodeSystemUrl))) { codeSystemUrl = toStringValue(theCodeSystemUrl); } else { - throw new InvalidRequestException(Msg.code(908) + "Either CodeSystem ID or CodeSystem identifier must be provided. Unable to validate."); + throw new InvalidRequestException(Msg.code(908) + + "Either CodeSystem ID or CodeSystem identifier must be provided. Unable to validate."); } if (haveCodeableConcept) { @@ -171,13 +228,15 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF Coding nextCoding = codeableConcept.getCoding().get(i); if (nextCoding.hasSystem()) { if (!codeSystemUrl.equalsIgnoreCase(nextCoding.getSystem())) { - throw new InvalidRequestException(Msg.code(909) + "Coding.system '" + nextCoding.getSystem() + "' does not equal with CodeSystem.url '" + codeSystemUrl + "'. Unable to validate."); + throw new InvalidRequestException(Msg.code(909) + "Coding.system '" + nextCoding.getSystem() + + "' does not equal with CodeSystem.url '" + codeSystemUrl + "'. Unable to validate."); } codeSystemUrl = nextCoding.getSystem(); } code = nextCoding.getCode(); String display = nextCoding.getDisplay(); - CodeValidationResult nextValidation = codeSystemValidateCode(codeSystemUrl, toStringValue(theVersion), code, display); + CodeValidationResult nextValidation = + codeSystemValidateCode(codeSystemUrl, toStringValue(theVersion), code, display); anyValidation = nextValidation; if (nextValidation.isOk()) { return nextValidation; @@ -187,7 +246,8 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF } else if (haveCoding) { if (coding.hasSystem()) { if (!codeSystemUrl.equalsIgnoreCase(coding.getSystem())) { - throw new InvalidRequestException(Msg.code(910) + "Coding.system '" + coding.getSystem() + "' does not equal with CodeSystem.url '" + codeSystemUrl + "'. Unable to validate."); + throw new InvalidRequestException(Msg.code(910) + "Coding.system '" + coding.getSystem() + + "' does not equal with CodeSystem.url '" + codeSystemUrl + "'. Unable to validate."); } codeSystemUrl = coding.getSystem(); } @@ -198,35 +258,48 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF String display = toStringValue(theDisplay); return codeSystemValidateCode(codeSystemUrl, toStringValue(theVersion), code, display); } - } - private CodeValidationResult codeSystemValidateCode(String theCodeSystemUrl, String theVersion, String theCode, String theDisplay) { + private CodeValidationResult codeSystemValidateCode( + String theCodeSystemUrl, String theVersion, String theCode, String theDisplay) { ValidationSupportContext context = new ValidationSupportContext(myValidationSupport); ConceptValidationOptions options = new ConceptValidationOptions(); options.setValidateDisplay(isNotBlank(theDisplay)); String codeSystemUrl = createVersionedSystemIfVersionIsPresent(theCodeSystemUrl, theVersion); - CodeValidationResult retVal = myValidationSupport.validateCode(context, options, codeSystemUrl, theCode, theDisplay, null); + CodeValidationResult retVal = + myValidationSupport.validateCode(context, options, codeSystemUrl, theCode, theDisplay, null); if (retVal == null) { retVal = new CodeValidationResult(); - retVal.setMessage("Terminology service was unable to provide validation for " + codeSystemUrl + "#" + theCode); + retVal.setMessage( + "Terminology service was unable to provide validation for " + codeSystemUrl + "#" + theCode); } return retVal; } - public static IValidationSupport.LookupCodeResult doLookupCode(FhirContext theFhirContext, FhirTerser theFhirTerser, IValidationSupport theValidationSupport, IPrimitiveType theCode, IPrimitiveType theSystem, IBaseCoding theCoding, IPrimitiveType theDisplayLanguage) { - boolean haveCoding = theCoding != null && isNotBlank(extractCodingSystem(theCoding)) && isNotBlank(extractCodingCode(theCoding)); + public static IValidationSupport.LookupCodeResult doLookupCode( + FhirContext theFhirContext, + FhirTerser theFhirTerser, + IValidationSupport theValidationSupport, + IPrimitiveType theCode, + IPrimitiveType theSystem, + IBaseCoding theCoding, + IPrimitiveType theDisplayLanguage) { + boolean haveCoding = theCoding != null + && isNotBlank(extractCodingSystem(theCoding)) + && isNotBlank(extractCodingCode(theCoding)); boolean haveCode = theCode != null && theCode.isEmpty() == false; boolean haveSystem = theSystem != null && theSystem.isEmpty() == false; boolean haveDisplayLanguage = theDisplayLanguage != null && theDisplayLanguage.isEmpty() == false; if (!haveCoding && !(haveSystem && haveCode)) { - throw new InvalidRequestException(Msg.code(1126) + "No code, coding, or codeableConcept provided to validate"); + throw new InvalidRequestException( + Msg.code(1126) + "No code, coding, or codeableConcept provided to validate"); } if (!LogicUtil.multiXor(haveCoding, (haveSystem && haveCode)) || (haveSystem != haveCode)) { - throw new InvalidRequestException(Msg.code(1127) + "$lookup can only validate (system AND code) OR (coding.system AND coding.code)"); + throw new InvalidRequestException( + Msg.code(1127) + "$lookup can only validate (system AND code) OR (coding.system AND coding.code)"); } String code; @@ -253,11 +326,11 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF if (theValidationSupport.isCodeSystemSupported(new ValidationSupportContext(theValidationSupport), system)) { ourLog.info("Code system {} is supported", system); - IValidationSupport.LookupCodeResult retVal = theValidationSupport.lookupCode(new ValidationSupportContext(theValidationSupport), system, code, displayLanguage); + IValidationSupport.LookupCodeResult retVal = theValidationSupport.lookupCode( + new ValidationSupportContext(theValidationSupport), system, code, displayLanguage); if (retVal != null) { return retVal; } - } // We didn't find it.. @@ -272,7 +345,8 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF return theCoding.getCode(); } - private static String extractCodingVersion(FhirContext theFhirContext, FhirTerser theFhirTerser, IBaseCoding theCoding) { + private static String extractCodingVersion( + FhirContext theFhirContext, FhirTerser theFhirTerser, IBaseCoding theCoding) { if (theFhirContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) { return null; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoComposition.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoComposition.java index d7a4429e912..1ff023172a6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoComposition.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoComposition.java @@ -31,13 +31,21 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.servlet.http.HttpServletRequest; import java.util.Collections; +import javax.servlet.http.HttpServletRequest; -public class JpaResourceDaoComposition extends BaseHapiFhirResourceDao implements IFhirResourceDaoComposition { +public class JpaResourceDaoComposition extends BaseHapiFhirResourceDao + implements IFhirResourceDaoComposition { @Override - public IBundleProvider getDocumentForComposition(HttpServletRequest theServletRequest, IIdType theId, IPrimitiveType theCount, IPrimitiveType theOffset, DateRangeParam theLastUpdate, SortSpec theSort, RequestDetails theRequestDetails) { + public IBundleProvider getDocumentForComposition( + HttpServletRequest theServletRequest, + IIdType theId, + IPrimitiveType theCount, + IPrimitiveType theOffset, + DateRangeParam theLastUpdate, + SortSpec theSort, + RequestDetails theRequestDetails) { SearchParameterMap paramMap = new SearchParameterMap(); if (theCount != null) { paramMap.setCount(theCount.getValue()); @@ -54,4 +62,3 @@ public class JpaResourceDaoComposition extends BaseHapi return search(paramMap, theRequestDetails); } } - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoConceptMap.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoConceptMap.java index e9bc7edcac7..758c9a204fb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoConceptMap.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoConceptMap.java @@ -35,24 +35,45 @@ import org.springframework.beans.factory.annotation.Autowired; import java.util.Date; -public class JpaResourceDaoConceptMap extends JpaResourceDao implements IFhirResourceDaoConceptMap { +public class JpaResourceDaoConceptMap extends JpaResourceDao + implements IFhirResourceDaoConceptMap { @Autowired private ITermConceptMappingSvc myTermConceptMappingSvc; + @Autowired private IValidationSupport myValidationSupport; + @Autowired private VersionCanonicalizer myVersionCanonicalizer; @Override - public TranslateConceptResults translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) { + public TranslateConceptResults translate( + TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) { IValidationSupport.TranslateCodeRequest translateCodeRequest = theTranslationRequest.asTranslateCodeRequest(); return myValidationSupport.translateConcept(translateCodeRequest); } @Override - public ResourceTable updateEntity(RequestDetails theRequestDetails, IBaseResource theResource, IBasePersistedResource theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing, - boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) { - ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry); + public ResourceTable updateEntity( + RequestDetails theRequestDetails, + IBaseResource theResource, + IBasePersistedResource theEntity, + Date theDeletedTimestampOrNull, + boolean thePerformIndexing, + boolean theUpdateVersion, + TransactionDetails theTransactionDetails, + boolean theForceUpdate, + boolean theCreateNewHistoryEntry) { + ResourceTable retVal = super.updateEntity( + theRequestDetails, + theResource, + theEntity, + theDeletedTimestampOrNull, + thePerformIndexing, + theUpdateVersion, + theTransactionDetails, + theForceUpdate, + theCreateNewHistoryEntry); boolean entityWasSaved = !retVal.isUnchangedInCurrentOperation(); boolean shouldProcessUpdate = entityWasSaved && thePerformIndexing; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoEncounter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoEncounter.java index 87bb3cc46ef..9c365a43b7d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoEncounter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoEncounter.java @@ -31,24 +31,33 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.servlet.http.HttpServletRequest; import java.util.Collections; +import javax.servlet.http.HttpServletRequest; -public class JpaResourceDaoEncounter extends BaseHapiFhirResourceDao implements IFhirResourceDaoEncounter { +public class JpaResourceDaoEncounter extends BaseHapiFhirResourceDao + implements IFhirResourceDaoEncounter { @Override - public IBundleProvider encounterInstanceEverything(HttpServletRequest theServletRequest, IIdType theId, IPrimitiveType theCount, IPrimitiveType theOffset, DateRangeParam theLastUpdated, SortSpec theSort) { + public IBundleProvider encounterInstanceEverything( + HttpServletRequest theServletRequest, + IIdType theId, + IPrimitiveType theCount, + IPrimitiveType theOffset, + DateRangeParam theLastUpdated, + SortSpec theSort) { SearchParameterMap paramMap = new SearchParameterMap(); if (theCount != null) { paramMap.setCount(theCount.getValue()); } if (theOffset != null) { - throw new IllegalArgumentException(Msg.code(1128) + "Everything operation does not support offset searching"); + throw new IllegalArgumentException( + Msg.code(1128) + "Everything operation does not support offset searching"); } -// paramMap.setRevIncludes(Collections.singleton(IResource.INCLUDE_ALL.asRecursive())); + // paramMap.setRevIncludes(Collections.singleton(IResource.INCLUDE_ALL.asRecursive())); paramMap.setIncludes(Collections.singleton(IBaseResource.INCLUDE_ALL.asRecursive())); - paramMap.setEverythingMode(theId != null ? EverythingModeEnum.ENCOUNTER_INSTANCE : EverythingModeEnum.ENCOUNTER_TYPE); + paramMap.setEverythingMode( + theId != null ? EverythingModeEnum.ENCOUNTER_INSTANCE : EverythingModeEnum.ENCOUNTER_TYPE); paramMap.setSort(theSort); paramMap.setLastUpdated(theLastUpdated); if (theId != null) { @@ -59,8 +68,12 @@ public class JpaResourceDaoEncounter extends BaseHapiFh } @Override - public IBundleProvider encounterTypeEverything(HttpServletRequest theServletRequest, IPrimitiveType theCount, IPrimitiveType theOffset, DateRangeParam theLastUpdated, SortSpec theSort) { + public IBundleProvider encounterTypeEverything( + HttpServletRequest theServletRequest, + IPrimitiveType theCount, + IPrimitiveType theOffset, + DateRangeParam theLastUpdated, + SortSpec theSort) { return encounterInstanceEverything(theServletRequest, null, theCount, theOffset, theLastUpdated, theSort); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoObservation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoObservation.java index 13e938f1c9b..6181c2dedfc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoObservation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoObservation.java @@ -42,30 +42,44 @@ import org.hl7.fhir.r4.model.Observation; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.support.TransactionTemplate; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; -import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.TreeMap; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; +import javax.servlet.http.HttpServletResponse; -public class JpaResourceDaoObservation extends BaseHapiFhirResourceDao implements IFhirResourceDaoObservation { +public class JpaResourceDaoObservation extends BaseHapiFhirResourceDao + implements IFhirResourceDaoObservation { @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + @Autowired ObservationLastNIndexPersistSvc myObservationLastNIndexPersistSvc; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperService; @Override - public IBundleProvider observationsLastN(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails, HttpServletResponse theServletResponse) { + public IBundleProvider observationsLastN( + SearchParameterMap theSearchParameterMap, + RequestDetails theRequestDetails, + HttpServletResponse theServletResponse) { updateSearchParamsForLastn(theSearchParameterMap, theRequestDetails); - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, getResourceName(), theSearchParameterMap, null); - return mySearchCoordinatorSvc.registerSearch(this, theSearchParameterMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)), theRequestDetails, requestPartitionId); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType( + theRequestDetails, getResourceName(), theSearchParameterMap, null); + return mySearchCoordinatorSvc.registerSearch( + this, + theSearchParameterMap, + getResourceName(), + new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)), + theRequestDetails, + requestPartitionId); } private String getEffectiveParamName() { @@ -85,18 +99,48 @@ public class JpaResourceDaoObservation extends BaseHapi } @Override - public ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, IBasePersistedResource theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing, - boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) { - return updateObservationEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, - thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate, - theCreateNewHistoryEntry); + public ResourceTable updateEntity( + RequestDetails theRequest, + IBaseResource theResource, + IBasePersistedResource theEntity, + Date theDeletedTimestampOrNull, + boolean thePerformIndexing, + boolean theUpdateVersion, + TransactionDetails theTransactionDetails, + boolean theForceUpdate, + boolean theCreateNewHistoryEntry) { + return updateObservationEntity( + theRequest, + theResource, + theEntity, + theDeletedTimestampOrNull, + thePerformIndexing, + theUpdateVersion, + theTransactionDetails, + theForceUpdate, + theCreateNewHistoryEntry); } - protected ResourceTable updateObservationEntity(RequestDetails theRequest, IBaseResource theResource, IBasePersistedResource theEntity, - Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion, - TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) { - ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, - theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry); + protected ResourceTable updateObservationEntity( + RequestDetails theRequest, + IBaseResource theResource, + IBasePersistedResource theEntity, + Date theDeletedTimestampOrNull, + boolean thePerformIndexing, + boolean theUpdateVersion, + TransactionDetails theTransactionDetails, + boolean theForceUpdate, + boolean theCreateNewHistoryEntry) { + ResourceTable retVal = super.updateEntity( + theRequest, + theResource, + theEntity, + theDeletedTimestampOrNull, + thePerformIndexing, + theUpdateVersion, + theTransactionDetails, + theForceUpdate, + theCreateNewHistoryEntry); if (getStorageSettings().isLastNEnabled()) { if (!retVal.isUnchangedInCurrentOperation()) { @@ -112,32 +156,44 @@ public class JpaResourceDaoObservation extends BaseHapi return retVal; } - protected void updateSearchParamsForLastn(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails) { + protected void updateSearchParamsForLastn( + SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails) { if (!isPagingProviderDatabaseBacked(theRequestDetails)) { theSearchParameterMap.setLoadSynchronous(true); } theSearchParameterMap.setLastN(true); SortSpec effectiveDtm = new SortSpec(getEffectiveParamName()).setOrder(SortOrderEnum.DESC); - SortSpec observationCode = new SortSpec(getCodeParamName()).setOrder(SortOrderEnum.ASC).setChain(effectiveDtm); - if (theSearchParameterMap.containsKey(getSubjectParamName()) || theSearchParameterMap.containsKey(getPatientParamName())) { + SortSpec observationCode = + new SortSpec(getCodeParamName()).setOrder(SortOrderEnum.ASC).setChain(effectiveDtm); + if (theSearchParameterMap.containsKey(getSubjectParamName()) + || theSearchParameterMap.containsKey(getPatientParamName())) { - new TransactionTemplate(myPlatformTransactionManager).executeWithoutResult(tx -> fixSubjectParamsOrderForLastn(theSearchParameterMap, theRequestDetails)); + new TransactionTemplate(myPlatformTransactionManager) + .executeWithoutResult( + tx -> fixSubjectParamsOrderForLastn(theSearchParameterMap, theRequestDetails)); - theSearchParameterMap.setSort(new SortSpec(getSubjectParamName()).setOrder(SortOrderEnum.ASC).setChain(observationCode)); + theSearchParameterMap.setSort(new SortSpec(getSubjectParamName()) + .setOrder(SortOrderEnum.ASC) + .setChain(observationCode)); } else { theSearchParameterMap.setSort(observationCode); } } - private void fixSubjectParamsOrderForLastn(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails) { - // Need to ensure that the patient/subject parameters are sorted in the SearchParameterMap to ensure correct ordering of - // the output. The reason for this is that observations are indexed by patient/subject forced ID, but then ordered in the + private void fixSubjectParamsOrderForLastn( + SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails) { + // Need to ensure that the patient/subject parameters are sorted in the SearchParameterMap to ensure correct + // ordering of + // the output. The reason for this is that observations are indexed by patient/subject forced ID, but then + // ordered in the // final result set by subject/patient resource PID. TreeMap orderedSubjectReferenceMap = new TreeMap<>(); if (theSearchParameterMap.containsKey(getSubjectParamName())) { - RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, getResourceName(), theSearchParameterMap, null); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType( + theRequestDetails, getResourceName(), theSearchParameterMap, null); List> patientParams = new ArrayList<>(); if (theSearchParameterMap.get(getPatientParamName()) != null) { @@ -151,10 +207,12 @@ public class JpaResourceDaoObservation extends BaseHapi for (IQueryParameterType nextOr : nextPatientList) { if (nextOr instanceof ReferenceParam) { ReferenceParam ref = (ReferenceParam) nextOr; - JpaPid pid = myIdHelperService.resolveResourcePersistentIds(requestPartitionId, ref.getResourceType(), ref.getIdPart()); + JpaPid pid = myIdHelperService.resolveResourcePersistentIds( + requestPartitionId, ref.getResourceType(), ref.getIdPart()); orderedSubjectReferenceMap.put(pid.getId(), nextOr); } else { - throw new IllegalArgumentException(Msg.code(942) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass()); + throw new IllegalArgumentException( + Msg.code(942) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass()); } } } @@ -164,10 +222,10 @@ public class JpaResourceDaoObservation extends BaseHapi // Subject PIDs ordered - so create 'OR' list of subjects for lastN operation ReferenceOrListParam orList = new ReferenceOrListParam(); - orderedSubjectReferenceMap.keySet().forEach(key -> orList.addOr((ReferenceParam) orderedSubjectReferenceMap.get(key))); + orderedSubjectReferenceMap + .keySet() + .forEach(key -> orList.addOr((ReferenceParam) orderedSubjectReferenceMap.get(key))); theSearchParameterMap.add(getSubjectParamName(), orList); } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoPatient.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoPatient.java index d3e57be01fa..d2256fee29a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoPatient.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoPatient.java @@ -43,31 +43,34 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import javax.servlet.http.HttpServletRequest; import java.util.Arrays; import java.util.Collections; +import javax.servlet.http.HttpServletRequest; -public class JpaResourceDaoPatient extends BaseHapiFhirResourceDao implements IFhirResourceDaoPatient { +public class JpaResourceDaoPatient extends BaseHapiFhirResourceDao + implements IFhirResourceDaoPatient { @Autowired private IRequestPartitionHelperSvc myPartitionHelperSvc; - private IBundleProvider doEverythingOperation(TokenOrListParam theIds, - IPrimitiveType theCount, - IPrimitiveType theOffset, - DateRangeParam theLastUpdated, - SortSpec theSort, - StringAndListParam theContent, - StringAndListParam theNarrative, - StringAndListParam theFilter, - StringAndListParam theTypes, - RequestDetails theRequest) { + private IBundleProvider doEverythingOperation( + TokenOrListParam theIds, + IPrimitiveType theCount, + IPrimitiveType theOffset, + DateRangeParam theLastUpdated, + SortSpec theSort, + StringAndListParam theContent, + StringAndListParam theNarrative, + StringAndListParam theFilter, + StringAndListParam theTypes, + RequestDetails theRequest) { SearchParameterMap paramMap = new SearchParameterMap(); if (theCount != null) { paramMap.setCount(theCount.getValue()); } if (theOffset != null) { - throw new IllegalArgumentException(Msg.code(1106) + "Everything operation does not support offset searching"); + throw new IllegalArgumentException( + Msg.code(1106) + "Everything operation does not support offset searching"); } if (theContent != null) { paramMap.add(Constants.PARAM_CONTENT, theContent); @@ -81,7 +84,10 @@ public class JpaResourceDaoPatient extends BaseHapiFhir paramMap.setIncludes(Collections.singleton(IResource.INCLUDE_ALL.asRecursive())); } - paramMap.setEverythingMode(theIds != null && theIds.getValuesAsQueryTokens().size() == 1 ? EverythingModeEnum.PATIENT_INSTANCE : EverythingModeEnum.PATIENT_TYPE); + paramMap.setEverythingMode( + theIds != null && theIds.getValuesAsQueryTokens().size() == 1 + ? EverythingModeEnum.PATIENT_INSTANCE + : EverythingModeEnum.PATIENT_TYPE); paramMap.setSort(theSort); paramMap.setLastUpdated(theLastUpdated); if (theIds != null) { @@ -98,26 +104,55 @@ public class JpaResourceDaoPatient extends BaseHapiFhir paramMap.setLoadSynchronous(true); } - RequestPartitionId requestPartitionId = myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequest, getResourceName(), paramMap, null); - return mySearchCoordinatorSvc.registerSearch(this, - paramMap, - getResourceName(), - new CacheControlDirective().parse(theRequest.getHeaders(Constants.HEADER_CACHE_CONTROL)), - theRequest, - requestPartitionId); + RequestPartitionId requestPartitionId = myPartitionHelperSvc.determineReadPartitionForRequestForSearchType( + theRequest, getResourceName(), paramMap, null); + return mySearchCoordinatorSvc.registerSearch( + this, + paramMap, + getResourceName(), + new CacheControlDirective().parse(theRequest.getHeaders(Constants.HEADER_CACHE_CONTROL)), + theRequest, + requestPartitionId); } @Override @Transactional(propagation = Propagation.SUPPORTS) - public IBundleProvider patientInstanceEverything(HttpServletRequest theServletRequest, RequestDetails theRequestDetails, PatientEverythingParameters theQueryParams, IIdType theId) { + public IBundleProvider patientInstanceEverything( + HttpServletRequest theServletRequest, + RequestDetails theRequestDetails, + PatientEverythingParameters theQueryParams, + IIdType theId) { TokenOrListParam id = new TokenOrListParam().add(new TokenParam(theId.getIdPart())); - return doEverythingOperation(id, theQueryParams.getCount(), theQueryParams.getOffset(), theQueryParams.getLastUpdated(), theQueryParams.getSort(), theQueryParams.getContent(), theQueryParams.getNarrative(), theQueryParams.getFilter(), theQueryParams.getTypes(), theRequestDetails); + return doEverythingOperation( + id, + theQueryParams.getCount(), + theQueryParams.getOffset(), + theQueryParams.getLastUpdated(), + theQueryParams.getSort(), + theQueryParams.getContent(), + theQueryParams.getNarrative(), + theQueryParams.getFilter(), + theQueryParams.getTypes(), + theRequestDetails); } @Override @Transactional(propagation = Propagation.SUPPORTS) - public IBundleProvider patientTypeEverything(HttpServletRequest theServletRequest, RequestDetails theRequestDetails, PatientEverythingParameters theQueryParams, TokenOrListParam theId) { - return doEverythingOperation(theId, theQueryParams.getCount(), theQueryParams.getOffset(), theQueryParams.getLastUpdated(), theQueryParams.getSort(), theQueryParams.getContent(), theQueryParams.getNarrative(), theQueryParams.getFilter(), theQueryParams.getTypes(), theRequestDetails); + public IBundleProvider patientTypeEverything( + HttpServletRequest theServletRequest, + RequestDetails theRequestDetails, + PatientEverythingParameters theQueryParams, + TokenOrListParam theId) { + return doEverythingOperation( + theId, + theQueryParams.getCount(), + theQueryParams.getOffset(), + theQueryParams.getLastUpdated(), + theQueryParams.getSort(), + theQueryParams.getContent(), + theQueryParams.getNarrative(), + theQueryParams.getFilter(), + theQueryParams.getTypes(), + theRequestDetails); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoSearchParameter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoSearchParameter.java index 566feac6713..816ebb1b0f7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoSearchParameter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoSearchParameter.java @@ -20,13 +20,12 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoSearchParameter; -import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.dao.validation.SearchParameterDaoValidator; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.hapi.converters.canonical.VersionCanonicalizer; import com.google.common.annotations.VisibleForTesting; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.r5.model.CodeType; import org.hl7.fhir.r5.model.Enumeration; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.support.TransactionSynchronization; @@ -36,7 +35,8 @@ import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; -public class JpaResourceDaoSearchParameter extends BaseHapiFhirResourceDao implements IFhirResourceDaoSearchParameter { +public class JpaResourceDaoSearchParameter extends BaseHapiFhirResourceDao + implements IFhirResourceDaoSearchParameter { private final AtomicBoolean myCacheReloadTriggered = new AtomicBoolean(false); @@ -70,12 +70,14 @@ public class JpaResourceDaoSearchParameter extends Base // N.B. Don't do this on the canonicalized version Boolean reindex = theResource != null ? CURRENTLY_REINDEXING.get(theResource) : null; - org.hl7.fhir.r5.model.SearchParameter searchParameter = myVersionCanonicalizer.searchParameterToCanonical(theResource); - List base = theResource != null ? searchParameter.getBase().stream().map(Enumeration::getCode).collect(Collectors.toList()) : null; + org.hl7.fhir.r5.model.SearchParameter searchParameter = + myVersionCanonicalizer.searchParameterToCanonical(theResource); + List base = theResource != null + ? searchParameter.getBase().stream().map(Enumeration::getCode).collect(Collectors.toList()) + : null; requestReindexForRelatedResources(reindex, base, theRequestDetails); } - @Override protected void postPersist(ResourceTable theEntity, T theResource, RequestDetails theRequestDetails) { super.postPersist(theEntity, theResource, theRequestDetails); @@ -102,7 +104,8 @@ public class JpaResourceDaoSearchParameter extends Base } public void validateSearchParam(IBaseResource theResource) { - org.hl7.fhir.r5.model.SearchParameter searchParameter = myVersionCanonicalizer.searchParameterToCanonical(theResource); + org.hl7.fhir.r5.model.SearchParameter searchParameter = + myVersionCanonicalizer.searchParameterToCanonical(theResource); mySearchParameterDaoValidator.validate(searchParameter); } @@ -112,7 +115,7 @@ public class JpaResourceDaoSearchParameter extends Base } @VisibleForTesting - public void setSearchParameterDaoValidatorForUnitTest(SearchParameterDaoValidator theSearchParameterDaoValidator){ + public void setSearchParameterDaoValidatorForUnitTest(SearchParameterDaoValidator theSearchParameterDaoValidator) { mySearchParameterDaoValidator = theSearchParameterDaoValidator; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoStructureDefinition.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoStructureDefinition.java index 61042ea28de..d8e1c4efd3b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoStructureDefinition.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoStructureDefinition.java @@ -26,16 +26,17 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; -public class JpaResourceDaoStructureDefinition extends BaseHapiFhirResourceDao implements IFhirResourceDaoStructureDefinition { +public class JpaResourceDaoStructureDefinition extends BaseHapiFhirResourceDao + implements IFhirResourceDaoStructureDefinition { @Autowired private IValidationSupport myValidationSupport; @Override public T generateSnapshot(T theInput, String theUrl, String theWebUrl, String theName) { - T output = (T) myValidationSupport.generateSnapshot(new ValidationSupportContext(myValidationSupport), theInput, theUrl, theWebUrl, theName); + T output = (T) myValidationSupport.generateSnapshot( + new ValidationSupportContext(myValidationSupport), theInput, theUrl, theWebUrl, theName); Validate.notNull(output); return output; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoValueSet.java index fa3191d08fd..8f508d56455 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoValueSet.java @@ -54,11 +54,14 @@ import static ca.uhn.fhir.jpa.provider.ValueSetOperationProvider.createValueSetE import static ca.uhn.fhir.util.DatatypeUtil.toStringValue; import static org.apache.commons.lang3.StringUtils.isNotBlank; -public class JpaResourceDaoValueSet extends BaseHapiFhirResourceDao implements IFhirResourceDaoValueSet { +public class JpaResourceDaoValueSet extends BaseHapiFhirResourceDao + implements IFhirResourceDaoValueSet { @Autowired private IValidationSupport myValidationSupport; + @Autowired private VersionCanonicalizer myVersionCanonicalizer; + @Autowired(required = false) private IFulltextSearchSvc myFulltextSearch; @@ -71,18 +74,32 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi @SuppressWarnings("unchecked") @Override public T expandByIdentifier(String theUri, ValueSetExpansionOptions theOptions) { - IValidationSupport.ValueSetExpansionOutcome expansionOutcome = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), theOptions, theUri); + IValidationSupport.ValueSetExpansionOutcome expansionOutcome = myValidationSupport.expandValueSet( + new ValidationSupportContext(myValidationSupport), theOptions, theUri); return extractValueSetOrThrowException(expansionOutcome); } @Override public T expand(T theSource, ValueSetExpansionOptions theOptions) { - IValidationSupport.ValueSetExpansionOutcome expansionOutcome = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), theOptions, theSource); + IValidationSupport.ValueSetExpansionOutcome expansionOutcome = myValidationSupport.expandValueSet( + new ValidationSupportContext(myValidationSupport), theOptions, theSource); return extractValueSetOrThrowException(expansionOutcome); } @Override - public T expand(IIdType theId, T theValueSet, IPrimitiveType theUrl, IPrimitiveType theValueSetVersion, IPrimitiveType theFilter, IPrimitiveType theContext, IPrimitiveType theContextDirection, IPrimitiveType theOffset, IPrimitiveType theCount, IPrimitiveType theDisplayLanguage, IPrimitiveType theIncludeHierarchy, RequestDetails theRequestDetails) { + public T expand( + IIdType theId, + T theValueSet, + IPrimitiveType theUrl, + IPrimitiveType theValueSetVersion, + IPrimitiveType theFilter, + IPrimitiveType theContext, + IPrimitiveType theContextDirection, + IPrimitiveType theOffset, + IPrimitiveType theCount, + IPrimitiveType theDisplayLanguage, + IPrimitiveType theIncludeHierarchy, + RequestDetails theRequestDetails) { boolean haveId = theId != null && theId.hasIdPart(); boolean haveIdentifier = theUrl != null && isNotBlank(theUrl.getValue()); boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty(); @@ -90,13 +107,17 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi boolean haveContextDirection = theContextDirection != null && !theContextDirection.isEmpty(); boolean haveContext = theContext != null && !theContext.isEmpty(); - boolean isAutocompleteExtension = haveContext && haveContextDirection && "existing".equals(theContextDirection.getValue()); + boolean isAutocompleteExtension = + haveContext && haveContextDirection && "existing".equals(theContextDirection.getValue()); if (isAutocompleteExtension) { // this is a funky extension for NIH. Do our own thing and return. - ValueSetAutocompleteOptions options = ValueSetAutocompleteOptions.validateAndParseOptions(myStorageSettings, theContext, theFilter, theCount, theId, theUrl, theValueSet); + ValueSetAutocompleteOptions options = ValueSetAutocompleteOptions.validateAndParseOptions( + myStorageSettings, theContext, theFilter, theCount, theId, theUrl, theValueSet); if (myFulltextSearch == null || myFulltextSearch.isDisabled()) { - throw new InvalidRequestException(Msg.code(2083) + " Autocomplete is not supported on this server, as the fulltext search service is not configured."); + throw new InvalidRequestException( + Msg.code(2083) + + " Autocomplete is not supported on this server, as the fulltext search service is not configured."); } else { return (T) myFulltextSearch.tokenAutocompleteValueSetSearch(options); } @@ -105,25 +126,35 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi if (!haveId && !haveIdentifier && !haveValueSet) { if (myFhirContext.getVersion().getVersion() == FhirVersionEnum.DSTU2) { // "url" parameter is called "identifier" in DSTU2 - throw new InvalidRequestException(Msg.code(1130) + "$expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request"); + throw new InvalidRequestException( + Msg.code(1130) + + "$expand operation at the type level (no ID specified) requires an identifier or a valueSet as a part of the request"); } - throw new InvalidRequestException(Msg.code(1133) + "$expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request."); + throw new InvalidRequestException( + Msg.code(1133) + + "$expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request."); } if (!LogicUtil.multiXor(haveId, haveIdentifier, haveValueSet)) { if (myFhirContext.getVersion().getVersion() == FhirVersionEnum.DSTU2) { // "url" parameter is called "identifier" in DSTU2 - throw new InvalidRequestException(Msg.code(1131) + "$expand must EITHER be invoked at the type level, or have an identifier specified, or have a ValueSet specified. Can not combine these options."); + throw new InvalidRequestException( + Msg.code(1131) + + "$expand must EITHER be invoked at the type level, or have an identifier specified, or have a ValueSet specified. Can not combine these options."); } - throw new InvalidRequestException(Msg.code(1134) + "$expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options."); + throw new InvalidRequestException( + Msg.code(1134) + + "$expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options."); } - ValueSetExpansionOptions options = createValueSetExpansionOptions(myStorageSettings, theOffset, theCount, theIncludeHierarchy, theFilter, theDisplayLanguage); + ValueSetExpansionOptions options = createValueSetExpansionOptions( + myStorageSettings, theOffset, theCount, theIncludeHierarchy, theFilter, theDisplayLanguage); IValidationSupport.ValueSetExpansionOutcome outcome; if (haveId) { IBaseResource valueSet = read(theId, theRequestDetails); - outcome = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), options, valueSet); + outcome = myValidationSupport.expandValueSet( + new ValidationSupportContext(myValidationSupport), options, valueSet); } else if (haveIdentifier) { String url; if (haveValueSetVersion) { @@ -131,9 +162,11 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi } else { url = theUrl.getValue(); } - outcome = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), options, url); + outcome = + myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), options, url); } else { - outcome = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), options, theValueSet); + outcome = myValidationSupport.expandValueSet( + new ValidationSupportContext(myValidationSupport), options, theValueSet); } return extractValueSetOrThrowException(outcome); @@ -142,7 +175,8 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi @SuppressWarnings("unchecked") private T extractValueSetOrThrowException(IValidationSupport.ValueSetExpansionOutcome outcome) { if (outcome == null) { - throw new InternalErrorException(Msg.code(2028) + "No validation support module was able to expand the given valueset"); + throw new InternalErrorException( + Msg.code(2028) + "No validation support module was able to expand the given valueset"); } if (outcome.getError() != null) { @@ -153,12 +187,19 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi } @Override - public IValidationSupport.CodeValidationResult validateCode(IPrimitiveType theValueSetIdentifier, IIdType theValueSetId, IPrimitiveType theCode, - IPrimitiveType theSystem, IPrimitiveType theDisplay, IBaseCoding theCoding, - IBaseDatatype theCodeableConcept, RequestDetails theRequestDetails) { + public IValidationSupport.CodeValidationResult validateCode( + IPrimitiveType theValueSetIdentifier, + IIdType theValueSetId, + IPrimitiveType theCode, + IPrimitiveType theSystem, + IPrimitiveType theDisplay, + IBaseCoding theCoding, + IBaseDatatype theCodeableConcept, + RequestDetails theRequestDetails) { CodeableConcept codeableConcept = myVersionCanonicalizer.codeableConceptToCanonical(theCodeableConcept); - boolean haveCodeableConcept = codeableConcept != null && codeableConcept.getCoding().size() > 0; + boolean haveCodeableConcept = + codeableConcept != null && codeableConcept.getCoding().size() > 0; Coding canonicalCodingToValidate = myVersionCanonicalizer.codingToCanonical((IBaseCoding) theCoding); boolean haveCoding = canonicalCodingToValidate != null && !canonicalCodingToValidate.isEmpty(); @@ -166,16 +207,19 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi boolean haveCode = theCode != null && !theCode.isEmpty(); if (!haveCodeableConcept && !haveCoding && !haveCode) { - throw new InvalidRequestException(Msg.code(899) + "No code, coding, or codeableConcept provided to validate"); + throw new InvalidRequestException( + Msg.code(899) + "No code, coding, or codeableConcept provided to validate"); } if (!LogicUtil.multiXor(haveCodeableConcept, haveCoding, haveCode)) { - throw new InvalidRequestException(Msg.code(900) + "$validate-code can only validate (system AND code) OR (coding) OR (codeableConcept)"); + throw new InvalidRequestException(Msg.code(900) + + "$validate-code can only validate (system AND code) OR (coding) OR (codeableConcept)"); } String valueSetIdentifier; if (theValueSetId != null) { IBaseResource valueSet = read(theValueSetId, theRequestDetails); - StringBuilder valueSetIdentifierBuilder = new StringBuilder(CommonCodeSystemsTerminologyService.getValueSetUrl(myFhirContext, valueSet)); + StringBuilder valueSetIdentifierBuilder = + new StringBuilder(CommonCodeSystemsTerminologyService.getValueSetUrl(myFhirContext, valueSet)); String valueSetVersion = CommonCodeSystemsTerminologyService.getValueSetVersion(myFhirContext, valueSet); if (valueSetVersion != null) { valueSetIdentifierBuilder.append("|").append(valueSetVersion); @@ -184,18 +228,22 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi } else if (isNotBlank(toStringValue(theValueSetIdentifier))) { valueSetIdentifier = toStringValue(theValueSetIdentifier); } else { - throw new InvalidRequestException(Msg.code(901) + "Either ValueSet ID or ValueSet identifier or system and code must be provided. Unable to validate."); + throw new InvalidRequestException( + Msg.code(901) + + "Either ValueSet ID or ValueSet identifier or system and code must be provided. Unable to validate."); } if (haveCodeableConcept) { IValidationSupport.CodeValidationResult anyValidation = null; for (int i = 0; i < codeableConcept.getCoding().size(); i++) { Coding nextCoding = codeableConcept.getCoding().get(i); - String system = createVersionedSystemIfVersionIsPresent(nextCoding.getSystem(), nextCoding.getVersion()); + String system = + createVersionedSystemIfVersionIsPresent(nextCoding.getSystem(), nextCoding.getVersion()); String code = nextCoding.getCode(); String display = nextCoding.getDisplay(); - IValidationSupport.CodeValidationResult nextValidation = validateCode(system, code, display, valueSetIdentifier); + IValidationSupport.CodeValidationResult nextValidation = + validateCode(system, code, display, valueSetIdentifier); anyValidation = nextValidation; if (nextValidation.isOk()) { return nextValidation; @@ -203,7 +251,8 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi } return anyValidation; } else if (haveCoding) { - String system = createVersionedSystemIfVersionIsPresent(canonicalCodingToValidate.getSystem(), canonicalCodingToValidate.getVersion()); + String system = createVersionedSystemIfVersionIsPresent( + canonicalCodingToValidate.getSystem(), canonicalCodingToValidate.getVersion()); String code = canonicalCodingToValidate.getCode(); String display = canonicalCodingToValidate.getDisplay(); return validateCode(system, code, display, valueSetIdentifier); @@ -215,24 +264,44 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi } } - private IValidationSupport.CodeValidationResult validateCode(String theSystem, String theCode, String theDisplay, String theValueSetIdentifier) { + private IValidationSupport.CodeValidationResult validateCode( + String theSystem, String theCode, String theDisplay, String theValueSetIdentifier) { ValidationSupportContext context = new ValidationSupportContext(myValidationSupport); ConceptValidationOptions options = new ConceptValidationOptions(); options.setValidateDisplay(isNotBlank(theDisplay)); - IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(context, options, theSystem, theCode, theDisplay, theValueSetIdentifier); + IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode( + context, options, theSystem, theCode, theDisplay, theValueSetIdentifier); if (result == null) { result = new IValidationSupport.CodeValidationResult(); - result.setMessage("Validator is unable to provide validation for " + theCode + "#" + theSystem + " - Unknown or unusable ValueSet[" + theValueSetIdentifier + "]"); + result.setMessage("Validator is unable to provide validation for " + theCode + "#" + theSystem + + " - Unknown or unusable ValueSet[" + theValueSetIdentifier + "]"); } return result; } @Override - public ResourceTable updateEntity(RequestDetails theRequestDetails, IBaseResource theResource, IBasePersistedResource theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing, - boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) { - ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry); + public ResourceTable updateEntity( + RequestDetails theRequestDetails, + IBaseResource theResource, + IBasePersistedResource theEntity, + Date theDeletedTimestampOrNull, + boolean thePerformIndexing, + boolean theUpdateVersion, + TransactionDetails theTransactionDetails, + boolean theForceUpdate, + boolean theCreateNewHistoryEntry) { + ResourceTable retVal = super.updateEntity( + theRequestDetails, + theResource, + theEntity, + theDeletedTimestampOrNull, + thePerformIndexing, + theUpdateVersion, + theTransactionDetails, + theForceUpdate, + theCreateNewHistoryEntry); if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) { if (retVal.getDeleted() == null) { @@ -245,6 +314,4 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi return retVal; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java index 776a9b6bc4b..6dfa2930cc9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java @@ -65,12 +65,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.List; +import javax.annotation.Nullable; import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.decodeResource; import static java.util.Objects.nonNull; @@ -79,16 +79,22 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class JpaStorageResourceParser implements IJpaStorageResourceParser { public static final LenientErrorHandler LENIENT_ERROR_HANDLER = new LenientErrorHandler(false).disableAllErrors(); private static final Logger ourLog = LoggerFactory.getLogger(JpaStorageResourceParser.class); + @Autowired private FhirContext myFhirContext; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private IResourceHistoryTableDao myResourceHistoryTableDao; + @Autowired private PartitionSettings myPartitionSettings; + @Autowired private IPartitionLookupSvc myPartitionLookupSvc; + @Autowired private ExternallyStoredResourceServiceRegistry myExternallyStoredResourceServiceRegistry; @@ -100,14 +106,17 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { } @Override - public R toResource(Class theResourceType, IBaseResourceEntity theEntity, Collection theTagList, boolean theForHistoryOperation) { + public R toResource( + Class theResourceType, + IBaseResourceEntity theEntity, + Collection theTagList, + boolean theForHistoryOperation) { // 1. get resource, it's encoding and the tags if any byte[] resourceBytes; String resourceText; ResourceEncodingEnum resourceEncoding; - @Nullable - Collection tagList = Collections.emptyList(); + @Nullable Collection tagList = Collections.emptyList(); long version; String provenanceSourceUri = null; String provenanceRequestId = null; @@ -150,7 +159,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { while (history == null) { if (version > 1L) { version--; - history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version); + history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( + theEntity.getId(), version); } else { return null; } @@ -222,11 +232,13 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { return retVal; } - private void populateResourcePartitionInformation(IBaseResourceEntity theEntity, R retVal) { + private void populateResourcePartitionInformation( + IBaseResourceEntity theEntity, R retVal) { if (myPartitionSettings.isPartitioningEnabled()) { PartitionablePartitionId partitionId = theEntity.getPartitionId(); if (partitionId != null && partitionId.getPartitionId() != null) { - PartitionEntity persistedPartition = myPartitionLookupSvc.getPartitionById(partitionId.getPartitionId()); + PartitionEntity persistedPartition = + myPartitionLookupSvc.getPartitionById(partitionId.getPartitionId()); retVal.setUserData(Constants.RESOURCE_PARTITION_ID, persistedPartition.toRequestPartitionId()); } else { retVal.setUserData(Constants.RESOURCE_PARTITION_ID, null); @@ -235,7 +247,11 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { } @SuppressWarnings("unchecked") - private R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum theResourceEncoding, String theDecodedResourceText, Class theResourceType) { + private R parseResource( + IBaseResourceEntity theEntity, + ResourceEncodingEnum theResourceEncoding, + String theDecodedResourceText, + Class theResourceType) { R retVal; if (theResourceEncoding == ResourceEncodingEnum.ESR) { @@ -245,12 +261,14 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { String address = theDecodedResourceText.substring(colonIndex + 1); Validate.notBlank(providerId, "No provider ID in ESR address: %s", theDecodedResourceText); Validate.notBlank(address, "No address in ESR address: %s", theDecodedResourceText); - IExternallyStoredResourceService provider = myExternallyStoredResourceServiceRegistry.getProvider(providerId); + IExternallyStoredResourceService provider = + myExternallyStoredResourceServiceRegistry.getProvider(providerId); retVal = (R) provider.fetchResource(address); } else if (theResourceEncoding != ResourceEncodingEnum.DEL) { - IParser parser = new TolerantJsonParser(getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId()); + IParser parser = new TolerantJsonParser( + getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId()); try { retVal = parser.parseResource(theResourceType, theDecodedResourceText); @@ -273,14 +291,16 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { } else { - retVal = (R) myFhirContext.getResourceDefinition(theEntity.getResourceType()).newInstance(); - + retVal = (R) myFhirContext + .getResourceDefinition(theEntity.getResourceType()) + .newInstance(); } return retVal; } @SuppressWarnings("unchecked") - private Class determineTypeToParse(Class theResourceType, @Nullable Collection tagList) { + private Class determineTypeToParse( + Class theResourceType, @Nullable Collection tagList) { Class resourceType = theResourceType; if (tagList != null) { if (myFhirContext.hasDefaultTypeForProfile()) { @@ -304,19 +324,31 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { @SuppressWarnings("unchecked") @Override - public R populateResourceMetadata(IBaseResourceEntity theEntitySource, boolean theForHistoryOperation, @Nullable Collection tagList, long theVersion, R theResourceTarget) { + public R populateResourceMetadata( + IBaseResourceEntity theEntitySource, + boolean theForHistoryOperation, + @Nullable Collection tagList, + long theVersion, + R theResourceTarget) { if (theResourceTarget instanceof IResource) { IResource res = (IResource) theResourceTarget; - theResourceTarget = (R) populateResourceMetadataHapi(theEntitySource, tagList, theForHistoryOperation, res, theVersion); + theResourceTarget = + (R) populateResourceMetadataHapi(theEntitySource, tagList, theForHistoryOperation, res, theVersion); } else { IAnyResource res = (IAnyResource) theResourceTarget; - theResourceTarget = populateResourceMetadataRi(theEntitySource, tagList, theForHistoryOperation, res, theVersion); + theResourceTarget = + populateResourceMetadataRi(theEntitySource, tagList, theForHistoryOperation, res, theVersion); } return theResourceTarget; } @SuppressWarnings("unchecked") - private R populateResourceMetadataHapi(IBaseResourceEntity theEntity, @Nullable Collection theTagList, boolean theForHistoryOperation, R res, Long theVersion) { + private R populateResourceMetadataHapi( + IBaseResourceEntity theEntity, + @Nullable Collection theTagList, + boolean theForHistoryOperation, + R res, + Long theVersion) { R retVal = res; if (theEntity.getDeleted() != null) { res = (R) myFhirContext.getResourceDefinition(res).newInstance(); @@ -357,7 +389,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { profiles.add(new IdDt(nextTag.getCode())); break; case SECURITY_LABEL: - IBaseCoding secLabel = (IBaseCoding) myFhirContext.getVersion().newCodingDt(); + IBaseCoding secLabel = + (IBaseCoding) myFhirContext.getVersion().newCodingDt(); secLabel.setSystem(nextTag.getSystem()); secLabel.setCode(nextTag.getCode()); secLabel.setDisplay(nextTag.getDisplay()); @@ -390,7 +423,12 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { } @SuppressWarnings("unchecked") - private R populateResourceMetadataRi(IBaseResourceEntity theEntity, @Nullable Collection theTagList, boolean theForHistoryOperation, IAnyResource res, Long theVersion) { + private R populateResourceMetadataRi( + IBaseResourceEntity theEntity, + @Nullable Collection theTagList, + boolean theForHistoryOperation, + IAnyResource res, + Long theVersion) { R retVal = (R) res; if (theEntity.getDeleted() != null) { res = (IAnyResource) myFhirContext.getResourceDefinition(res).newInstance(); @@ -486,7 +524,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { return FhirContext.forCached(theVersion); } - private static String decodedResourceText(byte[] resourceBytes, String resourceText, ResourceEncodingEnum resourceEncoding) { + private static String decodedResourceText( + byte[] resourceBytes, String resourceText, ResourceEncodingEnum resourceEncoding) { String decodedResourceText; if (resourceText != null) { decodedResourceText = resourceText; @@ -503,5 +542,4 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser { } return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ObservationLastNIndexPersistSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ObservationLastNIndexPersistSvc.java index eef5557c9b7..fa8b6fb3274 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ObservationLastNIndexPersistSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/ObservationLastNIndexPersistSvc.java @@ -63,38 +63,52 @@ public class ObservationLastNIndexPersistSvc { return; } - List subjectReferenceElement = mySearchParameterExtractor.extractValues("Observation.subject", theResource); + List subjectReferenceElement = + mySearchParameterExtractor.extractValues("Observation.subject", theResource); String subjectId = subjectReferenceElement.stream() - .map(refElement -> mySearchParameterExtractor.extractReferenceLinkFromResource(refElement, "Observation.subject")) - .filter(Objects::nonNull) - .map(PathAndRef::getRef) - .filter(Objects::nonNull) - .map(subjectRef -> subjectRef.getReferenceElement().getValue()) - .filter(Objects::nonNull) - .findFirst().orElse(null); + .map(refElement -> + mySearchParameterExtractor.extractReferenceLinkFromResource(refElement, "Observation.subject")) + .filter(Objects::nonNull) + .map(PathAndRef::getRef) + .filter(Objects::nonNull) + .map(subjectRef -> subjectRef.getReferenceElement().getValue()) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); Date effectiveDtm = null; - List effectiveDateElement = mySearchParameterExtractor.extractValues("Observation.effective", theResource); + List effectiveDateElement = + mySearchParameterExtractor.extractValues("Observation.effective", theResource); if (effectiveDateElement.size() > 0) { - effectiveDtm = mySearchParameterExtractor.extractDateFromResource(effectiveDateElement.get(0), "Observation.effective"); + effectiveDtm = mySearchParameterExtractor.extractDateFromResource( + effectiveDateElement.get(0), "Observation.effective"); } - List observationCodeCodeableConcepts = mySearchParameterExtractor.extractValues("Observation.code", theResource); + List observationCodeCodeableConcepts = + mySearchParameterExtractor.extractValues("Observation.code", theResource); // Only index for lastn if Observation has a code if (observationCodeCodeableConcepts.size() == 0) { return; } - List observationCategoryCodeableConcepts = mySearchParameterExtractor.extractValues("Observation.category", theResource); - - createOrUpdateIndexedObservation(theResource, effectiveDtm, subjectId, observationCodeCodeableConcepts, observationCategoryCodeableConcepts); + List observationCategoryCodeableConcepts = + mySearchParameterExtractor.extractValues("Observation.category", theResource); + createOrUpdateIndexedObservation( + theResource, + effectiveDtm, + subjectId, + observationCodeCodeableConcepts, + observationCategoryCodeableConcepts); } - private void createOrUpdateIndexedObservation(IBaseResource theResource, Date theEffectiveDtm, String theSubjectId, - List theObservationCodeCodeableConcepts, - List theObservationCategoryCodeableConcepts) { + private void createOrUpdateIndexedObservation( + IBaseResource theResource, + Date theEffectiveDtm, + String theSubjectId, + List theObservationCodeCodeableConcepts, + List theObservationCategoryCodeableConcepts) { String resourcePID = theResource.getIdElement().getIdPart(); // Determine if an index already exists for Observation: @@ -118,7 +132,6 @@ public class ObservationLastNIndexPersistSvc { addCategoriesToObservationIndex(theObservationCategoryCodeableConcepts, indexedObservation); myElasticsearchSvc.createOrUpdateObservationIndex(resourcePID, indexedObservation); - } private String encodeResource(IBaseResource theResource) { @@ -126,23 +139,24 @@ public class ObservationLastNIndexPersistSvc { return parser.encodeResourceToString(theResource); } - private void addCodeToObservationIndex(List theObservationCodeCodeableConcepts, - ObservationJson theIndexedObservation) { + private void addCodeToObservationIndex( + List theObservationCodeCodeableConcepts, ObservationJson theIndexedObservation) { // Determine if a Normalized ID was created previously for Observation Code - String existingObservationCodeNormalizedId = getCodeCodeableConceptId(theObservationCodeCodeableConcepts.get(0)); + String existingObservationCodeNormalizedId = + getCodeCodeableConceptId(theObservationCodeCodeableConcepts.get(0)); // Create/update normalized Observation Code index record CodeJson codeableConceptField = - getCodeCodeableConcept(theObservationCodeCodeableConcepts.get(0), - existingObservationCodeNormalizedId); + getCodeCodeableConcept(theObservationCodeCodeableConcepts.get(0), existingObservationCodeNormalizedId); - myElasticsearchSvc.createOrUpdateObservationCodeIndex(codeableConceptField.getCodeableConceptId(), codeableConceptField); + myElasticsearchSvc.createOrUpdateObservationCodeIndex( + codeableConceptField.getCodeableConceptId(), codeableConceptField); theIndexedObservation.setCode(codeableConceptField); } - private void addCategoriesToObservationIndex(List observationCategoryCodeableConcepts, - ObservationJson indexedObservation) { + private void addCategoriesToObservationIndex( + List observationCategoryCodeableConcepts, ObservationJson indexedObservation) { // Build CodeableConcept entities for Observation.Category List categoryCodeableConceptEntities = new ArrayList<>(); for (IBase categoryCodeableConcept : observationCategoryCodeableConcepts) { @@ -183,17 +197,18 @@ public class ObservationLastNIndexPersistSvc { Optional codeCodeableConceptIdOptional = Optional.empty(); for (IBase nextCoding : codings) { - ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding("Observation", - new RuntimeSearchParam(null, null, "code", null, null, null, - null, null, null, null), - nextCoding); + ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding( + "Observation", + new RuntimeSearchParam(null, null, "code", null, null, null, null, null, null, null), + nextCoding); if (param != null) { String system = param.getSystem(); String code = param.getValue(); String text = mySearchParameterExtractor.getDisplayTextForCoding(nextCoding); String codeSystemHash = String.valueOf(CodeSystemHash.hashCodeSystem(system, code)); - CodeJson codeCodeableConceptDocument = myElasticsearchSvc.getObservationCodeDocument(codeSystemHash, text); + CodeJson codeCodeableConceptDocument = + myElasticsearchSvc.getObservationCodeDocument(codeSystemHash, text); if (codeCodeableConceptDocument != null) { codeCodeableConceptIdOptional = Optional.of(codeCodeableConceptDocument.getCodeableConceptId()); break; @@ -205,9 +220,10 @@ public class ObservationLastNIndexPersistSvc { } private void addCategoryCoding(IBase theValue, CodeJson theCategoryCodeableConcept) { - ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding("Observation", - new RuntimeSearchParam(null, null, "category", null, null, null, null, null, null, null), - theValue); + ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding( + "Observation", + new RuntimeSearchParam(null, null, "category", null, null, null, null, null, null, null), + theValue); if (param != null) { String system = param.getSystem(); String code = param.getValue(); @@ -217,9 +233,10 @@ public class ObservationLastNIndexPersistSvc { } private void addCodeCoding(IBase theValue, CodeJson theObservationCode) { - ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding("Observation", - new RuntimeSearchParam(null, null, "code", null, null, null, null, null, null, null), - theValue); + ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding( + "Observation", + new RuntimeSearchParam(null, null, "code", null, null, null, null, null, null, null), + theValue); if (param != null) { String system = param.getSystem(); String code = param.getValue(); @@ -234,10 +251,10 @@ public class ObservationLastNIndexPersistSvc { return; } - ObservationJson deletedObservationLastNEntity = myElasticsearchSvc.getObservationDocument(theEntity.getIdDt().getIdPart()); + ObservationJson deletedObservationLastNEntity = + myElasticsearchSvc.getObservationDocument(theEntity.getIdDt().getIdPart()); if (deletedObservationLastNEntity != null) { myElasticsearchSvc.deleteObservationDocument(deletedObservationLastNEntity.getIdentifier()); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TolerantJsonParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TolerantJsonParser.java index ec95f9d9861..1ae25cae3fa 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TolerantJsonParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TolerantJsonParser.java @@ -32,9 +32,9 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; import java.math.BigDecimal; import java.util.Objects; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultString; @@ -79,7 +79,8 @@ public class TolerantJsonParser extends JsonParser { */ String msg = defaultString(e.getMessage(), ""); - if (msg.contains("Unexpected character ('.' (code 46))") || msg.contains("Invalid numeric value: Leading zeroes not allowed")) { + if (msg.contains("Unexpected character ('.' (code 46))") + || msg.contains("Invalid numeric value: Leading zeroes not allowed")) { Gson gson = new Gson(); JsonObject object = gson.fromJson(theMessageString, JsonObject.class); @@ -87,20 +88,30 @@ public class TolerantJsonParser extends JsonParser { T parsed = super.parseResource(theResourceType, corrected); - myContext.newTerser().visit(parsed, (theElement, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath) -> { + myContext + .newTerser() + .visit( + parsed, + (theElement, + theContainingElementPath, + theChildDefinitionPath, + theElementDefinitionPath) -> { + BaseRuntimeElementDefinition def = + theElementDefinitionPath.get(theElementDefinitionPath.size() - 1); + if (def.getName().equals("decimal")) { + IPrimitiveType decimal = (IPrimitiveType) theElement; + String oldValue = decimal.getValueAsString(); + String newValue = decimal.getValue().toPlainString(); + ourLog.warn( + "Correcting invalid previously saved decimal number for Resource[pid={}] - Was {} and now is {}", + Objects.isNull(myResourcePid) ? "" : myResourcePid, + oldValue, + newValue); + decimal.setValueAsString(newValue); + } - BaseRuntimeElementDefinition def = theElementDefinitionPath.get(theElementDefinitionPath.size() - 1); - if (def.getName().equals("decimal")) { - IPrimitiveType decimal = (IPrimitiveType) theElement; - String oldValue = decimal.getValueAsString(); - String newValue = decimal.getValue().toPlainString(); - ourLog.warn("Correcting invalid previously saved decimal number for Resource[pid={}] - Was {} and now is {}", - Objects.isNull(myResourcePid) ? "" : myResourcePid, oldValue, newValue); - decimal.setValueAsString(newValue); - } - - return true; - }); + return true; + }); return parsed; } @@ -109,7 +120,8 @@ public class TolerantJsonParser extends JsonParser { } } - public static TolerantJsonParser createWithLenientErrorHandling(FhirContext theContext, @Nullable Long theResourcePid) { + public static TolerantJsonParser createWithLenientErrorHandling( + FhirContext theContext, @Nullable Long theResourcePid) { LenientErrorHandler errorHandler = new LenientErrorHandler(false).disableAllErrors(); return new TolerantJsonParser(theContext, errorHandler, theResourcePid); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java index c243c31db79..16d58ec0be6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java @@ -55,6 +55,15 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; import javax.annotation.Nullable; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; @@ -66,15 +75,6 @@ import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.regex.Pattern; -import java.util.stream.Collectors; import static ca.uhn.fhir.util.UrlUtil.determineResourceTypeInResourceUrl; import static org.apache.commons.lang3.StringUtils.countMatches; @@ -84,28 +84,37 @@ public class TransactionProcessor extends BaseTransactionProcessor { public static final Pattern SINGLE_PARAMETER_MATCH_URL_PATTERN = Pattern.compile("^[^?]+[?][a-z0-9-]+=[^&,]+$"); private static final Logger ourLog = LoggerFactory.getLogger(TransactionProcessor.class); + @Autowired private ApplicationContext myApplicationContext; + @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; + @Autowired(required = false) private HapiFhirHibernateJpaDialect myHapiFhirHibernateJpaDialect; + @Autowired private IIdHelperService myIdHelperService; + @Autowired private PartitionSettings myPartitionSettings; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private FhirContext myFhirContext; + @Autowired private MatchResourceUrlService myMatchResourceUrlService; + @Autowired private MatchUrlService myMatchUrlService; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionSvc; - public void setEntityManagerForUnitTest(EntityManager theEntityManager) { myEntityManager = theEntityManager; } @@ -128,10 +137,18 @@ public class TransactionProcessor extends BaseTransactionProcessor { super.setStorageSettings(theStorageSettings); } - @Override - protected EntriesToProcessMap doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set theAllIds, - IdSubstitutionMap theIdSubstitutions, Map theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap theOriginalRequestOrder, List theEntries, StopWatch theTransactionStopWatch) { + protected EntriesToProcessMap doTransactionWriteOperations( + final RequestDetails theRequest, + String theActionName, + TransactionDetails theTransactionDetails, + Set theAllIds, + IdSubstitutionMap theIdSubstitutions, + Map theIdToPersistedOutcome, + IBaseBundle theResponse, + IdentityHashMap theOriginalRequestOrder, + List theEntries, + StopWatch theTransactionStopWatch) { ITransactionProcessorVersionAdapter versionAdapter = getVersionAdapter(); RequestPartitionId requestPartitionId = null; @@ -146,10 +163,24 @@ public class TransactionProcessor extends BaseTransactionProcessor { preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId); } - return super.doTransactionWriteOperations(theRequest, theActionName, theTransactionDetails, theAllIds, theIdSubstitutions, theIdToPersistedOutcome, theResponse, theOriginalRequestOrder, theEntries, theTransactionStopWatch); + return super.doTransactionWriteOperations( + theRequest, + theActionName, + theTransactionDetails, + theAllIds, + theIdSubstitutions, + theIdToPersistedOutcome, + theResponse, + theOriginalRequestOrder, + theEntries, + theTransactionStopWatch); } - private void preFetch(TransactionDetails theTransactionDetails, List theEntries, ITransactionProcessorVersionAdapter theVersionAdapter, RequestPartitionId theRequestPartitionId) { + private void preFetch( + TransactionDetails theTransactionDetails, + List theEntries, + ITransactionProcessorVersionAdapter theVersionAdapter, + RequestPartitionId theRequestPartitionId) { Set foundIds = new HashSet<>(); List idsToPreFetch = new ArrayList<>(); @@ -157,18 +188,26 @@ public class TransactionProcessor extends BaseTransactionProcessor { * Pre-Fetch any resources that are referred to normally by ID, e.g. * regular FHIR updates within the transaction. */ - preFetchResourcesById(theTransactionDetails, theEntries, theVersionAdapter, theRequestPartitionId, foundIds, idsToPreFetch); + preFetchResourcesById( + theTransactionDetails, theEntries, theVersionAdapter, theRequestPartitionId, foundIds, idsToPreFetch); /* * Pre-resolve any conditional URLs we can */ - preFetchConditionalUrls(theTransactionDetails, theEntries, theVersionAdapter, theRequestPartitionId, idsToPreFetch); + preFetchConditionalUrls( + theTransactionDetails, theEntries, theVersionAdapter, theRequestPartitionId, idsToPreFetch); IFhirSystemDao systemDao = myApplicationContext.getBean(IFhirSystemDao.class); systemDao.preFetchResources(JpaPid.fromLongList(idsToPreFetch), true); } - private void preFetchResourcesById(TransactionDetails theTransactionDetails, List theEntries, ITransactionProcessorVersionAdapter theVersionAdapter, RequestPartitionId theRequestPartitionId, Set foundIds, List idsToPreFetch) { + private void preFetchResourcesById( + TransactionDetails theTransactionDetails, + List theEntries, + ITransactionProcessorVersionAdapter theVersionAdapter, + RequestPartitionId theRequestPartitionId, + Set foundIds, + List idsToPreFetch) { List idsToPreResolve = new ArrayList<>(); for (IBase nextEntry : theEntries) { IBaseResource resource = theVersionAdapter.getResource(nextEntry); @@ -184,12 +223,15 @@ public class TransactionProcessor extends BaseTransactionProcessor { } } } - List outcome = myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, idsToPreResolve) - .stream().collect(Collectors.toList()); + List outcome = + myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, idsToPreResolve).stream() + .collect(Collectors.toList()); for (JpaPid next : outcome) { - foundIds.add(next.getAssociatedResourceId().toUnqualifiedVersionless().getValue()); + foundIds.add( + next.getAssociatedResourceId().toUnqualifiedVersionless().getValue()); theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next); - if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY || !next.getAssociatedResourceId().isIdPartValidLong()) { + if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY + || !next.getAssociatedResourceId().isIdPartValidLong()) { idsToPreFetch.add(next.getId()); } } @@ -200,7 +242,12 @@ public class TransactionProcessor extends BaseTransactionProcessor { } } - private void preFetchConditionalUrls(TransactionDetails theTransactionDetails, List theEntries, ITransactionProcessorVersionAdapter theVersionAdapter, RequestPartitionId theRequestPartitionId, List idsToPreFetch) { + private void preFetchConditionalUrls( + TransactionDetails theTransactionDetails, + List theEntries, + ITransactionProcessorVersionAdapter theVersionAdapter, + RequestPartitionId theRequestPartitionId, + List idsToPreFetch) { List searchParameterMapsToResolve = new ArrayList<>(); for (IBase nextEntry : theEntries) { IBaseResource resource = theVersionAdapter.getResource(nextEntry); @@ -215,16 +262,21 @@ public class TransactionProcessor extends BaseTransactionProcessor { if (("PUT".equals(verb) || "PATCH".equals(verb)) && requestUrl != null && requestUrl.contains("?")) { preFetchConditionalUrl(resourceType, requestUrl, true, idsToPreFetch, searchParameterMapsToResolve); } else if ("POST".equals(verb) && requestIfNoneExist != null && requestIfNoneExist.contains("?")) { - preFetchConditionalUrl(resourceType, requestIfNoneExist, false, idsToPreFetch, searchParameterMapsToResolve); + preFetchConditionalUrl( + resourceType, requestIfNoneExist, false, idsToPreFetch, searchParameterMapsToResolve); } if (myStorageSettings.isAllowInlineMatchUrlReferences()) { - List references = myFhirContext.newTerser().getAllResourceReferences(resource); + List references = + myFhirContext.newTerser().getAllResourceReferences(resource); for (ResourceReferenceInfo next : references) { - String referenceUrl = next.getResourceReference().getReferenceElement().getValue(); + String referenceUrl = next.getResourceReference() + .getReferenceElement() + .getValue(); String refResourceType = determineResourceTypeInResourceUrl(myFhirContext, referenceUrl); if (refResourceType != null) { - preFetchConditionalUrl(refResourceType, referenceUrl, false, idsToPreFetch, searchParameterMapsToResolve); + preFetchConditionalUrl( + refResourceType, referenceUrl, false, idsToPreFetch, searchParameterMapsToResolve); } } } @@ -232,8 +284,11 @@ public class TransactionProcessor extends BaseTransactionProcessor { } new QueryChunker() - .chunk(searchParameterMapsToResolve, 100, map -> - preFetchSearchParameterMaps(theTransactionDetails, theRequestPartitionId, map, idsToPreFetch)); + .chunk( + searchParameterMapsToResolve, + 100, + map -> preFetchSearchParameterMaps( + theTransactionDetails, theRequestPartitionId, map, idsToPreFetch)); } /** @@ -244,7 +299,11 @@ public class TransactionProcessor extends BaseTransactionProcessor { * pre-loaded (ie. fetch the actual resource body since we're presumably * going to update it and will need to see its current state eventually) */ - private void preFetchSearchParameterMaps(TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId, List theInputParameters, List theOutputPidsToLoadFully) { + private void preFetchSearchParameterMaps( + TransactionDetails theTransactionDetails, + RequestPartitionId theRequestPartitionId, + List theInputParameters, + List theOutputPidsToLoadFully) { Set systemAndValueHashes = new HashSet<>(); Set valueHashes = new HashSet<>(); for (MatchUrlToResolve next : theInputParameters) { @@ -254,24 +313,37 @@ public class TransactionProcessor extends BaseTransactionProcessor { IQueryParameterType param = andList.get(0).get(0); if (param instanceof TokenParam) { - buildHashPredicateFromTokenParam((TokenParam) param, theRequestPartitionId, next, systemAndValueHashes, valueHashes); + buildHashPredicateFromTokenParam( + (TokenParam) param, theRequestPartitionId, next, systemAndValueHashes, valueHashes); } } - } - preFetchSearchParameterMapsToken("myHashSystemAndValue", systemAndValueHashes, theTransactionDetails, theRequestPartitionId, theInputParameters, theOutputPidsToLoadFully); - preFetchSearchParameterMapsToken("myHashValue", valueHashes, theTransactionDetails, theRequestPartitionId, theInputParameters, theOutputPidsToLoadFully); + preFetchSearchParameterMapsToken( + "myHashSystemAndValue", + systemAndValueHashes, + theTransactionDetails, + theRequestPartitionId, + theInputParameters, + theOutputPidsToLoadFully); + preFetchSearchParameterMapsToken( + "myHashValue", + valueHashes, + theTransactionDetails, + theRequestPartitionId, + theInputParameters, + theOutputPidsToLoadFully); - //For each SP Map which did not return a result, tag it as not found. + // For each SP Map which did not return a result, tag it as not found. if (!valueHashes.isEmpty() || !systemAndValueHashes.isEmpty()) { theInputParameters.stream() - // No matches - .filter(match -> !match.myResolved) - .forEach(match -> { - ourLog.debug("Was unable to match url {} from database", match.myRequestUrl); - theTransactionDetails.addResolvedMatchUrl(myFhirContext, match.myRequestUrl, TransactionDetails.NOT_FOUND); - }); + // No matches + .filter(match -> !match.myResolved) + .forEach(match -> { + ourLog.debug("Was unable to match url {} from database", match.myRequestUrl); + theTransactionDetails.addResolvedMatchUrl( + myFhirContext, match.myRequestUrl, TransactionDetails.NOT_FOUND); + }); } } @@ -282,27 +354,42 @@ public class TransactionProcessor extends BaseTransactionProcessor { * Note that we do a tuple query for only 2 columns in order to ensure that we can get by with only * the data in the index (ie no need to load the actual table rows). */ - private void preFetchSearchParameterMapsToken(String theIndexColumnName, Set theHashesForIndexColumn, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId, List theInputParameters, List theOutputPidsToLoadFully) { + private void preFetchSearchParameterMapsToken( + String theIndexColumnName, + Set theHashesForIndexColumn, + TransactionDetails theTransactionDetails, + RequestPartitionId theRequestPartitionId, + List theInputParameters, + List theOutputPidsToLoadFully) { if (!theHashesForIndexColumn.isEmpty()) { - ListMultimap hashToSearchMap = buildHashToSearchMap(theInputParameters, theIndexColumnName); + ListMultimap hashToSearchMap = + buildHashToSearchMap(theInputParameters, theIndexColumnName); CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); CriteriaQuery cq = cb.createTupleQuery(); Root from = cq.from(ResourceIndexedSearchParamToken.class); - cq.multiselect(from.get("myResourcePid").as(Long.class), from.get(theIndexColumnName).as(Long.class)); + cq.multiselect( + from.get("myResourcePid").as(Long.class), + from.get(theIndexColumnName).as(Long.class)); Predicate masterPredicate; if (theHashesForIndexColumn.size() == 1) { - masterPredicate = cb.equal(from.get(theIndexColumnName).as(Long.class), theHashesForIndexColumn.iterator().next()); + masterPredicate = cb.equal( + from.get(theIndexColumnName).as(Long.class), + theHashesForIndexColumn.iterator().next()); } else { masterPredicate = from.get(theIndexColumnName).as(Long.class).in(theHashesForIndexColumn); } - if (myPartitionSettings.isPartitioningEnabled() && !myPartitionSettings.isIncludePartitionInSearchHashes()) { + if (myPartitionSettings.isPartitioningEnabled() + && !myPartitionSettings.isIncludePartitionInSearchHashes()) { if (theRequestPartitionId.isDefaultPartition()) { - Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue").as(Integer.class)); + Predicate partitionIdCriteria = + cb.isNull(from.get("myPartitionIdValue").as(Integer.class)); masterPredicate = cb.and(partitionIdCriteria, masterPredicate); } else if (!theRequestPartitionId.isAllPartitions()) { - Predicate partitionIdCriteria = from.get("myPartitionIdValue").as(Integer.class).in(theRequestPartitionId.getPartitionIds()); + Predicate partitionIdCriteria = from.get("myPartitionIdValue") + .as(Integer.class) + .in(theRequestPartitionId.getPartitionIds()); masterPredicate = cb.and(partitionIdCriteria, masterPredicate); } } @@ -336,12 +423,16 @@ public class TransactionProcessor extends BaseTransactionProcessor { if (matchUrl.myShouldPreFetchResourceBody) { theOutputPidsToLoadFully.add(nextResourcePid); } - myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, matchUrl.myResourceDefinition.getName(), matchUrl.myRequestUrl, JpaPid.fromId(nextResourcePid)); - theTransactionDetails.addResolvedMatchUrl(myFhirContext, matchUrl.myRequestUrl, JpaPid.fromId(nextResourcePid)); + myMatchResourceUrlService.matchUrlResolved( + theTransactionDetails, + matchUrl.myResourceDefinition.getName(), + matchUrl.myRequestUrl, + JpaPid.fromId(nextResourcePid)); + theTransactionDetails.addResolvedMatchUrl( + myFhirContext, matchUrl.myRequestUrl, JpaPid.fromId(nextResourcePid)); matchUrl.setResolved(true); }); } - } } @@ -365,7 +456,12 @@ public class TransactionProcessor extends BaseTransactionProcessor { * @param theOutputIdsToPreFetch This will be populated with any resource PIDs that need to be pre-fetched * @param theOutputSearchParameterMapsToResolve This will be populated with any {@link SearchParameterMap} instances corresponding to match URLs we need to resolve */ - private void preFetchConditionalUrl(String theResourceType, String theRequestUrl, boolean theShouldPreFetchResourceBody, List theOutputIdsToPreFetch, List theOutputSearchParameterMapsToResolve) { + private void preFetchConditionalUrl( + String theResourceType, + String theRequestUrl, + boolean theShouldPreFetchResourceBody, + List theOutputIdsToPreFetch, + List theOutputSearchParameterMapsToResolve) { JpaPid cachedId = myMatchResourceUrlService.processMatchUrlUsingCacheOnly(theResourceType, theRequestUrl); if (cachedId != null) { if (theShouldPreFetchResourceBody) { @@ -373,18 +469,22 @@ public class TransactionProcessor extends BaseTransactionProcessor { } } else if (SINGLE_PARAMETER_MATCH_URL_PATTERN.matcher(theRequestUrl).matches()) { RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(theResourceType); - SearchParameterMap matchUrlSearchMap = myMatchUrlService.translateMatchUrl(theRequestUrl, resourceDefinition); - theOutputSearchParameterMapsToResolve.add(new MatchUrlToResolve(theRequestUrl, matchUrlSearchMap, resourceDefinition, theShouldPreFetchResourceBody)); + SearchParameterMap matchUrlSearchMap = + myMatchUrlService.translateMatchUrl(theRequestUrl, resourceDefinition); + theOutputSearchParameterMapsToResolve.add(new MatchUrlToResolve( + theRequestUrl, matchUrlSearchMap, resourceDefinition, theShouldPreFetchResourceBody)); } } - private RequestPartitionId getSinglePartitionForAllEntriesOrNull(RequestDetails theRequest, List theEntries, ITransactionProcessorVersionAdapter versionAdapter) { + private RequestPartitionId getSinglePartitionForAllEntriesOrNull( + RequestDetails theRequest, List theEntries, ITransactionProcessorVersionAdapter versionAdapter) { RequestPartitionId retVal = null; Set requestPartitionIdsForAllEntries = new HashSet<>(); for (IBase nextEntry : theEntries) { IBaseResource resource = versionAdapter.getResource(nextEntry); if (resource != null) { - RequestPartitionId requestPartition = myRequestPartitionSvc.determineCreatePartitionForRequest(theRequest, resource, myFhirContext.getResourceType(resource)); + RequestPartitionId requestPartition = myRequestPartitionSvc.determineCreatePartitionForRequest( + theRequest, resource, myFhirContext.getResourceType(resource)); requestPartitionIdsForAllEntries.add(requestPartition); } } @@ -399,20 +499,36 @@ public class TransactionProcessor extends BaseTransactionProcessor { * If neither are available, it returns null. */ @Nullable - private void buildHashPredicateFromTokenParam(TokenParam theTokenParam, RequestPartitionId theRequestPartitionId, MatchUrlToResolve theMatchUrl, Set theSysAndValuePredicates, Set theValuePredicates) { + private void buildHashPredicateFromTokenParam( + TokenParam theTokenParam, + RequestPartitionId theRequestPartitionId, + MatchUrlToResolve theMatchUrl, + Set theSysAndValuePredicates, + Set theValuePredicates) { if (isNotBlank(theTokenParam.getValue()) && isNotBlank(theTokenParam.getSystem())) { - theMatchUrl.myHashSystemAndValue = ResourceIndexedSearchParamToken.calculateHashSystemAndValue(myPartitionSettings, theRequestPartitionId, theMatchUrl.myResourceDefinition.getName(), theMatchUrl.myMatchUrlSearchMap.keySet().iterator().next(), theTokenParam.getSystem(), theTokenParam.getValue()); + theMatchUrl.myHashSystemAndValue = ResourceIndexedSearchParamToken.calculateHashSystemAndValue( + myPartitionSettings, + theRequestPartitionId, + theMatchUrl.myResourceDefinition.getName(), + theMatchUrl.myMatchUrlSearchMap.keySet().iterator().next(), + theTokenParam.getSystem(), + theTokenParam.getValue()); theSysAndValuePredicates.add(theMatchUrl.myHashSystemAndValue); } else if (isNotBlank(theTokenParam.getValue())) { - theMatchUrl.myHashValue = ResourceIndexedSearchParamToken.calculateHashValue(myPartitionSettings, theRequestPartitionId, theMatchUrl.myResourceDefinition.getName(), theMatchUrl.myMatchUrlSearchMap.keySet().iterator().next(), theTokenParam.getValue()); + theMatchUrl.myHashValue = ResourceIndexedSearchParamToken.calculateHashValue( + myPartitionSettings, + theRequestPartitionId, + theMatchUrl.myResourceDefinition.getName(), + theMatchUrl.myMatchUrlSearchMap.keySet().iterator().next(), + theTokenParam.getValue()); theValuePredicates.add(theMatchUrl.myHashValue); } - } - private ListMultimap buildHashToSearchMap(List searchParameterMapsToResolve, String theIndex) { + private ListMultimap buildHashToSearchMap( + List searchParameterMapsToResolve, String theIndex) { ListMultimap hashToSearch = ArrayListMultimap.create(); - //Build a lookup map so we don't have to iterate over the searches repeatedly. + // Build a lookup map so we don't have to iterate over the searches repeatedly. for (MatchUrlToResolve nextSearchParameterMap : searchParameterMapsToResolve) { if (nextSearchParameterMap.myHashSystemAndValue != null && theIndex.equals("myHashSystemAndValue")) { hashToSearch.put(nextSearchParameterMap.myHashSystemAndValue, nextSearchParameterMap); @@ -440,10 +556,17 @@ public class TransactionProcessor extends BaseTransactionProcessor { StopWatch sw = new StopWatch(); myEntityManager.flush(); - ourLog.debug("Session flush took {}ms for {} inserts and {} updates", sw.getMillis(), insertionCount, updateCount); + ourLog.debug( + "Session flush took {}ms for {} inserts and {} updates", + sw.getMillis(), + insertionCount, + updateCount); } catch (PersistenceException e) { if (myHapiFhirHibernateJpaDialect != null) { - List types = theIdToPersistedOutcome.keySet().stream().filter(t -> t != null).map(t -> t.getResourceType()).collect(Collectors.toList()); + List types = theIdToPersistedOutcome.keySet().stream() + .filter(t -> t != null) + .map(t -> t.getResourceType()) + .collect(Collectors.toList()); String message = "Error flushing transaction with resource types: " + types; throw myHapiFhirHibernateJpaDialect.translate(e, message); } @@ -476,7 +599,11 @@ public class TransactionProcessor extends BaseTransactionProcessor { private Long myHashValue; private Long myHashSystemAndValue; - public MatchUrlToResolve(String theRequestUrl, SearchParameterMap theMatchUrlSearchMap, RuntimeResourceDefinition theResourceDefinition, boolean theShouldPreFetchResourceBody) { + public MatchUrlToResolve( + String theRequestUrl, + SearchParameterMap theMatchUrlSearchMap, + RuntimeResourceDefinition theResourceDefinition, + boolean theShouldPreFetchResourceBody) { myRequestUrl = theRequestUrl; myMatchUrlSearchMap = theMatchUrlSearchMap; myResourceDefinition = theResourceDefinition; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java index f0c2747cdef..c618e97c37d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.dao; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import ca.uhn.fhir.model.dstu2.resource.Bundle; @@ -32,14 +32,14 @@ import ca.uhn.fhir.model.dstu2.valueset.IssueTypeEnum; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.hl7.fhir.exceptions.FHIRException; -import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.Date; import java.util.List; -public class TransactionProcessorVersionAdapterDstu2 implements ITransactionProcessorVersionAdapter { +public class TransactionProcessorVersionAdapterDstu2 + implements ITransactionProcessorVersionAdapter { @Override public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) { theBundleEntry.getResponse().setStatus(theStatus); @@ -72,9 +72,9 @@ public class TransactionProcessorVersionAdapterDstu2 implements ITransactionProc public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.Entry theEntry) { OperationOutcome oo = new OperationOutcome(); oo.addIssue() - .setSeverity(IssueSeverityEnum.ERROR) - .setDiagnostics(theCaughtEx.getMessage()) - .setCode(IssueTypeEnum.EXCEPTION); + .setSeverity(IssueSeverityEnum.ERROR) + .setDiagnostics(theCaughtEx.getMessage()) + .setCode(IssueTypeEnum.EXCEPTION); theEntry.setResource(oo); } @@ -173,5 +173,4 @@ public class TransactionProcessorVersionAdapterDstu2 implements ITransactionProc public void setRequestUrl(Bundle.Entry theEntry, String theUrl) { theEntry.getRequest().setUrl(theUrl); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java index 0335705d2c3..ee575c64478 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java @@ -31,11 +31,16 @@ import java.util.Date; import java.util.List; import java.util.Set; -public interface IBatch2JobInstanceRepository extends JpaRepository, IHapiFhirJpaRepository { +public interface IBatch2JobInstanceRepository + extends JpaRepository, IHapiFhirJpaRepository { @Modifying - @Query("UPDATE Batch2JobInstanceEntity e SET e.myStatus = :status WHERE e.myId = :id and e.myStatus IN ( :prior_states )") - int updateInstanceStatusIfIn(@Param("id") String theInstanceId, @Param("status") StatusEnum theNewState, @Param("prior_states") Set thePriorStates); + @Query( + "UPDATE Batch2JobInstanceEntity e SET e.myStatus = :status WHERE e.myId = :id and e.myStatus IN ( :prior_states )") + int updateInstanceStatusIfIn( + @Param("id") String theInstanceId, + @Param("status") StatusEnum theNewState, + @Param("prior_states") Set thePriorStates); @Modifying @Query("UPDATE Batch2JobInstanceEntity e SET e.myUpdateTime = :updated WHERE e.myId = :id") @@ -49,43 +54,40 @@ public interface IBatch2JobInstanceRepository extends JpaRepository findInstancesByJobIdParamsAndStatus( - @Param("defId") String theDefinitionId, - @Param("params") String theParams, - @Param("stats") Set theStatus, - Pageable thePageable - ); + @Param("defId") String theDefinitionId, + @Param("params") String theParams, + @Param("stats") Set theStatus, + Pageable thePageable); @Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myParamsJson = :params") List findInstancesByJobIdAndParams( - @Param("defId") String theDefinitionId, - @Param("params") String theParams, - Pageable thePageable - ); + @Param("defId") String theDefinitionId, @Param("params") String theParams, Pageable thePageable); @Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myStatus = :status") - List findInstancesByJobStatus( - @Param("status") StatusEnum theState, - Pageable thePageable - ); + List findInstancesByJobStatus(@Param("status") StatusEnum theState, Pageable thePageable); @Query("SELECT count(b) from Batch2JobInstanceEntity b WHERE b.myStatus = :status") - Integer findTotalJobsOfStatus( - @Param("status") StatusEnum theState - ); + Integer findTotalJobsOfStatus(@Param("status") StatusEnum theState); - @Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myStatus IN( :stats ) AND b.myEndTime < :cutoff") + @Query( + "SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myStatus IN( :stats ) AND b.myEndTime < :cutoff") List findInstancesByJobIdAndStatusAndExpiry( - @Param("defId") String theDefinitionId, - @Param("stats") Set theStatus, - @Param("cutoff") Date theCutoff, - Pageable thePageable - ); + @Param("defId") String theDefinitionId, + @Param("stats") Set theStatus, + @Param("cutoff") Date theCutoff, + Pageable thePageable); - @Query("SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId AND e.myStatus IN :statuses") - List fetchInstancesByJobDefinitionIdAndStatus(@Param("jobDefinitionId") String theJobDefinitionId, @Param("statuses") Set theIncompleteStatuses, Pageable thePageRequest); + @Query( + "SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId AND e.myStatus IN :statuses") + List fetchInstancesByJobDefinitionIdAndStatus( + @Param("jobDefinitionId") String theJobDefinitionId, + @Param("statuses") Set theIncompleteStatuses, + Pageable thePageRequest); @Query("SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId") - List findInstancesByJobDefinitionId(@Param("jobDefinitionId") String theJobDefinitionId, Pageable thePageRequest); + List findInstancesByJobDefinitionId( + @Param("jobDefinitionId") String theJobDefinitionId, Pageable thePageRequest); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java index 65dd6982f2d..8e6fea0339d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java @@ -33,53 +33,83 @@ import java.util.List; import java.util.Set; import java.util.stream.Stream; -public interface IBatch2WorkChunkRepository extends JpaRepository, IHapiFhirJpaRepository { +public interface IBatch2WorkChunkRepository + extends JpaRepository, IHapiFhirJpaRepository { // NOTE we need a stable sort so paging is reliable. // Warning: mySequence is not unique - it is reset for every chunk. So we also sort by myId. - @Query("SELECT e FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC, e.myId ASC") + @Query( + "SELECT e FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC, e.myId ASC") List fetchChunks(Pageable thePageRequest, @Param("instanceId") String theInstanceId); /** * A projection query to avoid fetching the CLOB over the wire. * Otherwise, the same as fetchChunks. */ - @Query("SELECT new Batch2WorkChunkEntity(" + - "e.myId, e.mySequence, e.myJobDefinitionId, e.myJobDefinitionVersion, e.myInstanceId, e.myTargetStepId, e.myStatus," + - "e.myCreateTime, e.myStartTime, e.myUpdateTime, e.myEndTime," + - "e.myErrorMessage, e.myErrorCount, e.myRecordsProcessed, e.myWarningMessage" + - ") FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC, e.myId ASC") + @Query("SELECT new Batch2WorkChunkEntity(" + + "e.myId, e.mySequence, e.myJobDefinitionId, e.myJobDefinitionVersion, e.myInstanceId, e.myTargetStepId, e.myStatus," + + "e.myCreateTime, e.myStartTime, e.myUpdateTime, e.myEndTime," + + "e.myErrorMessage, e.myErrorCount, e.myRecordsProcessed, e.myWarningMessage" + + ") FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC, e.myId ASC") List fetchChunksNoData(Pageable thePageRequest, @Param("instanceId") String theInstanceId); - @Query("SELECT DISTINCT e.myStatus from Batch2WorkChunkEntity e where e.myInstanceId = :instanceId AND e.myTargetStepId = :stepId") - Set getDistinctStatusesForStep(@Param("instanceId") String theInstanceId, @Param("stepId") String theStepId); + @Query( + "SELECT DISTINCT e.myStatus from Batch2WorkChunkEntity e where e.myInstanceId = :instanceId AND e.myTargetStepId = :stepId") + Set getDistinctStatusesForStep( + @Param("instanceId") String theInstanceId, @Param("stepId") String theStepId); - @Query("SELECT e FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId AND e.myTargetStepId = :targetStepId ORDER BY e.mySequence ASC") - Stream fetchChunksForStep(@Param("instanceId") String theInstanceId, @Param("targetStepId") String theTargetStepId); + @Query( + "SELECT e FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId AND e.myTargetStepId = :targetStepId ORDER BY e.mySequence ASC") + Stream fetchChunksForStep( + @Param("instanceId") String theInstanceId, @Param("targetStepId") String theTargetStepId); @Modifying - @Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, " + - "e.myRecordsProcessed = :rp, e.myErrorCount = e.myErrorCount + :errorRetries, e.mySerializedData = null, " + - "e.myWarningMessage = :warningMessage WHERE e.myId = :id") - void updateChunkStatusAndClearDataForEndSuccess(@Param("id") String theChunkId, @Param("et") Date theEndTime, - @Param("rp") int theRecordsProcessed, @Param("errorRetries") int theErrorRetries, @Param("status") WorkChunkStatusEnum theInProgress, @Param("warningMessage") String theWarningMessage); + @Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, " + + "e.myRecordsProcessed = :rp, e.myErrorCount = e.myErrorCount + :errorRetries, e.mySerializedData = null, " + + "e.myWarningMessage = :warningMessage WHERE e.myId = :id") + void updateChunkStatusAndClearDataForEndSuccess( + @Param("id") String theChunkId, + @Param("et") Date theEndTime, + @Param("rp") int theRecordsProcessed, + @Param("errorRetries") int theErrorRetries, + @Param("status") WorkChunkStatusEnum theInProgress, + @Param("warningMessage") String theWarningMessage); @Modifying - @Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)") - void updateAllChunksForInstanceStatusClearDataAndSetError(@Param("ids") List theChunkIds, @Param("et") Date theEndTime, @Param("status") WorkChunkStatusEnum theInProgress, @Param("em") String theError); + @Query( + "UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)") + void updateAllChunksForInstanceStatusClearDataAndSetError( + @Param("ids") List theChunkIds, + @Param("et") Date theEndTime, + @Param("status") WorkChunkStatusEnum theInProgress, + @Param("em") String theError); @Modifying - @Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.myErrorMessage = :em, e.myErrorCount = e.myErrorCount + 1 WHERE e.myId = :id") - int updateChunkStatusAndIncrementErrorCountForEndError(@Param("id") String theChunkId, @Param("et") Date theEndTime, @Param("em") String theErrorMessage, @Param("status") WorkChunkStatusEnum theInProgress); + @Query( + "UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.myErrorMessage = :em, e.myErrorCount = e.myErrorCount + 1 WHERE e.myId = :id") + int updateChunkStatusAndIncrementErrorCountForEndError( + @Param("id") String theChunkId, + @Param("et") Date theEndTime, + @Param("em") String theErrorMessage, + @Param("status") WorkChunkStatusEnum theInProgress); @Modifying - @Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myStartTime = :st WHERE e.myId = :id AND e.myStatus IN :startStatuses") - int updateChunkStatusForStart(@Param("id") String theChunkId, @Param("st") Date theStartedTime, @Param("status") WorkChunkStatusEnum theInProgress, @Param("startStatuses") Collection theStartStatuses); + @Query( + "UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myStartTime = :st WHERE e.myId = :id AND e.myStatus IN :startStatuses") + int updateChunkStatusForStart( + @Param("id") String theChunkId, + @Param("st") Date theStartedTime, + @Param("status") WorkChunkStatusEnum theInProgress, + @Param("startStatuses") Collection theStartStatuses); @Modifying @Query("DELETE FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId") int deleteAllForInstance(@Param("instanceId") String theInstanceId); - @Query("SELECT e.myId from Batch2WorkChunkEntity e where e.myInstanceId = :instanceId AND e.myTargetStepId = :stepId AND e.myStatus = :status") - List fetchAllChunkIdsForStepWithStatus(@Param("instanceId")String theInstanceId, @Param("stepId")String theStepId, @Param("status") WorkChunkStatusEnum theStatus); + @Query( + "SELECT e.myId from Batch2WorkChunkEntity e where e.myInstanceId = :instanceId AND e.myTargetStepId = :stepId AND e.myStatus = :status") + List fetchAllChunkIdsForStepWithStatus( + @Param("instanceId") String theInstanceId, + @Param("stepId") String theStepId, + @Param("status") WorkChunkStatusEnum theStatus); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBinaryStorageEntityDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBinaryStorageEntityDao.java index 5c11209fca7..8bd1e974df7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBinaryStorageEntityDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBinaryStorageEntityDao.java @@ -30,7 +30,8 @@ import java.util.Optional; public interface IBinaryStorageEntityDao extends JpaRepository, IHapiFhirJpaRepository { @Query("SELECT e FROM BinaryStorageEntity e WHERE e.myBlobId = :blob_id AND e.myResourceId = :resource_id") - Optional findByIdAndResourceId(@Param("blob_id") String theBlobId, @Param("resource_id") String theResourceId); + Optional findByIdAndResourceId( + @Param("blob_id") String theBlobId, @Param("resource_id") String theResourceId); @Modifying @Query("DELETE FROM BinaryStorageEntity t WHERE t.myBlobId = :pid") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java index acdfa0e2c02..c220c454d44 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java @@ -34,12 +34,14 @@ public interface IBulkImportJobFileDao extends JpaRepository findAllForJob(@Param("jobId") String theJobId); @Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob = :job AND f.myFileSequence = :fileIndex") - Optional findForJob(@Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex); + Optional findForJob( + @Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex); - @Query("SELECT f.myFileDescription FROM BulkImportJobFileEntity f WHERE f.myJob = :job AND f.myFileSequence = :fileIndex") - Optional findFileDescriptionForJob(@Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex); + @Query( + "SELECT f.myFileDescription FROM BulkImportJobFileEntity f WHERE f.myJob = :job AND f.myFileSequence = :fileIndex") + Optional findFileDescriptionForJob( + @Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex); @Query("SELECT f.myId FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC") List findAllIdsForJob(@Param("jobId") String theJobId); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IHapiFhirJpaRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IHapiFhirJpaRepository.java index 26be1126fa3..f9a46f69e62 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IHapiFhirJpaRepository.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IHapiFhirJpaRepository.java @@ -23,5 +23,4 @@ import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; @Transactional(propagation = Propagation.MANDATORY) -public interface IHapiFhirJpaRepository { -} +public interface IHapiFhirJpaRepository {} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkJpaRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkJpaRepository.java index 82bf4ac5d04..b49d848cb1e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkJpaRepository.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IMdmLinkJpaRepository.java @@ -34,33 +34,38 @@ import java.util.List; import java.util.Optional; @Repository -public interface IMdmLinkJpaRepository extends RevisionRepository,JpaRepository, IHapiFhirJpaRepository { +public interface IMdmLinkJpaRepository + extends RevisionRepository, JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("DELETE FROM MdmLink f WHERE myGoldenResourcePid = :pid OR mySourcePid = :pid") int deleteWithAnyReferenceToPid(@Param("pid") Long thePid); @Modifying - @Query("DELETE FROM MdmLink f WHERE (myGoldenResourcePid = :pid OR mySourcePid = :pid) AND myMatchResult <> :matchResult") - int deleteWithAnyReferenceToPidAndMatchResultNot(@Param("pid") Long thePid, @Param("matchResult") MdmMatchResultEnum theMatchResult); + @Query( + "DELETE FROM MdmLink f WHERE (myGoldenResourcePid = :pid OR mySourcePid = :pid) AND myMatchResult <> :matchResult") + int deleteWithAnyReferenceToPidAndMatchResultNot( + @Param("pid") Long thePid, @Param("matchResult") MdmMatchResultEnum theMatchResult); @Modifying @Query("DELETE FROM MdmLink f WHERE myGoldenResourcePid IN (:goldenPids) OR mySourcePid IN (:goldenPids)") void deleteLinksWithAnyReferenceToPids(@Param("goldenPids") List theResourcePids); - @Query("SELECT ml2.myGoldenResourcePid as goldenPid, ml2.mySourcePid as sourcePid FROM MdmLink ml2 " + - "WHERE ml2.myMatchResult=:matchResult " + - "AND ml2.myGoldenResourcePid IN (" + - "SELECT ml.myGoldenResourcePid FROM MdmLink ml " + - "INNER JOIN ResourceLink hrl " + - "ON hrl.myTargetResourcePid=ml.mySourcePid " + - "AND hrl.mySourceResourcePid=:groupPid " + - "AND hrl.mySourcePath='Group.member.entity' " + - "AND hrl.myTargetResourceType='Patient'" + - ")") - List expandPidsFromGroupPidGivenMatchResult(@Param("groupPid") Long theGroupPid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); + @Query("SELECT ml2.myGoldenResourcePid as goldenPid, ml2.mySourcePid as sourcePid FROM MdmLink ml2 " + + "WHERE ml2.myMatchResult=:matchResult " + + "AND ml2.myGoldenResourcePid IN (" + + "SELECT ml.myGoldenResourcePid FROM MdmLink ml " + + "INNER JOIN ResourceLink hrl " + + "ON hrl.myTargetResourcePid=ml.mySourcePid " + + "AND hrl.mySourceResourcePid=:groupPid " + + "AND hrl.mySourcePath='Group.member.entity' " + + "AND hrl.myTargetResourceType='Patient'" + + ")") + List expandPidsFromGroupPidGivenMatchResult( + @Param("groupPid") Long theGroupPid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); @Query("SELECT ml FROM MdmLink ml WHERE ml.mySourcePid = :sourcePid AND ml.myMatchResult = :matchResult") - Optional findBySourcePidAndMatchResult(@Param("sourcePid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMatch); + Optional findBySourcePidAndMatchResult( + @Param("sourcePid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMatch); interface MdmPidTuple { Long getGoldenPid(); @@ -68,21 +73,32 @@ public interface IMdmLinkJpaRepository extends RevisionRepository expandPidsBySourcePidAndMatchResult(@Param("sourcePid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); + @Query("SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid " + "FROM MdmLink ml " + + "INNER JOIN MdmLink ml2 " + + "on ml.myGoldenResourcePid=ml2.myGoldenResourcePid " + + "WHERE ml2.mySourcePid=:sourcePid " + + "AND ml2.myMatchResult=:matchResult " + + "AND ml.myMatchResult=:matchResult") + List expandPidsBySourcePidAndMatchResult( + @Param("sourcePid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); - @Query("SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid FROM MdmLink ml WHERE ml.myGoldenResourcePid = :goldenPid and ml.myMatchResult = :matchResult") - List expandPidsByGoldenResourcePidAndMatchResult(@Param("goldenPid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); + @Query( + "SELECT ml.myGoldenResourcePid as goldenPid, ml.mySourcePid as sourcePid FROM MdmLink ml WHERE ml.myGoldenResourcePid = :goldenPid and ml.myMatchResult = :matchResult") + List expandPidsByGoldenResourcePidAndMatchResult( + @Param("goldenPid") Long theSourcePid, @Param("matchResult") MdmMatchResultEnum theMdmMatchResultEnum); - @Query("SELECT ml.myId FROM MdmLink ml WHERE ml.myMdmSourceType = :resourceName AND ml.myCreated <= :highThreshold ORDER BY ml.myCreated DESC") - List findPidByResourceNameAndThreshold(@Param("resourceName") String theResourceName, @Param("highThreshold") Date theHighThreshold, Pageable thePageable); + @Query( + "SELECT ml.myId FROM MdmLink ml WHERE ml.myMdmSourceType = :resourceName AND ml.myCreated <= :highThreshold ORDER BY ml.myCreated DESC") + List findPidByResourceNameAndThreshold( + @Param("resourceName") String theResourceName, + @Param("highThreshold") Date theHighThreshold, + Pageable thePageable); - @Query("SELECT ml.myId FROM MdmLink ml WHERE ml.myMdmSourceType = :resourceName AND ml.myCreated <= :highThreshold AND ml.myPartitionIdValue IN :partitionId ORDER BY ml.myCreated DESC") - List findPidByResourceNameAndThresholdAndPartitionId(@Param("resourceName") String theResourceName, @Param("highThreshold") Date theHighThreshold, @Param("partitionId") List thePartitionIds, Pageable thePageable); + @Query( + "SELECT ml.myId FROM MdmLink ml WHERE ml.myMdmSourceType = :resourceName AND ml.myCreated <= :highThreshold AND ml.myPartitionIdValue IN :partitionId ORDER BY ml.myCreated DESC") + List findPidByResourceNameAndThresholdAndPartitionId( + @Param("resourceName") String theResourceName, + @Param("highThreshold") Date theHighThreshold, + @Param("partitionId") List thePartitionIds, + Pageable thePageable); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageDao.java index 6170e838db7..6df63c7d233 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageDao.java @@ -30,5 +30,4 @@ public interface INpmPackageDao extends JpaRepository, I @Query("SELECT p FROM NpmPackageEntity p WHERE p.myPackageId = :id") Optional findByPackageId(@Param("id") String thePackageId); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageVersionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageVersionDao.java index 73105f35895..26d4076401e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageVersionDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageVersionDao.java @@ -34,11 +34,14 @@ public interface INpmPackageVersionDao extends JpaRepository findByPackageId(@Param("id") String thePackageId); @Query("SELECT p FROM NpmPackageVersionEntity p WHERE p.myPackageId = :id AND p.myVersionId = :version") - Optional findByPackageIdAndVersion(@Param("id") String thePackageId, @Param("version") String thePackageVersion); + Optional findByPackageIdAndVersion( + @Param("id") String thePackageId, @Param("version") String thePackageVersion); /** * Uses a "like" expression on the version ID */ - @Query("SELECT p.myVersionId FROM NpmPackageVersionEntity p WHERE p.myPackageId = :id AND p.myVersionId like :version") - List findVersionIdsByPackageIdAndLikeVersion(@Param("id") String theId, @Param("version") String thePartialVersionString); + @Query( + "SELECT p.myVersionId FROM NpmPackageVersionEntity p WHERE p.myPackageId = :id AND p.myVersionId like :version") + List findVersionIdsByPackageIdAndLikeVersion( + @Param("id") String theId, @Param("version") String thePartialVersionString); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageVersionResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageVersionResourceDao.java index 1fec9e2044b..50b61acf3dd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageVersionResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/INpmPackageVersionResourceDao.java @@ -27,14 +27,28 @@ import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface INpmPackageVersionResourceDao extends JpaRepository, IHapiFhirJpaRepository { +public interface INpmPackageVersionResourceDao + extends JpaRepository, IHapiFhirJpaRepository { - @Query("SELECT e FROM NpmPackageVersionResourceEntity e WHERE e.myResourceType = :resourceType AND e.myFhirVersion = :fhirVersion AND e.myPackageVersion.myCurrentVersion = true") - Slice findCurrentVersionByResourceType(Pageable thePage, @Param("fhirVersion") FhirVersionEnum theFhirVersion, @Param("resourceType") String theResourceType); + @Query( + "SELECT e FROM NpmPackageVersionResourceEntity e WHERE e.myResourceType = :resourceType AND e.myFhirVersion = :fhirVersion AND e.myPackageVersion.myCurrentVersion = true") + Slice findCurrentVersionByResourceType( + Pageable thePage, + @Param("fhirVersion") FhirVersionEnum theFhirVersion, + @Param("resourceType") String theResourceType); - @Query("SELECT e FROM NpmPackageVersionResourceEntity e WHERE e.myCanonicalUrl = :url AND e.myFhirVersion = :fhirVersion AND e.myPackageVersion.myCurrentVersion = true") - Slice findCurrentVersionByCanonicalUrl(Pageable thePage, @Param("fhirVersion") FhirVersionEnum theFhirVersion, @Param("url") String theCanonicalUrl); + @Query( + "SELECT e FROM NpmPackageVersionResourceEntity e WHERE e.myCanonicalUrl = :url AND e.myFhirVersion = :fhirVersion AND e.myPackageVersion.myCurrentVersion = true") + Slice findCurrentVersionByCanonicalUrl( + Pageable thePage, + @Param("fhirVersion") FhirVersionEnum theFhirVersion, + @Param("url") String theCanonicalUrl); - @Query("SELECT e FROM NpmPackageVersionResourceEntity e WHERE e.myCanonicalUrl = :url AND e.myCanonicalVersion = :version AND e.myFhirVersion = :fhirVersion AND e.myPackageVersion.myCurrentVersion = true") - Slice findCurrentVersionByCanonicalUrlAndVersion(Pageable theOf, @Param("fhirVersion") FhirVersionEnum theFhirVersion, @Param("url") String theCanonicalUrl, @Param("version") String theCanonicalVersion); + @Query( + "SELECT e FROM NpmPackageVersionResourceEntity e WHERE e.myCanonicalUrl = :url AND e.myCanonicalVersion = :version AND e.myFhirVersion = :fhirVersion AND e.myPackageVersion.myCurrentVersion = true") + Slice findCurrentVersionByCanonicalUrlAndVersion( + Pageable theOf, + @Param("fhirVersion") FhirVersionEnum theFhirVersion, + @Param("url") String theCanonicalUrl, + @Param("version") String theCanonicalVersion); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IPartitionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IPartitionDao.java index 5b8921aa292..578e4e5fff7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IPartitionDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IPartitionDao.java @@ -30,5 +30,4 @@ public interface IPartitionDao extends JpaRepository, @Query("SELECT p FROM PartitionEntity p WHERE p.myName = :name") Optional findForName(@Param("name") String theName); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryProvenanceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryProvenanceDao.java index 0fece322308..c793ba355d6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryProvenanceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryProvenanceDao.java @@ -25,10 +25,10 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface IResourceHistoryProvenanceDao extends JpaRepository, IHapiFhirJpaRepository { +public interface IResourceHistoryProvenanceDao + extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("DELETE FROM ResourceHistoryProvenanceEntity t WHERE t.myId = :pid") void deleteByPid(@Param("pid") Long theId); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java index 0c212e8bb4a..bd65315bf30 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTableDao.java @@ -38,41 +38,45 @@ public interface IResourceHistoryTableDao extends JpaRepository findAllVersionsForResourceIdInOrder(@Param("resId") Long theId); - @Query("SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version") - ResourceHistoryTable findForIdAndVersionAndFetchProvenance(@Param("id") long theId, @Param("version") long theVersion); + @Query( + "SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version") + ResourceHistoryTable findForIdAndVersionAndFetchProvenance( + @Param("id") long theId, @Param("version") long theVersion); - @Query("SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion") - Slice findForResourceId(Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); + @Query( + "SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion") + Slice findForResourceId( + Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); - @Query("SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion") - Slice findForResourceIdAndReturnEntitiesAndFetchProvenance(Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); + @Query( + "SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion") + Slice findForResourceIdAndReturnEntitiesAndFetchProvenance( + Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); - @Query("" + - "SELECT v.myId FROM ResourceHistoryTable v " + - "LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " + - "WHERE v.myResourceVersion != t.myVersion AND " + - "t.myId = :resId") + @Query("" + "SELECT v.myId FROM ResourceHistoryTable v " + + "LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " + + "WHERE v.myResourceVersion != t.myVersion AND " + + "t.myId = :resId") Slice findIdsOfPreviousVersionsOfResourceId(Pageable thePage, @Param("resId") Long theResourceId); - @Query("" + - "SELECT v.myId FROM ResourceHistoryTable v " + - "LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " + - "WHERE v.myResourceVersion != t.myVersion AND " + - "t.myResourceType = :restype") + @Query("" + "SELECT v.myId FROM ResourceHistoryTable v " + + "LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " + + "WHERE v.myResourceVersion != t.myVersion AND " + + "t.myResourceType = :restype") Slice findIdsOfPreviousVersionsOfResources(Pageable thePage, @Param("restype") String theResourceName); - @Query("" + - "SELECT v.myId FROM ResourceHistoryTable v " + - "LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " + - "WHERE v.myResourceVersion != t.myVersion") + @Query("" + "SELECT v.myId FROM ResourceHistoryTable v " + + "LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " + + "WHERE v.myResourceVersion != t.myVersion") Slice findIdsOfPreviousVersionsOfResources(Pageable thePage); @Modifying - @Query("UPDATE ResourceHistoryTable r SET r.myResourceVersion = :newVersion WHERE r.myResourceId = :id AND r.myResourceVersion = :oldVersion") - void updateVersion(@Param("id") long theId, @Param("oldVersion") long theOldVersion, @Param("newVersion") long theNewVersion); + @Query( + "UPDATE ResourceHistoryTable r SET r.myResourceVersion = :newVersion WHERE r.myResourceId = :id AND r.myResourceVersion = :oldVersion") + void updateVersion( + @Param("id") long theId, @Param("oldVersion") long theOldVersion, @Param("newVersion") long theNewVersion); @Modifying @Query("DELETE FROM ResourceHistoryTable t WHERE t.myId = :pid") void deleteByPid(@Param("pid") Long theId); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java index d5824b92e8d..bf9e951a91d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceHistoryTagDao.java @@ -25,12 +25,9 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -import java.util.List; - public interface IResourceHistoryTagDao extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("DELETE FROM ResourceHistoryTag t WHERE t.myResourceHistoryPid = :historyPid") void deleteByPid(@Param("historyPid") Long theResourceHistoryTablePid); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedComboTokensNonUniqueDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedComboTokensNonUniqueDao.java index e8dc3678264..1129a8d5605 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedComboTokensNonUniqueDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedComboTokensNonUniqueDao.java @@ -25,7 +25,8 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface IResourceIndexedComboTokensNonUniqueDao extends JpaRepository { +public interface IResourceIndexedComboTokensNonUniqueDao + extends JpaRepository { @Modifying @Query("DELETE FROM ResourceIndexedComboTokenNonUnique t WHERE t.myResourceId = :res_id") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamCoordsDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamCoordsDao.java index b781005af0f..749bee48f9e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamCoordsDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamCoordsDao.java @@ -19,14 +19,14 @@ */ package ca.uhn.fhir.jpa.dao.data; -import org.springframework.data.jpa.repository.JpaRepository; - import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords; +import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface IResourceIndexedSearchParamCoordsDao extends JpaRepository, IHapiFhirJpaRepository { +public interface IResourceIndexedSearchParamCoordsDao + extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("delete from ResourceIndexedSearchParamCoords t WHERE t.myResourcePid = :resid") void deleteByResourceId(@Param("resid") Long theResourcePid); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamDateDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamDateDao.java index ef5b19c44ec..4dbf111cbf7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamDateDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamDateDao.java @@ -27,7 +27,8 @@ import org.springframework.data.repository.query.Param; import java.util.List; -public interface IResourceIndexedSearchParamDateDao extends JpaRepository, IHapiFhirJpaRepository { +public interface IResourceIndexedSearchParamDateDao + extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("delete from ResourceIndexedSearchParamDate t WHERE t.myResourcePid = :resid") void deleteByResourceId(@Param("resid") Long theResourcePid); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamNumberDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamNumberDao.java index 10d9b9686fd..4c25fdc75dc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamNumberDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamNumberDao.java @@ -19,14 +19,14 @@ */ package ca.uhn.fhir.jpa.dao.data; -import org.springframework.data.jpa.repository.JpaRepository; - import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber; +import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface IResourceIndexedSearchParamNumberDao extends JpaRepository, IHapiFhirJpaRepository { +public interface IResourceIndexedSearchParamNumberDao + extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("delete from ResourceIndexedSearchParamNumber t WHERE t.myResourcePid = :resid") void deleteByResourceId(@Param("resid") Long theResourcePid); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamQuantityDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamQuantityDao.java index 0c990c5193c..5bc55760377 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamQuantityDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamQuantityDao.java @@ -19,14 +19,14 @@ */ package ca.uhn.fhir.jpa.dao.data; -import org.springframework.data.jpa.repository.JpaRepository; - import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity; +import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface IResourceIndexedSearchParamQuantityDao extends JpaRepository, IHapiFhirJpaRepository { +public interface IResourceIndexedSearchParamQuantityDao + extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("delete from ResourceIndexedSearchParamQuantity t WHERE t.myResourcePid = :resid") void deleteByResourceId(@Param("resid") Long theResourcePid); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamQuantityNormalizedDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamQuantityNormalizedDao.java index 132c4e5f2d2..aca6cbd19c0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamQuantityNormalizedDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamQuantityNormalizedDao.java @@ -21,13 +21,12 @@ package ca.uhn.fhir.jpa.dao.data; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantityNormalized; import org.springframework.data.jpa.repository.JpaRepository; - -import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface IResourceIndexedSearchParamQuantityNormalizedDao extends JpaRepository, IHapiFhirJpaRepository { +public interface IResourceIndexedSearchParamQuantityNormalizedDao + extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("delete from ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResourcePid = :resid") void deleteByResourceId(@Param("resid") Long theResourcePid); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamStringDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamStringDao.java index 6575b948b7d..55b55c0e446 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamStringDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamStringDao.java @@ -27,7 +27,8 @@ import org.springframework.data.repository.query.Param; import java.util.List; -public interface IResourceIndexedSearchParamStringDao extends JpaRepository, IHapiFhirJpaRepository { +public interface IResourceIndexedSearchParamStringDao + extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :resId") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamTokenDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamTokenDao.java index 880f57d89a7..e8a63f50e51 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamTokenDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamTokenDao.java @@ -25,7 +25,8 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface IResourceIndexedSearchParamTokenDao extends JpaRepository, IHapiFhirJpaRepository { +public interface IResourceIndexedSearchParamTokenDao + extends JpaRepository, IHapiFhirJpaRepository { @Query("select count(*) from ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :resid") int countForResourceId(@Param("resid") Long theResourcePid); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamUriDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamUriDao.java index ba2e0b7cc09..d8d7c423d14 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamUriDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceIndexedSearchParamUriDao.java @@ -27,8 +27,9 @@ import org.springframework.data.repository.query.Param; import java.util.Collection; -public interface IResourceIndexedSearchParamUriDao extends JpaRepository, IHapiFhirJpaRepository { - +public interface IResourceIndexedSearchParamUriDao + extends JpaRepository, IHapiFhirJpaRepository { + @Query("SELECT DISTINCT p.myUri FROM ResourceIndexedSearchParamUri p WHERE p.myHashIdentity = :hash_identity") public Collection findAllByHashIdentity(@Param("hash_identity") long theHashIdentity); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceLinkDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceLinkDao.java index d97b05d834f..96593b37665 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceLinkDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceLinkDao.java @@ -43,8 +43,7 @@ public interface IResourceLinkDao extends JpaRepository, IHa * Loads a collection of ResourceLink entities by PID, but also eagerly fetches * the target resources and their forced IDs */ - @Query("SELECT t FROM ResourceLink t LEFT JOIN FETCH t.myTargetResource tr LEFT JOIN FETCH tr.myForcedId WHERE t.myId in :pids") + @Query( + "SELECT t FROM ResourceLink t LEFT JOIN FETCH t.myTargetResource tr LEFT JOIN FETCH tr.myForcedId WHERE t.myId in :pids") List findByPidAndFetchTargetDetails(@Param("pids") List thePids); - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceReindexJobDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceReindexJobDao.java index af8c1d3e874..4a120087589 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceReindexJobDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceReindexJobDao.java @@ -62,5 +62,4 @@ public interface IResourceReindexJobDao extends JpaRepository, IHapiFhirJpaRepository{ +public interface IResourceSearchUrlDao extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("DELETE FROM ResourceSearchUrlEntity s WHERE (s.myCreatedTime < :cutoff)") @@ -37,5 +36,4 @@ public interface IResourceSearchUrlDao extends JpaRepository, IHapiFhirJpaRepository { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java index 55fa3f8982c..3696314341e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTableDao.java @@ -44,42 +44,68 @@ public interface IResourceTableDao extends JpaRepository, I @Query("SELECT t.myId FROM ResourceTable t WHERE t.myResourceType = :restype AND t.myDeleted IS NOT NULL") Slice findIdsOfDeletedResourcesOfType(Pageable thePageable, @Param("restype") String theResourceName); - @Query("SELECT t.myId FROM ResourceTable t WHERE t.myId = :resid AND t.myResourceType = :restype AND t.myDeleted IS NOT NULL") - Slice findIdsOfDeletedResourcesOfType(Pageable thePageable, @Param("resid") Long theResourceId, @Param("restype") String theResourceName); + @Query( + "SELECT t.myId FROM ResourceTable t WHERE t.myId = :resid AND t.myResourceType = :restype AND t.myDeleted IS NOT NULL") + Slice findIdsOfDeletedResourcesOfType( + Pageable thePageable, @Param("resid") Long theResourceId, @Param("restype") String theResourceName); - @Query("SELECT t.myResourceType as type, COUNT(t.myResourceType) as count FROM ResourceTable t GROUP BY t.myResourceType") + @Query( + "SELECT t.myResourceType as type, COUNT(t.myResourceType) as count FROM ResourceTable t GROUP BY t.myResourceType") List> getResourceCounts(); - @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated DESC") - Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromNewest(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); + @Query( + "SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated DESC") + Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromNewest( + Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); - @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC") - Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); + @Query( + "SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC") + Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest( + Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); /** * @return List of arrays containing [PID, resourceType, lastUpdated] */ - @Query("SELECT t.myId, t.myResourceType, t.myUpdated FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC") - Slice findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); + @Query( + "SELECT t.myId, t.myResourceType, t.myUpdated FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC") + Slice findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldest( + Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); /** * @return List of arrays containing [PID, resourceType, lastUpdated] */ - @Query("SELECT t.myId, t.myResourceType, t.myUpdated FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myPartitionIdValue IN (:partition_ids) ORDER BY t.myUpdated ASC") - Slice findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldestForPartitionIds(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh, @Param("partition_ids") List theRequestPartitionIds); + @Query( + "SELECT t.myId, t.myResourceType, t.myUpdated FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myPartitionIdValue IN (:partition_ids) ORDER BY t.myUpdated ASC") + Slice findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldestForPartitionIds( + Pageable thePage, + @Param("low") Date theLow, + @Param("high") Date theHigh, + @Param("partition_ids") List theRequestPartitionIds); /** * @return List of arrays containing [PID, resourceType, lastUpdated] */ - @Query("SELECT t.myId, t.myResourceType, t.myUpdated FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC") - Slice findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldestForDefaultPartition(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); + @Query( + "SELECT t.myId, t.myResourceType, t.myUpdated FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high ORDER BY t.myUpdated ASC") + Slice findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldestForDefaultPartition( + Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh); // TODO in the future, consider sorting by pid as well so batch jobs process in the same order across restarts - @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myPartitionIdValue = :partition_id ORDER BY t.myUpdated ASC") - Slice findIdsOfPartitionedResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("low") Date theLow, @Param("high") Date theHigh, @Param("partition_id") Integer theRequestPartitionId); + @Query( + "SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myPartitionIdValue = :partition_id ORDER BY t.myUpdated ASC") + Slice findIdsOfPartitionedResourcesWithinUpdatedRangeOrderedFromOldest( + Pageable thePage, + @Param("low") Date theLow, + @Param("high") Date theHigh, + @Param("partition_id") Integer theRequestPartitionId); - @Query("SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myResourceType = :restype ORDER BY t.myUpdated ASC") - Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(Pageable thePage, @Param("restype") String theResourceType, @Param("low") Date theLow, @Param("high") Date theHigh); + @Query( + "SELECT t.myId FROM ResourceTable t WHERE t.myUpdated >= :low AND t.myUpdated <= :high AND t.myResourceType = :restype ORDER BY t.myUpdated ASC") + Slice findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest( + Pageable thePage, + @Param("restype") String theResourceType, + @Param("low") Date theLow, + @Param("high") Date theHigh); @Modifying @Query("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id") @@ -104,21 +130,26 @@ public interface IResourceTableDao extends JpaRepository, I * This method returns a Collection where each row is an element in the collection. Each element in the collection * is an object array, where the order matters (the array represents columns returned by the query). Be careful if you change this query in any way. */ - @Query("SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IN :partition_id") - Collection findLookupFieldsByResourcePidInPartitionIds(@Param("pid") List thePids, @Param("partition_id") Collection thePartitionId); + @Query( + "SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IN :partition_id") + Collection findLookupFieldsByResourcePidInPartitionIds( + @Param("pid") List thePids, @Param("partition_id") Collection thePartitionId); /** * This method returns a Collection where each row is an element in the collection. Each element in the collection * is an object array, where the order matters (the array represents columns returned by the query). Be careful if you change this query in any way. */ - @Query("SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND (t.myPartitionIdValue IS NULL OR t.myPartitionIdValue IN :partition_id)") - Collection findLookupFieldsByResourcePidInPartitionIdsOrNullPartition(@Param("pid") List thePids, @Param("partition_id") Collection thePartitionId); + @Query( + "SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND (t.myPartitionIdValue IS NULL OR t.myPartitionIdValue IN :partition_id)") + Collection findLookupFieldsByResourcePidInPartitionIdsOrNullPartition( + @Param("pid") List thePids, @Param("partition_id") Collection thePartitionId); /** * This method returns a Collection where each row is an element in the collection. Each element in the collection * is an object array, where the order matters (the array represents columns returned by the query). Be careful if you change this query in any way. */ - @Query("SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IS NULL") + @Query( + "SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IS NULL") Collection findLookupFieldsByResourcePidInPartitionNull(@Param("pid") List thePids); @Query("SELECT t.myVersion FROM ResourceTable t WHERE t.myId = :pid") @@ -134,17 +165,24 @@ public interface IResourceTableDao extends JpaRepository, I @Query("SELECT t.myId, t.myResourceType, t.myVersion FROM ResourceTable t WHERE t.myId IN ( :pid )") Collection getResourceVersionsForPid(@Param("pid") List pid); - @Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid") + @Query( + "SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid") Optional readByPartitionIdNull(@Param("pid") Long theResourceId); - @Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId = :partitionId AND t.myId = :pid") - Optional readByPartitionId(@Param("partitionId") int thePartitionId, @Param("pid") Long theResourceId); + @Query( + "SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId = :partitionId AND t.myId = :pid") + Optional readByPartitionId( + @Param("partitionId") int thePartitionId, @Param("pid") Long theResourceId); - @Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE (t.myPartitionId.myPartitionId IS NULL OR t.myPartitionId.myPartitionId IN (:partitionIds)) AND t.myId = :pid") - Optional readByPartitionIdsOrNull(@Param("partitionIds") Collection thrValues, @Param("pid") Long theResourceId); + @Query( + "SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE (t.myPartitionId.myPartitionId IS NULL OR t.myPartitionId.myPartitionId IN (:partitionIds)) AND t.myId = :pid") + Optional readByPartitionIdsOrNull( + @Param("partitionIds") Collection thrValues, @Param("pid") Long theResourceId); - @Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IN (:partitionIds) AND t.myId = :pid") - Optional readByPartitionIds(@Param("partitionIds") Collection thrValues, @Param("pid") Long theResourceId); + @Query( + "SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IN (:partitionIds) AND t.myId = :pid") + Optional readByPartitionIds( + @Param("partitionIds") Collection thrValues, @Param("pid") Long theResourceId); @Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myId IN :pids") List findAllByIdAndLoadForcedIds(@Param("pids") List thePids); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTagDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTagDao.java index d2484b721cf..f7a5a3b88c5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTagDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IResourceTagDao.java @@ -19,22 +19,19 @@ */ package ca.uhn.fhir.jpa.dao.data; -import java.util.Collection; - +import ca.uhn.fhir.jpa.model.entity.ResourceTag; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -import ca.uhn.fhir.jpa.model.entity.ResourceTag; +import java.util.Collection; public interface IResourceTagDao extends JpaRepository, IHapiFhirJpaRepository { - @Query("" + - "SELECT t FROM ResourceTag t " + - "INNER JOIN FETCH t.myTag td " + - "WHERE t.myResourceId in (:pids)") + @Query("" + "SELECT t FROM ResourceTag t " + "INNER JOIN FETCH t.myTag td " + "WHERE t.myResourceId in (:pids)") Collection findByResourceIds(@Param("pids") Collection pids); @Modifying @Query("delete from ResourceTag t WHERE t.myResourceId = :resId") - void deleteByResourceId(@Param("resId") Long theResourcePid);} + void deleteByResourceId(@Param("resId") Long theResourcePid); +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java index aa666741938..6a9fa7fd5ab 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchDao.java @@ -36,14 +36,19 @@ public interface ISearchDao extends JpaRepository, IHapiFhirJpaRep @Query("SELECT s FROM Search s LEFT OUTER JOIN FETCH s.myIncludes WHERE s.myUuid = :uuid") Optional findByUuidAndFetchIncludes(@Param("uuid") String theUuid); - @Query("SELECT s.myId FROM Search s WHERE (s.myCreated < :cutoff) AND (s.myExpiryOrNull IS NULL OR s.myExpiryOrNull < :now) AND (s.myDeleted IS NULL OR s.myDeleted = FALSE)") + @Query( + "SELECT s.myId FROM Search s WHERE (s.myCreated < :cutoff) AND (s.myExpiryOrNull IS NULL OR s.myExpiryOrNull < :now) AND (s.myDeleted IS NULL OR s.myDeleted = FALSE)") Slice findWhereCreatedBefore(@Param("cutoff") Date theCutoff, @Param("now") Date theNow, Pageable thePage); @Query("SELECT s.myId FROM Search s WHERE s.myDeleted = TRUE") Slice findDeleted(Pageable thePage); - @Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND s.mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = FALSE AND s.myStatus <> 'FAILED'") - Collection findWithCutoffOrExpiry(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff); + @Query( + "SELECT s FROM Search s WHERE s.myResourceType = :type AND s.mySearchQueryStringHash = :hash AND (s.myCreated > :cutoff) AND s.myDeleted = FALSE AND s.myStatus <> 'FAILED'") + Collection findWithCutoffOrExpiry( + @Param("type") String theResourceType, + @Param("hash") int theHashCode, + @Param("cutoff") Date theCreatedCutoff); @Query("SELECT COUNT(s) FROM Search s WHERE s.myDeleted = TRUE") int countDeleted(); @@ -55,5 +60,4 @@ public interface ISearchDao extends JpaRepository, IHapiFhirJpaRep @Modifying @Query("DELETE FROM Search s WHERE s.myId = :pid") void deleteByPid(@Param("pid") Long theId); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchIncludeDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchIncludeDao.java index 87c60aa5a85..9312d300f0a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchIncludeDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchIncludeDao.java @@ -26,8 +26,8 @@ import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; public interface ISearchIncludeDao extends JpaRepository, IHapiFhirJpaRepository { - + @Modifying - @Query(value="DELETE FROM SearchInclude r WHERE r.mySearchPid = :search") + @Query(value = "DELETE FROM SearchInclude r WHERE r.mySearchPid = :search") void deleteForSearch(@Param("search") Long theSearchPid); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchParamPresentDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchParamPresentDao.java index bea83fc61e4..7ced0fd721a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchParamPresentDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISearchParamPresentDao.java @@ -36,5 +36,4 @@ public interface ISearchParamPresentDao extends JpaRepository, IHapiFhirJpaRepository { - - @Query(value="SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearchPid = :search ORDER BY r.myOrder ASC") + + @Query(value = "SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearchPid = :search ORDER BY r.myOrder ASC") Slice findWithSearchPid(@Param("search") Long theSearchPid, Pageable thePage); - @Query(value="SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearchPid = :search") + @Query(value = "SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearchPid = :search") List findWithSearchPidOrderIndependent(@Param("search") Long theSearchPid); - @Query(value="SELECT r.myId FROM SearchResult r WHERE r.mySearchPid = :search") + @Query(value = "SELECT r.myId FROM SearchResult r WHERE r.mySearchPid = :search") Slice findForSearch(Pageable thePage, @Param("search") Long theSearchPid); @Modifying diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISubscriptionTableDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISubscriptionTableDao.java index 9280dd85a0d..e356a04b79b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISubscriptionTableDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ISubscriptionTableDao.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.dao.data; -import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.entity.SubscriptionTable; +import ca.uhn.fhir.jpa.model.entity.ResourceTable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; @@ -34,5 +34,4 @@ public interface ISubscriptionTableDao extends JpaRepository, IHapiFhirJpaRepository { // nothing diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemDao.java index bbac84fd706..23373bc2f7d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemDao.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.jpa.dao.data; +import ca.uhn.fhir.jpa.entity.TermCodeSystem; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -import ca.uhn.fhir.jpa.entity.TermCodeSystem; - import java.util.Optional; -public interface ITermCodeSystemDao extends JpaRepository, IHapiFhirJpaRepository { +public interface ITermCodeSystemDao extends JpaRepository, IHapiFhirJpaRepository { @Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCodeSystemUri = :code_system_uri") TermCodeSystem findByCodeSystemUri(@Param("code_system_uri") String theCodeSystemUri); @@ -37,5 +36,4 @@ public interface ITermCodeSystemDao extends JpaRepository @Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCurrentVersion.myId = :csv_pid") Optional findWithCodeSystemVersionAsCurrentVersion(@Param("csv_pid") Long theCodeSystemVersionPid); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java index a3c71cb5ed9..a3124f7d4cc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java @@ -37,20 +37,28 @@ public interface ITermCodeSystemVersionDao extends JpaRepository findSortedPidsByCodeSystemPid(@Param("codesystem_pid") Long theCodeSystemPid); - @Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid AND cs.myCodeSystemVersionId = :codesystem_version_id") - TermCodeSystemVersion findByCodeSystemPidAndVersion(@Param("codesystem_pid") Long theCodeSystemPid, @Param("codesystem_version_id") String theCodeSystemVersionId); + @Query( + "SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid AND cs.myCodeSystemVersionId = :codesystem_version_id") + TermCodeSystemVersion findByCodeSystemPidAndVersion( + @Param("codesystem_pid") Long theCodeSystemPid, + @Param("codesystem_version_id") String theCodeSystemVersionId); - @Query("SELECT tcsv FROM TermCodeSystemVersion tcsv INNER JOIN FETCH TermCodeSystem tcs on tcs.myPid = tcsv.myCodeSystemPid " + - "WHERE tcs.myCodeSystemUri = :code_system_uri AND tcsv.myCodeSystemVersionId = :codesystem_version_id") - TermCodeSystemVersion findByCodeSystemUriAndVersion(@Param("code_system_uri") String theCodeSystemUri, @Param("codesystem_version_id") String theCodeSystemVersionId); + @Query( + "SELECT tcsv FROM TermCodeSystemVersion tcsv INNER JOIN FETCH TermCodeSystem tcs on tcs.myPid = tcsv.myCodeSystemPid " + + "WHERE tcs.myCodeSystemUri = :code_system_uri AND tcsv.myCodeSystemVersionId = :codesystem_version_id") + TermCodeSystemVersion findByCodeSystemUriAndVersion( + @Param("code_system_uri") String theCodeSystemUri, + @Param("codesystem_version_id") String theCodeSystemVersionId); - @Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid AND cs.myCodeSystemVersionId IS NULL") + @Query( + "SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid AND cs.myCodeSystemVersionId IS NULL") TermCodeSystemVersion findByCodeSystemPidVersionIsNull(@Param("codesystem_pid") Long theCodeSystemPid); @Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myResourcePid = :resource_id") List findByCodeSystemResourcePid(@Param("resource_id") Long theCodeSystemResourcePid); - @Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemHavingThisVersionAsCurrentVersionIfAny.myResource.myId = :resource_id") - TermCodeSystemVersion findCurrentVersionForCodeSystemResourcePid(@Param("resource_id") Long theCodeSystemResourcePid); - + @Query( + "SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemHavingThisVersionAsCurrentVersionIfAny.myResource.myId = :resource_id") + TermCodeSystemVersion findCurrentVersionForCodeSystemResourcePid( + @Param("resource_id") Long theCodeSystemResourcePid); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDao.java index 2aab148ac93..bc04905e77f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDao.java @@ -33,24 +33,23 @@ import java.util.Optional; public interface ITermConceptDao extends JpaRepository, IHapiFhirJpaRepository { - @Query("SELECT t FROM TermConcept t " + - "LEFT JOIN FETCH t.myDesignations d " + - "WHERE t.myId IN :pids") + @Query("SELECT t FROM TermConcept t " + "LEFT JOIN FETCH t.myDesignations d " + "WHERE t.myId IN :pids") List fetchConceptsAndDesignationsByPid(@Param("pids") List thePids); - @Query("SELECT t FROM TermConcept t " + - "LEFT JOIN FETCH t.myDesignations d " + - "WHERE t.myCodeSystemVersionPid = :pid") + @Query("SELECT t FROM TermConcept t " + "LEFT JOIN FETCH t.myDesignations d " + + "WHERE t.myCodeSystemVersionPid = :pid") List fetchConceptsAndDesignationsByVersionPid(@Param("pid") Long theCodeSystemVersionPid); @Query("SELECT COUNT(t) FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid") Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid); @Query("SELECT c FROM TermConcept c WHERE c.myCodeSystemVersionPid = :csv_pid AND c.myCode = :code") - Optional findByCodeSystemAndCode(@Param("csv_pid") Long theCodeSystemVersionPid, @Param("code") String theCode); + Optional findByCodeSystemAndCode( + @Param("csv_pid") Long theCodeSystemVersionPid, @Param("code") String theCode); @Query("FROM TermConcept WHERE myCodeSystemVersionPid = :csv_pid AND myCode in (:codeList)") - List findByCodeSystemAndCodeList(@Param("csv_pid") Long theCodeSystem, @Param("codeList") List theCodeList); + List findByCodeSystemAndCodeList( + @Param("csv_pid") Long theCodeSystem, @Param("codeList") List theCodeList); @Modifying @Query("DELETE FROM TermConcept WHERE myCodeSystem.myId = :cs_pid") @@ -61,5 +60,4 @@ public interface ITermConceptDao extends JpaRepository, IHapi @Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null") Page findResourcesRequiringReindexing(Pageable thePageRequest); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDesignationDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDesignationDao.java index 706fb8292e7..045cf22868f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDesignationDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDesignationDao.java @@ -25,10 +25,10 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface ITermConceptDesignationDao extends JpaRepository, IHapiFhirJpaRepository { +public interface ITermConceptDesignationDao + extends JpaRepository, IHapiFhirJpaRepository { @Modifying @Query("DELETE FROM TermConceptDesignation WHERE myCodeSystemVersion.myId = :csv_pid") int deleteByCodeSystemVersion(@Param("csv_pid") Long thePid); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapDao.java index f23f3a77f7d..d8057478595 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapDao.java @@ -19,16 +19,15 @@ */ package ca.uhn.fhir.jpa.dao.data; -import java.util.List; -import java.util.Optional; - +import ca.uhn.fhir.jpa.entity.TermConceptMap; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -import ca.uhn.fhir.jpa.entity.TermConceptMap; +import java.util.List; +import java.util.Optional; public interface ITermConceptMapDao extends JpaRepository, IHapiFhirJpaRepository { @Query("DELETE FROM TermConceptMap cm WHERE cm.myId = :pid") @@ -42,14 +41,18 @@ public interface ITermConceptMapDao extends JpaRepository, @Deprecated @Query("SELECT cm FROM TermConceptMap cm WHERE cm.myUrl = :url and cm.myVersion is null") Optional findTermConceptMapByUrl(@Param("url") String theUrl); - + @Query("SELECT cm FROM TermConceptMap cm WHERE cm.myUrl = :url and cm.myVersion is null") Optional findTermConceptMapByUrlAndNullVersion(@Param("url") String theUrl); // Note that last updated version is considered current version. - @Query(value="SELECT cm FROM TermConceptMap cm INNER JOIN ResourceTable r ON r.myId = cm.myResourcePid WHERE cm.myUrl = :url ORDER BY r.myUpdated DESC") - List getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(Pageable thePage, @Param("url") String theUrl); + @Query( + value = + "SELECT cm FROM TermConceptMap cm INNER JOIN ResourceTable r ON r.myId = cm.myResourcePid WHERE cm.myUrl = :url ORDER BY r.myUpdated DESC") + List getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate( + Pageable thePage, @Param("url") String theUrl); @Query("SELECT cm FROM TermConceptMap cm WHERE cm.myUrl = :url AND cm.myVersion = :version") - Optional findTermConceptMapByUrlAndVersion(@Param("url") String theUrl, @Param("version") String theVersion); + Optional findTermConceptMapByUrlAndVersion( + @Param("url") String theUrl, @Param("version") String theVersion); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapGroupElementDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapGroupElementDao.java index 1a5044cb82d..f5753c02978 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapGroupElementDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapGroupElementDao.java @@ -25,7 +25,8 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface ITermConceptMapGroupElementDao extends JpaRepository, IHapiFhirJpaRepository { +public interface ITermConceptMapGroupElementDao + extends JpaRepository, IHapiFhirJpaRepository { @Query("DELETE FROM TermConceptMapGroupElement e WHERE e.myId = :pid") @Modifying void deleteTermConceptMapGroupElementById(@Param("pid") Long theId); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapGroupElementTargetDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapGroupElementTargetDao.java index 67a976c4013..a2f0aed109a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapGroupElementTargetDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptMapGroupElementTargetDao.java @@ -25,7 +25,8 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface ITermConceptMapGroupElementTargetDao extends JpaRepository, IHapiFhirJpaRepository { +public interface ITermConceptMapGroupElementTargetDao + extends JpaRepository, IHapiFhirJpaRepository { @Query("DELETE FROM TermConceptMapGroupElementTarget t WHERE t.myId = :pid") @Modifying void deleteTermConceptMapGroupElementTargetById(@Param("pid") Long theId); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptParentChildLinkDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptParentChildLinkDao.java index 90496eb66eb..925b7250a70 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptParentChildLinkDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptParentChildLinkDao.java @@ -27,7 +27,8 @@ import org.springframework.data.repository.query.Param; import java.util.Collection; -public interface ITermConceptParentChildLinkDao extends JpaRepository, IHapiFhirJpaRepository { +public interface ITermConceptParentChildLinkDao + extends JpaRepository, IHapiFhirJpaRepository { @Query("SELECT t.myParentPid FROM TermConceptParentChildLink t WHERE t.myChildPid = :child_pid") Collection findAllWithChild(@Param("child_pid") Long theConceptPid); @@ -35,5 +36,4 @@ public interface ITermConceptParentChildLinkDao extends JpaRepository findByTermValueSetIdSystemOnly(Pageable thePage, @Param("pid") Long theValueSetId, @Param("system_url") String theSystem); + List findByTermValueSetIdSystemOnly( + Pageable thePage, @Param("pid") Long theValueSetId, @Param("system_url") String theSystem); - @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSetPid = :pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval") - Optional findByTermValueSetIdSystemAndCode(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); + @Query( + "SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSetPid = :pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval") + Optional findByTermValueSetIdSystemAndCode( + @Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); - @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSetPid = :pid AND vsc.mySystem = :system_url AND vsc.mySystemVer = :system_version AND vsc.myCode = :codeval") - Optional findByTermValueSetIdSystemAndCodeWithVersion(@Param("pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("system_version") String theSystemVersion, @Param("codeval") String theCode); + @Query( + "SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSetPid = :pid AND vsc.mySystem = :system_url AND vsc.mySystemVer = :system_version AND vsc.myCode = :codeval") + Optional findByTermValueSetIdSystemAndCodeWithVersion( + @Param("pid") Long theValueSetId, + @Param("system_url") String theSystem, + @Param("system_version") String theSystemVersion, + @Param("codeval") String theCode); - @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.myCode = :codeval") - List findByValueSetResourcePidAndCode(@Param("resource_pid") Long theValueSetId, @Param("codeval") String theCode); + @Query( + "SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.myCode = :codeval") + List findByValueSetResourcePidAndCode( + @Param("resource_pid") Long theValueSetId, @Param("codeval") String theCode); - @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval") - Optional findByValueSetResourcePidSystemAndCode(@Param("resource_pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("codeval") String theCode); + @Query( + "SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.mySystem = :system_url AND vsc.myCode = :codeval") + Optional findByValueSetResourcePidSystemAndCode( + @Param("resource_pid") Long theValueSetId, + @Param("system_url") String theSystem, + @Param("codeval") String theCode); - @Query("SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.mySystem = :system_url AND vsc.mySystemVer = :system_version AND vsc.myCode = :codeval") - Optional findByValueSetResourcePidSystemAndCodeWithVersion(@Param("resource_pid") Long theValueSetId, @Param("system_url") String theSystem, @Param("system_version") String theSystemVersion, @Param("codeval") String theCode); + @Query( + "SELECT vsc FROM TermValueSetConcept vsc WHERE vsc.myValueSet.myResourcePid = :resource_pid AND vsc.mySystem = :system_url AND vsc.mySystemVer = :system_version AND vsc.myCode = :codeval") + Optional findByValueSetResourcePidSystemAndCodeWithVersion( + @Param("resource_pid") Long theValueSetId, + @Param("system_url") String theSystem, + @Param("system_version") String theSystemVersion, + @Param("codeval") String theCode); @Query("SELECT vsc.myId FROM TermValueSetConcept vsc WHERE vsc.myValueSetPid = :pid ORDER BY vsc.myId") List findIdsByTermValueSetId(@Param("pid") Long theValueSetId); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java index a3690d32193..1b9ba470420 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptDesignationDao.java @@ -25,7 +25,8 @@ import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -public interface ITermValueSetConceptDesignationDao extends JpaRepository, IHapiFhirJpaRepository { +public interface ITermValueSetConceptDesignationDao + extends JpaRepository, IHapiFhirJpaRepository { @Query("SELECT COUNT(vscd) FROM TermValueSetConceptDesignation vscd WHERE vscd.myValueSetPid = :pid") Integer countByTermValueSetId(@Param("pid") Long theValueSetId); @@ -33,5 +34,4 @@ public interface ITermValueSetConceptDesignationDao extends JpaRepository, IHapiFhirJpaRepository { +public interface ITermValueSetConceptViewDao + extends JpaRepository, IHapiFhirJpaRepository { - @Query("SELECT v FROM TermValueSetConceptView v WHERE v.myConceptValueSetPid = :pid AND v.myConceptOrder >= :from AND v.myConceptOrder < :to ORDER BY v.myConceptOrder") - List findByTermValueSetId(@Param("from") int theFrom, @Param("to") int theTo, @Param("pid") Long theValueSetId); - - @Query("SELECT v FROM TermValueSetConceptView v WHERE v.myConceptValueSetPid = :pid AND LOWER(v.myConceptDisplay) LIKE :display ORDER BY v.myConceptOrder") - List findByTermValueSetId(@Param("pid") Long theValueSetId, @Param("display") String theDisplay); + @Query( + "SELECT v FROM TermValueSetConceptView v WHERE v.myConceptValueSetPid = :pid AND v.myConceptOrder >= :from AND v.myConceptOrder < :to ORDER BY v.myConceptOrder") + List findByTermValueSetId( + @Param("from") int theFrom, @Param("to") int theTo, @Param("pid") Long theValueSetId); + @Query( + "SELECT v FROM TermValueSetConceptView v WHERE v.myConceptValueSetPid = :pid AND LOWER(v.myConceptDisplay) LIKE :display ORDER BY v.myConceptOrder") + List findByTermValueSetId( + @Param("pid") Long theValueSetId, @Param("display") String theDisplay); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptViewOracleDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptViewOracleDao.java index 8c8d6d84457..82e5e67c2a3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptViewOracleDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetConceptViewOracleDao.java @@ -24,13 +24,17 @@ import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; -import java.io.Serializable; import java.util.List; -public interface ITermValueSetConceptViewOracleDao extends JpaRepository, IHapiFhirJpaRepository { - @Query("SELECT v FROM TermValueSetConceptViewOracle v WHERE v.myConceptValueSetPid = :pid AND v.myConceptOrder >= :from AND v.myConceptOrder < :to ORDER BY v.myConceptOrder") - List findByTermValueSetId(@Param("from") int theFrom, @Param("to") int theTo, @Param("pid") Long theValueSetId); +public interface ITermValueSetConceptViewOracleDao + extends JpaRepository, IHapiFhirJpaRepository { + @Query( + "SELECT v FROM TermValueSetConceptViewOracle v WHERE v.myConceptValueSetPid = :pid AND v.myConceptOrder >= :from AND v.myConceptOrder < :to ORDER BY v.myConceptOrder") + List findByTermValueSetId( + @Param("from") int theFrom, @Param("to") int theTo, @Param("pid") Long theValueSetId); - @Query("SELECT v FROM TermValueSetConceptViewOracle v WHERE v.myConceptValueSetPid = :pid AND LOWER(v.myConceptDisplay) LIKE :display ORDER BY v.myConceptOrder") - List findByTermValueSetId(@Param("pid") Long theValueSetId, @Param("display") String theDisplay); + @Query( + "SELECT v FROM TermValueSetConceptViewOracle v WHERE v.myConceptValueSetPid = :pid AND LOWER(v.myConceptDisplay) LIKE :display ORDER BY v.myConceptOrder") + List findByTermValueSetId( + @Param("pid") Long theValueSetId, @Param("display") String theDisplay); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java index 5c0b19056b5..568c19574ae 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermValueSetDao.java @@ -41,23 +41,27 @@ public interface ITermValueSetDao extends JpaRepository, IHa Optional findByUrl(@Param("url") String theUrl); @Query("SELECT vs FROM TermValueSet vs WHERE vs.myExpansionStatus = :expansion_status") - Slice findByExpansionStatus(Pageable pageable, @Param("expansion_status") TermValueSetPreExpansionStatusEnum theExpansionStatus); + Slice findByExpansionStatus( + Pageable pageable, @Param("expansion_status") TermValueSetPreExpansionStatusEnum theExpansionStatus); - @Query(value="SELECT vs FROM TermValueSet vs INNER JOIN ResourceTable r ON r.myId = vs.myResourcePid WHERE vs.myUrl = :url ORDER BY r.myUpdated DESC") + @Query( + value = + "SELECT vs FROM TermValueSet vs INNER JOIN ResourceTable r ON r.myId = vs.myResourcePid WHERE vs.myUrl = :url ORDER BY r.myUpdated DESC") List findTermValueSetByUrl(Pageable thePage, @Param("url") String theUrl); /** * The current TermValueSet is not necessarily the last uploaded anymore, but the current VS resource * is pointed by a specific ForcedId, so we locate current ValueSet as the one pointing to current VS resource */ - @Query(value="SELECT vs FROM ForcedId f, TermValueSet vs where f.myForcedId = :forcedId and vs.myResource = f.myResource") + @Query( + value = + "SELECT vs FROM ForcedId f, TermValueSet vs where f.myForcedId = :forcedId and vs.myResource = f.myResource") Optional findTermValueSetByForcedId(@Param("forcedId") String theForcedId); @Query("SELECT vs FROM TermValueSet vs WHERE vs.myUrl = :url AND vs.myVersion IS NULL") Optional findTermValueSetByUrlAndNullVersion(@Param("url") String theUrl); @Query("SELECT vs FROM TermValueSet vs WHERE vs.myUrl = :url AND vs.myVersion = :version") - Optional findTermValueSetByUrlAndVersion(@Param("url") String theUrl, @Param("version") String theVersion); - - + Optional findTermValueSetByUrlAndVersion( + @Param("url") String theUrl, @Param("version") String theVersion); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/custom/IForcedIdDaoImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/custom/IForcedIdDaoImpl.java index a00004d685c..9f271a15df2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/custom/IForcedIdDaoImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/custom/IForcedIdDaoImpl.java @@ -21,10 +21,10 @@ package ca.uhn.fhir.jpa.dao.data.custom; import org.springframework.stereotype.Component; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; import java.util.Collection; import java.util.List; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; @Component // Don't change the name of this class. Spring Data requires the name to match. @@ -39,7 +39,8 @@ public class IForcedIdDaoImpl implements IForcedIdQueries { * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources are not filtered. */ - public Collection findAndResolveByForcedIdWithNoTypeIncludeDeleted(String theResourceType, Collection theForcedIds) { + public Collection findAndResolveByForcedIdWithNoTypeIncludeDeleted( + String theResourceType, Collection theForcedIds) { return findAndResolveByForcedIdWithNoType(theResourceType, theForcedIds, false); } @@ -48,22 +49,23 @@ public class IForcedIdDaoImpl implements IForcedIdQueries { * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources are optionally filtered. Be careful if you change this query in any way. */ - public Collection findAndResolveByForcedIdWithNoType(String theResourceType, Collection theForcedIds, boolean theExcludeDeleted) { - String query = "" + - "SELECT " + - " f.myResourceType, f.myResourcePid, f.myForcedId, t.myDeleted " + - "FROM ForcedId f " + - "JOIN ResourceTable t ON t.myId = f.myResourcePid " + - "WHERE f.myResourceType = :resource_type AND f.myForcedId IN ( :forced_id )"; + public Collection findAndResolveByForcedIdWithNoType( + String theResourceType, Collection theForcedIds, boolean theExcludeDeleted) { + String query = "" + "SELECT " + + " f.myResourceType, f.myResourcePid, f.myForcedId, t.myDeleted " + + "FROM ForcedId f " + + "JOIN ResourceTable t ON t.myId = f.myResourcePid " + + "WHERE f.myResourceType = :resource_type AND f.myForcedId IN ( :forced_id )"; if (theExcludeDeleted) { query += " AND t.myDeleted IS NULL"; } - return myEntityManager.createQuery(query) - .setParameter("resource_type", theResourceType) - .setParameter("forced_id", theForcedIds) - .getResultList(); + return myEntityManager + .createQuery(query) + .setParameter("resource_type", theResourceType) + .setParameter("forced_id", theForcedIds) + .getResultList(); } /** @@ -71,49 +73,27 @@ public class IForcedIdDaoImpl implements IForcedIdQueries { * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources are optionally filtered. Be careful if you change this query in any way. */ - public Collection findAndResolveByForcedIdWithNoTypeInPartition(String theResourceType, Collection theForcedIds, Collection thePartitionId, boolean theExcludeDeleted) { - String query = "" + - "SELECT " + - " f.myResourceType, f.myResourcePid, f.myForcedId, t.myDeleted " + - "FROM ForcedId f " + - "JOIN ResourceTable t ON t.myId = f.myResourcePid " + - "WHERE f.myResourceType = :resource_type AND f.myForcedId IN ( :forced_id ) AND f.myPartitionIdValue IN ( :partition_id )"; - + public Collection findAndResolveByForcedIdWithNoTypeInPartition( + String theResourceType, + Collection theForcedIds, + Collection thePartitionId, + boolean theExcludeDeleted) { + String query = "" + "SELECT " + + " f.myResourceType, f.myResourcePid, f.myForcedId, t.myDeleted " + + "FROM ForcedId f " + + "JOIN ResourceTable t ON t.myId = f.myResourcePid " + + "WHERE f.myResourceType = :resource_type AND f.myForcedId IN ( :forced_id ) AND f.myPartitionIdValue IN ( :partition_id )"; if (theExcludeDeleted) { query += " AND t.myDeleted IS NULL"; } - return myEntityManager.createQuery(query) - .setParameter("resource_type", theResourceType) - .setParameter("forced_id", theForcedIds) - .setParameter("partition_id", thePartitionId) - .getResultList(); - } - - - /** - * This method returns a Collection where each row is an element in the collection. Each element in the collection - * is an object array, where the order matters (the array represents columns returned by the query). - * Deleted resources are optionally filtered. Be careful if you change this query in any way. - */ - public Collection findAndResolveByForcedIdWithNoTypeInPartitionNull(String theResourceType, Collection theForcedIds, boolean theExcludeDeleted) { - String query = "" + - "SELECT " + - " f.myResourceType, f.myResourcePid, f.myForcedId, t.myDeleted " + - "FROM ForcedId f " + - "JOIN ResourceTable t ON t.myId = f.myResourcePid " + - "WHERE f.myResourceType = :resource_type AND f.myForcedId IN ( :forced_id ) AND f.myPartitionIdValue IS NULL"; - - - if (theExcludeDeleted) { - query += " AND t.myDeleted IS NULL"; - } - - return myEntityManager.createQuery(query) - .setParameter("resource_type", theResourceType) - .setParameter("forced_id", theForcedIds) - .getResultList(); + return myEntityManager + .createQuery(query) + .setParameter("resource_type", theResourceType) + .setParameter("forced_id", theForcedIds) + .setParameter("partition_id", thePartitionId) + .getResultList(); } /** @@ -121,22 +101,50 @@ public class IForcedIdDaoImpl implements IForcedIdQueries { * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources are optionally filtered. Be careful if you change this query in any way. */ - public Collection findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId(String theResourceType, Collection theForcedIds, List thePartitionIdsWithoutDefault, boolean theExcludeDeleted) { - String query = "" + - "SELECT " + - " f.myResourceType, f.myResourcePid, f.myForcedId, t.myDeleted " + - "FROM ForcedId f " + - "JOIN ResourceTable t ON t.myId = f.myResourcePid " + - "WHERE f.myResourceType = :resource_type AND f.myForcedId IN ( :forced_id ) AND (f.myPartitionIdValue IS NULL OR f.myPartitionIdValue IN ( :partition_id ))"; + public Collection findAndResolveByForcedIdWithNoTypeInPartitionNull( + String theResourceType, Collection theForcedIds, boolean theExcludeDeleted) { + String query = "" + "SELECT " + + " f.myResourceType, f.myResourcePid, f.myForcedId, t.myDeleted " + + "FROM ForcedId f " + + "JOIN ResourceTable t ON t.myId = f.myResourcePid " + + "WHERE f.myResourceType = :resource_type AND f.myForcedId IN ( :forced_id ) AND f.myPartitionIdValue IS NULL"; if (theExcludeDeleted) { query += " AND t.myDeleted IS NULL"; } - return myEntityManager.createQuery(query) - .setParameter("resource_type", theResourceType) - .setParameter("forced_id", theForcedIds) - .setParameter("partition_id", thePartitionIdsWithoutDefault) - .getResultList(); + return myEntityManager + .createQuery(query) + .setParameter("resource_type", theResourceType) + .setParameter("forced_id", theForcedIds) + .getResultList(); + } + + /** + * This method returns a Collection where each row is an element in the collection. Each element in the collection + * is an object array, where the order matters (the array represents columns returned by the query). + * Deleted resources are optionally filtered. Be careful if you change this query in any way. + */ + public Collection findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId( + String theResourceType, + Collection theForcedIds, + List thePartitionIdsWithoutDefault, + boolean theExcludeDeleted) { + String query = "" + "SELECT " + + " f.myResourceType, f.myResourcePid, f.myForcedId, t.myDeleted " + + "FROM ForcedId f " + + "JOIN ResourceTable t ON t.myId = f.myResourcePid " + + "WHERE f.myResourceType = :resource_type AND f.myForcedId IN ( :forced_id ) AND (f.myPartitionIdValue IS NULL OR f.myPartitionIdValue IN ( :partition_id ))"; + + if (theExcludeDeleted) { + query += " AND t.myDeleted IS NULL"; + } + + return myEntityManager + .createQuery(query) + .setParameter("resource_type", theResourceType) + .setParameter("forced_id", theForcedIds) + .setParameter("partition_id", thePartitionIdsWithoutDefault) + .getResultList(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/custom/IForcedIdQueries.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/custom/IForcedIdQueries.java index 7c83509e1de..7c5bf73b4d4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/custom/IForcedIdQueries.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/custom/IForcedIdQueries.java @@ -29,34 +29,44 @@ public interface IForcedIdQueries { * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources should not be filtered. */ - Collection findAndResolveByForcedIdWithNoTypeIncludeDeleted(String theResourceType, Collection theForcedIds); + Collection findAndResolveByForcedIdWithNoTypeIncludeDeleted( + String theResourceType, Collection theForcedIds); /** * This method returns a Collection where each row is an element in the collection. Each element in the collection * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources are optionally filtered. */ - Collection findAndResolveByForcedIdWithNoType(String theResourceType, Collection theForcedIds, boolean theExcludeDeleted); + Collection findAndResolveByForcedIdWithNoType( + String theResourceType, Collection theForcedIds, boolean theExcludeDeleted); /** * This method returns a Collection where each row is an element in the collection. Each element in the collection * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources are optionally filtered. */ - Collection findAndResolveByForcedIdWithNoTypeInPartition(String theResourceType, Collection theForcedIds, Collection thePartitionId, boolean theExcludeDeleted); + Collection findAndResolveByForcedIdWithNoTypeInPartition( + String theResourceType, + Collection theForcedIds, + Collection thePartitionId, + boolean theExcludeDeleted); /** * This method returns a Collection where each row is an element in the collection. Each element in the collection * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources are optionally filtered. */ - Collection findAndResolveByForcedIdWithNoTypeInPartitionNull(String theResourceType, Collection theForcedIds, boolean theExcludeDeleted); + Collection findAndResolveByForcedIdWithNoTypeInPartitionNull( + String theResourceType, Collection theForcedIds, boolean theExcludeDeleted); /** * This method returns a Collection where each row is an element in the collection. Each element in the collection * is an object array, where the order matters (the array represents columns returned by the query). * Deleted resources are optionally filtered. */ - Collection findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId(String theNextResourceType, Collection theNextIds, List thePartitionIdsWithoutDefault, boolean theExcludeDeleted); - + Collection findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId( + String theNextResourceType, + Collection theNextIds, + List thePartitionIdsWithoutDefault, + boolean theExcludeDeleted); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java index c84ecbcdf44..9f9730e3456 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoSubscriptionDstu3.java @@ -34,7 +34,8 @@ import org.springframework.beans.factory.annotation.Autowired; import java.util.Date; -public class FhirResourceDaoSubscriptionDstu3 extends BaseHapiFhirResourceDao implements IFhirResourceDaoSubscription { +public class FhirResourceDaoSubscriptionDstu3 extends BaseHapiFhirResourceDao + implements IFhirResourceDaoSubscription { @Autowired private ISubscriptionTableDao mySubscriptionTableDao; @@ -47,7 +48,8 @@ public class FhirResourceDaoSubscriptionDstu3 extends BaseHapiFhirResourceDao { @@ -50,10 +50,16 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao { retVal.addProfile(next.getCode()); break; case SECURITY_LABEL: - retVal.addSecurity().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addSecurity() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; case TAG: - retVal.addTag().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addTag() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; } } @@ -64,6 +70,4 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao { public IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) { return JpaResourceDao.throwProcessMessageNotImplemented(); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java index a5d055739e0..34f247edce7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java @@ -84,6 +84,8 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.Nullable; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; @@ -91,20 +93,23 @@ import javax.persistence.PersistenceContextType; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; @Service public class ExpungeEverythingService implements IExpungeEverythingService { private static final Logger ourLog = LoggerFactory.getLogger(ExpungeEverythingService.class); + @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + @Autowired protected IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private HapiTransactionService myTxService; + @Autowired private MemoryCacheService myMemoryCacheService; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; @@ -117,76 +122,126 @@ public class ExpungeEverythingService implements IExpungeEverythingService { // Notify Interceptors about pre-action call HookParams hooks = new HookParams() - .add(AtomicInteger.class, counter) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING, hooks); + .add(AtomicInteger.class, counter) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING, hooks); ourLog.info("BEGINNING GLOBAL $expunge"); Propagation propagation = Propagation.REQUIRES_NEW; - ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_EXPUNGE); - RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequest, details); + ReadPartitionIdRequestDetails details = + ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_EXPUNGE); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequest, details); - myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> { - counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null")); - }); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2WorkChunkEntity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2JobInstanceEntity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionResourceEntity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionEntity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageEntity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchParamPresentEntity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class, requestPartitionId)); + myTxService + .withRequest(theRequest) + .withPropagation(propagation) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + counter.addAndGet(doExpungeEverythingQuery( + "UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null")); + }); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, Batch2WorkChunkEntity.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, Batch2JobInstanceEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, NpmPackageVersionResourceEntity.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionEntity.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageEntity.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, SearchParamPresentEntity.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ForcedId.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamDate.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamNumber.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantityNormalized.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamString.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamToken.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamUri.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamCoords.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboStringUnique.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboTokenNonUnique.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedSearchParamDate.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedSearchParamNumber.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedSearchParamQuantity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedSearchParamQuantityNormalized.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedSearchParamString.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedSearchParamToken.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedSearchParamUri.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedSearchParamCoords.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedComboStringUnique.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceIndexedComboTokenNonUnique.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceLink.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchResult.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchInclude.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConceptDesignation.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConcept.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, TermValueSetConceptDesignation.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConcept.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSet.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptParentChildLink.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElementTarget.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElement.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroup.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, TermConceptParentChildLink.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, TermConceptMapGroupElementTarget.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, TermConceptMapGroupElement.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroup.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMap.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptProperty.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptDesignation.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, TermConceptProperty.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, TermConceptDesignation.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConcept.class, requestPartitionId)); - myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> { - for (TermCodeSystem next : myEntityManager.createQuery("SELECT c FROM " + TermCodeSystem.class.getName() + " c", TermCodeSystem.class).getResultList()) { - next.setCurrentVersion(null); - myEntityManager.merge(next); - } - }); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystemVersion.class, requestPartitionId)); + myTxService + .withRequest(theRequest) + .withPropagation(propagation) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + for (TermCodeSystem next : myEntityManager + .createQuery("SELECT c FROM " + TermCodeSystem.class.getName() + " c", TermCodeSystem.class) + .getResultList()) { + next.setCurrentVersion(null); + myEntityManager.merge(next); + } + }); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystemVersion.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystem.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SubscriptionTable.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTag.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, SubscriptionTable.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTag.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTag.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TagDefinition.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryProvenanceEntity.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class, requestPartitionId)); - counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceSearchUrlEntity.class, requestPartitionId)); + counter.addAndGet(expungeEverythingByTypeWithoutPurging( + theRequest, ResourceHistoryProvenanceEntity.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class, requestPartitionId)); + counter.addAndGet( + expungeEverythingByTypeWithoutPurging(theRequest, ResourceSearchUrlEntity.class, requestPartitionId)); int counterBefore = counter.get(); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTable.class, requestPartitionId)); counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, PartitionEntity.class, requestPartitionId)); deletedResourceEntityCount = counter.get() - counterBefore; - myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> { - counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d")); - }); + myTxService + .withRequest(theRequest) + .withPropagation(propagation) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d")); + }); purgeAllCaches(); @@ -202,23 +257,28 @@ public class ExpungeEverythingService implements IExpungeEverythingService { myMemoryCacheService.invalidateAllCaches(); } - private int expungeEverythingByTypeWithoutPurging(RequestDetails theRequest, Class theEntityType, RequestPartitionId theRequestPartitionId) { + private int expungeEverythingByTypeWithoutPurging( + RequestDetails theRequest, Class theEntityType, RequestPartitionId theRequestPartitionId) { int outcome = 0; while (true) { StopWatch sw = new StopWatch(); - int count = myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).withRequestPartitionId(theRequestPartitionId).execute(() -> { - CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); - CriteriaQuery cq = cb.createQuery(theEntityType); - cq.from(theEntityType); - TypedQuery query = myEntityManager.createQuery(cq); - query.setMaxResults(1000); - List results = query.getResultList(); - for (Object result : results) { - myEntityManager.remove(result); - } - return results.size(); - }); + int count = myTxService + .withRequest(theRequest) + .withPropagation(Propagation.REQUIRES_NEW) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); + CriteriaQuery cq = cb.createQuery(theEntityType); + cq.from(theEntityType); + TypedQuery query = myEntityManager.createQuery(cq); + query.setMaxResults(1000); + List results = query.getResultList(); + for (Object result : results) { + myEntityManager.remove(result); + } + return results.size(); + }); outcome += count; if (count == 0) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java index 49a6b8a64b2..54d2785cc46 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/JpaResourceExpungeService.java @@ -78,56 +78,80 @@ public class JpaResourceExpungeService implements IResourceExpungeService findHistoricalVersionsOfNonDeletedResources(String theResourceName, JpaPid theJpaPid, int theRemainingCount) { + public List findHistoricalVersionsOfNonDeletedResources( + String theResourceName, JpaPid theJpaPid, int theRemainingCount) { if (isEmptyQuery(theRemainingCount)) { return Collections.EMPTY_LIST; } @@ -137,7 +161,8 @@ public class JpaResourceExpungeService implements IResourceExpungeService ids; if (theJpaPid != null && theJpaPid.getId() != null) { if (theJpaPid.getVersion() != null) { - ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theJpaPid.getId(), theJpaPid.getVersion())); + ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance( + theJpaPid.getId(), theJpaPid.getVersion())); } else { ids = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theJpaPid.getId()); } @@ -154,7 +179,8 @@ public class JpaResourceExpungeService implements IResourceExpungeService findHistoricalVersionsOfDeletedResources(String theResourceName, JpaPid theResourceId, int theRemainingCount) { + public List findHistoricalVersionsOfDeletedResources( + String theResourceName, JpaPid theResourceId, int theRemainingCount) { if (isEmptyQuery(theRemainingCount)) { return Collections.EMPTY_LIST; } @@ -163,7 +189,11 @@ public class JpaResourceExpungeService implements IResourceExpungeService ids; if (theResourceId != null) { ids = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId.getId(), theResourceName); - ourLog.info("Expunging {} deleted resources of type[{}] and ID[{}]", ids.getNumberOfElements(), theResourceName, theResourceId); + ourLog.info( + "Expunging {} deleted resources of type[{}] and ID[{}]", + ids.getNumberOfElements(), + theResourceName, + theResourceId); } else { if (theResourceName != null) { ids = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName); @@ -178,9 +208,10 @@ public class JpaResourceExpungeService implements IResourceExpungeService theResourceIds, AtomicInteger theRemainingCount) { + public void expungeCurrentVersionOfResources( + RequestDetails theRequestDetails, List theResourceIds, AtomicInteger theRemainingCount) { for (JpaPid next : theResourceIds) { - expungeCurrentVersionOfResource(theRequestDetails,(next).getId(), theRemainingCount); + expungeCurrentVersionOfResource(theRequestDetails, (next).getId(), theRemainingCount); if (expungeLimitReached(theRemainingCount)) { return; } @@ -201,15 +232,18 @@ public class JpaResourceExpungeService implements IResourceExpungeService theResourceIds, AtomicInteger theRemainingCount) { + public void expungeHistoricalVersionsOfIds( + RequestDetails theRequestDetails, List theResourceIds, AtomicInteger theRemainingCount) { List pids = JpaPid.toLongList(theResourceIds); List resourcesToDelete = myResourceTableDao.findAllByIdAndLoadForcedIds(pids); @@ -249,7 +290,8 @@ public class JpaResourceExpungeService implements IResourceExpungeService theHistoricalIds, AtomicInteger theRemainingCount) { + public void expungeHistoricalVersions( + RequestDetails theRequestDetails, List theHistoricalIds, AtomicInteger theRemainingCount) { for (JpaPid next : theHistoricalIds) { expungeHistoricalVersion(theRequestDetails, (next).getId(), theRemainingCount); if (expungeLimitReached(theRemainingCount)) { @@ -258,15 +300,18 @@ public class JpaResourceExpungeService implements IResourceExpungeService versionIds = myResourceHistoryTableDao.findForResourceId(page, theResource.getId(), theResource.getVersion()); - ourLog.debug("Found {} versions of resource {} to expunge", versionIds.getNumberOfElements(), theResource.getIdDt().getValue()); + Slice versionIds = + myResourceHistoryTableDao.findForResourceId(page, theResource.getId(), theResource.getVersion()); + ourLog.debug( + "Found {} versions of resource {} to expunge", + versionIds.getNumberOfElements(), + theResource.getIdDt().getValue()); for (Long nextVersionId : versionIds) { expungeHistoricalVersion(theRequestDetails, nextVersionId, theRemainingCount); if (expungeLimitReached(theRemainingCount)) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceForeignKey.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceForeignKey.java index 81170edc775..3409e511b3a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceForeignKey.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceForeignKey.java @@ -42,24 +42,21 @@ public class ResourceForeignKey { ResourceForeignKey that = (ResourceForeignKey) theO; return new EqualsBuilder() - .append(table, that.table) - .append(key, that.key) - .isEquals(); + .append(table, that.table) + .append(key, that.key) + .isEquals(); } @Override public int hashCode() { - return new HashCodeBuilder(17, 37) - .append(table) - .append(key) - .toHashCode(); + return new HashCodeBuilder(17, 37).append(table).append(key).toHashCode(); } @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("table", table) - .append("key", key) - .toString(); + .append("table", table) + .append("key", key) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceTableFKProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceTableFKProvider.java index e5b74026fe5..fe5c2d80804 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceTableFKProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ResourceTableFKProvider.java @@ -23,9 +23,9 @@ import ca.uhn.fhir.mdm.api.IMdmSettings; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; @Service public class ResourceTableFKProvider { @@ -40,10 +40,10 @@ public class ResourceTableFKProvider { // SELECT FKTABLE_NAME, FKCOLUMN_NAME FROM CROSS_REFERENCES WHERE PKTABLE_NAME = 'HFJ_RESOURCE' // Add some secondary related records that don't have foreign keys - retval.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));//NOT covered by index. + retval.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID")); // NOT covered by index. retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID")); - //These have the possibility of touching all resource types. + // These have the possibility of touching all resource types. retval.add(new ResourceForeignKey("HFJ_FORCED_ID", "RESOURCE_PID")); retval.add(new ResourceForeignKey("HFJ_IDX_CMP_STRING_UNIQ", "RES_ID")); retval.add(new ResourceForeignKey("HFJ_IDX_CMB_TOK_NU", "RES_ID")); @@ -64,7 +64,7 @@ public class ResourceTableFKProvider { retval.add(new ResourceForeignKey("MPI_LINK", "TARGET_PID")); retval.add(new ResourceForeignKey("MPI_LINK", "PERSON_PID")); - //These only touch certain resource types. + // These only touch certain resource types. retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID")); retval.add(new ResourceForeignKey("TRM_CODESYSTEM", "RES_ID")); retval.add(new ResourceForeignKey("TRM_VALUESET", "RES_ID")); @@ -76,10 +76,11 @@ public class ResourceTableFKProvider { return retval; } + @Nonnull public List getResourceForeignKeysByResourceType(String theResourceType) { List retval = new ArrayList<>(); - //These have the possibility of touching all resource types. + // These have the possibility of touching all resource types. retval.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID")); retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID")); retval.add(new ResourceForeignKey("HFJ_FORCED_ID", "RESOURCE_PID")); @@ -88,8 +89,8 @@ public class ResourceTableFKProvider { retval.add(new ResourceForeignKey("HFJ_RES_LINK", "SRC_RESOURCE_ID")); retval.add(new ResourceForeignKey("HFJ_RES_LINK", "TARGET_RESOURCE_ID")); retval.add(new ResourceForeignKey("HFJ_RES_PARAM_PRESENT", "RES_ID")); - retval.add(new ResourceForeignKey("HFJ_RES_TAG", "RES_ID"));//TODO GGG: Res_ID + TAG_ID? is that enough? - retval.add(new ResourceForeignKey("HFJ_RES_VER", "RES_ID"));//TODO GGG: RES_ID + updated? is that enough? + retval.add(new ResourceForeignKey("HFJ_RES_TAG", "RES_ID")); // TODO GGG: Res_ID + TAG_ID? is that enough? + retval.add(new ResourceForeignKey("HFJ_RES_VER", "RES_ID")); // TODO GGG: RES_ID + updated? is that enough? retval.add(new ResourceForeignKey("HFJ_SPIDX_COORDS", "RES_ID")); retval.add(new ResourceForeignKey("HFJ_SPIDX_DATE", "RES_ID")); retval.add(new ResourceForeignKey("HFJ_SPIDX_NUMBER", "RES_ID")); @@ -99,33 +100,35 @@ public class ResourceTableFKProvider { retval.add(new ResourceForeignKey("HFJ_SPIDX_TOKEN", "RES_ID")); retval.add(new ResourceForeignKey("HFJ_SPIDX_URI", "RES_ID")); - if (myMdmSettings != null && myMdmSettings.isEnabled()) { - retval.add(new ResourceForeignKey("MPI_LINK", "GOLDEN_RESOURCE_PID"));//NOT covered by index. - retval.add(new ResourceForeignKey("MPI_LINK", "TARGET_PID"));//Possibly covered, partial index - retval.add(new ResourceForeignKey("MPI_LINK", "PERSON_PID"));//TODO GGG: I don't even think we need this... this field is deprecated, and the deletion is covered by GOLDEN_RESOURCE_PID + if (myMdmSettings != null && myMdmSettings.isEnabled()) { + retval.add(new ResourceForeignKey("MPI_LINK", "GOLDEN_RESOURCE_PID")); // NOT covered by index. + retval.add(new ResourceForeignKey("MPI_LINK", "TARGET_PID")); // Possibly covered, partial index + retval.add(new ResourceForeignKey( + "MPI_LINK", + "PERSON_PID")); // TODO GGG: I don't even think we need this... this field is deprecated, and the + // deletion is covered by GOLDEN_RESOURCE_PID } switch (theResourceType.toLowerCase()) { case "binary": - retval.add(new ResourceForeignKey("NPM_PACKAGE_VER", "BINARY_RES_ID"));//Not covered - retval.add(new ResourceForeignKey("NPM_PACKAGE_VER_RES", "BINARY_RES_ID"));//Not covered + retval.add(new ResourceForeignKey("NPM_PACKAGE_VER", "BINARY_RES_ID")); // Not covered + retval.add(new ResourceForeignKey("NPM_PACKAGE_VER_RES", "BINARY_RES_ID")); // Not covered break; case "subscription": - retval.add(new ResourceForeignKey("HFJ_SUBSCRIPTION_STATS", "RES_ID"));//Covered by index. + retval.add(new ResourceForeignKey("HFJ_SUBSCRIPTION_STATS", "RES_ID")); // Covered by index. break; case "codesystem": - retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID"));//Not covered - retval.add(new ResourceForeignKey("TRM_CODESYSTEM", "RES_ID"));//Not covered + retval.add(new ResourceForeignKey("TRM_CODESYSTEM_VER", "RES_ID")); // Not covered + retval.add(new ResourceForeignKey("TRM_CODESYSTEM", "RES_ID")); // Not covered break; case "valueset": - retval.add(new ResourceForeignKey("TRM_VALUESET", "RES_ID"));//Not covered + retval.add(new ResourceForeignKey("TRM_VALUESET", "RES_ID")); // Not covered break; case "conceptmap": - retval.add(new ResourceForeignKey("TRM_CONCEPT_MAP", "RES_ID"));//Not covered + retval.add(new ResourceForeignKey("TRM_CONCEPT_MAP", "RES_ID")); // Not covered break; default: } return retval; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoSearchParamSynchronizer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoSearchParamSynchronizer.java index fd6eab9da66..458f5a428d5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoSearchParamSynchronizer.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoSearchParamSynchronizer.java @@ -26,20 +26,23 @@ import ca.uhn.fhir.jpa.util.AddRemoveCount; import com.google.common.annotations.VisibleForTesting; import org.springframework.stereotype.Service; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; @Service public class DaoSearchParamSynchronizer { @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; - public AddRemoveCount synchronizeSearchParamsToDatabase(ResourceIndexedSearchParams theParams, ResourceTable theEntity, ResourceIndexedSearchParams existingParams) { + public AddRemoveCount synchronizeSearchParamsToDatabase( + ResourceIndexedSearchParams theParams, + ResourceTable theEntity, + ResourceIndexedSearchParams existingParams) { AddRemoveCount retVal = new AddRemoveCount(); synchronize(theEntity, retVal, theParams.myStringParams, existingParams.myStringParams); @@ -64,7 +67,11 @@ public class DaoSearchParamSynchronizer { myEntityManager = theEntityManager; } - private void synchronize(ResourceTable theEntity, AddRemoveCount theAddRemoveCount, Collection theNewParams, Collection theExistingParams) { + private void synchronize( + ResourceTable theEntity, + AddRemoveCount theAddRemoveCount, + Collection theNewParams, + Collection theExistingParams) { Collection newParams = theNewParams; for (T next : newParams) { next.setPartitionId(theEntity.getPartitionId()); @@ -105,7 +112,8 @@ public class DaoSearchParamSynchronizer { * @param theIndexesToRemove The rows that would be removed * @param theIndexesToAdd The rows that would be added */ - private void tryToReuseIndexEntities(List theIndexesToRemove, List theIndexesToAdd) { + private void tryToReuseIndexEntities( + List theIndexesToRemove, List theIndexesToAdd) { for (int addIndex = 0; addIndex < theIndexesToAdd.size(); addIndex++) { // If there are no more rows to remove, there's nothing we can reuse @@ -125,7 +133,6 @@ public class DaoSearchParamSynchronizer { } } - public static List subtract(Collection theSubtractFrom, Collection theToSubtract) { assert theSubtractFrom != theToSubtract || (theSubtractFrom.isEmpty()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java index 2ce5071b423..d50b3444702 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java @@ -55,17 +55,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.support.TransactionSynchronizationManager; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; -import javax.persistence.Tuple; -import javax.persistence.TypedQuery; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -78,6 +67,17 @@ import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; +import javax.persistence.Tuple; +import javax.persistence.TypedQuery; +import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Predicate; +import javax.persistence.criteria.Root; import static ca.uhn.fhir.jpa.search.builder.predicate.BaseJoiningPredicateBuilder.replaceDefaultPartitionIdIfNonNull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -103,20 +103,28 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class IdHelperService implements IIdHelperService { public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0]; public static final String RESOURCE_PID = "RESOURCE_PID"; + @Autowired protected IForcedIdDao myForcedIdDao; + @Autowired protected IResourceTableDao myResourceTableDao; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private FhirContext myFhirCtx; + @Autowired private MemoryCacheService myMemoryCacheService; + @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; + @Autowired private PartitionSettings myPartitionSettings; + private boolean myDontCheckActiveTransactionForUnitTest; @VisibleForTesting @@ -132,7 +140,9 @@ public class IdHelperService implements IIdHelperService { */ @Override @Nonnull - public IResourceLookup resolveResourceIdentity(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId) throws ResourceNotFoundException { + public IResourceLookup resolveResourceIdentity( + @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId) + throws ResourceNotFoundException { return resolveResourceIdentity(theRequestPartitionId, theResourceType, theResourceId, false); } @@ -145,7 +155,12 @@ public class IdHelperService implements IIdHelperService { */ @Override @Nonnull - public IResourceLookup resolveResourceIdentity(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId, boolean theExcludeDeleted) throws ResourceNotFoundException { + public IResourceLookup resolveResourceIdentity( + @Nonnull RequestPartitionId theRequestPartitionId, + String theResourceType, + String theResourceId, + boolean theExcludeDeleted) + throws ResourceNotFoundException { assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); assert theRequestPartitionId != null; @@ -153,9 +168,8 @@ public class IdHelperService implements IIdHelperService { theResourceId = theResourceId.substring(theResourceId.indexOf("/") + 1); } IdDt id = new IdDt(theResourceType, theResourceId); - Map>> matches = translateForcedIdToPids(theRequestPartitionId, - Collections.singletonList(id), - theExcludeDeleted); + Map>> matches = + translateForcedIdToPids(theRequestPartitionId, Collections.singletonList(id), theExcludeDeleted); // We only pass 1 input in so only 0..1 will come back if (matches.isEmpty() || !matches.containsKey(theResourceId)) { @@ -181,9 +195,8 @@ public class IdHelperService implements IIdHelperService { */ @Override @Nonnull - public Map resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - List theIds) { + public Map resolveResourcePersistentIds( + @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List theIds) { return resolveResourcePersistentIds(theRequestPartitionId, theResourceType, theIds, false); } @@ -194,10 +207,11 @@ public class IdHelperService implements IIdHelperService { */ @Override @Nonnull - public Map resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - List theIds, - boolean theExcludeDeleted) { + public Map resolveResourcePersistentIds( + @Nonnull RequestPartitionId theRequestPartitionId, + String theResourceType, + List theIds, + boolean theExcludeDeleted) { assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); Validate.notNull(theIds, "theIds cannot be null"); Validate.isTrue(!theIds.isEmpty(), "theIds must not be empty"); @@ -214,20 +228,23 @@ public class IdHelperService implements IIdHelperService { // is a forced id // we must resolve! if (myStorageSettings.isDeleteEnabled()) { - retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theExcludeDeleted).getPersistentId(); + retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theExcludeDeleted) + .getPersistentId(); retVals.put(id, retVal); } else { // fetch from cache... adding to cache if not available String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, id); - retVal = myMemoryCacheService.getThenPutAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, t -> { - List ids = Collections.singletonList(new IdType(theResourceType, id)); - // fetches from cache using a function that checks cache first... - List resolvedIds = resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids); - if (resolvedIds.isEmpty()) { - throw new ResourceNotFoundException(Msg.code(1100) + ids.get(0)); - } - return resolvedIds.get(0); - }); + retVal = myMemoryCacheService.getThenPutAfterCommit( + MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, t -> { + List ids = Collections.singletonList(new IdType(theResourceType, id)); + // fetches from cache using a function that checks cache first... + List resolvedIds = + resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids); + if (resolvedIds.isEmpty()) { + throw new ResourceNotFoundException(Msg.code(1100) + ids.get(0)); + } + return resolvedIds.get(0); + }); retVals.put(id, retVal); } } @@ -243,7 +260,8 @@ public class IdHelperService implements IIdHelperService { */ @Override @Nonnull - public JpaPid resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) { + public JpaPid resolveResourcePersistentIds( + @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) { return resolveResourcePersistentIds(theRequestPartitionId, theResourceType, theId, false); } @@ -254,13 +272,15 @@ public class IdHelperService implements IIdHelperService { * @throws ResourceNotFoundException If the ID can not be found */ @Override - public JpaPid resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId, boolean theExcludeDeleted){ + public JpaPid resolveResourcePersistentIds( + @Nonnull RequestPartitionId theRequestPartitionId, + String theResourceType, + String theId, + boolean theExcludeDeleted) { Validate.notNull(theId, "theId must not be null"); - Map retVal = resolveResourcePersistentIds(theRequestPartitionId, - theResourceType, - Collections.singletonList(theId), - theExcludeDeleted); + Map retVal = resolveResourcePersistentIds( + theRequestPartitionId, theResourceType, Collections.singletonList(theId), theExcludeDeleted); return retVal.get(theId); // should be only one } @@ -273,11 +293,13 @@ public class IdHelperService implements IIdHelperService { */ @Override public boolean idRequiresForcedId(String theId) { - return myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY || !isValidPid(theId); + return myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY + || !isValidPid(theId); } @Nonnull - private String toForcedIdToPidKey(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) { + private String toForcedIdToPidKey( + @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) { return RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId; } @@ -289,7 +311,8 @@ public class IdHelperService implements IIdHelperService { */ @Override @Nonnull - public List resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List theIds) { + public List resolveResourcePersistentIdsWithCache( + RequestPartitionId theRequestPartitionId, List theIds) { boolean onlyForcedIds = false; return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds, onlyForcedIds); } @@ -304,7 +327,8 @@ public class IdHelperService implements IIdHelperService { */ @Override @Nonnull - public List resolveResourcePersistentIdsWithCache(RequestPartitionId theRequestPartitionId, List theIds, boolean theOnlyForcedIds) { + public List resolveResourcePersistentIdsWithCache( + RequestPartitionId theRequestPartitionId, List theIds, boolean theOnlyForcedIds) { assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); List retVal = new ArrayList<>(theIds.size()); @@ -338,13 +362,18 @@ public class IdHelperService implements IIdHelperService { idsToCheck.add(nextId); } - new QueryChunker().chunk(idsToCheck, SearchBuilder.getMaximumPageSize() / 2, ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal)); + new QueryChunker() + .chunk( + idsToCheck, + SearchBuilder.getMaximumPageSize() / 2, + ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal)); } return retVal; } - private void doResolvePersistentIds(RequestPartitionId theRequestPartitionId, List theIds, List theOutputListToPopulate) { + private void doResolvePersistentIds( + RequestPartitionId theRequestPartitionId, List theIds, List theOutputListToPopulate) { CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); CriteriaQuery criteriaQuery = cb.createTupleQuery(); Root from = criteriaQuery.from(ForcedId.class); @@ -357,10 +386,9 @@ public class IdHelperService implements IIdHelperService { * performant. */ criteriaQuery.multiselect( - from.get("myResourcePid").as(Long.class), - from.get("myResourceType").as(String.class), - from.get("myForcedId").as(String.class) - ); + from.get("myResourcePid").as(Long.class), + from.get("myResourceType").as(String.class), + from.get("myForcedId").as(String.class)); List predicates = new ArrayList<>(theIds.size()); for (IIdType next : theIds) { @@ -404,20 +432,24 @@ public class IdHelperService implements IIdHelperService { * 2. If it is default partition and default partition id is null, then return predicate for null partition. * 3. If the requested partition search is not all partition, return the request partition as predicate. */ - private Optional getOptionalPartitionPredicate(RequestPartitionId theRequestPartitionId, CriteriaBuilder cb, Root from) { + private Optional getOptionalPartitionPredicate( + RequestPartitionId theRequestPartitionId, CriteriaBuilder cb, Root from) { if (myPartitionSettings.isAllowUnqualifiedCrossPartitionReference()) { return Optional.empty(); } else if (theRequestPartitionId.isDefaultPartition() && myPartitionSettings.getDefaultPartitionId() == null) { - Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue").as(Integer.class)); + Predicate partitionIdCriteria = + cb.isNull(from.get("myPartitionIdValue").as(Integer.class)); return Optional.of(partitionIdCriteria); } else if (!theRequestPartitionId.isAllPartitions()) { List partitionIds = theRequestPartitionId.getPartitionIds(); partitionIds = replaceDefaultPartitionIdIfNonNull(myPartitionSettings, partitionIds); if (partitionIds.size() > 1) { - Predicate partitionIdCriteria = from.get("myPartitionIdValue").as(Integer.class).in(partitionIds); + Predicate partitionIdCriteria = + from.get("myPartitionIdValue").as(Integer.class).in(partitionIds); return Optional.of(partitionIdCriteria); - } else if (partitionIds.size() == 1){ - Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue").as(Integer.class), partitionIds.get(0)); + } else if (partitionIds.size() == 1) { + Predicate partitionIdCriteria = + cb.equal(from.get("myPartitionIdValue").as(Integer.class), partitionIds.get(0)); return Optional.of(partitionIdCriteria); } } @@ -454,13 +486,18 @@ public class IdHelperService implements IIdHelperService { @Override public Optional translatePidIdToForcedIdWithCache(JpaPid theId) { - return myMemoryCacheService.get(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getId(), pid -> myForcedIdDao.findByResourcePid(pid).map(ForcedId::asTypedFhirResourceId)); + return myMemoryCacheService.get( + MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, + theId.getId(), + pid -> myForcedIdDao.findByResourcePid(pid).map(ForcedId::asTypedFhirResourceId)); } private ListMultimap organizeIdsByResourceType(Collection theIds) { - ListMultimap typeToIds = MultimapBuilder.hashKeys().arrayListValues().build(); + ListMultimap typeToIds = + MultimapBuilder.hashKeys().arrayListValues().build(); for (IIdType nextId : theIds) { - if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY || !isValidPid(nextId)) { + if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY + || !isValidPid(nextId)) { if (nextId.hasResourceType()) { typeToIds.put(nextId.getResourceType(), nextId.getIdPart()); } else { @@ -471,7 +508,8 @@ public class IdHelperService implements IIdHelperService { return typeToIds; } - private Map>> translateForcedIdToPids(@Nonnull RequestPartitionId theRequestPartitionId, Collection theId, boolean theExcludeDeleted) { + private Map>> translateForcedIdToPids( + @Nonnull RequestPartitionId theRequestPartitionId, Collection theId, boolean theExcludeDeleted) { assert theRequestPartitionId != null; theId.forEach(id -> Validate.isTrue(id.hasIdPart())); @@ -484,11 +522,10 @@ public class IdHelperService implements IIdHelperService { RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId); if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) { - List pids = theId - .stream() - .filter(t -> isValidPid(t)) - .map(t -> t.getIdPartAsLong()) - .collect(Collectors.toList()); + List pids = theId.stream() + .filter(t -> isValidPid(t)) + .map(t -> t.getIdPartAsLong()) + .collect(Collectors.toList()); if (!pids.isEmpty()) { resolvePids(requestPartitionId, pids, retVal); } @@ -504,7 +541,8 @@ public class IdHelperService implements IIdHelperService { for (Iterator forcedIdIterator = nextIds.iterator(); forcedIdIterator.hasNext(); ) { String nextForcedId = forcedIdIterator.next(); String nextKey = nextResourceType + "/" + nextForcedId; - IResourceLookup cachedLookup = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey); + IResourceLookup cachedLookup = + myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey); if (cachedLookup != null) { forcedIdIterator.remove(); if (!retVal.containsKey(nextForcedId)) { @@ -520,14 +558,21 @@ public class IdHelperService implements IIdHelperService { assert isNotBlank(nextResourceType); if (requestPartitionId.isAllPartitions()) { - views = myForcedIdDao.findAndResolveByForcedIdWithNoType(nextResourceType, nextIds, theExcludeDeleted); + views = myForcedIdDao.findAndResolveByForcedIdWithNoType( + nextResourceType, nextIds, theExcludeDeleted); } else { if (requestPartitionId.isDefaultPartition()) { - views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartitionNull(nextResourceType, nextIds, theExcludeDeleted); + views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartitionNull( + nextResourceType, nextIds, theExcludeDeleted); } else if (requestPartitionId.hasDefaultPartitionId()) { - views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId(nextResourceType, nextIds, requestPartitionId.getPartitionIdsWithoutDefault(), theExcludeDeleted); + views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId( + nextResourceType, + nextIds, + requestPartitionId.getPartitionIdsWithoutDefault(), + theExcludeDeleted); } else { - views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartition(nextResourceType, nextIds, requestPartitionId.getPartitionIds(), theExcludeDeleted); + views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartition( + nextResourceType, nextIds, requestPartitionId.getPartitionIds(), theExcludeDeleted); } } @@ -549,7 +594,6 @@ public class IdHelperService implements IIdHelperService { } } } - } return retVal; @@ -558,23 +602,25 @@ public class IdHelperService implements IIdHelperService { RequestPartitionId replaceDefault(RequestPartitionId theRequestPartitionId) { if (myPartitionSettings.getDefaultPartitionId() != null) { if (!theRequestPartitionId.isAllPartitions() && theRequestPartitionId.hasDefaultPartitionId()) { - List partitionIds = theRequestPartitionId - .getPartitionIds() - .stream() - .map(t -> t == null ? myPartitionSettings.getDefaultPartitionId() : t) - .collect(Collectors.toList()); + List partitionIds = theRequestPartitionId.getPartitionIds().stream() + .map(t -> t == null ? myPartitionSettings.getDefaultPartitionId() : t) + .collect(Collectors.toList()); return RequestPartitionId.fromPartitionIds(partitionIds); } } return theRequestPartitionId; } - private void resolvePids(@Nonnull RequestPartitionId theRequestPartitionId, List thePidsToResolve, Map>> theTargets) { + private void resolvePids( + @Nonnull RequestPartitionId theRequestPartitionId, + List thePidsToResolve, + Map>> theTargets) { if (!myStorageSettings.isDeleteEnabled()) { for (Iterator forcedIdIterator = thePidsToResolve.iterator(); forcedIdIterator.hasNext(); ) { Long nextPid = forcedIdIterator.next(); String nextKey = Long.toString(nextPid); - IResourceLookup cachedLookup = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey); + IResourceLookup cachedLookup = + myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey); if (cachedLookup != null) { forcedIdIterator.remove(); if (!theTargets.containsKey(nextKey)) { @@ -593,26 +639,27 @@ public class IdHelperService implements IIdHelperService { if (theRequestPartitionId.isDefaultPartition()) { lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionNull(thePidsToResolve); } else if (theRequestPartitionId.hasDefaultPartitionId()) { - lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIdsOrNullPartition(thePidsToResolve, theRequestPartitionId.getPartitionIdsWithoutDefault()); + lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIdsOrNullPartition( + thePidsToResolve, theRequestPartitionId.getPartitionIdsWithoutDefault()); } else { - lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIds(thePidsToResolve, theRequestPartitionId.getPartitionIds()); + lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIds( + thePidsToResolve, theRequestPartitionId.getPartitionIds()); } } - lookup - .stream() - .map(t -> new JpaResourceLookup((String) t[0], (Long) t[1], (Date) t[2])) - .forEach(t -> { - String id = t.getPersistentId().toString(); - if (!theTargets.containsKey(id)) { - theTargets.put(id, new ArrayList<>()); - } - theTargets.get(id).add(t); - if (!myStorageSettings.isDeleteEnabled()) { - String nextKey = t.getPersistentId().toString(); - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, t); - } - }); - + lookup.stream() + .map(t -> new JpaResourceLookup((String) t[0], (Long) t[1], (Date) t[2])) + .forEach(t -> { + String id = t.getPersistentId().toString(); + if (!theTargets.containsKey(id)) { + theTargets.put(id, new ArrayList<>()); + } + theTargets.get(id).add(t); + if (!myStorageSettings.isDeleteEnabled()) { + String nextKey = t.getPersistentId().toString(); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, t); + } + }); } } @@ -620,12 +667,11 @@ public class IdHelperService implements IIdHelperService { public PersistentIdToForcedIdMap translatePidsToForcedIds(Set theResourceIds) { assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); Set thePids = theResourceIds.stream().map(JpaPid::getId).collect(Collectors.toSet()); - Map> retVal = new HashMap<>(myMemoryCacheService.getAllPresent(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, thePids)); + Map> retVal = new HashMap<>( + myMemoryCacheService.getAllPresent(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, thePids)); - List remainingPids = thePids - .stream() - .filter(t -> !retVal.containsKey(t)) - .collect(Collectors.toList()); + List remainingPids = + thePids.stream().filter(t -> !retVal.containsKey(t)).collect(Collectors.toList()); new QueryChunker().chunk(remainingPids, t -> { List forcedIds = myForcedIdDao.findAllByResourcePid(t); @@ -634,24 +680,21 @@ public class IdHelperService implements IIdHelperService { Long nextResourcePid = forcedId.getResourceId(); Optional nextForcedId = Optional.of(forcedId.asTypedFhirResourceId()); retVal.put(nextResourcePid, nextForcedId); - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, nextResourcePid, nextForcedId); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, nextResourcePid, nextForcedId); } }); - remainingPids = thePids - .stream() - .filter(t -> !retVal.containsKey(t)) - .collect(Collectors.toList()); + remainingPids = thePids.stream().filter(t -> !retVal.containsKey(t)).collect(Collectors.toList()); for (Long nextResourcePid : remainingPids) { retVal.put(nextResourcePid, Optional.empty()); - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, nextResourcePid, Optional.empty()); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, nextResourcePid, Optional.empty()); } Map> convertRetVal = new HashMap<>(); - retVal.forEach( - (k, v) -> { - convertRetVal.put(JpaPid.fromId(k), v); - } - ); + retVal.forEach((k, v) -> { + convertRetVal.put(JpaPid.fromId(k), v); + }); return new PersistentIdToForcedIdMap(convertRetVal); } @@ -659,17 +702,26 @@ public class IdHelperService implements IIdHelperService { * Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods */ @Override - public void addResolvedPidToForcedId(JpaPid theJpaPid, @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, @Nullable String theForcedId, @Nullable Date theDeletedAt) { + public void addResolvedPidToForcedId( + JpaPid theJpaPid, + @Nonnull RequestPartitionId theRequestPartitionId, + String theResourceType, + @Nullable String theForcedId, + @Nullable Date theDeletedAt) { if (theForcedId != null) { if (theJpaPid.getAssociatedResourceId() == null) { populateAssociatedResourceId(theResourceType, theForcedId, theJpaPid); } - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theJpaPid.getId(), Optional.of(theResourceType + "/" + theForcedId)); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, + theJpaPid.getId(), + Optional.of(theResourceType + "/" + theForcedId)); String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theForcedId); myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid); } else { - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theJpaPid.getId(), Optional.empty()); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theJpaPid.getId(), Optional.empty()); } if (!myStorageSettings.isDeleteEnabled()) { @@ -677,7 +729,6 @@ public class IdHelperService implements IIdHelperService { String nextKey = theJpaPid.toString(); myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, lookup); } - } @VisibleForTesting @@ -700,7 +751,8 @@ public class IdHelperService implements IIdHelperService { @Override @Nonnull - public List getPidsOrThrowException(@Nonnull RequestPartitionId theRequestPartitionId, List theIds) { + public List getPidsOrThrowException( + @Nonnull RequestPartitionId theRequestPartitionId, List theIds) { List resourcePersistentIds = resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds); return resourcePersistentIds; } @@ -739,7 +791,10 @@ public class IdHelperService implements IIdHelperService { public JpaPid getPidOrThrowException(@Nonnull IAnyResource theResource) { Long theResourcePID = (Long) theResource.getUserData(RESOURCE_PID); if (theResourcePID == null) { - throw new IllegalStateException(Msg.code(2108) + String.format("Unable to find %s in the user data for %s with ID %s", RESOURCE_PID, theResource, theResource.getId())); + throw new IllegalStateException(Msg.code(2108) + + String.format( + "Unable to find %s in the user data for %s with ID %s", + RESOURCE_PID, theResource, theResource.getId())); } return JpaPid.fromId(theResourcePID); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/SearchParamWithInlineReferencesExtractor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/SearchParamWithInlineReferencesExtractor.java index dc1b4b77905..c77d2e206ce 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/SearchParamWithInlineReferencesExtractor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/SearchParamWithInlineReferencesExtractor.java @@ -45,28 +45,36 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; +import java.util.Collection; +import java.util.Iterator; +import java.util.stream.Collectors; import javax.annotation.Nullable; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; -import java.util.Collection; -import java.util.Iterator; -import java.util.stream.Collectors; @Service @Lazy -public class SearchParamWithInlineReferencesExtractor extends BaseSearchParamWithInlineReferencesExtractor implements ISearchParamWithInlineReferencesExtractor { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchParamWithInlineReferencesExtractor.class); +public class SearchParamWithInlineReferencesExtractor extends BaseSearchParamWithInlineReferencesExtractor + implements ISearchParamWithInlineReferencesExtractor { + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(SearchParamWithInlineReferencesExtractor.class); + @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private SearchParamExtractorService mySearchParamExtractorService; + @Autowired private DaoSearchParamSynchronizer myDaoSearchParamSynchronizer; + @Autowired private IResourceIndexedComboStringUniqueDao myResourceIndexedCompositeStringUniqueDao; + @Autowired private PartitionSettings myPartitionSettings; @@ -85,29 +93,48 @@ public class SearchParamWithInlineReferencesExtractor extends BaseSearchParamWit mySearchParamRegistry = theSearchParamRegistry; } - public void populateFromResource(RequestPartitionId theRequestPartitionId, ResourceIndexedSearchParams theParams, TransactionDetails theTransactionDetails, ResourceTable theEntity, IBaseResource theResource, ResourceIndexedSearchParams theExistingParams, RequestDetails theRequest, boolean thePerformIndexing) { + public void populateFromResource( + RequestPartitionId theRequestPartitionId, + ResourceIndexedSearchParams theParams, + TransactionDetails theTransactionDetails, + ResourceTable theEntity, + IBaseResource theResource, + ResourceIndexedSearchParams theExistingParams, + RequestDetails theRequest, + boolean thePerformIndexing) { if (thePerformIndexing) { // Perform inline match URL substitution extractInlineReferences(theRequest, theResource, theTransactionDetails); } - mySearchParamExtractorService.extractFromResource(theRequestPartitionId, theRequest, theParams, theExistingParams, theEntity, theResource, theTransactionDetails, thePerformIndexing, ISearchParamExtractor.ALL_PARAMS); + mySearchParamExtractorService.extractFromResource( + theRequestPartitionId, + theRequest, + theParams, + theExistingParams, + theEntity, + theResource, + theTransactionDetails, + thePerformIndexing, + ISearchParamExtractor.ALL_PARAMS); /* * If the existing resource already has links and those match links we still want, use them instead of removing them and re adding them */ - for (Iterator existingLinkIter = theExistingParams.getResourceLinks().iterator(); existingLinkIter.hasNext(); ) { + for (Iterator existingLinkIter = + theExistingParams.getResourceLinks().iterator(); + existingLinkIter.hasNext(); ) { ResourceLink nextExisting = existingLinkIter.next(); if (theParams.myLinks.remove(nextExisting)) { existingLinkIter.remove(); theParams.myLinks.add(nextExisting); } } - } @Nullable - private Collection findParameterIndexes(ResourceIndexedSearchParams theParams, RuntimeSearchParam nextCompositeOf) { + private Collection findParameterIndexes( + ResourceIndexedSearchParams theParams, RuntimeSearchParam nextCompositeOf) { Collection paramsListForCompositePart = null; switch (nextCompositeOf.getParamType()) { case NUMBER: @@ -135,10 +162,9 @@ public class SearchParamWithInlineReferencesExtractor extends BaseSearchParamWit break; } if (paramsListForCompositePart != null) { - paramsListForCompositePart = paramsListForCompositePart - .stream() - .filter(t -> t.getParamName().equals(nextCompositeOf.getName())) - .collect(Collectors.toList()); + paramsListForCompositePart = paramsListForCompositePart.stream() + .filter(t -> t.getParamName().equals(nextCompositeOf.getName())) + .collect(Collectors.toList()); } return paramsListForCompositePart; } @@ -148,29 +174,48 @@ public class SearchParamWithInlineReferencesExtractor extends BaseSearchParamWit myDaoSearchParamSynchronizer = theDaoSearchParamSynchronizer; } - public void storeUniqueComboParameters(ResourceIndexedSearchParams theParams, ResourceTable theEntity, ResourceIndexedSearchParams theExistingParams) { + public void storeUniqueComboParameters( + ResourceIndexedSearchParams theParams, + ResourceTable theEntity, + ResourceIndexedSearchParams theExistingParams) { /* * String Uniques */ if (myStorageSettings.isUniqueIndexesEnabled()) { - for (ResourceIndexedComboStringUnique next : DaoSearchParamSynchronizer.subtract(theExistingParams.myComboStringUniques, theParams.myComboStringUniques)) { + for (ResourceIndexedComboStringUnique next : DaoSearchParamSynchronizer.subtract( + theExistingParams.myComboStringUniques, theParams.myComboStringUniques)) { ourLog.debug("Removing unique index: {}", next); myEntityManager.remove(next); theEntity.getParamsComboStringUnique().remove(next); } boolean haveNewStringUniqueParams = false; - for (ResourceIndexedComboStringUnique next : DaoSearchParamSynchronizer.subtract(theParams.myComboStringUniques, theExistingParams.myComboStringUniques)) { + for (ResourceIndexedComboStringUnique next : DaoSearchParamSynchronizer.subtract( + theParams.myComboStringUniques, theExistingParams.myComboStringUniques)) { if (myStorageSettings.isUniqueIndexesCheckedBeforeSave()) { - ResourceIndexedComboStringUnique existing = myResourceIndexedCompositeStringUniqueDao.findByQueryString(next.getIndexString()); + ResourceIndexedComboStringUnique existing = + myResourceIndexedCompositeStringUniqueDao.findByQueryString(next.getIndexString()); if (existing != null) { String searchParameterId = "(unknown)"; if (next.getSearchParameterId() != null) { - searchParameterId = next.getSearchParameterId().toUnqualifiedVersionless().getValue(); + searchParameterId = next.getSearchParameterId() + .toUnqualifiedVersionless() + .getValue(); } - String msg = myFhirContext.getLocalizer().getMessage(BaseHapiFhirDao.class, "uniqueIndexConflictFailure", theEntity.getResourceType(), next.getIndexString(), existing.getResource().getIdDt().toUnqualifiedVersionless().getValue(), searchParameterId); + String msg = myFhirContext + .getLocalizer() + .getMessage( + BaseHapiFhirDao.class, + "uniqueIndexConflictFailure", + theEntity.getResourceType(), + next.getIndexString(), + existing.getResource() + .getIdDt() + .toUnqualifiedVersionless() + .getValue(), + searchParameterId); // Use ResourceVersionConflictException here because the HapiTransactionService // catches this and can retry it if needed @@ -181,7 +226,8 @@ public class SearchParamWithInlineReferencesExtractor extends BaseSearchParamWit myEntityManager.persist(next); haveNewStringUniqueParams = true; } - theEntity.setParamsComboStringUniquePresent(theParams.myComboStringUniques.size() > 0 || haveNewStringUniqueParams); + theEntity.setParamsComboStringUniquePresent( + theParams.myComboStringUniques.size() > 0 || haveNewStringUniqueParams); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmExpansionCacheSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmExpansionCacheSvc.java index 4a83c67885f..bf3f53cd339 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmExpansionCacheSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmExpansionCacheSvc.java @@ -51,7 +51,7 @@ public class MdmExpansionCacheSvc { ourLog.debug(buildLogMessage("About to lookup cached resource ID " + theSourceId)); String goldenResourceId = mySourceToGoldenIdCache.get(theSourceId); - //A golden resources' golden resource ID is itself. + // A golden resources' golden resource ID is itself. if (StringUtils.isBlank(goldenResourceId)) { if (mySourceToGoldenIdCache.containsValue(theSourceId)) { goldenResourceId = theSourceId; @@ -75,14 +75,14 @@ public class MdmExpansionCacheSvc { StringBuilder builder = new StringBuilder(); builder.append(message); if (ourLog.isDebugEnabled() || theAddCacheContentContent) { - builder.append("\n") - .append("Current cache content is:") - .append("\n"); - mySourceToGoldenIdCache.entrySet().stream().forEach(entry -> builder.append(entry.getKey()).append(" -> ").append(entry.getValue()).append("\n")); + builder.append("\n").append("Current cache content is:").append("\n"); + mySourceToGoldenIdCache.entrySet().stream().forEach(entry -> builder.append(entry.getKey()) + .append(" -> ") + .append(entry.getValue()) + .append("\n")); return builder.toString(); } return builder.toString(); - } /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java index b7b3572145c..948f9bbb9a6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/mdm/MdmLinkDaoJpaImpl.java @@ -57,6 +57,12 @@ import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.history.Revisions; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.persistence.EntityManager; import javax.persistence.TypedQuery; @@ -68,12 +74,6 @@ import javax.persistence.criteria.Path; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.validation.constraints.NotNull; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; import static ca.uhn.fhir.mdm.api.MdmQuerySearchParameters.GOLDEN_RESOURCE_NAME; import static ca.uhn.fhir.mdm.api.MdmQuerySearchParameters.GOLDEN_RESOURCE_PID_NAME; @@ -88,10 +88,13 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao { @Autowired IMdmLinkJpaRepository myMdmLinkDao; + @Autowired protected EntityManager myEntityManager; + @Autowired private IIdHelperService myIdHelperService; + @Autowired private AuditReader myAuditReader; @@ -106,47 +109,55 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao { } @Override - public List> expandPidsFromGroupPidGivenMatchResult(JpaPid theGroupPid, MdmMatchResultEnum theMdmMatchResultEnum) { - return myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult((theGroupPid).getId(), theMdmMatchResultEnum) - .stream() - .map(this::daoTupleToMdmTuple) - .collect(Collectors.toList()); + public List> expandPidsFromGroupPidGivenMatchResult( + JpaPid theGroupPid, MdmMatchResultEnum theMdmMatchResultEnum) { + return myMdmLinkDao + .expandPidsFromGroupPidGivenMatchResult((theGroupPid).getId(), theMdmMatchResultEnum) + .stream() + .map(this::daoTupleToMdmTuple) + .collect(Collectors.toList()); } private MdmPidTuple daoTupleToMdmTuple(IMdmLinkJpaRepository.MdmPidTuple theMdmPidTuple) { - return MdmPidTuple.fromGoldenAndSource(JpaPid.fromId(theMdmPidTuple.getGoldenPid()), JpaPid.fromId(theMdmPidTuple.getSourcePid())); + return MdmPidTuple.fromGoldenAndSource( + JpaPid.fromId(theMdmPidTuple.getGoldenPid()), JpaPid.fromId(theMdmPidTuple.getSourcePid())); } @Override - public List> expandPidsBySourcePidAndMatchResult(JpaPid theSourcePid, MdmMatchResultEnum theMdmMatchResultEnum) { - return myMdmLinkDao.expandPidsBySourcePidAndMatchResult((theSourcePid).getId(), theMdmMatchResultEnum) - .stream() - .map(this::daoTupleToMdmTuple) - .collect(Collectors.toList()); + public List> expandPidsBySourcePidAndMatchResult( + JpaPid theSourcePid, MdmMatchResultEnum theMdmMatchResultEnum) { + return myMdmLinkDao.expandPidsBySourcePidAndMatchResult((theSourcePid).getId(), theMdmMatchResultEnum).stream() + .map(this::daoTupleToMdmTuple) + .collect(Collectors.toList()); } @Override - public List> expandPidsByGoldenResourcePidAndMatchResult(JpaPid theSourcePid, MdmMatchResultEnum theMdmMatchResultEnum) { - return myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult((theSourcePid).getId(), theMdmMatchResultEnum) - .stream() - .map(this::daoTupleToMdmTuple) - .collect(Collectors.toList()); + public List> expandPidsByGoldenResourcePidAndMatchResult( + JpaPid theSourcePid, MdmMatchResultEnum theMdmMatchResultEnum) { + return myMdmLinkDao + .expandPidsByGoldenResourcePidAndMatchResult((theSourcePid).getId(), theMdmMatchResultEnum) + .stream() + .map(this::daoTupleToMdmTuple) + .collect(Collectors.toList()); } @Override - public List findPidByResourceNameAndThreshold(String theResourceName, Date theHighThreshold, Pageable thePageable) { - return myMdmLinkDao.findPidByResourceNameAndThreshold(theResourceName,theHighThreshold, thePageable) - .stream() - .map(JpaPid::fromId) - .collect(Collectors.toList()); + public List findPidByResourceNameAndThreshold( + String theResourceName, Date theHighThreshold, Pageable thePageable) { + return myMdmLinkDao.findPidByResourceNameAndThreshold(theResourceName, theHighThreshold, thePageable).stream() + .map(JpaPid::fromId) + .collect(Collectors.toList()); } @Override - public List findPidByResourceNameAndThresholdAndPartitionId(String theResourceName, Date theHighThreshold, List thePartitionIds, Pageable thePageable) { - return myMdmLinkDao.findPidByResourceNameAndThresholdAndPartitionId(theResourceName,theHighThreshold, thePartitionIds, thePageable) - .stream() - .map(JpaPid::fromId) - .collect(Collectors.toList()); + public List findPidByResourceNameAndThresholdAndPartitionId( + String theResourceName, Date theHighThreshold, List thePartitionIds, Pageable thePageable) { + return myMdmLinkDao + .findPidByResourceNameAndThresholdAndPartitionId( + theResourceName, theHighThreshold, thePartitionIds, thePageable) + .stream() + .map(JpaPid::fromId) + .collect(Collectors.toList()); } @Override @@ -165,7 +176,6 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao { myMdmLinkDao.deleteAll(theLinks); } - @Override public List findAll(Example theExample) { return myMdmLinkDao.findAll(theExample); @@ -203,24 +213,28 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao { @Override public MdmLink validateMdmLink(IMdmLink theMdmLink) throws UnprocessableEntityException { - if (theMdmLink instanceof MdmLink){ - return (MdmLink) theMdmLink; - } - else { + if (theMdmLink instanceof MdmLink) { + return (MdmLink) theMdmLink; + } else { throw new UnprocessableEntityException(Msg.code(2109) + "Unprocessable MdmLink implementation"); } } @Override @Deprecated - public Page search(IIdType theGoldenResourceId, IIdType theSourceId, MdmMatchResultEnum theMatchResult, - MdmLinkSourceEnum theLinkSource, MdmPageRequest thePageRequest, List thePartitionIds) { + public Page search( + IIdType theGoldenResourceId, + IIdType theSourceId, + MdmMatchResultEnum theMatchResult, + MdmLinkSourceEnum theLinkSource, + MdmPageRequest thePageRequest, + List thePartitionIds) { MdmQuerySearchParameters mdmQuerySearchParameters = new MdmQuerySearchParameters(thePageRequest) - .setGoldenResourceId(theGoldenResourceId) - .setSourceId(theSourceId) - .setMatchResult(theMatchResult) - .setLinkSource(theLinkSource) - .setPartitionIds(thePartitionIds); + .setGoldenResourceId(theGoldenResourceId) + .setSourceId(theSourceId) + .setMatchResult(theMatchResult) + .setLinkSource(theLinkSource) + .setPartitionIds(thePartitionIds); return search(mdmQuerySearchParameters); } @@ -234,71 +248,86 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao { List andPredicates = buildPredicates(theParams, criteriaBuilder, from); Predicate finalQuery = criteriaBuilder.and(andPredicates.toArray(new Predicate[0])); - if ( ! orderList.isEmpty()) { + if (!orderList.isEmpty()) { criteriaQuery.orderBy(orderList); } TypedQuery typedQuery = myEntityManager.createQuery(criteriaQuery.where(finalQuery)); CriteriaQuery countQuery = criteriaBuilder.createQuery(Long.class); - countQuery.select(criteriaBuilder.count(countQuery.from(MdmLink.class))) - .where(finalQuery); + countQuery.select(criteriaBuilder.count(countQuery.from(MdmLink.class))).where(finalQuery); Long totalResults = myEntityManager.createQuery(countQuery).getSingleResult(); MdmPageRequest pageRequest = theParams.getPageRequest(); List result = typedQuery - .setFirstResult(pageRequest.getOffset()) - .setMaxResults(pageRequest.getCount()) - .getResultList(); + .setFirstResult(pageRequest.getOffset()) + .setMaxResults(pageRequest.getCount()) + .getResultList(); - return new PageImpl<>(result, - PageRequest.of(pageRequest.getPage(), pageRequest.getCount()), - totalResults); + return new PageImpl<>(result, PageRequest.of(pageRequest.getPage(), pageRequest.getCount()), totalResults); } @NotNull - private List buildPredicates(MdmQuerySearchParameters theParams, CriteriaBuilder criteriaBuilder, Root from) { + private List buildPredicates( + MdmQuerySearchParameters theParams, CriteriaBuilder criteriaBuilder, Root from) { List andPredicates = new ArrayList<>(); if (theParams.getGoldenResourceId() != null) { - Predicate goldenResourcePredicate = criteriaBuilder.equal(from.get(GOLDEN_RESOURCE_PID_NAME).as(Long.class), (myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theParams.getGoldenResourceId())).getId()); + Predicate goldenResourcePredicate = criteriaBuilder.equal( + from.get(GOLDEN_RESOURCE_PID_NAME).as(Long.class), + (myIdHelperService.getPidOrThrowException( + RequestPartitionId.allPartitions(), theParams.getGoldenResourceId())) + .getId()); andPredicates.add(goldenResourcePredicate); } if (theParams.getSourceId() != null) { - Predicate sourceIdPredicate = criteriaBuilder.equal(from.get(SOURCE_PID_NAME).as(Long.class), (myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theParams.getSourceId())).getId()); + Predicate sourceIdPredicate = criteriaBuilder.equal( + from.get(SOURCE_PID_NAME).as(Long.class), + (myIdHelperService.getPidOrThrowException( + RequestPartitionId.allPartitions(), theParams.getSourceId())) + .getId()); andPredicates.add(sourceIdPredicate); } if (theParams.getMatchResult() != null) { - Predicate matchResultPredicate = criteriaBuilder.equal(from.get(MATCH_RESULT_NAME).as(MdmMatchResultEnum.class), theParams.getMatchResult()); + Predicate matchResultPredicate = criteriaBuilder.equal( + from.get(MATCH_RESULT_NAME).as(MdmMatchResultEnum.class), theParams.getMatchResult()); andPredicates.add(matchResultPredicate); } if (theParams.getLinkSource() != null) { - Predicate linkSourcePredicate = criteriaBuilder.equal(from.get(LINK_SOURCE_NAME).as(MdmLinkSourceEnum.class), theParams.getLinkSource()); + Predicate linkSourcePredicate = criteriaBuilder.equal( + from.get(LINK_SOURCE_NAME).as(MdmLinkSourceEnum.class), theParams.getLinkSource()); andPredicates.add(linkSourcePredicate); } if (!CollectionUtils.isEmpty(theParams.getPartitionIds())) { - Expression exp = from.get(PARTITION_ID_NAME).get(PARTITION_ID_NAME).as(Integer.class); + Expression exp = + from.get(PARTITION_ID_NAME).get(PARTITION_ID_NAME).as(Integer.class); Predicate linkSourcePredicate = exp.in(theParams.getPartitionIds()); andPredicates.add(linkSourcePredicate); } if (theParams.getResourceType() != null) { - Predicate resourceTypePredicate = criteriaBuilder.equal(from.get(GOLDEN_RESOURCE_NAME).get(RESOURCE_TYPE_NAME).as(String.class), theParams.getResourceType()); + Predicate resourceTypePredicate = criteriaBuilder.equal( + from.get(GOLDEN_RESOURCE_NAME).get(RESOURCE_TYPE_NAME).as(String.class), + theParams.getResourceType()); andPredicates.add(resourceTypePredicate); } return andPredicates; } - private List getOrderList(MdmQuerySearchParameters theParams, CriteriaBuilder criteriaBuilder, Root from) { + private List getOrderList( + MdmQuerySearchParameters theParams, CriteriaBuilder criteriaBuilder, Root from) { if (CollectionUtils.isEmpty(theParams.getSort())) { return Collections.emptyList(); } - return theParams.getSort().stream().map(sortSpec -> { - Path path = from.get(sortSpec.getParamName()); - return sortSpec.getOrder() == SortOrderEnum.DESC ? criteriaBuilder.desc(path) : criteriaBuilder.asc(path); - }) - .collect(Collectors.toList()); + return theParams.getSort().stream() + .map(sortSpec -> { + Path path = from.get(sortSpec.getParamName()); + return sortSpec.getOrder() == SortOrderEnum.DESC + ? criteriaBuilder.desc(path) + : criteriaBuilder.asc(path); + }) + .collect(Collectors.toList()); } @Override @@ -308,7 +337,8 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao { @Override public void deleteLinksWithAnyReferenceToPids(List theResourcePersistentIds) { - List goldenResourcePids = theResourcePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList()); + List goldenResourcePids = + theResourcePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList()); // Split into chunks of 500 so older versions of Oracle don't run into issues (500 = 1000 / 2 since the dao // method uses the list twice in the sql predicate) List> chunks = ListUtils.partition(goldenResourcePids, 500); @@ -329,48 +359,56 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao { } @Override - public List> getHistoryForIds(MdmHistorySearchParameters theMdmHistorySearchParameters) { + public List> getHistoryForIds( + MdmHistorySearchParameters theMdmHistorySearchParameters) { final AuditQueryCreator auditQueryCreator = myAuditReader.createQuery(); try { - final AuditCriterion goldenResourceIdCriterion = AuditEntity.property(GOLDEN_RESOURCE_PID_NAME).in(convertToLongIds(theMdmHistorySearchParameters.getGoldenResourceIds())); - final AuditCriterion resourceIdCriterion = AuditEntity.property(SOURCE_PID_NAME).in(convertToLongIds(theMdmHistorySearchParameters.getSourceIds())); + final AuditCriterion goldenResourceIdCriterion = AuditEntity.property(GOLDEN_RESOURCE_PID_NAME) + .in(convertToLongIds(theMdmHistorySearchParameters.getGoldenResourceIds())); + final AuditCriterion resourceIdCriterion = AuditEntity.property(SOURCE_PID_NAME) + .in(convertToLongIds(theMdmHistorySearchParameters.getSourceIds())); final AuditCriterion goldenResourceAndOrResourceIdCriterion; - if (! theMdmHistorySearchParameters.getGoldenResourceIds().isEmpty() && ! theMdmHistorySearchParameters.getSourceIds().isEmpty()) { + if (!theMdmHistorySearchParameters.getGoldenResourceIds().isEmpty() + && !theMdmHistorySearchParameters.getSourceIds().isEmpty()) { goldenResourceAndOrResourceIdCriterion = AuditEntity.or(goldenResourceIdCriterion, resourceIdCriterion); - } else if (! theMdmHistorySearchParameters.getGoldenResourceIds().isEmpty()) { + } else if (!theMdmHistorySearchParameters.getGoldenResourceIds().isEmpty()) { goldenResourceAndOrResourceIdCriterion = goldenResourceIdCriterion; - } else if (! theMdmHistorySearchParameters.getSourceIds().isEmpty()) { + } else if (!theMdmHistorySearchParameters.getSourceIds().isEmpty()) { goldenResourceAndOrResourceIdCriterion = resourceIdCriterion; } else { - throw new IllegalArgumentException(Msg.code(2298) + "$mdm-link-history Golden resource and source query IDs cannot both be empty."); + throw new IllegalArgumentException(Msg.code(2298) + + "$mdm-link-history Golden resource and source query IDs cannot both be empty."); } @SuppressWarnings("unchecked") - final List mdmLinksWithRevisions = auditQueryCreator.forRevisionsOfEntity(MdmLink.class, false, false) - .add(goldenResourceAndOrResourceIdCriterion) - .addOrder(AuditEntity.property(GOLDEN_RESOURCE_PID_NAME).asc()) - .addOrder(AuditEntity.property(SOURCE_PID_NAME).asc()) - .addOrder(AuditEntity.revisionNumber().desc()) - .getResultList(); + final List mdmLinksWithRevisions = auditQueryCreator + .forRevisionsOfEntity(MdmLink.class, false, false) + .add(goldenResourceAndOrResourceIdCriterion) + .addOrder(AuditEntity.property(GOLDEN_RESOURCE_PID_NAME).asc()) + .addOrder(AuditEntity.property(SOURCE_PID_NAME).asc()) + .addOrder(AuditEntity.revisionNumber().desc()) + .getResultList(); return mdmLinksWithRevisions.stream() - .map(this::buildRevisionFromObjectArray) - .collect(Collectors.toUnmodifiableList()); + .map(this::buildRevisionFromObjectArray) + .collect(Collectors.toUnmodifiableList()); } catch (IllegalStateException exception) { ourLog.error("got an Exception when trying to invoke Envers:", exception); - throw new IllegalStateException(Msg.code(2291) + "Hibernate envers AuditReader is returning Service is not yet initialized but front-end validation has not caught the error that envers is disabled"); + throw new IllegalStateException( + Msg.code(2291) + + "Hibernate envers AuditReader is returning Service is not yet initialized but front-end validation has not caught the error that envers is disabled"); } } @Nonnull private List convertToLongIds(List theMdmHistorySearchParameters) { return theMdmHistorySearchParameters.stream() - .map(id -> myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), id)) - .map(JpaPid::getId) - .collect(Collectors.toUnmodifiableList()); + .map(id -> myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), id)) + .map(JpaPid::getId) + .collect(Collectors.toUnmodifiableList()); } @SuppressWarnings("unchecked") @@ -385,7 +423,8 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao { final HapiFhirEnversRevision revision = (HapiFhirEnversRevision) revisionUncast; - return new MdmLinkWithRevision<>((MdmLink) mdmLinkUncast, - new EnversRevision((RevisionType)revisionTypeUncast, revision.getRev(), revision.getRevtstmp())); + return new MdmLinkWithRevision<>( + (MdmLink) mdmLinkUncast, + new EnversRevision((RevisionType) revisionTypeUncast, revision.getRev(), revision.getRevtstmp())); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/SearchFilterParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/SearchFilterParser.java index ce64f87ed3b..4efb51e1bc8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/SearchFilterParser.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/SearchFilterParser.java @@ -29,9 +29,11 @@ import java.util.regex.Pattern; public class SearchFilterParser { - private static final String XML_DATE_PATTERN = "[0-9]{4}(-(0[1-9]|1[0-2])(-(0[0-9]|[1-2][0-9]|3[0-1])(T([01][0-9]|2[0-3]):[0-5][0-9]:([0-5][0-9]|60)(\\.[0-9]+)?(Z|([+\\-])((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?)?)?)?"; + private static final String XML_DATE_PATTERN = + "[0-9]{4}(-(0[1-9]|1[0-2])(-(0[0-9]|[1-2][0-9]|3[0-1])(T([01][0-9]|2[0-3]):[0-5][0-9]:([0-5][0-9]|60)(\\.[0-9]+)?(Z|([+\\-])((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?)?)?)?"; private static final Pattern XML_DATE_MATCHER = Pattern.compile(XML_DATE_PATTERN); - private static final List CODES_CompareOperation = Arrays.asList("eq", "ne", "co", "sw", "ew", "gt", "lt", "ge", "le", "pr", "po", "ss", "sb", "in", "re"); + private static final List CODES_CompareOperation = + Arrays.asList("eq", "ne", "co", "sw", "ew", "gt", "lt", "ge", "le", "pr", "po", "ss", "sb", "in", "re"); private static final List CODES_LogicalOperation = Arrays.asList("and", "or", "not"); private String original = null; private int cursor; @@ -51,9 +53,9 @@ public class SearchFilterParser { if (cursor >= original.length()) { result = FilterLexType.fsltEnded; } else { - if (((original.charAt(cursor) >= 'a') && (original.charAt(cursor) <= 'z')) || - ((original.charAt(cursor) >= 'A') && (original.charAt(cursor) <= 'Z')) || - (original.charAt(cursor) == '_')) { + if (((original.charAt(cursor) >= 'a') && (original.charAt(cursor) <= 'z')) + || ((original.charAt(cursor) >= 'A') && (original.charAt(cursor) <= 'Z')) + || (original.charAt(cursor) == '_')) { result = FilterLexType.fsltName; } else if ((original.charAt(cursor) >= '0') && (original.charAt(cursor) <= '9')) { result = FilterLexType.fsltNumber; @@ -70,9 +72,8 @@ public class SearchFilterParser { } else if (original.charAt(cursor) == ']') { result = FilterLexType.fsltCloseSq; } else { - throw new FilterSyntaxException(Msg.code(1052) + String.format("Unknown Character \"%s\" at %d", - peekCh(), - cursor)); + throw new FilterSyntaxException( + Msg.code(1052) + String.format("Unknown Character \"%s\" at %d", peekCh(), cursor)); } } return result; @@ -95,16 +96,15 @@ public class SearchFilterParser { int i = cursor; do { i++; - } while ((i <= original.length() - 1) && - (((original.charAt(i) >= 'a') && (original.charAt(i) <= 'z')) || - ((original.charAt(i) >= 'A') && (original.charAt(i) <= 'Z')) || - ((original.charAt(i) >= '0') && (original.charAt(i) <= '9')) || - (original.charAt(i) == '-') || - (original.charAt(i) == '_') || - (original.charAt(i) == ':'))); + } while ((i <= original.length() - 1) + && (((original.charAt(i) >= 'a') && (original.charAt(i) <= 'z')) + || ((original.charAt(i) >= 'A') && (original.charAt(i) <= 'Z')) + || ((original.charAt(i) >= '0') && (original.charAt(i) <= '9')) + || (original.charAt(i) == '-') + || (original.charAt(i) == '_') + || (original.charAt(i) == ':'))); - result = original.substring(cursor, - i/* - cursor*/); + result = original.substring(cursor, i /* - cursor*/); cursor = i; return result; } @@ -115,13 +115,12 @@ public class SearchFilterParser { int i = cursor; do { i++; - } while ((i <= original.length() - 1) && - (original.charAt(i) > 32) && - (!StringUtils.isWhitespace(original.substring(i, i + 1))) && - (original.charAt(i) != ')') && - (original.charAt(i) != ']')); - result = original.substring(cursor, - i/* - cursor*/); + } while ((i <= original.length() - 1) + && (original.charAt(i) > 32) + && (!StringUtils.isWhitespace(original.substring(i, i + 1))) + && (original.charAt(i) != ')') + && (original.charAt(i) != ']')); + result = original.substring(cursor, i /* - cursor*/); cursor = i; return result; } @@ -132,60 +131,59 @@ public class SearchFilterParser { int i = cursor; do { i++; - } while ((i <= original.length() - 1) && - (((original.charAt(i) >= '0') && (original.charAt(i) <= '9')) || - (original.charAt(i) == '.') || - (original.charAt(i) == '-') || - (original.charAt(i) == ':') || - (original.charAt(i) == '+') || - (original.charAt(i) == 'T'))); - result = original.substring(cursor, - i/* - cursor*/); + } while ((i <= original.length() - 1) + && (((original.charAt(i) >= '0') && (original.charAt(i) <= '9')) + || (original.charAt(i) == '.') + || (original.charAt(i) == '-') + || (original.charAt(i) == ':') + || (original.charAt(i) == '+') + || (original.charAt(i) == 'T'))); + result = original.substring(cursor, i /* - cursor*/); cursor = i; return result; } private String consumeString() throws FilterSyntaxException { -// int l = 0; + // int l = 0; cursor++; StringBuilder str = new StringBuilder(original.length()); -// setLength(result, length(original)); // can't be longer than that + // setLength(result, length(original)); // can't be longer than that while ((cursor <= original.length()) && (original.charAt(cursor) != '"')) { -// l++; + // l++; if (original.charAt(cursor) != '\\') { str.append(original.charAt(cursor)); -// str.setCharAt(l, original.charAt(cursor)); + // str.setCharAt(l, original.charAt(cursor)); } else { cursor++; if (original.charAt(cursor) == '"') { str.append('"'); -// str.setCharAt(l, '"'); + // str.setCharAt(l, '"'); } else if (original.charAt(cursor) == 't') { str.append('\t'); -// str.setCharAt(l, '\t'); + // str.setCharAt(l, '\t'); } else if (original.charAt(cursor) == 'r') { str.append('\r'); -// str.setCharAt(l, '\r'); + // str.setCharAt(l, '\r'); } else if (original.charAt(cursor) == 'n') { str.append('\n'); -// str.setCharAt(l, '\n'); + // str.setCharAt(l, '\n'); } else { - throw new FilterSyntaxException(Msg.code(1053) + String.format("Unknown escape sequence at %d", - cursor)); + throw new FilterSyntaxException( + Msg.code(1053) + String.format("Unknown escape sequence at %d", cursor)); } } cursor++; } -// SetLength(result, l); + // SetLength(result, l); if ((cursor > original.length()) || (original.charAt(cursor) != '"')) { - throw new FilterSyntaxException(Msg.code(1054) + String.format("Problem with string termination at %d", - cursor)); + throw new FilterSyntaxException( + Msg.code(1054) + String.format("Problem with string termination at %d", cursor)); } if (str.length() == 0) { - throw new FilterSyntaxException(Msg.code(1055) + String.format("Problem with string at %d cannot be empty", - cursor)); + throw new FilterSyntaxException( + Msg.code(1055) + String.format("Problem with string at %d cannot be empty", cursor)); } cursor++; @@ -196,8 +194,8 @@ public class SearchFilterParser { BaseFilter result = parseOpen(); if (cursor < original.length()) { - throw new FilterSyntaxException(Msg.code(1056) + String.format("Expression did not terminate at %d", - cursor)); + throw new FilterSyntaxException( + Msg.code(1056) + String.format("Expression did not terminate at %d", cursor)); } return result; } @@ -212,20 +210,20 @@ public class SearchFilterParser { grp = new FilterParameterGroup(); grp.setContained(parseOpen()); if (peek() != FilterLexType.fsltClose) { - throw new FilterSyntaxException(Msg.code(1057) + String.format("Expected ')' at %d but found %s", - cursor, - peekCh())); + throw new FilterSyntaxException( + Msg.code(1057) + String.format("Expected ')' at %d but found %s", cursor, peekCh())); } cursor++; FilterLexType lexType = peek(); if (lexType == FilterLexType.fsltName) { result = parseLogical(grp); - } else if ((lexType == FilterLexType.fsltEnded) || (lexType == FilterLexType.fsltClose) || (lexType == FilterLexType.fsltCloseSq)) { + } else if ((lexType == FilterLexType.fsltEnded) + || (lexType == FilterLexType.fsltClose) + || (lexType == FilterLexType.fsltCloseSq)) { result = grp; } else { - throw new FilterSyntaxException(Msg.code(1058) + String.format("Unexpected Character %s at %d", - peekCh(), - cursor)); + throw new FilterSyntaxException( + Msg.code(1058) + String.format("Unexpected Character %s at %d", peekCh(), cursor)); } } else { s = consumeName(); @@ -248,9 +246,7 @@ public class SearchFilterParser { } else { s = consumeName(); if ((!s.equals("or")) && (!s.equals("and")) && (!s.equals("not"))) { - throw new FilterSyntaxException(Msg.code(1059) + String.format("Unexpected Name %s at %d", - s, - cursor)); + throw new FilterSyntaxException(Msg.code(1059) + String.format("Unexpected Name %s at %d", s, cursor)); } logical = new FilterLogical(); @@ -277,9 +273,8 @@ public class SearchFilterParser { cursor++; result.setFilter(parseOpen()); if (peek() != FilterLexType.fsltCloseSq) { - throw new FilterSyntaxException(Msg.code(1060) + String.format("Expected ']' at %d but found %s", - cursor, - peekCh())); + throw new FilterSyntaxException( + Msg.code(1060) + String.format("Expected ']' at %d but found %s", cursor, peekCh())); } cursor++; } @@ -287,15 +282,13 @@ public class SearchFilterParser { if (peek() == FilterLexType.fsltDot) { cursor++; if (peek() != FilterLexType.fsltName) { - throw new FilterSyntaxException(Msg.code(1061) + String.format("Unexpected Character %s at %d", - peekCh(), - cursor)); + throw new FilterSyntaxException( + Msg.code(1061) + String.format("Unexpected Character %s at %d", peekCh(), cursor)); } result.setNext(parsePath(consumeName())); } else if (result.getFilter() != null) { - throw new FilterSyntaxException(Msg.code(1062) + String.format("Expected '.' at %d but found %s", - cursor, - peekCh())); + throw new FilterSyntaxException( + Msg.code(1062) + String.format("Expected '.' at %d but found %s", cursor, peekCh())); } return result; @@ -311,16 +304,13 @@ public class SearchFilterParser { filter.setParamPath(parsePath(name)); if (peek() != FilterLexType.fsltName) { - throw new FilterSyntaxException(Msg.code(1063) + String.format("Unexpected Character %s at %d", - peekCh(), - cursor)); + throw new FilterSyntaxException( + Msg.code(1063) + String.format("Unexpected Character %s at %d", peekCh(), cursor)); } s = consumeName(); int index = CODES_CompareOperation.indexOf(s); if (index == -1) { - throw new FilterSyntaxException(Msg.code(1064) + String.format("Unknown operation %s at %d", - s, - cursor)); + throw new FilterSyntaxException(Msg.code(1064) + String.format("Unknown operation %s at %d", s, cursor)); } filter.setOperation(CompareOperation.values()[index]); @@ -335,38 +325,42 @@ public class SearchFilterParser { filter.setValue(consumeString()); filter.setValueType(FilterValueType.string); } else { - throw new FilterSyntaxException(Msg.code(1065) + String.format("Unexpected Character %s at %d", - peekCh(), - cursor)); + throw new FilterSyntaxException( + Msg.code(1065) + String.format("Unexpected Character %s at %d", peekCh(), cursor)); } // check operation / value type results if (filter.getOperation() == CompareOperation.pr) { - if ((filter.getValue().compareToIgnoreCase("true") != 0) && - (filter.getValue().compareToIgnoreCase("false") != 0)) { - throw new FilterSyntaxException(Msg.code(1066) + String.format("Value %s not valid for operation %s at %d", - filter.getValue(), - CODES_CompareOperation.get(filter.getOperation().ordinal()), - cursor)); + if ((filter.getValue().compareToIgnoreCase("true") != 0) + && (filter.getValue().compareToIgnoreCase("false") != 0)) { + throw new FilterSyntaxException(Msg.code(1066) + + String.format( + "Value %s not valid for operation %s at %d", + filter.getValue(), + CODES_CompareOperation.get(filter.getOperation().ordinal()), + cursor)); } } else if (filter.getOperation() == CompareOperation.po) { if (!isDate(filter.getValue())) { - throw new FilterSyntaxException(Msg.code(1067) + String.format("Value %s not valid for operation %s at %d", - filter.getValue(), - CODES_CompareOperation.get(filter.getOperation().ordinal()), - cursor)); + throw new FilterSyntaxException(Msg.code(1067) + + String.format( + "Value %s not valid for operation %s at %d", + filter.getValue(), + CODES_CompareOperation.get(filter.getOperation().ordinal()), + cursor)); } } lexType = peek(); if (lexType == FilterLexType.fsltName) { result = parseLogical(filter); - } else if ((lexType == FilterLexType.fsltEnded) || (lexType == FilterLexType.fsltClose) || (lexType == FilterLexType.fsltCloseSq)) { + } else if ((lexType == FilterLexType.fsltEnded) + || (lexType == FilterLexType.fsltClose) + || (lexType == FilterLexType.fsltCloseSq)) { result = filter; } else { - throw new FilterSyntaxException(Msg.code(1068) + String.format("Unexpected Character %s at %d", - peekCh(), - cursor)); + throw new FilterSyntaxException( + Msg.code(1068) + String.format("Unexpected Character %s at %d", peekCh(), cursor)); } return result; } @@ -422,7 +416,7 @@ public class SearchFilterParser { fsltCloseSq } - abstract public static class BaseFilter { + public abstract static class BaseFilter { private FilterItemType itemType; @@ -496,7 +490,6 @@ public class SearchFilterParser { FContained = value; } - @Override public String toString() { @@ -521,7 +514,6 @@ public class SearchFilterParser { FParamPath = value; } - public CompareOperation getOperation() { return FOperation; @@ -555,9 +547,11 @@ public class SearchFilterParser { @Override public String toString() { if (FValueType == FilterValueType.string) { - return getParamPath().toString() + " " + CODES_CompareOperation.get(getOperation().ordinal()) + " \"" + getValue() + "\""; + return getParamPath().toString() + " " + + CODES_CompareOperation.get(getOperation().ordinal()) + " \"" + getValue() + "\""; } else { - return getParamPath().toString() + " " + CODES_CompareOperation.get(getOperation().ordinal()) + " " + getValue(); + return getParamPath().toString() + " " + + CODES_CompareOperation.get(getOperation().ordinal()) + " " + getValue(); } } } @@ -568,7 +562,6 @@ public class SearchFilterParser { private FilterLogicalOperation FOperation; private BaseFilter FFilter2; - public BaseFilter getFilter1() { return FFilter1; @@ -601,7 +594,8 @@ public class SearchFilterParser { @Override public String toString() { - return FFilter1.toString() + " " + CODES_LogicalOperation.get(getOperation().ordinal()) + " " + FFilter2.toString(); + return FFilter1.toString() + " " + + CODES_LogicalOperation.get(getOperation().ordinal()) + " " + FFilter2.toString(); } } @@ -611,7 +605,7 @@ public class SearchFilterParser { } } - static public BaseFilter parse(String expression) throws FilterSyntaxException { + public static BaseFilter parse(String expression) throws FilterSyntaxException { SearchFilterParser parser = new SearchFilterParser(); parser.original = expression; parser.cursor = 0; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSubscriptionR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSubscriptionR4.java index 5b30e1c696d..38be25e1239 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSubscriptionR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoSubscriptionR4.java @@ -34,7 +34,8 @@ import org.springframework.beans.factory.annotation.Autowired; import java.util.Date; -public class FhirResourceDaoSubscriptionR4 extends BaseHapiFhirResourceDao implements IFhirResourceDaoSubscription { +public class FhirResourceDaoSubscriptionR4 extends BaseHapiFhirResourceDao + implements IFhirResourceDaoSubscription { @Autowired private ISubscriptionTableDao mySubscriptionTableDao; @@ -47,7 +48,8 @@ public class FhirResourceDaoSubscriptionR4 extends BaseHapiFhirResourceDao { @@ -47,7 +47,6 @@ public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao { return JpaResourceDao.throwProcessMessageNotImplemented(); } - protected Meta toMeta(Collection tagDefinitions) { Meta retVal = new Meta(); for (TagDefinition next : tagDefinitions) { @@ -56,14 +55,19 @@ public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao { retVal.addProfile(next.getCode()); break; case SECURITY_LABEL: - retVal.addSecurity().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addSecurity() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; case TAG: - retVal.addTag().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addTag() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; } } return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/FhirResourceDaoSubscriptionR4B.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/FhirResourceDaoSubscriptionR4B.java index c12fec750e6..f2c7719138b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/FhirResourceDaoSubscriptionR4B.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/FhirResourceDaoSubscriptionR4B.java @@ -27,11 +27,12 @@ import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4b.model.Subscription; -public class FhirResourceDaoSubscriptionR4B extends BaseHapiFhirResourceDao implements IFhirResourceDaoSubscription { +public class FhirResourceDaoSubscriptionR4B extends BaseHapiFhirResourceDao + implements IFhirResourceDaoSubscription { @Override - public Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + public Long getSubscriptionTablePidForSubscriptionResource( + IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails) { throw new UnsupportedOperationException(Msg.code(2150)); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/FhirSystemDaoR4B.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/FhirSystemDaoR4B.java index 681fe318d0f..8867aa89d13 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/FhirSystemDaoR4B.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/FhirSystemDaoR4B.java @@ -27,9 +27,9 @@ import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.r4b.model.Bundle; import org.hl7.fhir.r4b.model.Meta; -import javax.persistence.TypedQuery; import java.util.Collection; import java.util.List; +import javax.persistence.TypedQuery; public class FhirSystemDaoR4B extends BaseHapiFhirSystemDao { @@ -49,7 +49,6 @@ public class FhirSystemDaoR4B extends BaseHapiFhirSystemDao { return JpaResourceDao.throwProcessMessageNotImplemented(); } - protected Meta toMeta(Collection tagDefinitions) { Meta retVal = new Meta(); for (TagDefinition next : tagDefinitions) { @@ -58,15 +57,19 @@ public class FhirSystemDaoR4B extends BaseHapiFhirSystemDao { retVal.addProfile(next.getCode()); break; case SECURITY_LABEL: - retVal.addSecurity().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addSecurity() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; case TAG: - retVal.addTag().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addTag() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; } } return retVal; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/TransactionProcessorVersionAdapterR4B.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/TransactionProcessorVersionAdapterR4B.java index 74fc3d8832e..f50d692e1f8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/TransactionProcessorVersionAdapterR4B.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r4b/TransactionProcessorVersionAdapterR4B.java @@ -34,7 +34,8 @@ import org.hl7.fhir.r4b.model.Resource; import java.util.Date; import java.util.List; -public class TransactionProcessorVersionAdapterR4B implements ITransactionProcessorVersionAdapter { +public class TransactionProcessorVersionAdapterR4B + implements ITransactionProcessorVersionAdapter { @Override public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) { theBundleEntry.getResponse().setStatus(theStatus); @@ -64,12 +65,13 @@ public class TransactionProcessorVersionAdapterR4B implements ITransactionProces } @Override - public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.BundleEntryComponent theEntry) { + public void populateEntryWithOperationOutcome( + BaseServerResponseException theCaughtEx, Bundle.BundleEntryComponent theEntry) { OperationOutcome oo = new OperationOutcome(); oo.addIssue() - .setSeverity(OperationOutcome.IssueSeverity.ERROR) - .setDiagnostics(theCaughtEx.getMessage()) - .setCode(OperationOutcome.IssueType.EXCEPTION); + .setSeverity(OperationOutcome.IssueSeverity.ERROR) + .setDiagnostics(theCaughtEx.getMessage()) + .setCode(OperationOutcome.IssueType.EXCEPTION); theEntry.getResponse().setOutcome(oo); } @@ -114,7 +116,6 @@ public class TransactionProcessorVersionAdapterR4B implements ITransactionProces return theEntry.getFullUrl(); } - @Override public void setFullUrl(Bundle.BundleEntryComponent theEntry, String theFullUrl) { theEntry.setFullUrl(theFullUrl); @@ -169,5 +170,4 @@ public class TransactionProcessorVersionAdapterR4B implements ITransactionProces public void setRequestUrl(Bundle.BundleEntryComponent theEntry, String theUrl) { theEntry.getRequest().setUrl(theUrl); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoSubscriptionR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoSubscriptionR5.java index 270390828f6..ff7f8e2dcd5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoSubscriptionR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoSubscriptionR5.java @@ -34,7 +34,8 @@ import org.springframework.beans.factory.annotation.Autowired; import java.util.Date; -public class FhirResourceDaoSubscriptionR5 extends BaseHapiFhirResourceDao implements IFhirResourceDaoSubscription { +public class FhirResourceDaoSubscriptionR5 extends BaseHapiFhirResourceDao + implements IFhirResourceDaoSubscription { @Autowired private ISubscriptionTableDao mySubscriptionTableDao; @@ -47,7 +48,8 @@ public class FhirResourceDaoSubscriptionR5 extends BaseHapiFhirResourceDao { @@ -49,7 +49,6 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao { return JpaResourceDao.throwProcessMessageNotImplemented(); } - protected Meta toMeta(Collection tagDefinitions) { Meta retVal = new Meta(); for (TagDefinition next : tagDefinitions) { @@ -58,15 +57,19 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao { retVal.addProfile(next.getCode()); break; case SECURITY_LABEL: - retVal.addSecurity().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addSecurity() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; case TAG: - retVal.addTag().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay()); + retVal.addTag() + .setSystem(next.getSystem()) + .setCode(next.getCode()) + .setDisplay(next.getDisplay()); break; } } return retVal; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/TransactionProcessorVersionAdapterR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/TransactionProcessorVersionAdapterR5.java index c1eca2d61a3..cf02d612e3e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/TransactionProcessorVersionAdapterR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/r5/TransactionProcessorVersionAdapterR5.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.dao.r5; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -34,7 +34,8 @@ import org.hl7.fhir.r5.model.Resource; import java.util.Date; import java.util.List; -public class TransactionProcessorVersionAdapterR5 implements ITransactionProcessorVersionAdapter { +public class TransactionProcessorVersionAdapterR5 + implements ITransactionProcessorVersionAdapter { @Override public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) { theBundleEntry.getResponse().setStatus(theStatus); @@ -64,12 +65,13 @@ public class TransactionProcessorVersionAdapterR5 implements ITransactionProcess } @Override - public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.BundleEntryComponent theEntry) { + public void populateEntryWithOperationOutcome( + BaseServerResponseException theCaughtEx, Bundle.BundleEntryComponent theEntry) { OperationOutcome oo = new OperationOutcome(); oo.addIssue() - .setSeverity(OperationOutcome.IssueSeverity.ERROR) - .setDiagnostics(theCaughtEx.getMessage()) - .setCode(OperationOutcome.IssueType.EXCEPTION); + .setSeverity(OperationOutcome.IssueSeverity.ERROR) + .setDiagnostics(theCaughtEx.getMessage()) + .setCode(OperationOutcome.IssueType.EXCEPTION); theEntry.getResponse().setOutcome(oo); } @@ -114,7 +116,6 @@ public class TransactionProcessorVersionAdapterR5 implements ITransactionProcess return theEntry.getFullUrl(); } - @Override public void setFullUrl(Bundle.BundleEntryComponent theEntry, String theFullUrl) { theEntry.setFullUrl(theFullUrl); @@ -169,5 +170,4 @@ public class TransactionProcessorVersionAdapterR5 implements ITransactionProcess public void setRequestUrl(Bundle.BundleEntryComponent theEntry, String theUrl) { theEntry.getRequest().setUrl(theUrl); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchClauseBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchClauseBuilder.java index c30be87bf7e..8dde509dbb6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchClauseBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchClauseBuilder.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.jpa.dao.search; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.model.util.UcumServiceUtil; import ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers; import ca.uhn.fhir.model.api.IQueryParameterType; @@ -56,7 +56,6 @@ import org.hibernate.search.engine.search.predicate.dsl.WildcardPredicateOptions import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.math.BigDecimal; import java.time.Instant; import java.util.Arrays; @@ -67,6 +66,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.dao.search.PathContext.joinPath; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.IDX_STRING_EXACT; @@ -99,10 +99,14 @@ public class ExtendedHSearchClauseBuilder { public final StorageSettings myStorageSettings; final PathContext myRootContext; - final List ordinalSearchPrecisions = Arrays.asList(TemporalPrecisionEnum.YEAR, TemporalPrecisionEnum.MONTH, TemporalPrecisionEnum.DAY); + final List ordinalSearchPrecisions = + Arrays.asList(TemporalPrecisionEnum.YEAR, TemporalPrecisionEnum.MONTH, TemporalPrecisionEnum.DAY); - public ExtendedHSearchClauseBuilder(FhirContext myFhirContext, StorageSettings theStorageSettings, - BooleanPredicateClausesStep theRootClause, SearchPredicateFactory thePredicateFactory) { + public ExtendedHSearchClauseBuilder( + FhirContext myFhirContext, + StorageSettings theStorageSettings, + BooleanPredicateClausesStep theRootClause, + SearchPredicateFactory thePredicateFactory) { this.myFhirContext = myFhirContext; this.myStorageSettings = theStorageSettings; this.myRootClause = theRootClause; @@ -124,7 +128,8 @@ public class ExtendedHSearchClauseBuilder { String nextValueTrimmed; if (nextOr instanceof StringParam) { StringParam nextOrString = (StringParam) nextOr; - nextValueTrimmed = StringUtils.defaultString(nextOrString.getValue()).trim(); + nextValueTrimmed = + StringUtils.defaultString(nextOrString.getValue()).trim(); } else if (nextOr instanceof TokenParam) { TokenParam nextOrToken = (TokenParam) nextOr; nextValueTrimmed = nextOrToken.getValue(); @@ -135,7 +140,8 @@ public class ExtendedHSearchClauseBuilder { nextValueTrimmed = nextValueTrimmed.substring(0, nextValueTrimmed.indexOf("/_history")); } } else { - throw new IllegalArgumentException(Msg.code(1088) + "Unsupported full-text param type: " + nextOr.getClass()); + throw new IllegalArgumentException( + Msg.code(1088) + "Unsupported full-text param type: " + nextOr.getClass()); } if (isNotBlank(nextValueTrimmed)) { terms.add(nextValueTrimmed); @@ -144,7 +150,6 @@ public class ExtendedHSearchClauseBuilder { return terms; } - public void addTokenUnmodifiedSearch(String theSearchParamName, List> theAndOrTerms) { if (CollectionUtils.isEmpty(theAndOrTerms)) { return; @@ -154,13 +159,12 @@ public class ExtendedHSearchClauseBuilder { ourLog.debug("addTokenUnmodifiedSearch {} {}", theSearchParamName, nextAnd); List clauses = nextAnd.stream() - .map(orTerm -> buildTokenUnmodifiedMatchOn(orTerm, spContext)) - .collect(Collectors.toList()); + .map(orTerm -> buildTokenUnmodifiedMatchOn(orTerm, spContext)) + .collect(Collectors.toList()); PredicateFinalStep finalClause = spContext.orPredicateOrSingle(clauses); myRootClause.must(finalClause); } - } private PathContext contextForFlatSP(String theSearchParamName) { @@ -174,21 +178,35 @@ public class ExtendedHSearchClauseBuilder { TokenParam token = (TokenParam) orTerm; if (StringUtils.isBlank(token.getSystem())) { // bare value - return thePathContext.match().field(joinPath(pathPrefix, INDEX_TYPE_TOKEN, TOKEN_CODE)).matching(token.getValue()); + return thePathContext + .match() + .field(joinPath(pathPrefix, INDEX_TYPE_TOKEN, TOKEN_CODE)) + .matching(token.getValue()); } else if (StringUtils.isBlank(token.getValue())) { // system without value - return thePathContext.match().field(joinPath(pathPrefix, INDEX_TYPE_TOKEN, TOKEN_SYSTEM)).matching(token.getSystem()); + return thePathContext + .match() + .field(joinPath(pathPrefix, INDEX_TYPE_TOKEN, TOKEN_SYSTEM)) + .matching(token.getSystem()); } else { // system + value - return thePathContext.match().field(joinPath(pathPrefix, INDEX_TYPE_TOKEN, TOKEN_SYSTEM_CODE)).matching(token.getValueAsQueryToken(this.myFhirContext)); + return thePathContext + .match() + .field(joinPath(pathPrefix, INDEX_TYPE_TOKEN, TOKEN_SYSTEM_CODE)) + .matching(token.getValueAsQueryToken(this.myFhirContext)); } } else if (orTerm instanceof StringParam) { - // MB I don't quite understand why FhirResourceDaoR4SearchNoFtTest.testSearchByIdParamWrongType() uses String but here we are + // MB I don't quite understand why FhirResourceDaoR4SearchNoFtTest.testSearchByIdParamWrongType() uses + // String but here we are StringParam string = (StringParam) orTerm; // treat a string as a code with no system (like _id) - return thePathContext.match().field(joinPath(pathPrefix, INDEX_TYPE_TOKEN, TOKEN_CODE)).matching(string.getValue()); + return thePathContext + .match() + .field(joinPath(pathPrefix, INDEX_TYPE_TOKEN, TOKEN_CODE)) + .matching(string.getValue()); } else { - throw new IllegalArgumentException(Msg.code(1089) + "Unexpected param type for token search-param: " + orTerm.getClass().getName()); + throw new IllegalArgumentException(Msg.code(1089) + "Unexpected param type for token search-param: " + + orTerm.getClass().getName()); } } @@ -198,8 +216,8 @@ public class ExtendedHSearchClauseBuilder { } String fieldName; switch (theSearchParamName) { - // _content and _text were here first, and don't obey our mapping. - // Leave them as-is for backwards compatibility. + // _content and _text were here first, and don't obey our mapping. + // Leave them as-is for backwards compatibility. case Constants.PARAM_CONTENT: fieldName = "myContentText"; break; @@ -215,21 +233,21 @@ public class ExtendedHSearchClauseBuilder { Set orTerms = TermHelper.makePrefixSearchTerm(extractOrStringParams(nextOrList)); ourLog.debug("addStringTextSearch {}, {}", theSearchParamName, orTerms); if (!orTerms.isEmpty()) { - String query = orTerms.stream() - .map(s -> "( " + s + " )") - .collect(Collectors.joining(" | ")); + String query = orTerms.stream().map(s -> "( " + s + " )").collect(Collectors.joining(" | ")); myRootClause.must(myRootContext - .simpleQueryString() - .field(fieldName) - .matching(query) - .defaultOperator(BooleanOperator.AND)); // term value may contain multiple tokens. Require all of them to be present. + .simpleQueryString() + .field(fieldName) + .matching(query) + .defaultOperator( + BooleanOperator + .AND)); // term value may contain multiple tokens. Require all of them to be + // present. } else { ourLog.warn("No Terms found in query parameter {}", nextOrList); } } } - public void addStringExactSearch(String theSearchParamName, List> theStringAndOrTerms) { String fieldPath = joinPath(SEARCH_PARAM_ROOT, theSearchParamName, INDEX_TYPE_STRING, IDX_STRING_EXACT); @@ -237,25 +255,24 @@ public class ExtendedHSearchClauseBuilder { Set terms = extractOrStringParams(nextAnd); ourLog.debug("addStringExactSearch {} {}", theSearchParamName, terms); List orTerms = terms.stream() - .map(s -> myRootContext.match().field(fieldPath).matching(s)) - .collect(Collectors.toList()); + .map(s -> myRootContext.match().field(fieldPath).matching(s)) + .collect(Collectors.toList()); myRootClause.must(myRootContext.orPredicateOrSingle(orTerms)); } } - public void addStringContainsSearch(String theSearchParamName, List> theStringAndOrTerms) { + public void addStringContainsSearch( + String theSearchParamName, List> theStringAndOrTerms) { String fieldPath = joinPath(SEARCH_PARAM_ROOT, theSearchParamName, INDEX_TYPE_STRING, IDX_STRING_NORMALIZED); for (List nextAnd : theStringAndOrTerms) { Set terms = extractOrStringParams(nextAnd); ourLog.debug("addStringContainsSearch {} {}", theSearchParamName, terms); List orTerms = terms.stream() - // wildcard is a term-level query, so queries aren't analyzed. Do our own normalization first. - .map(this::normalize) - .map(s -> myRootContext - .wildcard().field(fieldPath) - .matching("*" + s + "*")) - .collect(Collectors.toList()); + // wildcard is a term-level query, so queries aren't analyzed. Do our own normalization first. + .map(this::normalize) + .map(s -> myRootContext.wildcard().field(fieldPath).matching("*" + s + "*")) + .collect(Collectors.toList()); myRootClause.must(myRootContext.orPredicateOrSingle(orTerms)); } @@ -273,36 +290,39 @@ public class ExtendedHSearchClauseBuilder { return StringUtil.normalizeStringForSearchIndexing(theString).toLowerCase(Locale.ROOT); } - public void addStringUnmodifiedSearch(String theSearchParamName, List> theStringAndOrTerms) { + public void addStringUnmodifiedSearch( + String theSearchParamName, List> theStringAndOrTerms) { PathContext context = contextForFlatSP(theSearchParamName); for (List nextOrList : theStringAndOrTerms) { Set terms = extractOrStringParams(nextOrList); ourLog.debug("addStringUnmodifiedSearch {} {}", theSearchParamName, terms); List orTerms = terms.stream() - .map(s -> - buildStringUnmodifiedClause(s, context)) - .collect(Collectors.toList()); + .map(s -> buildStringUnmodifiedClause(s, context)) + .collect(Collectors.toList()); myRootClause.must(context.orPredicateOrSingle(orTerms)); } } private WildcardPredicateOptionsStep buildStringUnmodifiedClause(String theString, PathContext theContext) { - return theContext.wildcard() - .field(joinPath(theContext.getContextPath(), INDEX_TYPE_STRING, IDX_STRING_NORMALIZED)) - // wildcard is a term-level query, so it isn't analyzed. Do our own case-folding to match the normStringAnalyzer - .matching(normalize(theString) + "*"); + return theContext + .wildcard() + .field(joinPath(theContext.getContextPath(), INDEX_TYPE_STRING, IDX_STRING_NORMALIZED)) + // wildcard is a term-level query, so it isn't analyzed. Do our own case-folding to match the + // normStringAnalyzer + .matching(normalize(theString) + "*"); } - public void addReferenceUnchainedSearch(String theSearchParamName, List> theReferenceAndOrTerms) { + public void addReferenceUnchainedSearch( + String theSearchParamName, List> theReferenceAndOrTerms) { String fieldPath = joinPath(SEARCH_PARAM_ROOT, theSearchParamName, "reference", "value"); for (List nextAnd : theReferenceAndOrTerms) { Set terms = extractOrStringParams(nextAnd); ourLog.trace("reference unchained search {}", terms); List orTerms = terms.stream() - .map(s -> myRootContext.match().field(fieldPath).matching(s)) - .collect(Collectors.toList()); + .map(s -> myRootContext.match().field(fieldPath).matching(s)) + .collect(Collectors.toList()); myRootClause.must(myRootContext.orPredicateOrSingle(orTerms)); } @@ -389,8 +409,8 @@ public class ExtendedHSearchClauseBuilder { PathContext spContext = contextForFlatSP(theSearchParamName); List clauses = nextOrList.stream() - .map(d -> buildDateTermClause(d, spContext)) - .collect(Collectors.toList()); + .map(d -> buildDateTermClause(d, spContext)) + .collect(Collectors.toList()); myRootClause.must(myRootContext.orPredicateOrSingle(clauses)); } @@ -400,8 +420,8 @@ public class ExtendedHSearchClauseBuilder { DateParam dateParam = (DateParam) theQueryParameter; boolean isOrdinalSearch = ordinalSearchPrecisions.contains(dateParam.getPrecision()); return isOrdinalSearch - ? generateDateOrdinalSearchTerms(dateParam, theSpContext) - : generateDateInstantSearchTerms(dateParam, theSpContext); + ? generateDateOrdinalSearchTerms(dateParam, theSpContext) + : generateDateInstantSearchTerms(dateParam, theSpContext); } private PredicateFinalStep generateDateOrdinalSearchTerms(DateParam theDateParam, PathContext theSpContext) { @@ -425,9 +445,8 @@ public class ExtendedHSearchClauseBuilder { if (Objects.isNull(prefix) || prefix == ParamPrefixEnum.EQUAL) { // For equality prefix we would like the date to fall between the lower and upper bound List predicateSteps = Arrays.asList( - theSpContext.range().field(lowerOrdinalField).atLeast(lowerBoundAsOrdinal), - theSpContext.range().field(upperOrdinalField).atMost(upperBoundAsOrdinal) - ); + theSpContext.range().field(lowerOrdinalField).atLeast(lowerBoundAsOrdinal), + theSpContext.range().field(upperOrdinalField).atMost(upperBoundAsOrdinal)); BooleanPredicateClausesStep booleanStep = theSpContext.bool(); predicateSteps.forEach(booleanStep::must); return booleanStep; @@ -443,15 +462,15 @@ public class ExtendedHSearchClauseBuilder { return theSpContext.range().field(lowerOrdinalField).atMost(lowerBoundAsOrdinal); } else if (ParamPrefixEnum.NOT_EQUAL == prefix) { List predicateSteps = Arrays.asList( - theSpContext.range().field(upperOrdinalField).lessThan(lowerBoundAsOrdinal), - theSpContext.range().field(lowerOrdinalField).greaterThan(upperBoundAsOrdinal) - ); + theSpContext.range().field(upperOrdinalField).lessThan(lowerBoundAsOrdinal), + theSpContext.range().field(lowerOrdinalField).greaterThan(upperBoundAsOrdinal)); BooleanPredicateClausesStep booleanStep = theSpContext.bool(); predicateSteps.forEach(booleanStep::should); booleanStep.minimumShouldMatchNumber(1); return booleanStep; } - throw new IllegalArgumentException(Msg.code(2255) + "Date search param does not support prefix of type: " + prefix); + throw new IllegalArgumentException( + Msg.code(2255) + "Date search param does not support prefix of type: " + prefix); } private PredicateFinalStep generateDateInstantSearchTerms(DateParam theDateParam, PathContext theSpContext) { @@ -462,9 +481,8 @@ public class ExtendedHSearchClauseBuilder { if (ParamPrefixEnum.NOT_EQUAL == prefix) { Instant dateInstant = theDateParam.getValue().toInstant(); List predicateSteps = Arrays.asList( - theSpContext.range().field(upperInstantField).lessThan(dateInstant), - theSpContext.range().field(lowerInstantField).greaterThan(dateInstant) - ); + theSpContext.range().field(upperInstantField).lessThan(dateInstant), + theSpContext.range().field(lowerInstantField).greaterThan(dateInstant)); BooleanPredicateClausesStep booleanStep = theSpContext.bool(); predicateSteps.forEach(booleanStep::should); booleanStep.minimumShouldMatchNumber(1); @@ -473,32 +491,53 @@ public class ExtendedHSearchClauseBuilder { // Consider lower and upper bounds for building range predicates DateRangeParam dateRange = new DateRangeParam(theDateParam); - Instant lowerBoundAsInstant = Optional.ofNullable(dateRange.getLowerBound()).map(param -> param.getValue().toInstant()).orElse(null); - Instant upperBoundAsInstant = Optional.ofNullable(dateRange.getUpperBound()).map(param -> param.getValue().toInstant()).orElse(null); + Instant lowerBoundAsInstant = Optional.ofNullable(dateRange.getLowerBound()) + .map(param -> param.getValue().toInstant()) + .orElse(null); + Instant upperBoundAsInstant = Optional.ofNullable(dateRange.getUpperBound()) + .map(param -> param.getValue().toInstant()) + .orElse(null); if (prefix == ParamPrefixEnum.EQUAL) { // For equality prefix we would like the date to fall between the lower and upper bound List predicateSteps = Arrays.asList( - ((SearchPredicateFactory) theSpContext).range().field(lowerInstantField).atLeast(lowerBoundAsInstant), - ((SearchPredicateFactory) theSpContext).range().field(upperInstantField).atMost(upperBoundAsInstant) - ); + ((SearchPredicateFactory) theSpContext) + .range() + .field(lowerInstantField) + .atLeast(lowerBoundAsInstant), + ((SearchPredicateFactory) theSpContext) + .range() + .field(upperInstantField) + .atMost(upperBoundAsInstant)); BooleanPredicateClausesStep booleanStep = ((SearchPredicateFactory) theSpContext).bool(); predicateSteps.forEach(booleanStep::must); return booleanStep; } else if (ParamPrefixEnum.GREATERTHAN == prefix || ParamPrefixEnum.STARTS_AFTER == prefix) { - return ((SearchPredicateFactory) theSpContext).range().field(upperInstantField).greaterThan(lowerBoundAsInstant); + return ((SearchPredicateFactory) theSpContext) + .range() + .field(upperInstantField) + .greaterThan(lowerBoundAsInstant); } else if (ParamPrefixEnum.GREATERTHAN_OR_EQUALS == prefix) { - return ((SearchPredicateFactory) theSpContext).range().field(upperInstantField).atLeast(lowerBoundAsInstant); + return ((SearchPredicateFactory) theSpContext) + .range() + .field(upperInstantField) + .atLeast(lowerBoundAsInstant); } else if (ParamPrefixEnum.LESSTHAN == prefix || ParamPrefixEnum.ENDS_BEFORE == prefix) { - return ((SearchPredicateFactory) theSpContext).range().field(lowerInstantField).lessThan(upperBoundAsInstant); + return ((SearchPredicateFactory) theSpContext) + .range() + .field(lowerInstantField) + .lessThan(upperBoundAsInstant); } else if (ParamPrefixEnum.LESSTHAN_OR_EQUALS == prefix) { - return ((SearchPredicateFactory) theSpContext).range().field(lowerInstantField).atMost(upperBoundAsInstant); + return ((SearchPredicateFactory) theSpContext) + .range() + .field(lowerInstantField) + .atMost(upperBoundAsInstant); } - throw new IllegalArgumentException(Msg.code(2256) + "Date search param does not support prefix of type: " + prefix); + throw new IllegalArgumentException( + Msg.code(2256) + "Date search param does not support prefix of type: " + prefix); } - /** * Differences with DB search: * _ is not all-normalized-or-all-not. Each parameter is applied on quantity or normalized quantity depending on UCUM fitness @@ -507,25 +546,26 @@ public class ExtendedHSearchClauseBuilder { * Strategy: For each parameter, if it can be canonicalized, it is, and used against 'normalized-value-quantity' index * otherwise it is applied as-is to 'value-quantity' */ - public void addQuantityUnmodifiedSearch(String theSearchParamName, List> theQuantityAndOrTerms) { + public void addQuantityUnmodifiedSearch( + String theSearchParamName, List> theQuantityAndOrTerms) { for (List nextOrList : theQuantityAndOrTerms) { // we build quantity predicates in a nested context so we can match units and systems with values. - PredicateFinalStep nestedClause = myRootContext.buildPredicateInNestedContext( - theSearchParamName, - nextedContext -> { - List orClauses = nextOrList.stream() - .map(quantityTerm -> buildQuantityTermClause(quantityTerm, nextedContext)) - .collect(Collectors.toList()); + PredicateFinalStep nestedClause = + myRootContext.buildPredicateInNestedContext(theSearchParamName, nextedContext -> { + List orClauses = nextOrList.stream() + .map(quantityTerm -> buildQuantityTermClause(quantityTerm, nextedContext)) + .collect(Collectors.toList()); - return nextedContext.orPredicateOrSingle(orClauses); - }); + return nextedContext.orPredicateOrSingle(orClauses); + }); myRootClause.must(nestedClause); } } - private BooleanPredicateClausesStep buildQuantityTermClause(IQueryParameterType theQueryParameter, PathContext thePathContext) { + private BooleanPredicateClausesStep buildQuantityTermClause( + IQueryParameterType theQueryParameter, PathContext thePathContext) { BooleanPredicateClausesStep quantityClause = ((SearchPredicateFactory) thePathContext).bool(); @@ -533,15 +573,18 @@ public class ExtendedHSearchClauseBuilder { ParamPrefixEnum activePrefix = qtyParam.getPrefix() == null ? ParamPrefixEnum.EQUAL : qtyParam.getPrefix(); String quantityElement = joinPath(thePathContext.getContextPath(), INDEX_TYPE_QUANTITY); - if (myStorageSettings.getNormalizedQuantitySearchLevel() == NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED) { + if (myStorageSettings.getNormalizedQuantitySearchLevel() + == NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED) { QuantityParam canonicalQty = UcumServiceUtil.toCanonicalQuantityOrNull(qtyParam); if (canonicalQty != null) { String valueFieldPath = joinPath(quantityElement, QTY_VALUE_NORM); - quantityClause.must(buildNumericClause(valueFieldPath, activePrefix, canonicalQty.getValue(), thePathContext)); - quantityClause.must(((SearchPredicateFactory) thePathContext).match() - .field(joinPath(quantityElement, QTY_CODE_NORM)) - .matching(canonicalQty.getUnits())); + quantityClause.must( + buildNumericClause(valueFieldPath, activePrefix, canonicalQty.getValue(), thePathContext)); + quantityClause.must(((SearchPredicateFactory) thePathContext) + .match() + .field(joinPath(quantityElement, QTY_CODE_NORM)) + .matching(canonicalQty.getUnits())); return quantityClause; } } @@ -550,22 +593,23 @@ public class ExtendedHSearchClauseBuilder { quantityClause.must(buildNumericClause(valueFieldPath, activePrefix, qtyParam.getValue(), thePathContext)); - if ( isNotBlank(qtyParam.getSystem()) ) { - quantityClause.must( - ((SearchPredicateFactory) thePathContext).match() - .field(joinPath(quantityElement, QTY_SYSTEM)).matching(qtyParam.getSystem()) ); + if (isNotBlank(qtyParam.getSystem())) { + quantityClause.must(((SearchPredicateFactory) thePathContext) + .match() + .field(joinPath(quantityElement, QTY_SYSTEM)) + .matching(qtyParam.getSystem())); } - if ( isNotBlank(qtyParam.getUnits()) ) { - quantityClause.must( - ((SearchPredicateFactory) thePathContext).match() - .field(joinPath(quantityElement, QTY_CODE)).matching(qtyParam.getUnits()) ); + if (isNotBlank(qtyParam.getUnits())) { + quantityClause.must(((SearchPredicateFactory) thePathContext) + .match() + .field(joinPath(quantityElement, QTY_CODE)) + .matching(qtyParam.getUnits())); } return quantityClause; } - /** * Shared helper between quantity and number * @param valueFieldPath The path leading to index node @@ -575,7 +619,8 @@ public class ExtendedHSearchClauseBuilder { * @return a query predicate applying the prefix to the value */ @Nonnull - private PredicateFinalStep buildNumericClause(String valueFieldPath, ParamPrefixEnum thePrefix, BigDecimal theNumberValue, PathContext thePathContext) { + private PredicateFinalStep buildNumericClause( + String valueFieldPath, ParamPrefixEnum thePrefix, BigDecimal theNumberValue, PathContext thePathContext) { PredicateFinalStep predicate = null; double value = theNumberValue.doubleValue(); @@ -584,44 +629,62 @@ public class ExtendedHSearchClauseBuilder { ParamPrefixEnum activePrefix = thePrefix == null ? ParamPrefixEnum.EQUAL : thePrefix; switch (activePrefix) { - // searches for resource quantity between passed param value +/- 10% + // searches for resource quantity between passed param value +/- 10% case APPROXIMATE: - predicate = ((SearchPredicateFactory) thePathContext).range().field(valueFieldPath) - .between(value-approxTolerance, value+approxTolerance); + predicate = ((SearchPredicateFactory) thePathContext) + .range() + .field(valueFieldPath) + .between(value - approxTolerance, value + approxTolerance); break; - // searches for resource quantity between passed param value +/- 5% + // searches for resource quantity between passed param value +/- 5% case EQUAL: - predicate = ((SearchPredicateFactory) thePathContext).range().field(valueFieldPath) - .between(range.getLeft().doubleValue(), range.getRight().doubleValue()); + predicate = ((SearchPredicateFactory) thePathContext) + .range() + .field(valueFieldPath) + .between(range.getLeft().doubleValue(), range.getRight().doubleValue()); break; - // searches for resource quantity > param value + // searches for resource quantity > param value case GREATERTHAN: - case STARTS_AFTER: // treated as GREATERTHAN because search doesn't handle ranges - predicate = ((SearchPredicateFactory) thePathContext).range().field(valueFieldPath).greaterThan(value); + case STARTS_AFTER: // treated as GREATERTHAN because search doesn't handle ranges + predicate = ((SearchPredicateFactory) thePathContext) + .range() + .field(valueFieldPath) + .greaterThan(value); break; - // searches for resource quantity not < param value + // searches for resource quantity not < param value case GREATERTHAN_OR_EQUALS: - predicate = ((SearchPredicateFactory) thePathContext).range().field(valueFieldPath).atLeast(value); + predicate = ((SearchPredicateFactory) thePathContext) + .range() + .field(valueFieldPath) + .atLeast(value); break; - // searches for resource quantity < param value + // searches for resource quantity < param value case LESSTHAN: - case ENDS_BEFORE: // treated as LESSTHAN because search doesn't handle ranges - predicate = ((SearchPredicateFactory) thePathContext).range().field(valueFieldPath).lessThan(value); + case ENDS_BEFORE: // treated as LESSTHAN because search doesn't handle ranges + predicate = ((SearchPredicateFactory) thePathContext) + .range() + .field(valueFieldPath) + .lessThan(value); break; - // searches for resource quantity not > param value + // searches for resource quantity not > param value case LESSTHAN_OR_EQUALS: - predicate = ((SearchPredicateFactory) thePathContext).range().field(valueFieldPath).atMost(value); + predicate = ((SearchPredicateFactory) thePathContext) + .range() + .field(valueFieldPath) + .atMost(value); break; - // NOT_EQUAL: searches for resource quantity not between passed param value +/- 5% + // NOT_EQUAL: searches for resource quantity not between passed param value +/- 5% case NOT_EQUAL: - RangePredicateOptionsStep negRange = ((SearchPredicateFactory) thePathContext).range() - .field(valueFieldPath).between(range.getLeft().doubleValue(), range.getRight().doubleValue()); + RangePredicateOptionsStep negRange = ((SearchPredicateFactory) thePathContext) + .range() + .field(valueFieldPath) + .between(range.getLeft().doubleValue(), range.getRight().doubleValue()); predicate = ((SearchPredicateFactory) thePathContext).bool().mustNot(negRange); break; } @@ -629,8 +692,8 @@ public class ExtendedHSearchClauseBuilder { return predicate; } - - public void addUriUnmodifiedSearch(String theParamName, List> theUriUnmodifiedAndOrTerms) { + public void addUriUnmodifiedSearch( + String theParamName, List> theUriUnmodifiedAndOrTerms) { PathContext spContext = this.contextForFlatSP(theParamName); for (List nextOrList : theUriUnmodifiedAndOrTerms) { @@ -641,24 +704,25 @@ public class ExtendedHSearchClauseBuilder { } private PredicateFinalStep buildURIClause(List theOrList, PathContext thePathContext) { - List orTerms = theOrList.stream() - .map(p -> ((UriParam) p).getValue()) - .collect(Collectors.toList()); + List orTerms = + theOrList.stream().map(p -> ((UriParam) p).getValue()).collect(Collectors.toList()); - return ((SearchPredicateFactory) thePathContext).terms() - .field(joinPath(thePathContext.getContextPath(), URI_VALUE)) - .matchingAny(orTerms); + return ((SearchPredicateFactory) thePathContext) + .terms() + .field(joinPath(thePathContext.getContextPath(), URI_VALUE)) + .matchingAny(orTerms); } - public void addNumberUnmodifiedSearch(String theParamName, List> theNumberUnmodifiedAndOrTerms) { + public void addNumberUnmodifiedSearch( + String theParamName, List> theNumberUnmodifiedAndOrTerms) { PathContext pathContext = contextForFlatSP(theParamName); String fieldPath = joinPath(SEARCH_PARAM_ROOT, theParamName, NUMBER_VALUE); for (List nextOrList : theNumberUnmodifiedAndOrTerms) { List orTerms = nextOrList.stream() - .map(NumberParam.class::cast) - .map(orTerm -> buildNumericClause(fieldPath, orTerm.getPrefix(), orTerm.getValue(), pathContext)) - .collect(Collectors.toList()); + .map(NumberParam.class::cast) + .map(orTerm -> buildNumericClause(fieldPath, orTerm.getPrefix(), orTerm.getValue(), pathContext)) + .collect(Collectors.toList()); myRootClause.must(pathContext.orPredicateOrSingle(orTerms)); } @@ -667,28 +731,30 @@ public class ExtendedHSearchClauseBuilder { private PredicateFinalStep buildNumericClause(IQueryParameterType theValue, PathContext thePathContext) { NumberParam p = (NumberParam) theValue; - return buildNumericClause(joinPath(thePathContext.getContextPath(), NUMBER_VALUE), p.getPrefix(), p.getValue(), thePathContext); + return buildNumericClause( + joinPath(thePathContext.getContextPath(), NUMBER_VALUE), p.getPrefix(), p.getValue(), thePathContext); } - public void addCompositeUnmodifiedSearch(RuntimeSearchParam theSearchParam, List theSubSearchParams, List> theCompositeAndOrTerms) { + public void addCompositeUnmodifiedSearch( + RuntimeSearchParam theSearchParam, + List theSubSearchParams, + List> theCompositeAndOrTerms) { for (List nextOrList : theCompositeAndOrTerms) { // The index data for each extracted element is stored in a separate nested HSearch document. // Create a nested parent node for all component predicates. // Each can share this nested beacuse all nested docs share a parent id. - PredicateFinalStep nestedClause = myRootContext.buildPredicateInNestedContext( - theSearchParam.getName(), - nestedContext -> { - List orClauses = - nextOrList.stream() - .map(term -> computeCompositeTermClause(theSearchParam, theSubSearchParams, (CompositeParam) term, nestedContext)) - .collect(Collectors.toList()); + PredicateFinalStep nestedClause = + myRootContext.buildPredicateInNestedContext(theSearchParam.getName(), nestedContext -> { + List orClauses = nextOrList.stream() + .map(term -> computeCompositeTermClause( + theSearchParam, theSubSearchParams, (CompositeParam) term, nestedContext)) + .collect(Collectors.toList()); - return nestedContext.orPredicateOrSingle(orClauses); - }); + return nestedContext.orPredicateOrSingle(orClauses); + }); myRootClause.must(nestedClause); - } } @@ -700,13 +766,26 @@ public class ExtendedHSearchClauseBuilder { * @param theCompositeQueryParam the query param values * @param theCompositeContext the root of the nested SP query. */ - private PredicateFinalStep computeCompositeTermClause(RuntimeSearchParam theSearchParam, List theSubSearchParams, CompositeParam theCompositeQueryParam, PathContext theCompositeContext) { + private PredicateFinalStep computeCompositeTermClause( + RuntimeSearchParam theSearchParam, + List theSubSearchParams, + CompositeParam theCompositeQueryParam, + PathContext theCompositeContext) { Validate.notNull(theSearchParam); Validate.notNull(theSubSearchParams); Validate.notNull(theCompositeQueryParam); - Validate.isTrue(theSubSearchParams.size() == 2, "Hapi only supports composite search parameters with 2 components. %s %d", theSearchParam.getName(), theSubSearchParams.size()); + Validate.isTrue( + theSubSearchParams.size() == 2, + "Hapi only supports composite search parameters with 2 components. %s %d", + theSearchParam.getName(), + theSubSearchParams.size()); List values = theCompositeQueryParam.getValues(); - Validate.isTrue(theSubSearchParams.size() == values.size(), "Different number of query components than defined. %s %d %d", theSearchParam.getName(), theSubSearchParams.size(), values.size()); + Validate.isTrue( + theSubSearchParams.size() == values.size(), + "Different number of query components than defined. %s %d %d", + theSearchParam.getName(), + theSubSearchParams.size(), + values.size()); // The index data for each extracted element is stored in a separate nested HSearch document. @@ -740,14 +819,17 @@ public class ExtendedHSearchClauseBuilder { default: break; - } - Validate.notNull(subMatch, "Unsupported composite type in %s: %s %s", theSearchParam.getName(), component.getName(), component.getParamType()); + Validate.notNull( + subMatch, + "Unsupported composite type in %s: %s %s", + theSearchParam.getName(), + component.getName(), + component.getParamType()); compositeClause.must(subMatch); } return compositeClause; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchIndexExtractor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchIndexExtractor.java index 5694e6a422c..8cb8ed8d9b0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchIndexExtractor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchIndexExtractor.java @@ -40,13 +40,13 @@ import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseCoding; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -62,8 +62,11 @@ public class ExtendedHSearchIndexExtractor { private final ResourceSearchParams myParams; private final ISearchParamExtractor mySearchParamExtractor; - public ExtendedHSearchIndexExtractor(JpaStorageSettings theJpaStorageSettings, FhirContext theContext, ResourceSearchParams theActiveParams, - ISearchParamExtractor theSearchParamExtractor) { + public ExtendedHSearchIndexExtractor( + JpaStorageSettings theJpaStorageSettings, + FhirContext theContext, + ResourceSearchParams theActiveParams, + ISearchParamExtractor theSearchParamExtractor) { myJpaStorageSettings = theJpaStorageSettings; myContext = theContext; myParams = theActiveParams; @@ -84,37 +87,37 @@ public class ExtendedHSearchIndexExtractor { extractAutocompleteTokens(theResource, retVal); theNewParams.myStringParams.stream() - .filter(nextParam -> !nextParam.isMissing()) - .forEach(nextParam -> retVal.addStringIndexData(nextParam.getParamName(), nextParam.getValueExact())); + .filter(nextParam -> !nextParam.isMissing()) + .forEach(nextParam -> retVal.addStringIndexData(nextParam.getParamName(), nextParam.getValueExact())); theNewParams.myTokenParams.stream() - .filter(nextParam -> !nextParam.isMissing()) - .forEach(nextParam -> retVal.addTokenIndexDataIfNotPresent(nextParam.getParamName(), nextParam.getSystem(), nextParam.getValue())); + .filter(nextParam -> !nextParam.isMissing()) + .forEach(nextParam -> retVal.addTokenIndexDataIfNotPresent( + nextParam.getParamName(), nextParam.getSystem(), nextParam.getValue())); theNewParams.myNumberParams.stream() - .filter(nextParam -> !nextParam.isMissing()) - .forEach(nextParam -> retVal.addNumberIndexDataIfNotPresent(nextParam.getParamName(), nextParam.getValue())); + .filter(nextParam -> !nextParam.isMissing()) + .forEach(nextParam -> + retVal.addNumberIndexDataIfNotPresent(nextParam.getParamName(), nextParam.getValue())); theNewParams.myDateParams.stream() - .filter(nextParam -> !nextParam.isMissing()) - .forEach(nextParam -> retVal.addDateIndexData(nextParam.getParamName(), convertDate(nextParam))); + .filter(nextParam -> !nextParam.isMissing()) + .forEach(nextParam -> retVal.addDateIndexData(nextParam.getParamName(), convertDate(nextParam))); theNewParams.myQuantityParams.stream() - .filter(nextParam -> !nextParam.isMissing()) - .forEach(nextParam -> retVal.addQuantityIndexData(nextParam.getParamName(), convertQuantity(nextParam))); + .filter(nextParam -> !nextParam.isMissing()) + .forEach( + nextParam -> retVal.addQuantityIndexData(nextParam.getParamName(), convertQuantity(nextParam))); theNewParams.myUriParams.stream() - .filter(nextParam -> !nextParam.isMissing()) - .forEach(nextParam -> retVal.addUriIndexData(nextParam.getParamName(), nextParam.getUri())); + .filter(nextParam -> !nextParam.isMissing()) + .forEach(nextParam -> retVal.addUriIndexData(nextParam.getParamName(), nextParam.getUri())); - theResource.getMeta().getTag().forEach(tag -> - retVal.addTokenIndexData("_tag", tag)); + theResource.getMeta().getTag().forEach(tag -> retVal.addTokenIndexData("_tag", tag)); - theResource.getMeta().getSecurity().forEach(sec -> - retVal.addTokenIndexData("_security", sec)); + theResource.getMeta().getSecurity().forEach(sec -> retVal.addTokenIndexData("_security", sec)); - theResource.getMeta().getProfile().forEach(prof -> - retVal.addUriIndexData("_profile", prof.getValue())); + theResource.getMeta().getProfile().forEach(prof -> retVal.addUriIndexData("_profile", prof.getValue())); String source = MetaUtil.getSource(myContext, theResource.getMeta()); if (isNotBlank(source)) { @@ -122,16 +125,20 @@ public class ExtendedHSearchIndexExtractor { } theNewParams.myCompositeParams.forEach(nextParam -> - retVal.addCompositeIndexData(nextParam.getSearchParamName(), buildCompositeIndexData(nextParam))); - + retVal.addCompositeIndexData(nextParam.getSearchParamName(), buildCompositeIndexData(nextParam))); if (theResource.getMeta().getLastUpdated() != null) { - int ordinal = ResourceIndexedSearchParamDate.calculateOrdinalValue(theResource.getMeta().getLastUpdated()).intValue(); - retVal.addDateIndexData("_lastUpdated", theResource.getMeta().getLastUpdated(), ordinal, - theResource.getMeta().getLastUpdated(), ordinal); + int ordinal = ResourceIndexedSearchParamDate.calculateOrdinalValue( + theResource.getMeta().getLastUpdated()) + .intValue(); + retVal.addDateIndexData( + "_lastUpdated", + theResource.getMeta().getLastUpdated(), + ordinal, + theResource.getMeta().getLastUpdated(), + ordinal); } - if (!theNewParams.myLinks.isEmpty()) { // awkwardly, links are indexed by jsonpath, not by search param. @@ -145,8 +152,8 @@ public class ExtendedHSearchIndexExtractor { nextPath = nextPath.toLowerCase(Locale.ROOT); linkPathToParamName - .computeIfAbsent(nextPath, (p) -> new ArrayList<>()) - .add(nextParamName); + .computeIfAbsent(nextPath, (p) -> new ArrayList<>()) + .add(nextParamName); } } @@ -159,7 +166,8 @@ public class ExtendedHSearchIndexExtractor { // Case 1: Resource Type and Resource ID is known // Case 2: Resource is unknown and referred by canonical url reference if (!Strings.isNullOrEmpty(nextLink.getTargetResourceId())) { - qualifiedTargetResourceId = nextLink.getTargetResourceType() + "/" + nextLink.getTargetResourceId(); + qualifiedTargetResourceId = + nextLink.getTargetResourceType() + "/" + nextLink.getTargetResourceId(); } else if (!Strings.isNullOrEmpty(nextLink.getTargetResourceUrl())) { qualifiedTargetResourceId = nextLink.getTargetResourceUrl(); } @@ -172,7 +180,8 @@ public class ExtendedHSearchIndexExtractor { } @Nonnull - private CompositeSearchIndexData buildCompositeIndexData(ResourceIndexedSearchParamComposite theSearchParamComposite) { + private CompositeSearchIndexData buildCompositeIndexData( + ResourceIndexedSearchParamComposite theSearchParamComposite) { return new HSearchCompositeSearchIndexDataImpl(theSearchParamComposite); } @@ -182,11 +191,11 @@ public class ExtendedHSearchIndexExtractor { private void extractAutocompleteTokens(IBaseResource theResource, ExtendedHSearchIndexData theRetVal) { // we need to re-index token params to match up display with codes. myParams.values().stream() - .filter(p -> p.getParamType() == RestSearchParameterTypeEnum.TOKEN) - // TODO it would be nice to reuse TokenExtractor - .forEach(p -> mySearchParamExtractor.extractValues(p.getPath(), theResource) - .forEach(nextValue -> indexTokenValue(theRetVal, p, nextValue) - )); + .filter(p -> p.getParamType() == RestSearchParameterTypeEnum.TOKEN) + // TODO it would be nice to reuse TokenExtractor + .forEach(p -> mySearchParamExtractor + .extractValues(p.getPath(), theResource) + .forEach(nextValue -> indexTokenValue(theRetVal, p, nextValue))); } private void indexTokenValue(ExtendedHSearchIndexData theRetVal, RuntimeSearchParam p, IBase nextValue) { @@ -199,15 +208,15 @@ public class ExtendedHSearchIndexExtractor { case "Coding": addToken_Coding(theRetVal, spName, (IBaseCoding) nextValue); break; - // TODO share this with TokenExtractor and introduce a ITokenIndexer interface. - // Ignore unknown types for now. - // This is just for autocomplete, and we are focused on Observation.code, category, combo-code, etc. -// case "Identifier": -// mySearchParamExtractor.addToken_Identifier(myResourceTypeName, params, searchParam, value); -// break; -// case "ContactPoint": -// mySearchParamExtractor.addToken_ContactPoint(myResourceTypeName, params, searchParam, value); -// break; + // TODO share this with TokenExtractor and introduce a ITokenIndexer interface. + // Ignore unknown types for now. + // This is just for autocomplete, and we are focused on Observation.code, category, combo-code, etc. + // case "Identifier": + // mySearchParamExtractor.addToken_Identifier(myResourceTypeName, params, searchParam, value); + // break; + // case "ContactPoint": + // mySearchParamExtractor.addToken_ContactPoint(myResourceTypeName, params, searchParam, value); + // break; default: break; } @@ -226,12 +235,18 @@ public class ExtendedHSearchIndexExtractor { @Nonnull public static DateSearchIndexData convertDate(ResourceIndexedSearchParamDate nextParam) { - return new DateSearchIndexData(nextParam.getValueLow(), nextParam.getValueLowDateOrdinal(), nextParam.getValueHigh(), nextParam.getValueHighDateOrdinal()); + return new DateSearchIndexData( + nextParam.getValueLow(), + nextParam.getValueLowDateOrdinal(), + nextParam.getValueHigh(), + nextParam.getValueHighDateOrdinal()); } @Nonnull public static QuantitySearchIndexData convertQuantity(ResourceIndexedSearchParamQuantity nextParam) { - return new QuantitySearchIndexData(nextParam.getUnits(), nextParam.getSystem(), nextParam.getValue().doubleValue()); + return new QuantitySearchIndexData( + nextParam.getUnits(), + nextParam.getSystem(), + nextParam.getValue().doubleValue()); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchResourceProjection.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchResourceProjection.java index ea07da24fad..99d8c010bb6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchResourceProjection.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchResourceProjection.java @@ -23,7 +23,6 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.parser.IParser; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java index 1e200074e57..ae806e9f0bc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java @@ -24,8 +24,8 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.param.CompositeParam; import ca.uhn.fhir.rest.api.SearchContainedModeEnum; +import ca.uhn.fhir.rest.param.CompositeParam; import ca.uhn.fhir.rest.param.DateParam; import ca.uhn.fhir.rest.param.NumberParam; import ca.uhn.fhir.rest.param.QuantityParam; @@ -39,13 +39,14 @@ import com.google.common.collect.Sets; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.StringUtils; -import static ca.uhn.fhir.rest.api.Constants.PARAMQUALIFIER_MISSING; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; +import static ca.uhn.fhir.rest.api.Constants.PARAMQUALIFIER_MISSING; + /** * Search builder for HSearch for token, string, and reference parameters. */ @@ -61,39 +62,42 @@ public class ExtendedHSearchSearchBuilder { * Are any of the queries supported by our indexing? */ public boolean isSupportsSomeOf(SearchParameterMap myParams) { - return - myParams.getSort() != null || - myParams.getLastUpdated() != null || - myParams.entrySet().stream() - .filter(e -> !ourUnsafeSearchParmeters.contains(e.getKey())) - // each and clause may have a different modifier, so split down to the ORs - .flatMap(andList -> andList.getValue().stream()) - .flatMap(Collection::stream) - .anyMatch(this::isParamTypeSupported); + return myParams.getSort() != null + || myParams.getLastUpdated() != null + || myParams.entrySet().stream() + .filter(e -> !ourUnsafeSearchParmeters.contains(e.getKey())) + // each and clause may have a different modifier, so split down to the ORs + .flatMap(andList -> andList.getValue().stream()) + .flatMap(Collection::stream) + .anyMatch(this::isParamTypeSupported); } /** * Are all the queries supported by our indexing? */ public boolean isSupportsAllOf(SearchParameterMap myParams) { - return - CollectionUtils.isEmpty( myParams.getRevIncludes() ) && // ??? - CollectionUtils.isEmpty( myParams.getIncludes() ) && // ??? - myParams.getEverythingMode() == null && // ??? - BooleanUtils.isFalse( myParams.isDeleteExpunge() ) && // ??? + return CollectionUtils.isEmpty(myParams.getRevIncludes()) + && // ??? + CollectionUtils.isEmpty(myParams.getIncludes()) + && // ??? + myParams.getEverythingMode() == null + && // ??? + BooleanUtils.isFalse(myParams.isDeleteExpunge()) + && // ??? - // not yet supported in HSearch - myParams.getNearDistanceParam() == null && // ??? + // not yet supported in HSearch + myParams.getNearDistanceParam() == null + && // ??? - // not yet supported in HSearch - myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE && // ??? - - myParams.entrySet().stream() - .filter(e -> !ourUnsafeSearchParmeters.contains(e.getKey())) - // each and clause may have a different modifier, so split down to the ORs - .flatMap(andList -> andList.getValue().stream()) - .flatMap(Collection::stream) - .allMatch(this::isParamTypeSupported); + // not yet supported in HSearch + myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE + && // ??? + myParams.entrySet().stream() + .filter(e -> !ourUnsafeSearchParmeters.contains(e.getKey())) + // each and clause may have a different modifier, so split down to the ORs + .flatMap(andList -> andList.getValue().stream()) + .flatMap(Collection::stream) + .allMatch(this::isParamTypeSupported); } /** @@ -114,7 +118,7 @@ public class ExtendedHSearchSearchBuilder { } } else if (param instanceof StringParam) { switch (modifier) { - // we support string:text, string:contains, string:exact, and unmodified string. + // we support string:text, string:contains, string:exact, and unmodified string. case Constants.PARAMQUALIFIER_STRING_TEXT: case Constants.PARAMQUALIFIER_STRING_EXACT: case Constants.PARAMQUALIFIER_STRING_CONTAINS: @@ -127,7 +131,7 @@ public class ExtendedHSearchSearchBuilder { return modifier.equals(EMPTY_MODIFIER); } else if (param instanceof CompositeParam) { - switch(modifier) { + switch (modifier) { case PARAMQUALIFIER_MISSING: return false; default: @@ -135,7 +139,7 @@ public class ExtendedHSearchSearchBuilder { } } else if (param instanceof ReferenceParam) { - //We cannot search by chain. + // We cannot search by chain. if (((ReferenceParam) param).getChain() != null) { return false; } @@ -161,7 +165,11 @@ public class ExtendedHSearchSearchBuilder { } } - public void addAndConsumeAdvancedQueryClauses(ExtendedHSearchClauseBuilder builder, String theResourceType, SearchParameterMap theParams, ISearchParamRegistry theSearchParamRegistry) { + public void addAndConsumeAdvancedQueryClauses( + ExtendedHSearchClauseBuilder builder, + String theResourceType, + SearchParameterMap theParams, + ISearchParamRegistry theSearchParamRegistry) { // copy the keys to avoid concurrent modification error ArrayList paramNames = compileParamNames(theParams); for (String nextParam : paramNames) { @@ -177,21 +185,26 @@ public class ExtendedHSearchSearchBuilder { // NOTE - keep this in sync with isParamSupported() above. switch (activeParam.getParamType()) { case TOKEN: - List> tokenTextAndOrTerms = theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT); + List> tokenTextAndOrTerms = + theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT); builder.addStringTextSearch(nextParam, tokenTextAndOrTerms); - List> tokenUnmodifiedAndOrTerms = theParams.removeByNameUnmodified(nextParam); + List> tokenUnmodifiedAndOrTerms = + theParams.removeByNameUnmodified(nextParam); builder.addTokenUnmodifiedSearch(nextParam, tokenUnmodifiedAndOrTerms); break; case STRING: - List> stringTextAndOrTerms = theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT); + List> stringTextAndOrTerms = + theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_TOKEN_TEXT); builder.addStringTextSearch(nextParam, stringTextAndOrTerms); - List> stringExactAndOrTerms = theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_STRING_EXACT); + List> stringExactAndOrTerms = + theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_STRING_EXACT); builder.addStringExactSearch(nextParam, stringExactAndOrTerms); - List> stringContainsAndOrTerms = theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_STRING_CONTAINS); + List> stringContainsAndOrTerms = + theParams.removeByNameAndModifier(nextParam, Constants.PARAMQUALIFIER_STRING_CONTAINS); builder.addStringContainsSearch(nextParam, stringContainsAndOrTerms); List> stringAndOrTerms = theParams.removeByNameUnmodified(nextParam); @@ -210,19 +223,23 @@ public class ExtendedHSearchSearchBuilder { case DATE: List> dateAndOrTerms = nextParam.equalsIgnoreCase("_lastupdated") - ? getLastUpdatedAndOrList(theParams) : theParams.removeByNameUnmodified(nextParam); + ? getLastUpdatedAndOrList(theParams) + : theParams.removeByNameUnmodified(nextParam); builder.addDateUnmodifiedSearch(nextParam, dateAndOrTerms); break; case COMPOSITE: List> compositeAndOrTerms = theParams.removeByNameUnmodified(nextParam); - // RuntimeSearchParam only points to the subs by reference. Resolve here while we have ISearchParamRegistry - List subSearchParams = JpaParamUtil.resolveCompositeComponentsDeclaredOrder(theSearchParamRegistry, activeParam); + // RuntimeSearchParam only points to the subs by reference. Resolve here while we have + // ISearchParamRegistry + List subSearchParams = + JpaParamUtil.resolveCompositeComponentsDeclaredOrder(theSearchParamRegistry, activeParam); builder.addCompositeUnmodifiedSearch(activeParam, subSearchParams, compositeAndOrTerms); break; case URI: - List> uriUnmodifiedAndOrTerms = theParams.removeByNameUnmodified(nextParam); + List> uriUnmodifiedAndOrTerms = + theParams.removeByNameUnmodified(nextParam); builder.addUriUnmodifiedSearch(nextParam, uriUnmodifiedAndOrTerms); break; @@ -237,13 +254,12 @@ public class ExtendedHSearchSearchBuilder { } } - private List> getLastUpdatedAndOrList(SearchParameterMap theParams) { DateParam activeBound = theParams.getLastUpdated().getLowerBound() != null - ? theParams.getLastUpdated().getLowerBound() - : theParams.getLastUpdated().getUpperBound(); + ? theParams.getLastUpdated().getLowerBound() + : theParams.getLastUpdated().getUpperBound(); - List> result = List.of( List.of(activeBound) ); + List> result = List.of(List.of(activeBound)); // indicate parameter was processed theParams.setLastUpdated(null); @@ -251,7 +267,6 @@ public class ExtendedHSearchSearchBuilder { return result; } - /** * Param name list is not only the params.keySet, but also the "special" parameters extracted from input * (as _lastUpdated when the input myLastUpdated field is not null, etc). @@ -265,5 +280,4 @@ public class ExtendedHSearchSearchBuilder { return nameList; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchCompositeSearchIndexDataImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchCompositeSearchIndexDataImpl.java index 7e8bf9ed86e..981b3aa9a34 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchCompositeSearchIndexDataImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchCompositeSearchIndexDataImpl.java @@ -46,7 +46,6 @@ class HSearchCompositeSearchIndexDataImpl implements CompositeSearchIndexData { mySearchParamComposite = theSearchParamComposite; } - /** * Write a nested index document for this composite. * We use a nested document to support correlation queries on the same parent element for @@ -54,42 +53,42 @@ class HSearchCompositeSearchIndexDataImpl implements CompositeSearchIndexData { * * Example for component-code-value-quantity, which composes * component-code and component-value-quantity: -
    - { "nsp: {
    -	 "component-code-value-quantity": [
    -		 {
    -			 "component-code": {
    -				 "token": {
    -					 "code": "8480-6",
    -					 "system": "http://loinc.org",
    -					 "code-system": "http://loinc.org|8480-6"
    -				 }
    -			 },
    -			 "component-value-quantity": {
    -				 "quantity": {
    -					 "code": "mmHg",
    -					 "value": 60.0
    -				 }
    -	 		 }
    -		 },
    -		 {
    -			 "component-code": {
    -				 "token": {
    -					 "code": "3421-5",
    -					 "system": "http://loinc.org",
    -					 "code-system": "http://loinc.org|3421-5"
    -				 }
    -			 },
    -			 "component-value-quantity": {
    -				 "quantity": {
    -					 "code": "mmHg",
    -					 "value": 100.0
    -				 }
    -			 }
    -		 }
    -	 ]
    - }}
    - 
    + *
    +	 * { "nsp: {
    +	 * "component-code-value-quantity": [
    +	 * {
    +	 * "component-code": {
    +	 * "token": {
    +	 * "code": "8480-6",
    +	 * "system": "http://loinc.org",
    +	 * "code-system": "http://loinc.org|8480-6"
    +	 * }
    +	 * },
    +	 * "component-value-quantity": {
    +	 * "quantity": {
    +	 * "code": "mmHg",
    +	 * "value": 60.0
    +	 * }
    +	 * }
    +	 * },
    +	 * {
    +	 * "component-code": {
    +	 * "token": {
    +	 * "code": "3421-5",
    +	 * "system": "http://loinc.org",
    +	 * "code-system": "http://loinc.org|3421-5"
    +	 * }
    +	 * },
    +	 * "component-value-quantity": {
    +	 * "quantity": {
    +	 * "code": "mmHg",
    +	 * "value": 100.0
    +	 * }
    +	 * }
    +	 * }
    +	 * ]
    +	 * }}
    +	 * 
    * * @param theRoot our cache wrapper around the root HSearch DocumentElement */ @@ -97,9 +96,8 @@ class HSearchCompositeSearchIndexDataImpl implements CompositeSearchIndexData { public void writeIndexEntry(HSearchIndexWriter theHSearchIndexWriter, HSearchElementCache theRoot) { // optimization - An empty sub-component will never match. // Storing the rest only wastes resources - boolean hasAnEmptyComponent = - mySearchParamComposite.getComponents().stream() - .anyMatch(c->c.getParamIndexValues().isEmpty()); + boolean hasAnEmptyComponent = mySearchParamComposite.getComponents().stream() + .anyMatch(c -> c.getParamIndexValues().isEmpty()); if (hasAnEmptyComponent) { return; @@ -110,7 +108,6 @@ class HSearchCompositeSearchIndexDataImpl implements CompositeSearchIndexData { // we want to re-use the `token`, `quantity` nodes for multiple values. DocumentElement compositeRoot = nestedParamRoot.addObject(mySearchParamComposite.getSearchParamName()); - for (ResourceIndexedSearchParamComposite.Component subParam : mySearchParamComposite.getComponents()) { // Write the various index nodes. // Note: we don't support modifiers with composites, so we don't bother to index :of-type, :text, etc. @@ -119,58 +116,64 @@ class HSearchCompositeSearchIndexDataImpl implements CompositeSearchIndexData { case DATE: DocumentElement dateElement = subParamElement.addObject("dt"); subParam.getParamIndexValues().stream() - .flatMap(o->ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamDate.class).stream()) - .map(ExtendedHSearchIndexExtractor::convertDate) - .forEach(d-> theHSearchIndexWriter.writeDateFields(dateElement, d)); + .flatMap(o -> ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamDate.class).stream()) + .map(ExtendedHSearchIndexExtractor::convertDate) + .forEach(d -> theHSearchIndexWriter.writeDateFields(dateElement, d)); break; case QUANTITY: - DocumentElement quantityElement = subParamElement.addObject(HSearchIndexWriter.INDEX_TYPE_QUANTITY); + DocumentElement quantityElement = subParamElement.addObject(HSearchIndexWriter.INDEX_TYPE_QUANTITY); subParam.getParamIndexValues().stream() - .flatMap(o->ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamQuantity.class).stream()) - .map(ExtendedHSearchIndexExtractor::convertQuantity) - .forEach(q-> theHSearchIndexWriter.writeQuantityFields(quantityElement, q)); + .flatMap(o -> + ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamQuantity.class).stream()) + .map(ExtendedHSearchIndexExtractor::convertQuantity) + .forEach(q -> theHSearchIndexWriter.writeQuantityFields(quantityElement, q)); break; case STRING: - DocumentElement stringElement = subParamElement.addObject("string"); + DocumentElement stringElement = subParamElement.addObject("string"); subParam.getParamIndexValues().stream() - .flatMap(o->ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamString.class).stream()) - .forEach(risps-> theHSearchIndexWriter.writeBasicStringFields(stringElement, risps.getValueExact())); + .flatMap(o -> + ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamString.class).stream()) + .forEach(risps -> + theHSearchIndexWriter.writeBasicStringFields(stringElement, risps.getValueExact())); break; case TOKEN: - DocumentElement tokenElement = subParamElement.addObject("token"); + DocumentElement tokenElement = subParamElement.addObject("token"); subParam.getParamIndexValues().stream() - .flatMap(o->ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamToken.class).stream()) - .forEach(rispt-> theHSearchIndexWriter.writeTokenFields(tokenElement, new Tag(rispt.getSystem(), rispt.getValue()))); + .flatMap( + o -> ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamToken.class).stream()) + .forEach(rispt -> theHSearchIndexWriter.writeTokenFields( + tokenElement, new Tag(rispt.getSystem(), rispt.getValue()))); break; case URI: subParam.getParamIndexValues().stream() - .flatMap(o->ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamUri.class).stream()) - .forEach(rispu->theHSearchIndexWriter.writeUriFields(subParamElement, rispu.getUri())); + .flatMap(o -> ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamUri.class).stream()) + .forEach(rispu -> theHSearchIndexWriter.writeUriFields(subParamElement, rispu.getUri())); break; case NUMBER: subParam.getParamIndexValues().stream() - .flatMap(o->ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamNumber.class).stream()) - .forEach(rispn->theHSearchIndexWriter.writeNumberFields(subParamElement, rispn.getValue())); + .flatMap(o -> + ObjectUtil.castIfInstanceof(o, ResourceIndexedSearchParamNumber.class).stream()) + .forEach(rispn -> + theHSearchIndexWriter.writeNumberFields(subParamElement, rispn.getValue())); break; case COMPOSITE: - assert false: "composite components can't be composite"; + assert false : "composite components can't be composite"; break; case REFERENCE: break; - // unsupported + // unsupported case SPECIAL: case HAS: break; } } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchSortHelperImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchSortHelperImpl.java index 82e1b1b6447..46ce0d9007b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchSortHelperImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchSortHelperImpl.java @@ -37,9 +37,9 @@ import java.util.Optional; import java.util.stream.Collectors; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.IDX_STRING_LOWER; +import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.INDEX_TYPE_QUANTITY; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.NESTED_SEARCH_PARAM_ROOT; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.NUMBER_VALUE; -import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.INDEX_TYPE_QUANTITY; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.QTY_VALUE; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.QTY_VALUE_NORM; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.SEARCH_PARAM_ROOT; @@ -53,18 +53,19 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper { /** Indicates which HSearch properties must be sorted for each RestSearchParameterTypeEnum **/ private Map> mySortPropertyListMap = Map.of( - RestSearchParameterTypeEnum.STRING, List.of(SEARCH_PARAM_ROOT + ".*.string." + IDX_STRING_LOWER), - RestSearchParameterTypeEnum.TOKEN, List.of( - String.join(".", NESTED_SEARCH_PARAM_ROOT, "*", "token", "system"), - String.join(".", NESTED_SEARCH_PARAM_ROOT, "*", "token", "code") ), - RestSearchParameterTypeEnum.REFERENCE, List.of(SEARCH_PARAM_ROOT + ".*.reference.value"), - RestSearchParameterTypeEnum.DATE, List.of(SEARCH_PARAM_ROOT + ".*.dt.lower"), - RestSearchParameterTypeEnum.QUANTITY, List.of( - String.join(".", NESTED_SEARCH_PARAM_ROOT, "*", INDEX_TYPE_QUANTITY, QTY_VALUE_NORM), - String.join(".", NESTED_SEARCH_PARAM_ROOT, "*", INDEX_TYPE_QUANTITY, QTY_VALUE) ), - RestSearchParameterTypeEnum.URI, List.of(SEARCH_PARAM_ROOT + ".*." + URI_VALUE), - RestSearchParameterTypeEnum.NUMBER, List.of(SEARCH_PARAM_ROOT + ".*." + NUMBER_VALUE) - ); + RestSearchParameterTypeEnum.STRING, List.of(SEARCH_PARAM_ROOT + ".*.string." + IDX_STRING_LOWER), + RestSearchParameterTypeEnum.TOKEN, + List.of( + String.join(".", NESTED_SEARCH_PARAM_ROOT, "*", "token", "system"), + String.join(".", NESTED_SEARCH_PARAM_ROOT, "*", "token", "code")), + RestSearchParameterTypeEnum.REFERENCE, List.of(SEARCH_PARAM_ROOT + ".*.reference.value"), + RestSearchParameterTypeEnum.DATE, List.of(SEARCH_PARAM_ROOT + ".*.dt.lower"), + RestSearchParameterTypeEnum.QUANTITY, + List.of( + String.join(".", NESTED_SEARCH_PARAM_ROOT, "*", INDEX_TYPE_QUANTITY, QTY_VALUE_NORM), + String.join(".", NESTED_SEARCH_PARAM_ROOT, "*", INDEX_TYPE_QUANTITY, QTY_VALUE)), + RestSearchParameterTypeEnum.URI, List.of(SEARCH_PARAM_ROOT + ".*." + URI_VALUE), + RestSearchParameterTypeEnum.NUMBER, List.of(SEARCH_PARAM_ROOT + ".*." + NUMBER_VALUE)); private final ISearchParamRegistry mySearchParamRegistry; @@ -76,13 +77,14 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper { * Builds and returns sort clauses for received sort parameters */ @Override - public SortFinalStep getSortClauses(SearchSortFactory theSortFactory, SortSpec theSortParams, String theResourceType) { + public SortFinalStep getSortClauses( + SearchSortFactory theSortFactory, SortSpec theSortParams, String theResourceType) { var sortStep = theSortFactory.composite(); Optional sortClauseOpt = getSortClause(theSortFactory, theSortParams, theResourceType); sortClauseOpt.ifPresent(sortStep::add); SortSpec nextParam = theSortParams.getChain(); - while( nextParam != null ) { + while (nextParam != null) { sortClauseOpt = getSortClause(theSortFactory, nextParam, theResourceType); sortClauseOpt.ifPresent(sortStep::add); @@ -92,7 +94,6 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper { return sortStep; } - /** * Builds sort clauses for the received SortSpec by * _ finding out the corresponding RestSearchParameterTypeEnum for the parameter @@ -103,8 +104,8 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper { Optional getSortClause(SearchSortFactory theF, SortSpec theSortSpec, String theResourceType) { Optional paramTypeOpt = getParamType(theResourceType, theSortSpec.getParamName()); if (paramTypeOpt.isEmpty()) { - ourLog.warn("Sprt parameter type couldn't be determined for parameter: " + theSortSpec.getParamName() + - ". Result will not be properly sorted"); + ourLog.warn("Sprt parameter type couldn't be determined for parameter: " + theSortSpec.getParamName() + + ". Result will not be properly sorted"); return Optional.empty(); } List paramFieldNameList = getSortPropertyList(paramTypeOpt.get(), theSortSpec.getParamName()); @@ -124,13 +125,12 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper { } // field could have no value - sortFinalStep.add( sortStep.missing().last() ); + sortFinalStep.add(sortStep.missing().last()); } return Optional.of(sortFinalStep); } - /** * Finds out and returns the parameter type for each parameter name */ @@ -145,7 +145,6 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper { return Optional.of(searchParam.getParamType()); } - /** * Retrieves the generic property names (* instead of parameter name) from the configured map and * replaces the '*' segment by theParamName before returning the final property name list @@ -154,11 +153,8 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper { List getSortPropertyList(RestSearchParameterTypeEnum theParamType, String theParamName) { List paramFieldNameList = mySortPropertyListMap.get(theParamType); // replace '*' names segment by theParamName - return paramFieldNameList.stream().map(s -> s.replace("*", theParamName)).collect(Collectors.toList()); + return paramFieldNameList.stream() + .map(s -> s.replace("*", theParamName)) + .collect(Collectors.toList()); } - - - - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/IHSearchSortHelper.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/IHSearchSortHelper.java index a95d8a39e29..d8e7e6a187a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/IHSearchSortHelper.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/IHSearchSortHelper.java @@ -29,5 +29,4 @@ import org.hibernate.search.engine.search.sort.dsl.SortFinalStep; public interface IHSearchSortHelper { SortFinalStep getSortClauses(SearchSortFactory theSortFactory, SortSpec theSort, String theResourceType); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNAggregation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNAggregation.java index e31928baa81..8b2e463f980 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNAggregation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNAggregation.java @@ -25,11 +25,11 @@ import com.google.gson.JsonObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.SEARCH_PARAM_ROOT; @@ -60,42 +60,43 @@ public class LastNAggregation { */ public JsonObject toAggregation() { JsonObject lastNAggregation = myJsonParser.fromJson( - "{" + - " \"terms\":{" + - " \"field\":\"" + SP_CODE_TOKEN_CODE_AND_SYSTEM + "\"," + - " \"size\":10000," + - " \"min_doc_count\":1" + - " }," + - " \"aggs\":{" + - " \"" + MOST_RECENT_EFFECTIVE_SUB_AGGREGATION + "\":{" + - " \"top_hits\":{" + - " \"size\":" + myLastNMax + "," + - " \"sort\":[" + - " {" + - " \"" + SP_DATE_DT_UPPER + "\":{" + - " \"order\":\"desc\"" + - " }" + - " }" + - " ]," + - " \"_source\":[" + - " \"myId\"" + - " ]" + - " }" + - " }" + - " }" + - "}", JsonObject.class); + "{" + " \"terms\":{" + + " \"field\":\"" + + SP_CODE_TOKEN_CODE_AND_SYSTEM + "\"," + " \"size\":10000," + + " \"min_doc_count\":1" + + " }," + + " \"aggs\":{" + + " \"" + + MOST_RECENT_EFFECTIVE_SUB_AGGREGATION + "\":{" + " \"top_hits\":{" + + " \"size\":" + + myLastNMax + "," + " \"sort\":[" + + " {" + + " \"" + + SP_DATE_DT_UPPER + "\":{" + " \"order\":\"desc\"" + + " }" + + " }" + + " ]," + + " \"_source\":[" + + " \"myId\"" + + " ]" + + " }" + + " }" + + " }" + + "}", + JsonObject.class); if (myAggregateOnSubject) { lastNAggregation = myJsonParser.fromJson( - "{" + - " \"terms\": {" + - " \"field\": \"" + SP_SUBJECT + "\"," + - " \"size\": 10000," + - " \"min_doc_count\": 1" + - " }," + - " \"aggs\": {" + - " \"" + GROUP_BY_CODE_SYSTEM_SUB_AGGREGATION + "\": " + myJsonParser.toJson(lastNAggregation) + "" + - " }" + - "}", JsonObject.class); + "{" + " \"terms\": {" + + " \"field\": \"" + + SP_SUBJECT + "\"," + " \"size\": 10000," + + " \"min_doc_count\": 1" + + " }," + + " \"aggs\": {" + + " \"" + + GROUP_BY_CODE_SYSTEM_SUB_AGGREGATION + "\": " + myJsonParser.toJson(lastNAggregation) + "" + + " }" + + "}", + JsonObject.class); } return lastNAggregation; } @@ -174,20 +175,25 @@ public class LastNAggregation { // was it grouped by subject? if (myAggregateOnSubject) { - resultBuckets = StreamSupport.stream(theAggregationResult.getAsJsonArray("buckets").spliterator(), false) - .map(bucket -> bucket.getAsJsonObject().getAsJsonObject(GROUP_BY_CODE_SYSTEM_SUB_AGGREGATION)); + resultBuckets = StreamSupport.stream( + theAggregationResult.getAsJsonArray("buckets").spliterator(), false) + .map(bucket -> bucket.getAsJsonObject().getAsJsonObject(GROUP_BY_CODE_SYSTEM_SUB_AGGREGATION)); } return resultBuckets - .flatMap(grouping -> StreamSupport.stream(grouping.getAsJsonArray("buckets").spliterator(), false)) - .flatMap(bucket -> { - JsonArray hits = bucket.getAsJsonObject() - .getAsJsonObject(MOST_RECENT_EFFECTIVE_SUB_AGGREGATION) - .getAsJsonObject("hits") - .getAsJsonArray("hits"); - return StreamSupport.stream(hits.spliterator(), false); - }) - .map(hit -> hit.getAsJsonObject().getAsJsonObject("_source").get("myId").getAsLong()) - .collect(Collectors.toList()); + .flatMap(grouping -> + StreamSupport.stream(grouping.getAsJsonArray("buckets").spliterator(), false)) + .flatMap(bucket -> { + JsonArray hits = bucket.getAsJsonObject() + .getAsJsonObject(MOST_RECENT_EFFECTIVE_SUB_AGGREGATION) + .getAsJsonObject("hits") + .getAsJsonArray("hits"); + return StreamSupport.stream(hits.spliterator(), false); + }) + .map(hit -> hit.getAsJsonObject() + .getAsJsonObject("_source") + .get("myId") + .getAsLong()) + .collect(Collectors.toList()); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNOperation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNOperation.java index 2bd9e98ee84..1a61ba5f780 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNOperation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/LastNOperation.java @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.dao.search; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; @@ -42,7 +42,10 @@ public class LastNOperation { private final ISearchParamRegistry mySearchParamRegistry; private final ExtendedHSearchSearchBuilder myExtendedHSearchSearchBuilder = new ExtendedHSearchSearchBuilder(); - public LastNOperation(SearchSession theSession, FhirContext theFhirContext, StorageSettings theStorageSettings, + public LastNOperation( + SearchSession theSession, + FhirContext theFhirContext, + StorageSettings theStorageSettings, ISearchParamRegistry theSearchParamRegistry) { mySession = theSession; myFhirContext = theFhirContext; @@ -52,19 +55,23 @@ public class LastNOperation { public List executeLastN(SearchParameterMap theParams, Integer theMaximumResults) { boolean lastNGroupedBySubject = isLastNGroupedBySubject(theParams); - LastNAggregation lastNAggregation = new LastNAggregation(getLastNMaxParamValue(theParams), lastNGroupedBySubject); + LastNAggregation lastNAggregation = + new LastNAggregation(getLastNMaxParamValue(theParams), lastNGroupedBySubject); AggregationKey observationsByCodeKey = AggregationKey.of("lastN_aggregation"); - SearchResult result = mySession.search(ResourceTable.class) - .extension(ElasticsearchExtension.get()) - .where(f -> f.bool(b -> { - // Must match observation type - b.must(f.match().field("myResourceType").matching(OBSERVATION_RES_TYPE)); - ExtendedHSearchClauseBuilder builder = new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f); - myExtendedHSearchSearchBuilder.addAndConsumeAdvancedQueryClauses(builder, OBSERVATION_RES_TYPE, theParams.clone(), mySearchParamRegistry); - })) - .aggregation(observationsByCodeKey, f -> f.fromJson(lastNAggregation.toAggregation())) - .fetch(0); + SearchResult result = mySession + .search(ResourceTable.class) + .extension(ElasticsearchExtension.get()) + .where(f -> f.bool(b -> { + // Must match observation type + b.must(f.match().field("myResourceType").matching(OBSERVATION_RES_TYPE)); + ExtendedHSearchClauseBuilder builder = + new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f); + myExtendedHSearchSearchBuilder.addAndConsumeAdvancedQueryClauses( + builder, OBSERVATION_RES_TYPE, theParams.clone(), mySearchParamRegistry); + })) + .aggregation(observationsByCodeKey, f -> f.fromJson(lastNAggregation.toAggregation())) + .fetch(0); JsonObject resultAggregation = result.aggregation(observationsByCodeKey); List pidList = lastNAggregation.extractResourceIds(resultAggregation); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/PathContext.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/PathContext.java index 4c6eaad77e9..eb87d87dcfd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/PathContext.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/PathContext.java @@ -19,15 +19,13 @@ */ package ca.uhn.fhir.jpa.dao.search; -import org.apache.commons.lang3.Validate; import org.hibernate.search.engine.search.predicate.dsl.*; import org.hibernate.search.util.common.annotation.Incubating; -import javax.annotation.Nonnull; import java.util.List; -import java.util.Objects; import java.util.function.Consumer; import java.util.function.Function; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.dao.search.ExtendedHSearchClauseBuilder.PATH_JOINER; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.NESTED_SEARCH_PARAM_ROOT; @@ -45,14 +43,16 @@ class PathContext implements SearchPredicateFactory { private final BooleanPredicateClausesStep myRootClause; private final SearchPredicateFactory myPredicateFactory; - PathContext(String thePrefix, BooleanPredicateClausesStep theClause, SearchPredicateFactory thePredicateFactory) { + PathContext( + String thePrefix, BooleanPredicateClausesStep theClause, SearchPredicateFactory thePredicateFactory) { myRootClause = theClause; myPredicateFactory = thePredicateFactory; myPathPrefix = thePrefix; } @Nonnull - static PathContext buildRootContext(BooleanPredicateClausesStep theRootClause, SearchPredicateFactory thePredicateFactory) { + static PathContext buildRootContext( + BooleanPredicateClausesStep theRootClause, SearchPredicateFactory thePredicateFactory) { return new PathContext("", theRootClause, thePredicateFactory); } @@ -69,14 +69,17 @@ class PathContext implements SearchPredicateFactory { return new PathContext(path, myRootClause, myPredicateFactory); } - public PredicateFinalStep buildPredicateInNestedContext(String theSubPath, Function f) { + public PredicateFinalStep buildPredicateInNestedContext( + String theSubPath, Function f) { String nestedRootPath = joinPath(NESTED_SEARCH_PARAM_ROOT, theSubPath); NestedPredicateOptionsStep orListPredicate = myPredicateFactory - .nested().objectField(nestedRootPath) - .nest(nestedRootPredicateFactory -> { - PathContext nestedCompositeSPContext = new PathContext(nestedRootPath, myRootClause, nestedRootPredicateFactory); - return f.apply(nestedCompositeSPContext); - }); + .nested() + .objectField(nestedRootPath) + .nest(nestedRootPredicateFactory -> { + PathContext nestedCompositeSPContext = + new PathContext(nestedRootPath, myRootClause, nestedRootPredicateFactory); + return f.apply(nestedCompositeSPContext); + }); return orListPredicate; } @@ -103,7 +106,6 @@ class PathContext implements SearchPredicateFactory { return finalClause; } - // implement SearchPredicateFactory public MatchAllPredicateOptionsStep matchAll() { @@ -185,7 +187,6 @@ class PathContext implements SearchPredicateFactory { return myPredicateFactory.toAbsolutePath(relativeFieldPath); } - // HSearch uses a dotted path // Some private static helpers that can be inlined. @Nonnull diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ResourceNotFoundInIndexException.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ResourceNotFoundInIndexException.java index 8b912b5a790..412200d3b32 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ResourceNotFoundInIndexException.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ResourceNotFoundInIndexException.java @@ -33,5 +33,4 @@ public class ResourceNotFoundInIndexException extends IllegalStateException { public ResourceNotFoundInIndexException(String theString, Throwable theCause) { super(theString, theCause); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java index 33104ff1cd6..94295e0b03e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/SearchScrollQueryExecutorAdaptor.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.dao.search; import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor; -import org.hibernate.search.engine.backend.common.DocumentReference; import org.hibernate.search.engine.search.query.SearchScroll; import org.hibernate.search.engine.search.query.SearchScrollResult; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/TermHelper.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/TermHelper.java index 974f2315e83..a40a9f6e590 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/TermHelper.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/TermHelper.java @@ -28,8 +28,7 @@ import java.util.stream.Collectors; public class TermHelper { /** characters which indicate the string parameter is a simple query string */ - private static final char[] simpleQuerySyntaxCharacters = new char[] { '+', '|', '"', '(', ')', '~' }; - + private static final char[] simpleQuerySyntaxCharacters = new char[] {'+', '|', '"', '(', ')', '~'}; /** * Each input set element is: @@ -38,40 +37,36 @@ public class TermHelper { */ public static Set makePrefixSearchTerm(Set theStringSet) { return theStringSet.stream() - .map(s -> isToLeftUntouched(s) || isQuoted(s) ? s : suffixTokensWithStar(s) ) - .collect(Collectors.toSet()); + .map(s -> isToLeftUntouched(s) || isQuoted(s) ? s : suffixTokensWithStar(s)) + .collect(Collectors.toSet()); } - private static String suffixTokensWithStar(String theStr) { StringBuilder sb = new StringBuilder(); - Arrays.stream(theStr.trim().split(" ")) - .forEach(s -> sb.append(s).append("* ")); + Arrays.stream(theStr.trim().split(" ")).forEach(s -> sb.append(s).append("* ")); return sb.toString().trim(); } - private static boolean isQuoted(String theS) { - return ( theS.startsWith("\"") && theS.endsWith("\"") ) || - ( theS.startsWith("'") && theS.endsWith("'") ); + return (theS.startsWith("\"") && theS.endsWith("\"")) || (theS.startsWith("'") && theS.endsWith("'")); } - /** * Returns true when the input string is recognized as Lucene Simple Query Syntax * @see "https://lucene.apache.org/core/8_11_2/queryparser/org/apache/lucene/queryparser/simple/SimpleQueryParser.html" */ static boolean isToLeftUntouched(String theString) { // remove backslashed * and - characters from string before checking, as those shouldn't be considered - if (theString.startsWith("-")) { return true; } // it is SimpleQuerySyntax + if (theString.startsWith("-")) { + return true; + } // it is SimpleQuerySyntax - if (theString.endsWith("*")) { return true; } // it is SimpleQuerySyntax + if (theString.endsWith("*")) { + return true; + } // it is SimpleQuerySyntax return StringUtils.containsAny(theString, simpleQuerySyntaxCharacters); } - - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/package-info.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/package-info.java index 28c76576c5d..5114004307e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/package-info.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/package-info.java @@ -53,4 +53,3 @@ * Activated by {@link ca.uhn.fhir.jpa.api.config.JpaStorageSettings#setAdvancedHSearchIndexing(boolean)}. */ package ca.uhn.fhir.jpa.dao.search; - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictFinderService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictFinderService.java index 50d94d7ece1..1e52953446b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictFinderService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictFinderService.java @@ -23,11 +23,11 @@ import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import org.springframework.stereotype.Service; +import java.util.List; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; import javax.persistence.TypedQuery; -import java.util.List; @Service public class DeleteConflictFinderService { @@ -35,7 +35,8 @@ public class DeleteConflictFinderService { protected EntityManager myEntityManager; List findConflicts(ResourceTable theEntity, int maxResults) { - TypedQuery query = myEntityManager.createQuery("SELECT l FROM ResourceLink l WHERE l.myTargetResourcePid = :target_pid", ResourceLink.class); + TypedQuery query = myEntityManager.createQuery( + "SELECT l FROM ResourceLink l WHERE l.myTargetResourcePid = :target_pid", ResourceLink.class); query.setParameter("target_pid", theEntity.getId()); query.setMaxResults(maxResults); return query.getResultList(); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictOutcome.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictOutcome.java index b37c2cf0927..cabd50e6e91 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictOutcome.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictOutcome.java @@ -34,5 +34,4 @@ public class DeleteConflictOutcome { myShouldRetryCount = theShouldRetryCount; return this; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictService.java index 8edae514c5d..fbb5e95ff2d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictService.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.delete; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; @@ -52,28 +52,47 @@ public class DeleteConflictService { public static final int FIRST_QUERY_RESULT_COUNT = 1; private static final Logger ourLog = LoggerFactory.getLogger(DeleteConflictService.class); public static int MAX_RETRY_ATTEMPTS = 10; - public static String MAX_RETRY_ATTEMPTS_EXCEEDED_MSG = "Requested delete operation stopped before all conflicts were handled. May need to increase the configured Maximum Delete Conflict Query Count."; + public static String MAX_RETRY_ATTEMPTS_EXCEEDED_MSG = + "Requested delete operation stopped before all conflicts were handled. May need to increase the configured Maximum Delete Conflict Query Count."; + @Autowired protected IResourceLinkDao myResourceLinkDao; + @Autowired protected IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired DeleteConflictFinderService myDeleteConflictFinderService; + @Autowired JpaStorageSettings myStorageSettings; + @Autowired private FhirContext myFhirContext; - private DeleteConflictOutcome findAndHandleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, int theMinQueryResultCount, TransactionDetails theTransactionDetails) { + private DeleteConflictOutcome findAndHandleConflicts( + RequestDetails theRequest, + DeleteConflictList theDeleteConflicts, + ResourceTable theEntity, + boolean theForValidate, + int theMinQueryResultCount, + TransactionDetails theTransactionDetails) { List resultList = myDeleteConflictFinderService.findConflicts(theEntity, theMinQueryResultCount); if (resultList.isEmpty()) { return null; } - return handleConflicts(theRequest, theDeleteConflicts, theEntity, theForValidate, resultList, theTransactionDetails); + return handleConflicts( + theRequest, theDeleteConflicts, theEntity, theForValidate, resultList, theTransactionDetails); } - private DeleteConflictOutcome handleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, List theResultList, TransactionDetails theTransactionDetails) { + private DeleteConflictOutcome handleConflicts( + RequestDetails theRequest, + DeleteConflictList theDeleteConflicts, + ResourceTable theEntity, + boolean theForValidate, + List theResultList, + TransactionDetails theTransactionDetails) { if (!myStorageSettings.isEnforceReferentialIntegrityOnDelete() && !theForValidate) { ourLog.debug("Deleting {} resource dependencies which can no longer be satisfied", theResultList.size()); myResourceLinkDao.deleteAll(theResultList); @@ -88,14 +107,16 @@ public class DeleteConflictService { // Notify Interceptors about pre-action call HookParams hooks = new HookParams() - .add(DeleteConflictList.class, theDeleteConflicts) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(TransactionDetails.class, theTransactionDetails); - return (DeleteConflictOutcome) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, hooks); + .add(DeleteConflictList.class, theDeleteConflicts) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(TransactionDetails.class, theTransactionDetails); + return (DeleteConflictOutcome) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( + myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, hooks); } - private void addConflictsToList(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, List theResultList) { + private void addConflictsToList( + DeleteConflictList theDeleteConflicts, ResourceTable theEntity, List theResultList) { for (ResourceLink link : theResultList) { IdDt targetId = theEntity.getIdDt(); IdDt sourceId = link.getSourceResource().getIdDt(); @@ -110,30 +131,50 @@ public class DeleteConflictService { } } - public int validateOkToDelete(DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + public int validateOkToDelete( + DeleteConflictList theDeleteConflicts, + ResourceTable theEntity, + boolean theForValidate, + RequestDetails theRequest, + TransactionDetails theTransactionDetails) { // We want the list of resources that are marked to be the same list even as we // drill into conflict resolution stacks.. this allows us to not get caught by // circular references DeleteConflictList newConflicts = new DeleteConflictList(theDeleteConflicts); - // In most cases, there will be no hooks, and so we only need to check if there is at least FIRST_QUERY_RESULT_COUNT conflict and populate that. - // Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for processing. + // In most cases, there will be no hooks, and so we only need to check if there is at least + // FIRST_QUERY_RESULT_COUNT conflict and populate that. + // Only in the case where there is a hook do we need to go back and collect larger batches of conflicts for + // processing. - DeleteConflictOutcome outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT, theTransactionDetails); + DeleteConflictOutcome outcome = findAndHandleConflicts( + theRequest, newConflicts, theEntity, theForValidate, FIRST_QUERY_RESULT_COUNT, theTransactionDetails); int retryCount = 0; while (outcome != null) { int shouldRetryCount = Math.min(outcome.getShouldRetryCount(), MAX_RETRY_ATTEMPTS); if (!(retryCount < shouldRetryCount)) break; newConflicts = new DeleteConflictList(newConflicts); - outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, myStorageSettings.getMaximumDeleteConflictQueryCount(), theTransactionDetails); + outcome = findAndHandleConflicts( + theRequest, + newConflicts, + theEntity, + theForValidate, + myStorageSettings.getMaximumDeleteConflictQueryCount(), + theTransactionDetails); ++retryCount; } theDeleteConflicts.addAll(newConflicts); if (retryCount >= MAX_RETRY_ATTEMPTS && !theDeleteConflicts.isEmpty()) { IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext); - OperationOutcomeUtil.addIssue(myFhirContext, oo, BaseStorageDao.OO_SEVERITY_ERROR, MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, null, "processing"); + OperationOutcomeUtil.addIssue( + myFhirContext, + oo, + BaseStorageDao.OO_SEVERITY_ERROR, + MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, + null, + "processing"); throw new ResourceVersionConflictException(Msg.code(821) + MAX_RETRY_ATTEMPTS_EXCEEDED_MSG, oo); } return retryCount; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/ThreadSafeResourceDeleterSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/ThreadSafeResourceDeleterSvc.java index dd947f7ea58..e69ca58f3d4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/ThreadSafeResourceDeleterSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/ThreadSafeResourceDeleterSvc.java @@ -56,7 +56,8 @@ public class ThreadSafeResourceDeleterSvc { public static final long RETRY_BACKOFF_PERIOD = 100L; public static final int RETRY_MAX_ATTEMPTS = 4; - private static final String REQ_DET_KEY_IN_NEW_TRANSACTION = ThreadSafeResourceDeleterSvc.class.getName() + "REQ_DET_KEY_IN_NEW_TRANSACTION"; + private static final String REQ_DET_KEY_IN_NEW_TRANSACTION = + ThreadSafeResourceDeleterSvc.class.getName() + "REQ_DET_KEY_IN_NEW_TRANSACTION"; private static final Logger ourLog = LoggerFactory.getLogger(ThreadSafeResourceDeleterSvc.class); private final DaoRegistry myDaoRegistry; private final IInterceptorBroadcaster myInterceptorBroadcaster; @@ -64,7 +65,10 @@ public class ThreadSafeResourceDeleterSvc { private final RetryTemplate myRetryTemplate = getRetryTemplate(); private final IHapiTransactionService myTransactionService; - public ThreadSafeResourceDeleterSvc(DaoRegistry theDaoRegistry, IInterceptorBroadcaster theInterceptorBroadcaster, IHapiTransactionService theTransactionService) { + public ThreadSafeResourceDeleterSvc( + DaoRegistry theDaoRegistry, + IInterceptorBroadcaster theInterceptorBroadcaster, + IHapiTransactionService theTransactionService) { myDaoRegistry = theDaoRegistry; myInterceptorBroadcaster = theInterceptorBroadcaster; myTransactionService = theTransactionService; @@ -73,7 +77,8 @@ public class ThreadSafeResourceDeleterSvc { /** * @return number of resources that were successfully deleted */ - public Integer delete(RequestDetails theRequest, DeleteConflictList theConflictList, TransactionDetails theTransactionDetails) { + public Integer delete( + RequestDetails theRequest, DeleteConflictList theConflictList, TransactionDetails theTransactionDetails) { Integer retVal = 0; List cascadeDeleteIdCache = CascadingDeleteInterceptor.getCascadedDeletesList(theRequest, true); @@ -83,7 +88,8 @@ public class ThreadSafeResourceDeleterSvc { if (!cascadeDeleteIdCache.contains(nextSourceId)) { cascadeDeleteIdCache.add(nextSourceId); - retVal += handleNextSource(theRequest, theConflictList, theTransactionDetails, next, nextSource, nextSourceId); + retVal += handleNextSource( + theRequest, theConflictList, theTransactionDetails, next, nextSource, nextSourceId); } } @@ -93,12 +99,17 @@ public class ThreadSafeResourceDeleterSvc { /** * @return number of resources that were successfully deleted */ - private Integer handleNextSource(RequestDetails theRequest, DeleteConflictList theConflictList, TransactionDetails theTransactionDetails, DeleteConflict next, IdDt nextSource, String nextSourceId) { + private Integer handleNextSource( + RequestDetails theRequest, + DeleteConflictList theConflictList, + TransactionDetails theTransactionDetails, + DeleteConflict next, + IdDt nextSource, + String nextSourceId) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(nextSource.getResourceType()); // We will retry deletes on any occurrence of ResourceVersionConflictException up to RETRY_MAX_ATTEMPTS return myRetryTemplate.execute(retryContext -> { - String previousNewTransactionValue = null; if (theRequest != null) { previousNewTransactionValue = (String) theRequest.getUserData().get(REQ_DET_KEY_IN_NEW_TRANSACTION); @@ -120,10 +131,10 @@ public class ThreadSafeResourceDeleterSvc { } myTransactionService - .withRequest(theRequest) - .withTransactionDetails(theTransactionDetails) - .withPropagation(propagation) - .execute(() -> doDelete(theRequest, theConflictList, theTransactionDetails, nextSource, dao)); + .withRequest(theRequest) + .withTransactionDetails(theTransactionDetails) + .withPropagation(propagation) + .execute(() -> doDelete(theRequest, theConflictList, theTransactionDetails, nextSource, dao)); return 1; } catch (ResourceGoneException exception) { @@ -132,25 +143,29 @@ public class ThreadSafeResourceDeleterSvc { if (theRequest != null) { theRequest.getUserData().put(REQ_DET_KEY_IN_NEW_TRANSACTION, previousNewTransactionValue); } - } return 0; }); } - private DaoMethodOutcome doDelete(RequestDetails theRequest, DeleteConflictList - theConflictList, TransactionDetails theTransactionDetails, IdDt nextSource, IFhirResourceDao dao) { + private DaoMethodOutcome doDelete( + RequestDetails theRequest, + DeleteConflictList theConflictList, + TransactionDetails theTransactionDetails, + IdDt nextSource, + IFhirResourceDao dao) { // Interceptor call: STORAGE_CASCADE_DELETE // Remove the version so we grab the latest version to delete IBaseResource resource = dao.read(nextSource.toVersionless(), theRequest); HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(DeleteConflictList.class, theConflictList) - .add(IBaseResource.class, resource); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_CASCADE_DELETE, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(DeleteConflictList.class, theConflictList) + .add(IBaseResource.class, resource); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_CASCADE_DELETE, params); return dao.delete(resource.getIdElement(), theConflictList, theRequest, theTransactionDetails); } @@ -162,7 +177,8 @@ public class ThreadSafeResourceDeleterSvc { fixedBackOffPolicy.setBackOffPeriod(RETRY_BACKOFF_PERIOD); retryTemplate.setBackOffPolicy(fixedBackOffPolicy); - final SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(RETRY_MAX_ATTEMPTS, Collections.singletonMap(ResourceVersionConflictException.class, true)); + final SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy( + RETRY_MAX_ATTEMPTS, Collections.singletonMap(ResourceVersionConflictException.class, true)); retryTemplate.setRetryPolicy(retryPolicy); return retryTemplate; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSqlBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSqlBuilder.java index 9966815c9a0..0bc2a272449 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSqlBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSqlBuilder.java @@ -31,13 +31,12 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class DeleteExpungeSqlBuilder { private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSqlBuilder.class); @@ -46,16 +45,20 @@ public class DeleteExpungeSqlBuilder { private final IIdHelperService myIdHelper; private final IResourceLinkDao myResourceLinkDao; - public DeleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, JpaStorageSettings theStorageSettings, IIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) { + public DeleteExpungeSqlBuilder( + ResourceTableFKProvider theResourceTableFKProvider, + JpaStorageSettings theStorageSettings, + IIdHelperService theIdHelper, + IResourceLinkDao theResourceLinkDao) { myResourceTableFKProvider = theResourceTableFKProvider; myStorageSettings = theStorageSettings; myIdHelper = theIdHelper; myResourceLinkDao = theResourceLinkDao; } - @Nonnull - DeleteExpungeSqlResult convertPidsToDeleteExpungeSql(List theJpaPids, boolean theCascade, Integer theCascadeMaxRounds) { + DeleteExpungeSqlResult convertPidsToDeleteExpungeSql( + List theJpaPids, boolean theCascade, Integer theCascadeMaxRounds) { Set pids = JpaPid.toLongSet(theJpaPids); validateOkToDeleteAndExpunge(pids, theCascade, theCascadeMaxRounds); @@ -125,27 +128,45 @@ public class DeleteExpungeSqlBuilder { ResourceLink firstConflict = conflictResourceLinks.get(0); - //NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a situation where we - //actually had to run delete conflict checks in multiple partitions, the executor service starts its own sessions on a per thread basis, and by the time - //we arrive here, those sessions are closed. So instead, we resolve them from PIDs, which are eagerly loaded. - String sourceResourceId = myIdHelper.resourceIdFromPidOrThrowException(JpaPid.fromId(firstConflict.getSourceResourcePid()), firstConflict.getSourceResourceType()).toVersionless().getValue(); - String targetResourceId = myIdHelper.resourceIdFromPidOrThrowException(JpaPid.fromId(firstConflict.getTargetResourcePid()), firstConflict.getTargetResourceType()).toVersionless().getValue(); + // NB-GGG: We previously instantiated these ID values from firstConflict.getSourceResource().getIdDt(), but in a + // situation where we + // actually had to run delete conflict checks in multiple partitions, the executor service starts its own + // sessions on a per thread basis, and by the time + // we arrive here, those sessions are closed. So instead, we resolve them from PIDs, which are eagerly loaded. + String sourceResourceId = myIdHelper + .resourceIdFromPidOrThrowException( + JpaPid.fromId(firstConflict.getSourceResourcePid()), firstConflict.getSourceResourceType()) + .toVersionless() + .getValue(); + String targetResourceId = myIdHelper + .resourceIdFromPidOrThrowException( + JpaPid.fromId(firstConflict.getTargetResourcePid()), firstConflict.getTargetResourceType()) + .toVersionless() + .getValue(); - throw new InvalidRequestException(Msg.code(822) + "DELETE with _expunge=true failed. Unable to delete " + - targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath()); + throw new InvalidRequestException( + Msg.code(822) + "DELETE with _expunge=true failed. Unable to delete " + targetResourceId + " because " + + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath()); } - public void findResourceLinksWithTargetPidIn(List theAllTargetPids, List theSomeTargetPids, List theConflictResourceLinks) { + public void findResourceLinksWithTargetPidIn( + List theAllTargetPids, + List theSomeTargetPids, + List theConflictResourceLinks) { List allTargetPidsAsLongs = JpaPid.toLongList(theAllTargetPids); List someTargetPidsAsLongs = JpaPid.toLongList(theSomeTargetPids); - // We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches + // We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the + // rest of the searches if (theConflictResourceLinks.isEmpty()) { - List conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(someTargetPidsAsLongs).stream() - // Filter out resource links for which we are planning to delete the source. - // theAllTargetPids contains a list of all the pids we are planning to delete. So we only want - // to consider a link to be a conflict if the source of that link is not in theAllTargetPids. - .filter(link -> !allTargetPidsAsLongs.contains(link.getSourceResourcePid())) - .collect(Collectors.toList()); + List conflictResourceLinks = + myResourceLinkDao.findWithTargetPidIn(someTargetPidsAsLongs).stream() + // Filter out resource links for which we are planning to delete the source. + // theAllTargetPids contains a list of all the pids we are planning to delete. So we only + // want + // to consider a link to be a conflict if the source of that link is not in + // theAllTargetPids. + .filter(link -> !allTargetPidsAsLongs.contains(link.getSourceResourcePid())) + .collect(Collectors.toList()); // We do this in two steps to avoid lock contention on this synchronized list theConflictResourceLinks.addAll(conflictResourceLinks); @@ -153,13 +174,12 @@ public class DeleteExpungeSqlBuilder { } private String deleteRecordsByColumnSql(String thePidListString, ResourceForeignKey theResourceForeignKey) { - return "DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + thePidListString; + return "DELETE FROM " + theResourceForeignKey.table + " WHERE " + theResourceForeignKey.key + " IN " + + thePidListString; } - public static class DeleteExpungeSqlResult { - private final List mySqlStatements; private final int myRecordCount; @@ -176,5 +196,4 @@ public class DeleteExpungeSqlBuilder { return myRecordCount; } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSvcImpl.java index 5bf0725652b..9e7e7ff4474 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/delete/batch2/DeleteExpungeSvcImpl.java @@ -27,9 +27,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.persistence.EntityManager; import java.util.List; import java.util.stream.Collectors; +import javax.persistence.EntityManager; public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc { private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSvcImpl.class); @@ -38,7 +38,10 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc { private final DeleteExpungeSqlBuilder myDeleteExpungeSqlBuilder; private final IFulltextSearchSvc myFullTextSearchSvc; - public DeleteExpungeSvcImpl(EntityManager theEntityManager, DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, @Autowired(required = false) IFulltextSearchSvc theFullTextSearchSvc) { + public DeleteExpungeSvcImpl( + EntityManager theEntityManager, + DeleteExpungeSqlBuilder theDeleteExpungeSqlBuilder, + @Autowired(required = false) IFulltextSearchSvc theFullTextSearchSvc) { myEntityManager = theEntityManager; myDeleteExpungeSqlBuilder = theDeleteExpungeSqlBuilder; myFullTextSearchSvc = theFullTextSearchSvc; @@ -46,7 +49,8 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc { @Override public int deleteExpunge(List theJpaPids, boolean theCascade, Integer theCascadeMaxRounds) { - DeleteExpungeSqlBuilder.DeleteExpungeSqlResult sqlResult = myDeleteExpungeSqlBuilder.convertPidsToDeleteExpungeSql(theJpaPids, theCascade, theCascadeMaxRounds); + DeleteExpungeSqlBuilder.DeleteExpungeSqlResult sqlResult = + myDeleteExpungeSqlBuilder.convertPidsToDeleteExpungeSql(theJpaPids, theCascade, theCascadeMaxRounds); List sqlList = sqlResult.getSqlStatements(); ourLog.debug("Executing {} delete expunge sql commands", sqlList.size()); @@ -58,7 +62,7 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc { ourLog.info("{} records deleted", totalDeleted); clearHibernateSearchIndex(theJpaPids); - + // TODO KHS instead of logging progress, produce result chunks that get aggregated into a delete expunge report return sqlResult.getRecordCount(); } @@ -74,11 +78,10 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc { */ private void clearHibernateSearchIndex(List thePersistentIds) { if (myFullTextSearchSvc != null && !myFullTextSearchSvc.isDisabled()) { - List objectIds = thePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList()); + List objectIds = + thePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList()); myFullTextSearchSvc.deleteIndexedDocumentsByTypeAndId(ResourceTable.class, objectIds); ourLog.info("Cleared Hibernate Search indexes."); } } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java index cc878330f93..dd9205a8f30 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2JobInstanceEntity.java @@ -24,6 +24,8 @@ import ca.uhn.fhir.batch2.model.StatusEnum; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.io.Serializable; +import java.util.Date; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; @@ -37,8 +39,6 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Version; -import java.io.Serializable; -import java.util.Date; import static ca.uhn.fhir.batch2.model.JobDefinition.ID_MAX_LENGTH; import static ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity.ERROR_MSG_MAX_LENGTH; @@ -46,9 +46,9 @@ import static ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity.WARNING_MSG_MAX_LENGT import static org.apache.commons.lang3.StringUtils.left; @Entity -@Table(name = "BT2_JOB_INSTANCE", indexes = { - @Index(name = "IDX_BT2JI_CT", columnList = "CREATE_TIME") -}) +@Table( + name = "BT2_JOB_INSTANCE", + indexes = {@Index(name = "IDX_BT2JI_CT", columnList = "CREATE_TIME")}) public class Batch2JobInstanceEntity implements Serializable { public static final int STATUS_MAX_LENGTH = 20; @@ -89,31 +89,44 @@ public class Batch2JobInstanceEntity implements Serializable { @Column(name = "JOB_CANCELLED", nullable = false) private boolean myCancelled; + @Column(name = "FAST_TRACKING", nullable = true) private Boolean myFastTracking; + @Column(name = "PARAMS_JSON", length = PARAMS_JSON_MAX_LENGTH, nullable = true) private String myParamsJson; + @Lob @Column(name = "PARAMS_JSON_LOB", nullable = true) private String myParamsJsonLob; + @Column(name = "CMB_RECS_PROCESSED", nullable = true) private Integer myCombinedRecordsProcessed; + @Column(name = "CMB_RECS_PER_SEC", nullable = true) private Double myCombinedRecordsProcessedPerSecond; + @Column(name = "TOT_ELAPSED_MILLIS", nullable = true) private Integer myTotalElapsedMillis; + @Column(name = "WORK_CHUNKS_PURGED", nullable = false) private boolean myWorkChunksPurged; + @Column(name = "PROGRESS_PCT") private double myProgress; + @Column(name = "ERROR_MSG", length = ERROR_MSG_MAX_LENGTH, nullable = true) private String myErrorMessage; + @Column(name = "ERROR_COUNT") private int myErrorCount; + @Column(name = "EST_REMAINING", length = TIME_REMAINING_LENGTH, nullable = true) private String myEstimatedTimeRemaining; + @Column(name = "CUR_GATED_STEP_ID", length = ID_MAX_LENGTH, nullable = true) private String myCurrentGatedStepId; + @Column(name = "WARNING_MSG", length = WARNING_MSG_MAX_LENGTH, nullable = true) private String myWarningMessages; @@ -306,26 +319,26 @@ public class Batch2JobInstanceEntity implements Serializable { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("id", myId) - .append("definitionId", myDefinitionId) - .append("definitionVersion", myDefinitionVersion) - .append("errorCount", myErrorCount) - .append("createTime", myCreateTime) - .append("startTime", myStartTime) - .append("endTime", myEndTime) - .append("updateTime", myUpdateTime) - .append("status", myStatus) - .append("cancelled", myCancelled) - .append("combinedRecordsProcessed", myCombinedRecordsProcessed) - .append("combinedRecordsProcessedPerSecond", myCombinedRecordsProcessedPerSecond) - .append("totalElapsedMillis", myTotalElapsedMillis) - .append("workChunksPurged", myWorkChunksPurged) - .append("progress", myProgress) - .append("errorMessage", myErrorMessage) - .append("estimatedTimeRemaining", myEstimatedTimeRemaining) - .append("report", myReport) - .append("warningMessages", myWarningMessages) - .toString(); + .append("id", myId) + .append("definitionId", myDefinitionId) + .append("definitionVersion", myDefinitionVersion) + .append("errorCount", myErrorCount) + .append("createTime", myCreateTime) + .append("startTime", myStartTime) + .append("endTime", myEndTime) + .append("updateTime", myUpdateTime) + .append("status", myStatus) + .append("cancelled", myCancelled) + .append("combinedRecordsProcessed", myCombinedRecordsProcessed) + .append("combinedRecordsProcessedPerSecond", myCombinedRecordsProcessedPerSecond) + .append("totalElapsedMillis", myTotalElapsedMillis) + .append("workChunksPurged", myWorkChunksPurged) + .append("progress", myProgress) + .append("errorMessage", myErrorMessage) + .append("estimatedTimeRemaining", myEstimatedTimeRemaining) + .append("report", myReport) + .append("warningMessages", myWarningMessages) + .toString(); } /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java index 6cfc382d434..e65c8814faa 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Batch2WorkChunkEntity.java @@ -23,6 +23,8 @@ import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.io.Serializable; +import java.util.Date; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; @@ -39,80 +41,110 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Version; -import java.io.Serializable; -import java.util.Date; import static ca.uhn.fhir.batch2.model.JobDefinition.ID_MAX_LENGTH; import static ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity.STATUS_MAX_LENGTH; import static org.apache.commons.lang3.StringUtils.left; @Entity -@Table(name = "BT2_WORK_CHUNK", indexes = { - @Index(name = "IDX_BT2WC_II_SEQ", columnList = "INSTANCE_ID,SEQ") -}) +@Table( + name = "BT2_WORK_CHUNK", + indexes = {@Index(name = "IDX_BT2WC_II_SEQ", columnList = "INSTANCE_ID,SEQ")}) public class Batch2WorkChunkEntity implements Serializable { public static final int ERROR_MSG_MAX_LENGTH = 500; public static final int WARNING_MSG_MAX_LENGTH = 4000; private static final long serialVersionUID = -6202771941965780558L; + @Id @Column(name = "ID", length = ID_MAX_LENGTH) private String myId; + @Column(name = "SEQ", nullable = false) private int mySequence; + @Column(name = "CREATE_TIME", nullable = false) @Temporal(TemporalType.TIMESTAMP) private Date myCreateTime; + @Column(name = "START_TIME", nullable = true) @Temporal(TemporalType.TIMESTAMP) private Date myStartTime; + @Column(name = "END_TIME", nullable = true) @Temporal(TemporalType.TIMESTAMP) private Date myEndTime; + @Version @Column(name = "UPDATE_TIME", nullable = true) @Temporal(TemporalType.TIMESTAMP) private Date myUpdateTime; + @Column(name = "RECORDS_PROCESSED", nullable = true) private Integer myRecordsProcessed; + @Column(name = "DEFINITION_ID", length = ID_MAX_LENGTH, nullable = false) private String myJobDefinitionId; + @Column(name = "DEFINITION_VER", length = ID_MAX_LENGTH, nullable = false) private int myJobDefinitionVersion; + @Column(name = "TGT_STEP_ID", length = ID_MAX_LENGTH, nullable = false) private String myTargetStepId; + @Lob @Basic(fetch = FetchType.LAZY) @Column(name = "CHUNK_DATA", nullable = true, length = Integer.MAX_VALUE - 1) private String mySerializedData; + @Column(name = "STAT", length = STATUS_MAX_LENGTH, nullable = false) @Enumerated(EnumType.STRING) private WorkChunkStatusEnum myStatus; + @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "INSTANCE_ID", insertable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_BT2WC_INSTANCE")) + @JoinColumn( + name = "INSTANCE_ID", + insertable = false, + updatable = false, + foreignKey = @ForeignKey(name = "FK_BT2WC_INSTANCE")) private Batch2JobInstanceEntity myInstance; + @Column(name = "INSTANCE_ID", length = ID_MAX_LENGTH, nullable = false) private String myInstanceId; + @Column(name = "ERROR_MSG", length = ERROR_MSG_MAX_LENGTH, nullable = true) private String myErrorMessage; + @Column(name = "ERROR_COUNT", nullable = false) private int myErrorCount; + @Column(name = "WARNING_MSG", length = WARNING_MSG_MAX_LENGTH, nullable = true) private String myWarningMessage; /** * Default constructor for Hibernate. */ - public Batch2WorkChunkEntity() { - } + public Batch2WorkChunkEntity() {} /** * Projection constructor for no-data path. */ - public Batch2WorkChunkEntity(String theId, int theSequence, String theJobDefinitionId, int theJobDefinitionVersion, - String theInstanceId, String theTargetStepId, WorkChunkStatusEnum theStatus, - Date theCreateTime, Date theStartTime, Date theUpdateTime, Date theEndTime, - String theErrorMessage, int theErrorCount, Integer theRecordsProcessed, String theWarningMessage) { + public Batch2WorkChunkEntity( + String theId, + int theSequence, + String theJobDefinitionId, + int theJobDefinitionVersion, + String theInstanceId, + String theTargetStepId, + WorkChunkStatusEnum theStatus, + Date theCreateTime, + Date theStartTime, + Date theUpdateTime, + Date theEndTime, + String theErrorMessage, + int theErrorCount, + Integer theRecordsProcessed, + String theWarningMessage) { myId = theId; mySequence = theSequence; myJobDefinitionId = theJobDefinitionId; @@ -265,23 +297,22 @@ public class Batch2WorkChunkEntity implements Serializable { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("id", myId) - .append("instanceId", myInstanceId) - .append("sequence", mySequence) - .append("errorCount", myErrorCount) - .append("jobDefinitionId", myJobDefinitionId) - .append("jobDefinitionVersion", myJobDefinitionVersion) - .append("createTime", myCreateTime) - .append("startTime", myStartTime) - .append("endTime", myEndTime) - .append("updateTime", myUpdateTime) - .append("recordsProcessed", myRecordsProcessed) - .append("targetStepId", myTargetStepId) - .append("serializedData", mySerializedData) - .append("status", myStatus) - .append("errorMessage", myErrorMessage) - .append("warningMessage", myWarningMessage) - .toString(); + .append("id", myId) + .append("instanceId", myInstanceId) + .append("sequence", mySequence) + .append("errorCount", myErrorCount) + .append("jobDefinitionId", myJobDefinitionId) + .append("jobDefinitionVersion", myJobDefinitionVersion) + .append("createTime", myCreateTime) + .append("startTime", myStartTime) + .append("endTime", myEndTime) + .append("updateTime", myUpdateTime) + .append("recordsProcessed", myRecordsProcessed) + .append("targetStepId", myTargetStepId) + .append("serializedData", mySerializedData) + .append("status", myStatus) + .append("errorMessage", myErrorMessage) + .append("warningMessage", myWarningMessage) + .toString(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionEntity.java index 73ae67256f2..7903eab9004 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionEntity.java @@ -21,6 +21,9 @@ package ca.uhn.fhir.jpa.entity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; @@ -34,9 +37,6 @@ import javax.persistence.OneToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Version; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; /* * These classes are no longer needed. @@ -55,16 +55,25 @@ public class BulkExportCollectionEntity implements Serializable { @SequenceGenerator(name = "SEQ_BLKEXCOL_PID", sequenceName = "SEQ_BLKEXCOL_PID") @Column(name = "PID") private Long myId; + @ManyToOne() - @JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_BLKEXCOL_JOB")) + @JoinColumn( + name = "JOB_PID", + referencedColumnName = "PID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_BLKEXCOL_JOB")) private BulkExportJobEntity myJob; + @Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN, nullable = false) private String myResourceType; + @Column(name = "TYPE_FILTER", length = 1000, nullable = true) private String myFilter; + @Version @Column(name = "OPTLOCK", nullable = false) private int myVersion; + @OneToMany(fetch = FetchType.LAZY, mappedBy = "myCollection") private Collection myFiles; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionFileEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionFileEntity.java index 5a811de58d9..a257166230e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionFileEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportCollectionFileEntity.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity; import ca.uhn.fhir.jpa.model.entity.ForcedId; +import java.io.Serializable; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; @@ -32,8 +33,6 @@ import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.io.Serializable; - /* * These classes are no longer needed. @@ -54,7 +53,11 @@ public class BulkExportCollectionFileEntity implements Serializable { private Long myId; @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "COLLECTION_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name="FK_BLKEXCOLFILE_COLLECT")) + @JoinColumn( + name = "COLLECTION_PID", + referencedColumnName = "PID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_BLKEXCOLFILE_COLLECT")) private BulkExportCollectionEntity myCollection; @Column(name = "RES_ID", length = ForcedId.MAX_FORCED_ID_LENGTH, nullable = false) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java index 9271f5247bf..a9bf341724a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java @@ -23,10 +23,12 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.r5.model.InstantType; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; @@ -39,16 +41,11 @@ import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.UniqueConstraint; import javax.persistence.Version; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.left; - /* * These classes are no longer needed. * Metadata on the job is contained in the job itself @@ -57,11 +54,10 @@ import static org.apache.commons.lang3.StringUtils.left; * See the BulkExportAppCtx for job details */ @Entity -@Table(name = BulkExportJobEntity.HFJ_BLK_EXPORT_JOB, uniqueConstraints = { - @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID") -}, indexes = { - @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME") -}) +@Table( + name = BulkExportJobEntity.HFJ_BLK_EXPORT_JOB, + uniqueConstraints = {@UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")}, + indexes = {@Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")}) @Deprecated public class BulkExportJobEntity implements Serializable { @@ -69,6 +65,7 @@ public class BulkExportJobEntity implements Serializable { public static final int STATUS_MESSAGE_LEN = 500; public static final String JOB_ID = "JOB_ID"; public static final String HFJ_BLK_EXPORT_JOB = "HFJ_BLK_EXPORT_JOB"; + @Id @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXJOB_PID") @SequenceGenerator(name = "SEQ_BLKEXJOB_PID", sequenceName = "SEQ_BLKEXJOB_PID") @@ -80,25 +77,33 @@ public class BulkExportJobEntity implements Serializable { @Column(name = "JOB_STATUS", length = 10, nullable = false) private String myStatus; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "CREATED_TIME", nullable = false) private Date myCreated; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "STATUS_TIME", nullable = false) private Date myStatusTime; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "EXP_TIME", nullable = true) private Date myExpiry; + @Column(name = "REQUEST", nullable = false, length = REQUEST_LENGTH) private String myRequest; + @OneToMany(fetch = FetchType.LAZY, mappedBy = "myJob", orphanRemoval = false) private Collection myCollections; + @Version @Column(name = "OPTLOCK", nullable = false) private int myVersion; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "EXP_SINCE", nullable = true) private Date mySince; + @Column(name = "STATUS_MESSAGE", nullable = true, length = STATUS_MESSAGE_LEN) private String myStatusMessage; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java index 95d3bf9eb94..f3151ea2ba4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java @@ -23,6 +23,8 @@ import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum; +import java.io.Serializable; +import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; @@ -36,20 +38,19 @@ import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.UniqueConstraint; import javax.persistence.Version; -import java.io.Serializable; -import java.util.Date; import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH; import static org.apache.commons.lang3.StringUtils.left; @Entity -@Table(name = BulkImportJobEntity.HFJ_BLK_IMPORT_JOB, uniqueConstraints = { - @UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID") -}) +@Table( + name = BulkImportJobEntity.HFJ_BLK_IMPORT_JOB, + uniqueConstraints = {@UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID")}) public class BulkImportJobEntity implements Serializable { public static final String HFJ_BLK_IMPORT_JOB = "HFJ_BLK_IMPORT_JOB"; public static final String JOB_ID = "JOB_ID"; + @Id @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID") @SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID") @@ -58,24 +59,32 @@ public class BulkImportJobEntity implements Serializable { @Column(name = JOB_ID, length = UUID_LENGTH, nullable = false, updatable = false) private String myJobId; + @Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN) private String myJobDescription; + @Enumerated(EnumType.STRING) @Column(name = "JOB_STATUS", length = 10, nullable = false) private BulkImportJobStatusEnum myStatus; + @Version @Column(name = "OPTLOCK", nullable = false) private int myVersion; + @Column(name = "FILE_COUNT", nullable = false) private int myFileCount; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "STATUS_TIME", nullable = false) private Date myStatusTime; + @Column(name = "STATUS_MESSAGE", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN) private String myStatusMessage; + @Column(name = "ROW_PROCESSING_MODE", length = 20, nullable = false, updatable = false) @Enumerated(EnumType.STRING) private JobFileRowProcessingModeEnum myRowProcessingMode; + @Column(name = "BATCH_SIZE", nullable = false, updatable = false) private int myBatchSize; @@ -144,9 +153,9 @@ public class BulkImportJobEntity implements Serializable { public BulkImportJobJson toJson() { return new BulkImportJobJson() - .setProcessingMode(getRowProcessingMode()) - .setFileCount(getFileCount()) - .setJobDescription(getJobDescription()); + .setProcessingMode(getRowProcessingMode()) + .setFileCount(getFileCount()) + .setJobDescription(getJobDescription()); } public int getBatchSize() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java index c73f914076b..11a2998989f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java @@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.entity; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.ForeignKey; @@ -33,18 +35,17 @@ import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.io.Serializable; -import java.nio.charset.StandardCharsets; import static org.apache.commons.lang3.StringUtils.left; @Entity -@Table(name = "HFJ_BLK_IMPORT_JOBFILE", indexes = { - @Index(name = "IDX_BLKIM_JOBFILE_JOBID", columnList = "JOB_PID") -}) +@Table( + name = "HFJ_BLK_IMPORT_JOBFILE", + indexes = {@Index(name = "IDX_BLKIM_JOBFILE_JOBID", columnList = "JOB_PID")}) public class BulkImportJobFileEntity implements Serializable { public static final int MAX_DESCRIPTION_LENGTH = 500; + @Id @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOBFILE_PID") @SequenceGenerator(name = "SEQ_BLKIMJOBFILE_PID", sequenceName = "SEQ_BLKIMJOBFILE_PID") @@ -52,16 +53,23 @@ public class BulkImportJobFileEntity implements Serializable { private Long myId; @ManyToOne - @JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_BLKIMJOBFILE_JOB")) + @JoinColumn( + name = "JOB_PID", + referencedColumnName = "PID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_BLKIMJOBFILE_JOB")) private BulkImportJobEntity myJob; @Column(name = "FILE_SEQ", nullable = false) private int myFileSequence; + @Column(name = "FILE_DESCRIPTION", nullable = true, length = MAX_DESCRIPTION_LENGTH) private String myFileDescription; + @Lob @Column(name = "JOB_CONTENTS", nullable = false) private byte[] myContents; + @Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH) private String myTenantName; @@ -97,11 +105,8 @@ public class BulkImportJobFileEntity implements Serializable { myContents = theContents.getBytes(StandardCharsets.UTF_8); } - public BulkImportJobFileJson toJson() { - return new BulkImportJobFileJson() - .setContents(getContents()) - .setTenantName(getTenantName()); + return new BulkImportJobFileJson().setContents(getContents()).setTenantName(getTenantName()); } public String getTenantName() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/HapiFhirEnversRevision.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/HapiFhirEnversRevision.java index 1a7d60ac22c..9daa452e844 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/HapiFhirEnversRevision.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/HapiFhirEnversRevision.java @@ -24,6 +24,8 @@ import org.hibernate.envers.RevisionEntity; import org.hibernate.envers.RevisionNumber; import org.hibernate.envers.RevisionTimestamp; +import java.io.Serializable; +import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; @@ -31,8 +33,6 @@ import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.io.Serializable; -import java.util.Date; /** * This class exists strictly to override the default names used to generate Hibernate Envers revision table. @@ -84,8 +84,8 @@ public class HapiFhirEnversRevision implements Serializable { @Override public String toString() { return new ToStringBuilder(this) - .append("myRev", myRev) - .append("myRevtstmp", myRevtstmp) - .toString(); + .append("myRev", myRev) + .append("myRevtstmp", myRevtstmp) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/MdmLink.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/MdmLink.java index 140b012c3a7..08be50ccfa9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/MdmLink.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/MdmLink.java @@ -30,6 +30,7 @@ import org.hibernate.envers.AuditTable; import org.hibernate.envers.Audited; import org.hibernate.envers.NotAudited; +import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; @@ -47,19 +48,23 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.UniqueConstraint; -import java.util.Date; @Entity -@Table(name = "MPI_LINK", uniqueConstraints = { - // TODO GGG DROP this index, and instead use the below one - @UniqueConstraint(name = "IDX_EMPI_PERSON_TGT", columnNames = {"PERSON_PID", "TARGET_PID"}), - //TODO GGG Should i make individual indices for PERSON/TARGET? -}, indexes = { - @Index(name = "IDX_EMPI_MATCH_TGT_VER", columnList = "MATCH_RESULT, TARGET_PID, VERSION"), - // v---- this one - @Index(name = "IDX_EMPI_GR_TGT", columnList = "GOLDEN_RESOURCE_PID, TARGET_PID"), - @Index(name = "FK_EMPI_LINK_TARGET", columnList = "TARGET_PID") -}) +@Table( + name = "MPI_LINK", + uniqueConstraints = { + // TODO GGG DROP this index, and instead use the below one + @UniqueConstraint( + name = "IDX_EMPI_PERSON_TGT", + columnNames = {"PERSON_PID", "TARGET_PID"}), + // TODO GGG Should i make individual indices for PERSON/TARGET? + }, + indexes = { + @Index(name = "IDX_EMPI_MATCH_TGT_VER", columnList = "MATCH_RESULT, TARGET_PID, VERSION"), + // v---- this one + @Index(name = "IDX_EMPI_GR_TGT", columnList = "GOLDEN_RESOURCE_PID, TARGET_PID"), + @Index(name = "FK_EMPI_LINK_TARGET", columnList = "TARGET_PID") + }) @Audited // This is the table name generated by default by envers, but we set it explicitly for clarity @AuditTable("MPI_LINK_AUD") @@ -75,30 +80,57 @@ public class MdmLink extends AuditableBasePartitionable implements IMdmLink myIncludes; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "LAST_UPDATED_HIGH", nullable = true, insertable = true, updatable = false) private Date myLastUpdatedHigh; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "LAST_UPDATED_LOW", nullable = true, insertable = true, updatable = false) private Date myLastUpdatedLow; + @Column(name = "NUM_FOUND", nullable = false) private int myNumFound; + @Column(name = "NUM_BLOCKED", nullable = true) private Integer myNumBlocked; + @Column(name = "PREFERRED_PAGE_SIZE", nullable = true) private Integer myPreferredPageSize; + @Column(name = "RESOURCE_ID", nullable = true) private Long myResourceId; + @Column(name = "RESOURCE_TYPE", length = 200, nullable = true) private String myResourceType; /** @@ -130,22 +146,29 @@ public class Search implements ICachedSearchDetails, Serializable { @Basic(fetch = FetchType.LAZY) @Column(name = "SEARCH_QUERY_STRING", nullable = true, updatable = false, length = MAX_SEARCH_QUERY_STRING) private String mySearchQueryString; + @Column(name = "SEARCH_QUERY_STRING_HASH", nullable = true, updatable = false) private Integer mySearchQueryStringHash; + @Enumerated(EnumType.ORDINAL) @Column(name = "SEARCH_TYPE", nullable = false) private SearchTypeEnum mySearchType; + @Enumerated(EnumType.STRING) @Column(name = "SEARCH_STATUS", nullable = false, length = 10) private SearchStatusEnum myStatus; + @Column(name = "TOTAL_COUNT", nullable = true) private Integer myTotalCount; + @Column(name = SEARCH_UUID, length = SEARCH_UUID_COLUMN_LENGTH, nullable = false, updatable = false) private String myUuid; + @SuppressWarnings("unused") @Version @Column(name = "OPTLOCK_VERSION", nullable = true) private Integer myVersion; + @Lob @Column(name = "SEARCH_PARAM_MAP", nullable = true) private byte[] mySearchParameterMap; @@ -187,15 +210,15 @@ public class Search implements ICachedSearchDetails, Serializable { @Override public String toString() { return new ToStringBuilder(this) - .append("myLastUpdatedHigh", myLastUpdatedHigh) - .append("myLastUpdatedLow", myLastUpdatedLow) - .append("myNumFound", myNumFound) - .append("myNumBlocked", myNumBlocked) - .append("myStatus", myStatus) - .append("myTotalCount", myTotalCount) - .append("myUuid", myUuid) - .append("myVersion", myVersion) - .toString(); + .append("myLastUpdatedHigh", myLastUpdatedHigh) + .append("myLastUpdatedLow", myLastUpdatedLow) + .append("myNumFound", myNumFound) + .append("myNumBlocked", myNumBlocked) + .append("myStatus", myStatus) + .append("myTotalCount", myTotalCount) + .append("myUuid", myUuid) + .append("myVersion", myVersion) + .toString(); } public int getNumBlocked() { @@ -456,7 +479,8 @@ public class Search implements ICachedSearchDetails, Serializable { } @Nonnull - public static String createSearchQueryStringForStorage(@Nonnull String theSearchQueryString, @Nonnull RequestPartitionId theRequestPartitionId) { + public static String createSearchQueryStringForStorage( + @Nonnull String theSearchQueryString, @Nonnull RequestPartitionId theRequestPartitionId) { String searchQueryString = theSearchQueryString; if (!theRequestPartitionId.isAllPartitions()) { searchQueryString = RequestPartitionId.stringifyForKey(theRequestPartitionId) + " " + searchQueryString; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchInclude.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchInclude.java index fa7c03dd5a5..8cf79736d7e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchInclude.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchInclude.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.entity; import java.io.Serializable; - import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.ForeignKey; @@ -33,12 +32,12 @@ import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -//@formatter:off +// @formatter:off @Entity -@Table(name = "HFJ_SEARCH_INCLUDE", indexes = { - @Index(name = "FK_SEARCHINC_SEARCH", columnList = "SEARCH_PID") -}) -//@formatter:on +@Table( + name = "HFJ_SEARCH_INCLUDE", + indexes = {@Index(name = "FK_SEARCHINC_SEARCH", columnList = "SEARCH_PID")}) +// @formatter:on public class SearchInclude implements Serializable { private static final long serialVersionUID = 1L; @@ -60,10 +59,16 @@ public class SearchInclude implements Serializable { private String myInclude; @ManyToOne - @JoinColumn(name = "SEARCH_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_SEARCHINC_SEARCH"), insertable = true, updatable = false, nullable = false) + @JoinColumn( + name = "SEARCH_PID", + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_SEARCHINC_SEARCH"), + insertable = true, + updatable = false, + nullable = false) private Search mySearch; - @Column(name="SEARCH_PID", insertable=false, updatable=false, nullable=false) + @Column(name = "SEARCH_PID", insertable = false, updatable = false, nullable = false) private Long mySearchPid; @Column(name = "INC_RECURSE", insertable = true, updatable = false, nullable = false) @@ -109,5 +114,4 @@ public class SearchInclude implements Serializable { public boolean isRecurse() { return myRecurse; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchResult.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchResult.java index cb0174eed88..5dc807554eb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchResult.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchResult.java @@ -22,13 +22,17 @@ package ca.uhn.fhir.jpa.entity; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.builder.ToStringBuilder; -import javax.persistence.*; import java.io.Serializable; +import javax.persistence.*; @Entity -@Table(name = "HFJ_SEARCH_RESULT", uniqueConstraints = { - @UniqueConstraint(name = "IDX_SEARCHRES_ORDER", columnNames = {"SEARCH_PID", "SEARCH_ORDER"}) -}) +@Table( + name = "HFJ_SEARCH_RESULT", + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_SEARCHRES_ORDER", + columnNames = {"SEARCH_PID", "SEARCH_ORDER"}) + }) public class SearchResult implements Serializable { private static final long serialVersionUID = 1L; @@ -38,10 +42,13 @@ public class SearchResult implements Serializable { @Id @Column(name = "PID") private Long myId; + @Column(name = "SEARCH_ORDER", nullable = false, insertable = true, updatable = false) private int myOrder; + @Column(name = "RESOURCE_PID", insertable = true, updatable = false, nullable = false) private Long myResourcePid; + @Column(name = "SEARCH_PID", insertable = true, updatable = false, nullable = false) private Long mySearchPid; @@ -63,10 +70,10 @@ public class SearchResult implements Serializable { @Override public String toString() { return new ToStringBuilder(this) - .append("search", mySearchPid) - .append("order", myOrder) - .append("resourcePid", myResourcePid) - .toString(); + .append("search", mySearchPid) + .append("order", myOrder) + .append("resourcePid", myResourcePid) + .toString(); } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchTypeEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchTypeEnum.java index 28106a86125..f34a7da2b14 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchTypeEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchTypeEnum.java @@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.entity; public enum SearchTypeEnum { - EVERYTHING, SEARCH, - HISTORY, - + HISTORY, } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SubscriptionTable.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SubscriptionTable.java index 9888c6a392d..63b8f6a6588 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SubscriptionTable.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SubscriptionTable.java @@ -21,13 +21,17 @@ package ca.uhn.fhir.jpa.entity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import javax.persistence.*; import java.util.Date; +import javax.persistence.*; @Entity -@Table(name = "HFJ_SUBSCRIPTION_STATS", uniqueConstraints = { - @UniqueConstraint(name = "IDX_SUBSC_RESID", columnNames = {"RES_ID"}), -}) +@Table( + name = "HFJ_SUBSCRIPTION_STATS", + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_SUBSC_RESID", + columnNames = {"RES_ID"}), + }) public class SubscriptionTable { @Temporal(TemporalType.TIMESTAMP) @@ -44,9 +48,12 @@ public class SubscriptionTable { private Long myResId; @OneToOne() - @JoinColumn(name = "RES_ID", insertable = true, updatable = false, referencedColumnName = "RES_ID", - foreignKey = @ForeignKey(name = "FK_SUBSC_RESOURCE_ID") - ) + @JoinColumn( + name = "RES_ID", + insertable = true, + updatable = false, + referencedColumnName = "RES_ID", + foreignKey = @ForeignKey(name = "FK_SUBSC_RESOURCE_ID")) private ResourceTable mySubscriptionResource; /** @@ -56,7 +63,6 @@ public class SubscriptionTable { super(); } - public Date getCreated() { return myCreated; } @@ -76,5 +82,4 @@ public class SubscriptionTable { public void setSubscriptionResource(ResourceTable theSubscriptionResource) { mySubscriptionResource = theSubscriptionResource; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystem.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystem.java index ab19934b21c..8c4a26b821a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystem.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystem.java @@ -26,44 +26,64 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.io.Serializable; import javax.annotation.Nonnull; import javax.persistence.*; -import java.io.Serializable; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; -//@formatter:off -@Table(name = "TRM_CODESYSTEM", uniqueConstraints = { - @UniqueConstraint(name = "IDX_CS_CODESYSTEM", columnNames = {"CODE_SYSTEM_URI"}) -}, indexes = { - @Index(name = "FK_TRMCODESYSTEM_RES", columnList = "RES_ID"), - @Index(name = "FK_TRMCODESYSTEM_CURVER", columnList = "CURRENT_VERSION_PID") -}) +// @formatter:off +@Table( + name = "TRM_CODESYSTEM", + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_CS_CODESYSTEM", + columnNames = {"CODE_SYSTEM_URI"}) + }, + indexes = { + @Index(name = "FK_TRMCODESYSTEM_RES", columnList = "RES_ID"), + @Index(name = "FK_TRMCODESYSTEM_CURVER", columnList = "CURRENT_VERSION_PID") + }) @Entity() -//@formatter:on +// @formatter:on public class TermCodeSystem implements Serializable { public static final int MAX_URL_LENGTH = 200; private static final long serialVersionUID = 1L; private static final int MAX_NAME_LENGTH = 200; + @Column(name = "CODE_SYSTEM_URI", nullable = false, length = MAX_URL_LENGTH) private String myCodeSystemUri; @OneToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "CURRENT_VERSION_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_CURVER")) + @JoinColumn( + name = "CURRENT_VERSION_PID", + referencedColumnName = "PID", + nullable = true, + foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_CURVER")) private TermCodeSystemVersion myCurrentVersion; + @Column(name = "CURRENT_VERSION_PID", nullable = true, insertable = false, updatable = false) private Long myCurrentVersionPid; + @Id() @SequenceGenerator(name = "SEQ_CODESYSTEM_PID", sequenceName = "SEQ_CODESYSTEM_PID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CODESYSTEM_PID") @Column(name = "PID") private Long myPid; + @OneToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = true, foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_RES")) + @JoinColumn( + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false, + updatable = true, + foreignKey = @ForeignKey(name = "FK_TRMCODESYSTEM_RES")) private ResourceTable myResource; + @Column(name = "RES_ID", insertable = false, updatable = false) private Long myResourcePid; + @Column(name = "CS_NAME", nullable = true, length = MAX_NAME_LENGTH) private String myName; @@ -104,8 +124,10 @@ public class TermCodeSystem implements Serializable { public TermCodeSystem setCodeSystemUri(@Nonnull String theCodeSystemUri) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theCodeSystemUri, "theCodeSystemUri must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theCodeSystemUri, MAX_URL_LENGTH, - "URI exceeds maximum length (" + MAX_URL_LENGTH + "): " + length(theCodeSystemUri)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theCodeSystemUri, + MAX_URL_LENGTH, + "URI exceeds maximum length (" + MAX_URL_LENGTH + "): " + length(theCodeSystemUri)); myCodeSystemUri = theCodeSystemUri; return this; } @@ -149,7 +171,6 @@ public class TermCodeSystem implements Serializable { b.append("currentVersionPid", myCurrentVersionPid); b.append("resourcePid", myResourcePid); b.append("name", myName); - return b - .toString(); + return b.toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java index 2766ca27485..cfb5bb36e10 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java @@ -26,6 +26,9 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; @@ -41,21 +44,21 @@ import javax.persistence.OneToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.UniqueConstraint; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; import static org.apache.commons.lang3.StringUtils.length; -@Table(name = "TRM_CODESYSTEM_VER", - // Note, we used to have a constraint named IDX_CSV_RESOURCEPID_AND_VER (don't reuse this) - uniqueConstraints = { - @UniqueConstraint(name = TermCodeSystemVersion.IDX_CODESYSTEM_AND_VER, columnNames = {"CODESYSTEM_PID", "CS_VERSION_ID"}) -}, -indexes = { - @Index(name = "FK_CODESYSVER_RES_ID", columnList = "RES_ID"), - @Index(name = "FK_CODESYSVER_CS_ID", columnList = "CODESYSTEM_PID") -}) +@Table( + name = "TRM_CODESYSTEM_VER", + // Note, we used to have a constraint named IDX_CSV_RESOURCEPID_AND_VER (don't reuse this) + uniqueConstraints = { + @UniqueConstraint( + name = TermCodeSystemVersion.IDX_CODESYSTEM_AND_VER, + columnNames = {"CODESYSTEM_PID", "CS_VERSION_ID"}) + }, + indexes = { + @Index(name = "FK_CODESYSVER_RES_ID", columnList = "RES_ID"), + @Index(name = "FK_CODESYSVER_CS_ID", columnList = "CODESYSTEM_PID") + }) @Entity() public class TermCodeSystemVersion implements Serializable { public static final String IDX_CODESYSTEM_AND_VER = "IDX_CODESYSTEM_AND_VER"; @@ -72,7 +75,12 @@ public class TermCodeSystemVersion implements Serializable { private Long myId; @OneToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_CODESYSVER_RES_ID")) + @JoinColumn( + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false, + updatable = false, + foreignKey = @ForeignKey(name = "FK_CODESYSVER_RES_ID")) private ResourceTable myResource; @Column(name = "RES_ID", nullable = false, insertable = false, updatable = false) @@ -86,7 +94,11 @@ public class TermCodeSystemVersion implements Serializable { * issued. It should be made non-nullable at some point. */ @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", nullable = true, foreignKey = @ForeignKey(name = "FK_CODESYSVER_CS_ID")) + @JoinColumn( + name = "CODESYSTEM_PID", + referencedColumnName = "PID", + nullable = true, + foreignKey = @ForeignKey(name = "FK_CODESYSVER_CS_ID")) private TermCodeSystem myCodeSystem; @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false) @@ -106,7 +118,6 @@ public class TermCodeSystemVersion implements Serializable { super(); } - public TermCodeSystem getCodeSystem() { return myCodeSystem; } @@ -122,8 +133,9 @@ public class TermCodeSystemVersion implements Serializable { public TermCodeSystemVersion setCodeSystemVersionId(String theCodeSystemVersionId) { ValidateUtil.isNotTooLongOrThrowIllegalArgument( - theCodeSystemVersionId, MAX_VERSION_LENGTH, - "Version ID exceeds maximum length (" + MAX_VERSION_LENGTH + "): " + length(theCodeSystemVersionId)); + theCodeSystemVersionId, + MAX_VERSION_LENGTH, + "Version ID exceeds maximum length (" + MAX_VERSION_LENGTH + "): " + length(theCodeSystemVersionId)); myCodeSystemVersionId = theCodeSystemVersionId; return this; } @@ -166,9 +178,9 @@ public class TermCodeSystemVersion implements Serializable { TermCodeSystemVersion that = (TermCodeSystemVersion) theO; return new EqualsBuilder() - .append(myCodeSystemVersionId, that.myCodeSystemVersionId) - .append(myCodeSystemPid, that.myCodeSystemPid) - .isEquals(); + .append(myCodeSystemVersionId, that.myCodeSystemVersionId) + .append(myCodeSystemPid, that.myCodeSystemPid) + .isEquals(); } @Override @@ -185,8 +197,9 @@ public class TermCodeSystemVersion implements Serializable { public void setCodeSystemDisplayName(String theCodeSystemDisplayName) { ValidateUtil.isNotTooLongOrThrowIllegalArgument( - theCodeSystemDisplayName, MAX_VERSION_LENGTH, - "Version ID exceeds maximum length (" + MAX_VERSION_LENGTH + "): " + length(theCodeSystemDisplayName)); + theCodeSystemDisplayName, + MAX_VERSION_LENGTH, + "Version ID exceeds maximum length (" + MAX_VERSION_LENGTH + "): " + length(theCodeSystemDisplayName)); myCodeSystemDisplayName = theCodeSystemDisplayName; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java index 68052caaa08..1d615c4758f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java @@ -39,6 +39,14 @@ import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding; import org.hl7.fhir.r4.model.Coding; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.persistence.Column; import javax.persistence.Entity; @@ -59,26 +67,23 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.UniqueConstraint; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @Entity -@Indexed(routingBinder=@RoutingBinderRef(type = DeferConceptIndexingRoutingBinder.class)) -@Table(name = "TRM_CONCEPT", uniqueConstraints = { - @UniqueConstraint(name = "IDX_CONCEPT_CS_CODE", columnNames = {"CODESYSTEM_PID", "CODEVAL"}) -}, indexes = { - @Index(name = "IDX_CONCEPT_INDEXSTATUS", columnList = "INDEX_STATUS"), - @Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED") -}) +@Indexed(routingBinder = @RoutingBinderRef(type = DeferConceptIndexingRoutingBinder.class)) +@Table( + name = "TRM_CONCEPT", + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_CONCEPT_CS_CODE", + columnNames = {"CODESYSTEM_PID", "CODEVAL"}) + }, + indexes = { + @Index(name = "IDX_CONCEPT_INDEXSTATUS", columnList = "INDEX_STATUS"), + @Index(name = "IDX_CONCEPT_UPDATED", columnList = "CONCEPT_UPDATED") + }) public class TermConcept implements Serializable { public static final int MAX_CODE_LENGTH = 500; public static final int MAX_DESC_LENGTH = 400; @@ -86,11 +91,18 @@ public class TermConcept implements Serializable { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class); private static final long serialVersionUID = 1L; - @OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {}) + @OneToMany( + fetch = FetchType.LAZY, + mappedBy = "myParent", + cascade = {}) private List myChildren; @Column(name = "CODEVAL", nullable = false, length = MAX_CODE_LENGTH) - @FullTextField(name = "myCode", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "exactAnalyzer") + @FullTextField( + name = "myCode", + searchable = Searchable.YES, + projectable = Projectable.YES, + analyzer = "exactAnalyzer") private String myCode; @Temporal(TemporalType.TIMESTAMP) @@ -98,7 +110,10 @@ public class TermConcept implements Serializable { private Date myUpdated; @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID")) + @JoinColumn( + name = "CODESYSTEM_PID", + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID")) private TermCodeSystemVersion myCodeSystem; @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false) @@ -106,11 +121,31 @@ public class TermConcept implements Serializable { private long myCodeSystemVersionPid; @Column(name = "DISPLAY", nullable = true, length = MAX_DESC_LENGTH) - @FullTextField(name = "myDisplay", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer") - @FullTextField(name = "myDisplayEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer") - @FullTextField(name = "myDisplayWordEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteWordEdgeAnalyzer") - @FullTextField(name = "myDisplayNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer") - @FullTextField(name = "myDisplayPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer") + @FullTextField( + name = "myDisplay", + searchable = Searchable.YES, + projectable = Projectable.YES, + analyzer = "standardAnalyzer") + @FullTextField( + name = "myDisplayEdgeNGram", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompleteEdgeAnalyzer") + @FullTextField( + name = "myDisplayWordEdgeNGram", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompleteWordEdgeAnalyzer") + @FullTextField( + name = "myDisplayNGram", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompleteNGramAnalyzer") + @FullTextField( + name = "myDisplayPhonetic", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompletePhoneticAnalyzer") private String myDisplay; @OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY) @@ -132,10 +167,17 @@ public class TermConcept implements Serializable { @Lob @Column(name = "PARENT_PIDS", nullable = true) - @FullTextField(name = "myParentPids", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "conceptParentPidsAnalyzer") + @FullTextField( + name = "myParentPids", + searchable = Searchable.YES, + projectable = Projectable.YES, + analyzer = "conceptParentPidsAnalyzer") private String myParentPids; - @OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myChild") + @OneToMany( + cascade = {}, + fetch = FetchType.LAZY, + mappedBy = "myChild") private List myParents; @Column(name = "CODE_SEQUENCE", nullable = true) @@ -183,7 +225,10 @@ public class TermConcept implements Serializable { return designation; } - private TermConceptProperty addProperty(@Nonnull TermConceptPropertyTypeEnum thePropertyType, @Nonnull String thePropertyName, @Nonnull String thePropertyValue) { + private TermConceptProperty addProperty( + @Nonnull TermConceptPropertyTypeEnum thePropertyType, + @Nonnull String thePropertyName, + @Nonnull String thePropertyValue) { Validate.notBlank(thePropertyName); TermConceptProperty property = new TermConceptProperty(); @@ -199,10 +244,14 @@ public class TermConcept implements Serializable { return property; } - public TermConceptProperty addPropertyCoding(@Nonnull String thePropertyName, @Nonnull String thePropertyCodeSystem, @Nonnull String thePropertyCode, String theDisplayName) { + public TermConceptProperty addPropertyCoding( + @Nonnull String thePropertyName, + @Nonnull String thePropertyCodeSystem, + @Nonnull String thePropertyCode, + String theDisplayName) { return addProperty(TermConceptPropertyTypeEnum.CODING, thePropertyName, thePropertyCode) - .setCodeSystem(thePropertyCodeSystem) - .setDisplay(theDisplayName); + .setCodeSystem(thePropertyCodeSystem) + .setDisplay(theDisplayName); } public TermConceptProperty addPropertyString(@Nonnull String thePropertyName, @Nonnull String thePropertyValue) { @@ -239,8 +288,8 @@ public class TermConcept implements Serializable { public TermConcept setCode(@Nonnull String theCode) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theCode, "theCode must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theCode, MAX_CODE_LENGTH, - "Code exceeds maximum length (" + MAX_CODE_LENGTH + "): " + length(theCode)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theCode, MAX_CODE_LENGTH, "Code exceeds maximum length (" + MAX_CODE_LENGTH + "): " + length(theCode)); myCode = theCode; return this; } @@ -440,7 +489,8 @@ public class TermConcept implements Serializable { retVal.add(new IValidationSupport.StringConceptProperty(next.getKey(), next.getValue())); break; case CODING: - retVal.add(new IValidationSupport.CodingConceptProperty(next.getKey(), next.getCodeSystem(), next.getValue(), next.getDisplay())); + retVal.add(new IValidationSupport.CodingConceptProperty( + next.getKey(), next.getCodeSystem(), next.getValue(), next.getDisplay())); break; default: throw new IllegalStateException(Msg.code(830) + "Don't know how to handle " + next.getType()); @@ -455,5 +505,4 @@ public class TermConcept implements Serializable { public List getChildCodes() { return getChildren().stream().map(TermConceptParentChildLink::getChild).collect(Collectors.toList()); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java index 2d57606274d..c430577e061 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java @@ -23,6 +23,7 @@ import ca.uhn.fhir.util.ValidateUtil; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.io.Serializable; import javax.annotation.Nonnull; import javax.persistence.Column; import javax.persistence.Entity; @@ -36,17 +37,20 @@ import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.io.Serializable; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @Entity -@Table(name = "TRM_CONCEPT_DESIG", uniqueConstraints = { }, indexes = { - // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically - @Index(name = "FK_CONCEPTDESIG_CONCEPT", columnList = "CONCEPT_PID", unique = false), - @Index(name = "FK_CONCEPTDESIG_CSV", columnList = "CS_VER_PID") -}) +@Table( + name = "TRM_CONCEPT_DESIG", + uniqueConstraints = {}, + indexes = { + // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index + // automatically + @Index(name = "FK_CONCEPTDESIG_CONCEPT", columnList = "CONCEPT_PID", unique = false), + @Index(name = "FK_CONCEPTDESIG_CSV", columnList = "CS_VER_PID") + }) public class TermConceptDesignation implements Serializable { private static final long serialVersionUID = 1L; @@ -54,21 +58,30 @@ public class TermConceptDesignation implements Serializable { public static final int MAX_VAL_LENGTH = 2000; @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CONCEPT")) + @JoinColumn( + name = "CONCEPT_PID", + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CONCEPT")) private TermConcept myConcept; + @Id() @SequenceGenerator(name = "SEQ_CONCEPT_DESIG_PID", sequenceName = "SEQ_CONCEPT_DESIG_PID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_DESIG_PID") @Column(name = "PID") private Long myId; + @Column(name = "LANG", nullable = true, length = MAX_LENGTH) private String myLanguage; + @Column(name = "USE_SYSTEM", nullable = true, length = MAX_LENGTH) private String myUseSystem; + @Column(name = "USE_CODE", nullable = true, length = MAX_LENGTH) private String myUseCode; + @Column(name = "USE_DISPLAY", nullable = true, length = MAX_LENGTH) private String myUseDisplay; + @Column(name = "VAL", nullable = false, length = MAX_VAL_LENGTH) private String myValue; /** @@ -77,7 +90,11 @@ public class TermConceptDesignation implements Serializable { * @since 3.5.0 */ @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CSV")) + @JoinColumn( + name = "CS_VER_PID", + nullable = true, + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CSV")) private TermCodeSystemVersion myCodeSystemVersion; public String getLanguage() { @@ -85,8 +102,10 @@ public class TermConceptDesignation implements Serializable { } public TermConceptDesignation setLanguage(String theLanguage) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theLanguage, MAX_LENGTH, - "Language exceeds maximum length (" + MAX_LENGTH + "): " + length(theLanguage)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theLanguage, + MAX_LENGTH, + "Language exceeds maximum length (" + MAX_LENGTH + "): " + length(theLanguage)); myLanguage = theLanguage; return this; } @@ -96,8 +115,8 @@ public class TermConceptDesignation implements Serializable { } public TermConceptDesignation setUseCode(String theUseCode) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theUseCode, MAX_LENGTH, - "Use code exceeds maximum length (" + MAX_LENGTH + "): " + length(theUseCode)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theUseCode, MAX_LENGTH, "Use code exceeds maximum length (" + MAX_LENGTH + "): " + length(theUseCode)); myUseCode = theUseCode; return this; } @@ -116,8 +135,10 @@ public class TermConceptDesignation implements Serializable { } public TermConceptDesignation setUseSystem(String theUseSystem) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theUseSystem, MAX_LENGTH, - "Use system exceeds maximum length (" + MAX_LENGTH + "): " + length(theUseSystem)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theUseSystem, + MAX_LENGTH, + "Use system exceeds maximum length (" + MAX_LENGTH + "): " + length(theUseSystem)); myUseSystem = theUseSystem; return this; } @@ -128,8 +149,8 @@ public class TermConceptDesignation implements Serializable { public TermConceptDesignation setValue(@Nonnull String theValue) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theValue, "theValue must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theValue, MAX_VAL_LENGTH, - "Value exceeds maximum length (" + MAX_VAL_LENGTH + "): " + length(theValue)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theValue, MAX_VAL_LENGTH, "Value exceeds maximum length (" + MAX_VAL_LENGTH + "): " + length(theValue)); myValue = theValue; return this; } @@ -144,7 +165,6 @@ public class TermConceptDesignation implements Serializable { return this; } - public Long getPid() { return myId; } @@ -152,13 +172,13 @@ public class TermConceptDesignation implements Serializable { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("conceptPid", myConcept.getId()) - .append("pid", myId) - .append("language", myLanguage) - .append("useSystem", myUseSystem) - .append("useCode", myUseCode) - .append("useDisplay", myUseDisplay) - .append("value", myValue) - .toString(); + .append("conceptPid", myConcept.getId()) + .append("pid", myId) + .append("language", myLanguage) + .append("useSystem", myUseSystem) + .append("useCode", myUseCode) + .append("useDisplay", myUseDisplay) + .append("value", myValue) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMap.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMap.java index b46f9a9f637..0b0a511f232 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMap.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMap.java @@ -24,21 +24,27 @@ import ca.uhn.fhir.util.ValidateUtil; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import javax.annotation.Nonnull; -import javax.persistence.*; import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.persistence.*; import static org.apache.commons.lang3.StringUtils.length; @Entity -@Table(name = "TRM_CONCEPT_MAP", uniqueConstraints = { - @UniqueConstraint(name = "IDX_CONCEPT_MAP_URL", columnNames = {"URL", "VER"}) -}, indexes = { - // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically - @Index(name = "FK_TRMCONCEPTMAP_RES", columnList = "RES_ID") -}) +@Table( + name = "TRM_CONCEPT_MAP", + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_CONCEPT_MAP_URL", + columnNames = {"URL", "VER"}) + }, + indexes = { + // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index + // automatically + @Index(name = "FK_TRMCONCEPTMAP_RES", columnList = "RES_ID") + }) public class TermConceptMap implements Serializable { private static final long serialVersionUID = 1L; @@ -59,7 +65,12 @@ public class TermConceptMap implements Serializable { private Long myId; @OneToOne() - @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_TRMCONCEPTMAP_RES")) + @JoinColumn( + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false, + updatable = false, + foreignKey = @ForeignKey(name = "FK_TRMCONCEPTMAP_RES")) private ResourceTable myResource; @Column(name = "RES_ID", insertable = false, updatable = false) @@ -76,7 +87,7 @@ public class TermConceptMap implements Serializable { @Column(name = "VER", nullable = true, length = MAX_VER_LENGTH) private String myVersion; - + @OneToMany(mappedBy = "myConceptMap") private List myConceptMapGroups; @@ -115,8 +126,10 @@ public class TermConceptMap implements Serializable { } public TermConceptMap setSource(String theSource) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theSource, TermValueSet.MAX_URL_LENGTH, - "Source exceeds maximum length (" + TermValueSet.MAX_URL_LENGTH + "): " + length(theSource)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theSource, + TermValueSet.MAX_URL_LENGTH, + "Source exceeds maximum length (" + TermValueSet.MAX_URL_LENGTH + "): " + length(theSource)); mySource = theSource; return this; } @@ -126,8 +139,10 @@ public class TermConceptMap implements Serializable { } public TermConceptMap setTarget(String theTarget) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theTarget, TermValueSet.MAX_URL_LENGTH, - "Target exceeds maximum length (" + TermValueSet.MAX_URL_LENGTH + "): " + length(theTarget)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theTarget, + TermValueSet.MAX_URL_LENGTH, + "Target exceeds maximum length (" + TermValueSet.MAX_URL_LENGTH + "): " + length(theTarget)); myTarget = theTarget; return this; } @@ -138,8 +153,8 @@ public class TermConceptMap implements Serializable { public TermConceptMap setUrl(@Nonnull String theUrl) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theUrl, "theUrl must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theUrl, MAX_URL_LENGTH, - "URL exceeds maximum length (" + MAX_URL_LENGTH + "): " + length(theUrl)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theUrl, MAX_URL_LENGTH, "URL exceeds maximum length (" + MAX_URL_LENGTH + "): " + length(theUrl)); myUrl = theUrl; return this; } @@ -149,23 +164,28 @@ public class TermConceptMap implements Serializable { } public TermConceptMap setVersion(String theVersion) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theVersion, MAX_VER_LENGTH, - "Version exceeds maximum length (" + MAX_VER_LENGTH + "): " + length(theVersion)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theVersion, + MAX_VER_LENGTH, + "Version exceeds maximum length (" + MAX_VER_LENGTH + "): " + length(theVersion)); myVersion = theVersion; return this; } - + @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("myId", myId) - .append(myResource != null ? ("myResource=" + myResource.toString()) : ("myResource=(null)")) - .append("myResourcePid", myResourcePid) - .append("mySource", mySource) - .append("myTarget", myTarget) - .append("myUrl", myUrl) - .append("myVersion", myVersion) - .append(myConceptMapGroups != null ? ("myConceptMapGroups - size=" + myConceptMapGroups.size()) : ("myConceptMapGroups=(null)")) - .toString(); + .append("myId", myId) + .append(myResource != null ? ("myResource=" + myResource.toString()) : ("myResource=(null)")) + .append("myResourcePid", myResourcePid) + .append("mySource", mySource) + .append("myTarget", myTarget) + .append("myUrl", myUrl) + .append("myVersion", myVersion) + .append( + myConceptMapGroups != null + ? ("myConceptMapGroups - size=" + myConceptMapGroups.size()) + : ("myConceptMapGroups=(null)")) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroup.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroup.java index 41332118ac5..4a8aaab630e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroup.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroup.java @@ -23,19 +23,18 @@ import ca.uhn.fhir.util.ValidateUtil; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import javax.annotation.Nonnull; -import javax.persistence.*; import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.persistence.*; import static org.apache.commons.lang3.StringUtils.length; @Entity -@Table(name = "TRM_CONCEPT_MAP_GROUP", -indexes = { - @Index(name = "FK_TCMGROUP_CONCEPTMAP", columnList = "CONCEPT_MAP_PID") -}) +@Table( + name = "TRM_CONCEPT_MAP_GROUP", + indexes = {@Index(name = "FK_TCMGROUP_CONCEPTMAP", columnList = "CONCEPT_MAP_PID")}) public class TermConceptMapGroup implements Serializable { private static final long serialVersionUID = 1L; @@ -46,7 +45,11 @@ public class TermConceptMapGroup implements Serializable { private Long myId; @ManyToOne() - @JoinColumn(name = "CONCEPT_MAP_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGROUP_CONCEPTMAP")) + @JoinColumn( + name = "CONCEPT_MAP_PID", + nullable = false, + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_TCMGROUP_CONCEPTMAP")) private TermConceptMap myConceptMap; @Column(name = "SOURCE_URL", nullable = false, length = TermCodeSystem.MAX_URL_LENGTH) @@ -64,13 +67,13 @@ public class TermConceptMapGroup implements Serializable { @OneToMany(mappedBy = "myConceptMapGroup") private List myConceptMapGroupElements; - @Column(name= "CONCEPT_MAP_URL", nullable = true, length = TermConceptMap.MAX_URL_LENGTH) + @Column(name = "CONCEPT_MAP_URL", nullable = true, length = TermConceptMap.MAX_URL_LENGTH) private String myConceptMapUrl; - @Column(name= "SOURCE_VS", nullable = true, length = TermValueSet.MAX_URL_LENGTH) + @Column(name = "SOURCE_VS", nullable = true, length = TermValueSet.MAX_URL_LENGTH) private String mySourceValueSet; - @Column(name= "TARGET_VS", nullable = true, length = TermValueSet.MAX_URL_LENGTH) + @Column(name = "TARGET_VS", nullable = true, length = TermValueSet.MAX_URL_LENGTH) private String myTargetValueSet; public TermConceptMap getConceptMap() { @@ -107,8 +110,10 @@ public class TermConceptMapGroup implements Serializable { public TermConceptMapGroup setSource(@Nonnull String theSource) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theSource, "theSource must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theSource, TermCodeSystem.MAX_URL_LENGTH, - "Source exceeds maximum length (" + TermCodeSystem.MAX_URL_LENGTH + "): " + length(theSource)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theSource, + TermCodeSystem.MAX_URL_LENGTH, + "Source exceeds maximum length (" + TermCodeSystem.MAX_URL_LENGTH + "): " + length(theSource)); this.mySource = theSource; return this; } @@ -125,8 +130,11 @@ public class TermConceptMapGroup implements Serializable { } public TermConceptMapGroup setSourceVersion(String theSourceVersion) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theSourceVersion, TermCodeSystemVersion.MAX_VERSION_LENGTH, - "Source version ID exceeds maximum length (" + TermCodeSystemVersion.MAX_VERSION_LENGTH + "): " + length(theSourceVersion)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theSourceVersion, + TermCodeSystemVersion.MAX_VERSION_LENGTH, + "Source version ID exceeds maximum length (" + TermCodeSystemVersion.MAX_VERSION_LENGTH + "): " + + length(theSourceVersion)); mySourceVersion = theSourceVersion; return this; } @@ -137,8 +145,10 @@ public class TermConceptMapGroup implements Serializable { public TermConceptMapGroup setTarget(@Nonnull String theTarget) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theTarget, "theTarget must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theTarget, TermCodeSystem.MAX_URL_LENGTH, - "Target exceeds maximum length (" + TermCodeSystem.MAX_URL_LENGTH + "): " + length(theTarget)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theTarget, + TermCodeSystem.MAX_URL_LENGTH, + "Target exceeds maximum length (" + TermCodeSystem.MAX_URL_LENGTH + "): " + length(theTarget)); this.myTarget = theTarget; return this; } @@ -155,8 +165,11 @@ public class TermConceptMapGroup implements Serializable { } public TermConceptMapGroup setTargetVersion(String theTargetVersion) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theTargetVersion, TermCodeSystemVersion.MAX_VERSION_LENGTH, - "Target version ID exceeds maximum length (" + TermCodeSystemVersion.MAX_VERSION_LENGTH + "): " + length(theTargetVersion)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theTargetVersion, + TermCodeSystemVersion.MAX_VERSION_LENGTH, + "Target version ID exceeds maximum length (" + TermCodeSystemVersion.MAX_VERSION_LENGTH + "): " + + length(theTargetVersion)); myTargetVersion = theTargetVersion; return this; } @@ -164,16 +177,19 @@ public class TermConceptMapGroup implements Serializable { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("myId", myId) - .append(myConceptMap != null ? ("myConceptMap - id=" + myConceptMap.getId()) : ("myConceptMap=(null)")) - .append("mySource", mySource) - .append("mySourceVersion", mySourceVersion) - .append("myTarget", myTarget) - .append("myTargetVersion", myTargetVersion) - .append(myConceptMapGroupElements != null ? ("myConceptMapGroupElements - size=" + myConceptMapGroupElements.size()) : ("myConceptMapGroupElements=(null)")) - .append("myConceptMapUrl", this.getConceptMapUrl()) - .append("mySourceValueSet", this.getSourceValueSet()) - .append("myTargetValueSet", this.getTargetValueSet()) - .toString(); + .append("myId", myId) + .append(myConceptMap != null ? ("myConceptMap - id=" + myConceptMap.getId()) : ("myConceptMap=(null)")) + .append("mySource", mySource) + .append("mySourceVersion", mySourceVersion) + .append("myTarget", myTarget) + .append("myTargetVersion", myTargetVersion) + .append( + myConceptMapGroupElements != null + ? ("myConceptMapGroupElements - size=" + myConceptMapGroupElements.size()) + : ("myConceptMapGroupElements=(null)")) + .append("myConceptMapUrl", this.getConceptMapUrl()) + .append("mySourceValueSet", this.getSourceValueSet()) + .append("myTargetValueSet", this.getTargetValueSet()) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElement.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElement.java index e21e71ae239..9c5aae83d37 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElement.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElement.java @@ -25,20 +25,22 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import javax.annotation.Nonnull; -import javax.persistence.*; import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.persistence.*; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @Entity -@Table(name = "TRM_CONCEPT_MAP_GRP_ELEMENT", indexes = { - @Index(name = "IDX_CNCPT_MAP_GRP_CD", columnList = "SOURCE_CODE"), - @Index(name = "FK_TCMGELEMENT_GROUP", columnList = "CONCEPT_MAP_GROUP_PID") -}) +@Table( + name = "TRM_CONCEPT_MAP_GRP_ELEMENT", + indexes = { + @Index(name = "IDX_CNCPT_MAP_GRP_CD", columnList = "SOURCE_CODE"), + @Index(name = "FK_TCMGELEMENT_GROUP", columnList = "CONCEPT_MAP_GROUP_PID") + }) public class TermConceptMapGroupElement implements Serializable { private static final long serialVersionUID = 1L; @@ -49,7 +51,11 @@ public class TermConceptMapGroupElement implements Serializable { private Long myId; @ManyToOne() - @JoinColumn(name = "CONCEPT_MAP_GROUP_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGELEMENT_GROUP")) + @JoinColumn( + name = "CONCEPT_MAP_GROUP_PID", + nullable = false, + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_TCMGELEMENT_GROUP")) private TermConceptMapGroup myConceptMapGroup; @Column(name = "SOURCE_CODE", nullable = false, length = TermConcept.MAX_CODE_LENGTH) @@ -79,8 +85,10 @@ public class TermConceptMapGroupElement implements Serializable { public TermConceptMapGroupElement setCode(@Nonnull String theCode) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theCode, "theCode must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theCode, TermConcept.MAX_CODE_LENGTH, - "Code exceeds maximum length (" + TermConcept.MAX_CODE_LENGTH + "): " + length(theCode)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theCode, + TermConcept.MAX_CODE_LENGTH, + "Code exceeds maximum length (" + TermConcept.MAX_CODE_LENGTH + "): " + length(theCode)); myCode = theCode; return this; } @@ -152,33 +160,39 @@ public class TermConceptMapGroupElement implements Serializable { TermConceptMapGroupElement that = (TermConceptMapGroupElement) o; return new EqualsBuilder() - .append(getCode(), that.getCode()) - .append(getSystem(), that.getSystem()) - .append(getSystemVersion(), that.getSystemVersion()) - .isEquals(); + .append(getCode(), that.getCode()) + .append(getSystem(), that.getSystem()) + .append(getSystemVersion(), that.getSystemVersion()) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(getCode()) - .append(getSystem()) - .append(getSystemVersion()) - .toHashCode(); + .append(getCode()) + .append(getSystem()) + .append(getSystemVersion()) + .toHashCode(); } @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("myId", myId) - .append(myConceptMapGroup != null ? ("myConceptMapGroup - id=" + myConceptMapGroup.getId()) : ("myConceptMapGroup=(null)")) - .append("myCode", myCode) - .append("myDisplay", myDisplay) - .append(myConceptMapGroupElementTargets != null ? ("myConceptMapGroupElementTargets - size=" + myConceptMapGroupElementTargets.size()) : ("myConceptMapGroupElementTargets=(null)")) - .append("myConceptMapUrl", this.getConceptMapUrl()) - .append("mySystem", this.getSystem()) - .append("mySystemVersion", this.getSystemVersion()) - .append("myValueSet", this.getValueSet()) - .toString(); + .append("myId", myId) + .append( + myConceptMapGroup != null + ? ("myConceptMapGroup - id=" + myConceptMapGroup.getId()) + : ("myConceptMapGroup=(null)")) + .append("myCode", myCode) + .append("myDisplay", myDisplay) + .append( + myConceptMapGroupElementTargets != null + ? ("myConceptMapGroupElementTargets - size=" + myConceptMapGroupElementTargets.size()) + : ("myConceptMapGroupElementTargets=(null)")) + .append("myConceptMapUrl", this.getConceptMapUrl()) + .append("mySystem", this.getSystem()) + .append("mySystemVersion", this.getSystemVersion()) + .append("myValueSet", this.getValueSet()) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElementTarget.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElementTarget.java index 3a66c5ec9b8..fd51394c985 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElementTarget.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElementTarget.java @@ -26,17 +26,19 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence; +import java.io.Serializable; import javax.annotation.Nonnull; import javax.persistence.*; -import java.io.Serializable; import static org.apache.commons.lang3.StringUtils.length; @Entity -@Table(name = "TRM_CONCEPT_MAP_GRP_ELM_TGT", indexes = { - @Index(name = "IDX_CNCPT_MP_GRP_ELM_TGT_CD", columnList = "TARGET_CODE"), - @Index(name = "FK_TCMGETARGET_ELEMENT", columnList = "CONCEPT_MAP_GRP_ELM_PID") -}) +@Table( + name = "TRM_CONCEPT_MAP_GRP_ELM_TGT", + indexes = { + @Index(name = "IDX_CNCPT_MP_GRP_ELM_TGT_CD", columnList = "TARGET_CODE"), + @Index(name = "FK_TCMGETARGET_ELEMENT", columnList = "CONCEPT_MAP_GRP_ELM_PID") + }) public class TermConceptMapGroupElementTarget implements Serializable { private static final long serialVersionUID = 1L; @@ -49,7 +51,11 @@ public class TermConceptMapGroupElementTarget implements Serializable { private Long myId; @ManyToOne() - @JoinColumn(name = "CONCEPT_MAP_GRP_ELM_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGETARGET_ELEMENT")) + @JoinColumn( + name = "CONCEPT_MAP_GRP_ELM_PID", + nullable = false, + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_TCMGETARGET_ELEMENT")) private TermConceptMapGroupElement myConceptMapGroupElement; @Column(name = "TARGET_CODE", nullable = false, length = TermConcept.MAX_CODE_LENGTH) @@ -80,8 +86,10 @@ public class TermConceptMapGroupElementTarget implements Serializable { public TermConceptMapGroupElementTarget setCode(@Nonnull String theCode) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theCode, "theCode must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theCode, TermConcept.MAX_CODE_LENGTH, - "Code exceeds maximum length (" + TermConcept.MAX_CODE_LENGTH + "): " + length(theCode)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theCode, + TermConcept.MAX_CODE_LENGTH, + "Code exceeds maximum length (" + TermConcept.MAX_CODE_LENGTH + "): " + length(theCode)); myCode = theCode; return this; } @@ -96,7 +104,10 @@ public class TermConceptMapGroupElementTarget implements Serializable { public String getConceptMapUrl() { if (myConceptMapUrl == null) { - myConceptMapUrl = getConceptMapGroupElement().getConceptMapGroup().getConceptMap().getUrl(); + myConceptMapUrl = getConceptMapGroupElement() + .getConceptMapGroup() + .getConceptMap() + .getUrl(); } return myConceptMapUrl; } @@ -153,35 +164,38 @@ public class TermConceptMapGroupElementTarget implements Serializable { TermConceptMapGroupElementTarget that = (TermConceptMapGroupElementTarget) o; return new EqualsBuilder() - .append(getCode(), that.getCode()) - .append(getEquivalence(), that.getEquivalence()) - .append(getSystem(), that.getSystem()) - .append(getSystemVersion(), that.getSystemVersion()) - .isEquals(); + .append(getCode(), that.getCode()) + .append(getEquivalence(), that.getEquivalence()) + .append(getSystem(), that.getSystem()) + .append(getSystemVersion(), that.getSystemVersion()) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(getCode()) - .append(getEquivalence()) - .append(getSystem()) - .append(getSystemVersion()) - .toHashCode(); + .append(getCode()) + .append(getEquivalence()) + .append(getSystem()) + .append(getSystemVersion()) + .toHashCode(); } @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("myId", myId) - .append(myConceptMapGroupElement != null ? ("myConceptMapGroupElement - id=" + myConceptMapGroupElement.getId()) : ("myConceptMapGroupElement=(null)")) - .append("myCode", myCode) - .append("myDisplay", myDisplay) - .append("myEquivalence", myEquivalence.toCode()) - .append("myConceptMapUrl", this.getConceptMapUrl()) - .append("mySystem", this.getSystem()) - .append("mySystemVersion", this.getSystemVersion()) - .append("myValueSet", this.getValueSet()) - .toString(); + .append("myId", myId) + .append( + myConceptMapGroupElement != null + ? ("myConceptMapGroupElement - id=" + myConceptMapGroupElement.getId()) + : ("myConceptMapGroupElement=(null)")) + .append("myCode", myCode) + .append("myDisplay", myDisplay) + .append("myEquivalence", myEquivalence.toCode()) + .append("myConceptMapUrl", this.getConceptMapUrl()) + .append("mySystem", this.getSystem()) + .append("mySystemVersion", this.getSystemVersion()) + .append("myValueSet", this.getValueSet()) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java index a1a536f456a..74ff7f63040 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import java.io.Serializable; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; @@ -35,20 +36,26 @@ import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.io.Serializable; @Entity -@Table(name = "TRM_CONCEPT_PC_LINK", indexes = { - // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically - @Index(name = "FK_TERM_CONCEPTPC_CHILD", columnList = "CHILD_PID", unique = false), - @Index(name = "FK_TERM_CONCEPTPC_PARENT", columnList = "PARENT_PID", unique = false), - @Index(name = "FK_TERM_CONCEPTPC_CS", columnList = "CODESYSTEM_PID") -}) +@Table( + name = "TRM_CONCEPT_PC_LINK", + indexes = { + // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index + // automatically + @Index(name = "FK_TERM_CONCEPTPC_CHILD", columnList = "CHILD_PID", unique = false), + @Index(name = "FK_TERM_CONCEPTPC_PARENT", columnList = "PARENT_PID", unique = false), + @Index(name = "FK_TERM_CONCEPTPC_CS", columnList = "CODESYSTEM_PID") + }) public class TermConceptParentChildLink implements Serializable { private static final long serialVersionUID = 1L; @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "CHILD_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_CHILD")) + @JoinColumn( + name = "CHILD_PID", + nullable = false, + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_CHILD")) private TermConcept myChild; @Column(name = "CHILD_PID", insertable = false, updatable = false) @@ -62,8 +69,14 @@ public class TermConceptParentChildLink implements Serializable { @FullTextField(name = "myCodeSystemVersionPid") private long myCodeSystemVersionPid; - @ManyToOne(fetch = FetchType.LAZY, cascade = {}) - @JoinColumn(name = "PARENT_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_PARENT")) + @ManyToOne( + fetch = FetchType.LAZY, + cascade = {}) + @JoinColumn( + name = "PARENT_PID", + nullable = false, + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_TERM_CONCEPTPC_PARENT")) private TermConcept myParent; @Column(name = "PARENT_PID", insertable = false, updatable = false) @@ -81,30 +94,20 @@ public class TermConceptParentChildLink implements Serializable { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; TermConceptParentChildLink other = (TermConceptParentChildLink) obj; if (myChild == null) { - if (other.myChild != null) - return false; - } else if (!myChild.equals(other.myChild)) - return false; + if (other.myChild != null) return false; + } else if (!myChild.equals(other.myChild)) return false; if (myCodeSystem == null) { - if (other.myCodeSystem != null) - return false; - } else if (!myCodeSystem.equals(other.myCodeSystem)) - return false; + if (other.myCodeSystem != null) return false; + } else if (!myCodeSystem.equals(other.myCodeSystem)) return false; if (myParent == null) { - if (other.myParent != null) - return false; - } else if (!myParent.equals(other.myParent)) - return false; - if (myRelationshipType != other.myRelationshipType) - return false; + if (other.myParent != null) return false; + } else if (!myParent.equals(other.myParent)) return false; + if (myRelationshipType != other.myRelationshipType) return false; return true; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java index 656ae55a231..ca7a104bf31 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java @@ -31,6 +31,8 @@ import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextFi import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; import org.hibernate.validator.constraints.NotBlank; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; import javax.annotation.Nonnull; import javax.persistence.Column; import javax.persistence.Entity; @@ -45,25 +47,30 @@ import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.io.Serializable; -import java.nio.charset.StandardCharsets; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @Entity -@Table(name = "TRM_CONCEPT_PROPERTY", uniqueConstraints = { }, indexes = { - // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically - @Index(name = "FK_CONCEPTPROP_CONCEPT", columnList = "CONCEPT_PID", unique = false), - @Index(name = "FK_CONCEPTPROP_CSV", columnList = "CS_VER_PID") -}) +@Table( + name = "TRM_CONCEPT_PROPERTY", + uniqueConstraints = {}, + indexes = { + // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index + // automatically + @Index(name = "FK_CONCEPTPROP_CONCEPT", columnList = "CONCEPT_PID", unique = false), + @Index(name = "FK_CONCEPTPROP_CSV", columnList = "CS_VER_PID") + }) public class TermConceptProperty implements Serializable { public static final int MAX_PROPTYPE_ENUM_LENGTH = 6; private static final long serialVersionUID = 1L; private static final int MAX_LENGTH = 500; @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT")) + @JoinColumn( + name = "CONCEPT_PID", + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT")) private TermConcept myConcept; /** @@ -72,7 +79,11 @@ public class TermConceptProperty implements Serializable { * @since 3.5.0 */ @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV")) + @JoinColumn( + name = "CS_VER_PID", + nullable = true, + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV")) private TermCodeSystemVersion myCodeSystemVersion; @Id() @@ -129,8 +140,10 @@ public class TermConceptProperty implements Serializable { * Relevant only for properties of type {@link TermConceptPropertyTypeEnum#CODING} */ public TermConceptProperty setCodeSystem(String theCodeSystem) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theCodeSystem, MAX_LENGTH, - "Property code system exceeds maximum length (" + MAX_LENGTH + "): " + length(theCodeSystem)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theCodeSystem, + MAX_LENGTH, + "Property code system exceeds maximum length (" + MAX_LENGTH + "): " + length(theCodeSystem)); myCodeSystem = theCodeSystem; return this; } @@ -156,8 +169,8 @@ public class TermConceptProperty implements Serializable { public TermConceptProperty setKey(@Nonnull String theKey) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theKey, "theKey must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theKey, MAX_LENGTH, - "Code exceeds maximum length (" + MAX_LENGTH + "): " + length(theKey)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theKey, MAX_LENGTH, "Code exceeds maximum length (" + MAX_LENGTH + "): " + length(theKey)); myKey = theKey; return this; } @@ -235,10 +248,10 @@ public class TermConceptProperty implements Serializable { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("conceptPid", myConcept.getId()) - .append("key", myKey) - .append("value", getValue()) - .toString(); + .append("conceptPid", myConcept.getId()) + .append("key", myKey) + .append("value", getValue()) + .toString(); } @Override @@ -254,23 +267,23 @@ public class TermConceptProperty implements Serializable { TermConceptProperty that = (TermConceptProperty) theO; return new EqualsBuilder() - .append(myKey, that.myKey) - .append(myValue, that.myValue) - .append(myType, that.myType) - .append(myCodeSystem, that.myCodeSystem) - .append(myDisplay, that.myDisplay) - .isEquals(); + .append(myKey, that.myKey) + .append(myValue, that.myValue) + .append(myType, that.myType) + .append(myCodeSystem, that.myCodeSystem) + .append(myDisplay, that.myDisplay) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(myKey) - .append(myValue) - .append(myType) - .append(myCodeSystem) - .append(myDisplay) - .toHashCode(); + .append(myKey) + .append(myValue) + .append(myType) + .append(myCodeSystem) + .append(myDisplay) + .toHashCode(); } public Long getPid() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyBinder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyBinder.java index 0045edbc795..9ae787549a6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyBinder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyBinder.java @@ -38,7 +38,6 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; */ public class TermConceptPropertyBinder implements PropertyBinder { - public static final String CONCEPT_PROPERTY_PREFIX_NAME = "P:"; private static final Logger ourLog = LoggerFactory.getLogger(TermConceptPropertyBinder.class); @@ -48,12 +47,14 @@ public class TermConceptPropertyBinder implements PropertyBinder { thePropertyBindingContext.dependencies().use("myKey").use("myValue"); IndexSchemaElement indexSchemaElement = thePropertyBindingContext.indexSchemaElement(); - //In order to support dynamic fields, we have to use field templates. We _must_ define the template at bootstrap time and cannot - //create them adhoc. https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#mapper-orm-bridge-index-field-dsl-dynamic - indexSchemaElement.fieldTemplate("propTemplate", IndexFieldTypeFactory::asString) - .matchingPathGlob(CONCEPT_PROPERTY_PREFIX_NAME + "*") - .multiValued(); - + // In order to support dynamic fields, we have to use field templates. We _must_ define the template at + // bootstrap time and cannot + // create them adhoc. + // https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#mapper-orm-bridge-index-field-dsl-dynamic + indexSchemaElement + .fieldTemplate("propTemplate", IndexFieldTypeFactory::asString) + .matchingPathGlob(CONCEPT_PROPERTY_PREFIX_NAME + "*") + .multiValued(); thePropertyBindingContext.bridge(Collection.class, new TermConceptPropertyBridge()); } @@ -62,7 +63,10 @@ public class TermConceptPropertyBinder implements PropertyBinder { private static class TermConceptPropertyBridge implements PropertyBridge { @Override - public void write(DocumentElement theDocument, Collection theObject, PropertyBridgeWriteContext thePropertyBridgeWriteContext) { + public void write( + DocumentElement theDocument, + Collection theObject, + PropertyBridgeWriteContext thePropertyBridgeWriteContext) { @SuppressWarnings("unchecked") Collection properties = (Collection) theObject; @@ -70,10 +74,15 @@ public class TermConceptPropertyBinder implements PropertyBinder { if (properties != null) { for (TermConceptProperty next : properties) { theDocument.addValue(CONCEPT_PROPERTY_PREFIX_NAME + next.getKey(), next.getValue()); - ourLog.trace("Adding Prop: {}{} -- {}", CONCEPT_PROPERTY_PREFIX_NAME, next.getKey(), next.getValue()); + ourLog.trace( + "Adding Prop: {}{} -- {}", CONCEPT_PROPERTY_PREFIX_NAME, next.getKey(), next.getValue()); if (next.getType() == TermConceptPropertyTypeEnum.CODING && isNotBlank(next.getDisplay())) { theDocument.addValue(CONCEPT_PROPERTY_PREFIX_NAME + next.getKey(), next.getDisplay()); - ourLog.trace("Adding multivalue Prop: {}{} -- {}", CONCEPT_PROPERTY_PREFIX_NAME, next.getKey(), next.getDisplay()); + ourLog.trace( + "Adding multivalue Prop: {}{} -- {}", + CONCEPT_PROPERTY_PREFIX_NAME, + next.getKey(), + next.getDisplay()); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java index 658c1710ef7..80060822983 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java @@ -26,6 +26,10 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hibernate.annotations.ColumnDefault; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; import javax.annotation.Nonnull; import javax.persistence.Column; import javax.persistence.Entity; @@ -46,22 +50,22 @@ import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import javax.persistence.UniqueConstraint; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; -@Table(name = "TRM_VALUESET", uniqueConstraints = { - @UniqueConstraint(name = "IDX_VALUESET_URL", columnNames = {"URL", "VER"}) - }, - indexes = { - // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically - @Index(name = "FK_TRMVALUESET_RES", columnList = "RES_ID" ) - } -) +@Table( + name = "TRM_VALUESET", + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_VALUESET_URL", + columnNames = {"URL", "VER"}) + }, + indexes = { + // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index + // automatically + @Index(name = "FK_TRMVALUESET_RES", columnList = "RES_ID") + }) @Entity() public class TermValueSet implements Serializable { public static final int MAX_EXPANSION_STATUS_LENGTH = 50; @@ -69,6 +73,7 @@ public class TermValueSet implements Serializable { public static final int MAX_URL_LENGTH = 200; public static final int MAX_VER_LENGTH = 200; private static final long serialVersionUID = 1L; + @Id() @SequenceGenerator(name = "SEQ_VALUESET_PID", sequenceName = "SEQ_VALUESET_PID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_VALUESET_PID") @@ -82,7 +87,12 @@ public class TermValueSet implements Serializable { private String myVersion; @OneToOne() - @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_TRMVALUESET_RES")) + @JoinColumn( + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false, + updatable = false, + foreignKey = @ForeignKey(name = "FK_TRMVALUESET_RES")) private ResourceTable myResource; @Column(name = "RES_ID", insertable = false, updatable = false) @@ -138,8 +148,8 @@ public class TermValueSet implements Serializable { public TermValueSet setUrl(@Nonnull String theUrl) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theUrl, "theUrl must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theUrl, MAX_URL_LENGTH, - "URL exceeds maximum length (" + MAX_URL_LENGTH + "): " + length(theUrl)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theUrl, MAX_URL_LENGTH, "URL exceeds maximum length (" + MAX_URL_LENGTH + "): " + length(theUrl)); myUrl = theUrl; return this; } @@ -225,8 +235,10 @@ public class TermValueSet implements Serializable { } public TermValueSet setVersion(String theVersion) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theVersion, MAX_VER_LENGTH, - "Version exceeds maximum length (" + MAX_VER_LENGTH + "): " + length(theVersion)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theVersion, + MAX_VER_LENGTH, + "Version exceeds maximum length (" + MAX_VER_LENGTH + "): " + length(theVersion)); myVersion = theVersion; return this; } @@ -239,9 +251,7 @@ public class TermValueSet implements Serializable { TermValueSet that = (TermValueSet) theO; - return new EqualsBuilder() - .append(getUrl(), that.getUrl()) - .isEquals(); + return new EqualsBuilder().append(getUrl(), that.getUrl()).isEquals(); } @Override @@ -255,15 +265,15 @@ public class TermValueSet implements Serializable { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("id", myId) - .append("url", myUrl) - .append(myResource != null ? ("resource=" + myResource.toString()) : ("resource=(null)")) - .append("resourcePid", myResourcePid) - .append("name", myName) - .append(myConcepts != null ? ("concepts - size=" + myConcepts.size()) : ("concepts=(null)")) - .append("totalConcepts", myTotalConcepts) - .append("totalConceptDesignations", myTotalConceptDesignations) - .append("expansionStatus", myExpansionStatus) - .toString(); + .append("id", myId) + .append("url", myUrl) + .append(myResource != null ? ("resource=" + myResource.toString()) : ("resource=(null)")) + .append("resourcePid", myResourcePid) + .append("name", myName) + .append(myConcepts != null ? ("concepts - size=" + myConcepts.size()) : ("concepts=(null)")) + .append("totalConcepts", myTotalConcepts) + .append("totalConceptDesignations", myTotalConceptDesignations) + .append("expansionStatus", myExpansionStatus) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java index c9836aaa69f..9f3a6a7b434 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConcept.java @@ -25,6 +25,9 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import javax.annotation.Nonnull; import javax.persistence.Column; import javax.persistence.Entity; @@ -41,9 +44,6 @@ import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Transient; import javax.persistence.UniqueConstraint; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @@ -52,10 +52,16 @@ import static org.apache.commons.lang3.StringUtils.length; * DM 2019-08-01 - Do not use IDX_VALUESET_CONCEPT_CS_CD or IDX_VALUESET_CONCEPT_CS_CODE; this was previously used as an index so reusing the name will * bork up migration tasks. */ -@Table(name = "TRM_VALUESET_CONCEPT", uniqueConstraints = { - @UniqueConstraint(name = "IDX_VS_CONCEPT_CSCD", columnNames = {"VALUESET_PID", "SYSTEM_URL", "CODEVAL"}), - @UniqueConstraint(name = "IDX_VS_CONCEPT_ORDER", columnNames = {"VALUESET_PID", "VALUESET_ORDER"}) -}) +@Table( + name = "TRM_VALUESET_CONCEPT", + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_VS_CONCEPT_CSCD", + columnNames = {"VALUESET_PID", "SYSTEM_URL", "CODEVAL"}), + @UniqueConstraint( + name = "IDX_VS_CONCEPT_ORDER", + columnNames = {"VALUESET_PID", "VALUESET_ORDER"}) + }) @Entity() public class TermValueSetConcept implements Serializable { private static final long serialVersionUID = 1L; @@ -67,7 +73,11 @@ public class TermValueSetConcept implements Serializable { private Long myId; @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "VALUESET_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VALUESET_PID")) + @JoinColumn( + name = "VALUESET_PID", + referencedColumnName = "PID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_TRM_VALUESET_PID")) private TermValueSet myValueSet; @Column(name = "VALUESET_PID", insertable = false, updatable = false, nullable = false) @@ -161,8 +171,10 @@ public class TermValueSetConcept implements Serializable { public TermValueSetConcept setSystem(@Nonnull String theSystem) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theSystem, "theSystem must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theSystem, TermCodeSystem.MAX_URL_LENGTH, - "System exceeds maximum length (" + TermCodeSystem.MAX_URL_LENGTH + "): " + length(theSystem)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theSystem, + TermCodeSystem.MAX_URL_LENGTH, + "System exceeds maximum length (" + TermCodeSystem.MAX_URL_LENGTH + "): " + length(theSystem)); mySystem = theSystem; return this; } @@ -172,8 +184,11 @@ public class TermValueSetConcept implements Serializable { } public TermValueSetConcept setSystemVersion(String theSystemVersion) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theSystemVersion, TermCodeSystemVersion.MAX_VERSION_LENGTH, - "System version exceeds maximum length (" + TermCodeSystemVersion.MAX_VERSION_LENGTH + "): " + length(theSystemVersion)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theSystemVersion, + TermCodeSystemVersion.MAX_VERSION_LENGTH, + "System version exceeds maximum length (" + TermCodeSystemVersion.MAX_VERSION_LENGTH + "): " + + length(theSystemVersion)); mySystemVer = theSystemVersion; return this; } @@ -184,8 +199,10 @@ public class TermValueSetConcept implements Serializable { public TermValueSetConcept setCode(@Nonnull String theCode) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theCode, "theCode must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theCode, TermConcept.MAX_CODE_LENGTH, - "Code exceeds maximum length (" + TermConcept.MAX_CODE_LENGTH + "): " + length(theCode)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theCode, + TermConcept.MAX_CODE_LENGTH, + "Code exceeds maximum length (" + TermConcept.MAX_CODE_LENGTH + "): " + length(theCode)); myCode = theCode; return this; } @@ -216,20 +233,20 @@ public class TermValueSetConcept implements Serializable { TermValueSetConcept that = (TermValueSetConcept) theO; return new EqualsBuilder() - .append(myValueSetPid, that.myValueSetPid) - .append(getSystem(), that.getSystem()) - .append(getCode(), that.getCode()) - .isEquals(); + .append(myValueSetPid, that.myValueSetPid) + .append(getSystem(), that.getSystem()) + .append(getCode(), that.getCode()) + .isEquals(); } @Override public int hashCode() { if (myHashCode == null) { myHashCode = new HashCodeBuilder(17, 37) - .append(myValueSetPid) - .append(getSystem()) - .append(getCode()) - .toHashCode(); + .append(myValueSetPid) + .append(getSystem()) + .append(getCode()) + .toHashCode(); } return myHashCode; } @@ -237,18 +254,18 @@ public class TermValueSetConcept implements Serializable { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("id", myId) - .append("order", myOrder) - .append("system", mySystem) - .append("code", myCode) - .append("valueSet", myValueSet != null ? myValueSet.getId() : "(null)") - .append("valueSetPid", myValueSetPid) - .append("valueSetUrl", this.getValueSetUrl()) - .append("valueSetName", this.getValueSetName()) - .append("display", myDisplay) - .append("designationCount", myDesignations != null ? myDesignations.size() : "(null)") - .append("parentPids", mySourceConceptDirectParentPids) - .toString(); + .append("id", myId) + .append("order", myOrder) + .append("system", mySystem) + .append("code", myCode) + .append("valueSet", myValueSet != null ? myValueSet.getId() : "(null)") + .append("valueSetPid", myValueSetPid) + .append("valueSetUrl", this.getValueSetUrl()) + .append("valueSetName", this.getValueSetName()) + .append("display", myDisplay) + .append("designationCount", myDesignations != null ? myDesignations.size() : "(null)") + .append("parentPids", mySourceConceptDirectParentPids) + .toString(); } public Long getIndexStatus() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java index 0a4c84c6f50..0e7bb151c6f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptDesignation.java @@ -25,18 +25,21 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.io.Serializable; import javax.annotation.Nonnull; import javax.persistence.*; -import java.io.Serializable; import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; -@Table(name = "TRM_VALUESET_C_DESIGNATION", indexes = { - // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically - @Index(name = "FK_TRM_VALUESET_CONCEPT_PID", columnList = "VALUESET_CONCEPT_PID", unique = false), - @Index(name = "FK_TRM_VSCD_VS_PID", columnList = "VALUESET_PID") -}) +@Table( + name = "TRM_VALUESET_C_DESIGNATION", + indexes = { + // must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index + // automatically + @Index(name = "FK_TRM_VALUESET_CONCEPT_PID", columnList = "VALUESET_CONCEPT_PID", unique = false), + @Index(name = "FK_TRM_VSCD_VS_PID", columnList = "VALUESET_PID") + }) @Entity() public class TermValueSetConceptDesignation implements Serializable { private static final long serialVersionUID = 1L; @@ -48,14 +51,22 @@ public class TermValueSetConceptDesignation implements Serializable { private Long myId; @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "VALUESET_CONCEPT_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VALUESET_CONCEPT_PID")) + @JoinColumn( + name = "VALUESET_CONCEPT_PID", + referencedColumnName = "PID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_TRM_VALUESET_CONCEPT_PID")) private TermValueSetConcept myConcept; @Column(name = "VALUESET_CONCEPT_PID", insertable = false, updatable = false, nullable = false) private Long myConceptPid; @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "VALUESET_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_TRM_VSCD_VS_PID")) + @JoinColumn( + name = "VALUESET_PID", + referencedColumnName = "PID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_TRM_VSCD_VS_PID")) private TermValueSet myValueSet; @Column(name = "VALUESET_PID", insertable = false, updatable = false, nullable = false) @@ -128,8 +139,10 @@ public class TermValueSetConceptDesignation implements Serializable { } public TermValueSetConceptDesignation setLanguage(String theLanguage) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theLanguage, TermConceptDesignation.MAX_LENGTH, - "Language exceeds maximum length (" + TermConceptDesignation.MAX_LENGTH + "): " + length(theLanguage)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theLanguage, + TermConceptDesignation.MAX_LENGTH, + "Language exceeds maximum length (" + TermConceptDesignation.MAX_LENGTH + "): " + length(theLanguage)); myLanguage = theLanguage; return this; } @@ -139,8 +152,11 @@ public class TermValueSetConceptDesignation implements Serializable { } public TermValueSetConceptDesignation setUseSystem(String theUseSystem) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theUseSystem, TermConceptDesignation.MAX_LENGTH, - "Use system exceeds maximum length (" + TermConceptDesignation.MAX_LENGTH + "): " + length(theUseSystem)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theUseSystem, + TermConceptDesignation.MAX_LENGTH, + "Use system exceeds maximum length (" + TermConceptDesignation.MAX_LENGTH + "): " + + length(theUseSystem)); myUseSystem = theUseSystem; return this; } @@ -150,8 +166,10 @@ public class TermValueSetConceptDesignation implements Serializable { } public TermValueSetConceptDesignation setUseCode(String theUseCode) { - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theUseCode, TermConceptDesignation.MAX_LENGTH, - "Use code exceeds maximum length (" + TermConceptDesignation.MAX_LENGTH + "): " + length(theUseCode)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theUseCode, + TermConceptDesignation.MAX_LENGTH, + "Use code exceeds maximum length (" + TermConceptDesignation.MAX_LENGTH + "): " + length(theUseCode)); myUseCode = theUseCode; return this; } @@ -171,8 +189,10 @@ public class TermValueSetConceptDesignation implements Serializable { public TermValueSetConceptDesignation setValue(@Nonnull String theValue) { ValidateUtil.isNotBlankOrThrowIllegalArgument(theValue, "theValue must not be null or empty"); - ValidateUtil.isNotTooLongOrThrowIllegalArgument(theValue, TermConceptDesignation.MAX_VAL_LENGTH, - "Value exceeds maximum length (" + TermConceptDesignation.MAX_VAL_LENGTH + "): " + length(theValue)); + ValidateUtil.isNotTooLongOrThrowIllegalArgument( + theValue, + TermConceptDesignation.MAX_VAL_LENGTH, + "Value exceeds maximum length (" + TermConceptDesignation.MAX_VAL_LENGTH + "): " + length(theValue)); myValue = theValue; return this; } @@ -186,24 +206,24 @@ public class TermValueSetConceptDesignation implements Serializable { TermValueSetConceptDesignation that = (TermValueSetConceptDesignation) theO; return new EqualsBuilder() - .append(getLanguage(), that.getLanguage()) - .append(getUseSystem(), that.getUseSystem()) - .append(getUseCode(), that.getUseCode()) - .append(getUseDisplay(), that.getUseDisplay()) - .append(getValue(), that.getValue()) - .isEquals(); + .append(getLanguage(), that.getLanguage()) + .append(getUseSystem(), that.getUseSystem()) + .append(getUseCode(), that.getUseCode()) + .append(getUseDisplay(), that.getUseDisplay()) + .append(getValue(), that.getValue()) + .isEquals(); } @Override public int hashCode() { if (myHashCode == null) { myHashCode = new HashCodeBuilder(17, 37) - .append(getLanguage()) - .append(getUseSystem()) - .append(getUseCode()) - .append(getUseDisplay()) - .append(getValue()) - .toHashCode(); + .append(getLanguage()) + .append(getUseSystem()) + .append(getUseCode()) + .append(getUseDisplay()) + .append(getValue()) + .toHashCode(); } return myHashCode; } @@ -211,18 +231,18 @@ public class TermValueSetConceptDesignation implements Serializable { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("myId", myId) - .append(myConcept != null ? ("myConcept - id=" + myConcept.getId()) : ("myConcept=(null)")) - .append("myConceptPid", myConceptPid) - .append(myValueSet != null ? ("myValueSet - id=" + myValueSet.getId()) : ("myValueSet=(null)")) - .append("myValueSetPid", myValueSetPid) - .append("myValueSetUrl", this.getValueSetUrl()) - .append("myValueSetName", this.getValueSetName()) - .append("myLanguage", myLanguage) - .append("myUseSystem", myUseSystem) - .append("myUseCode", myUseCode) - .append("myUseDisplay", myUseDisplay) - .append("myValue", myValue) - .toString(); + .append("myId", myId) + .append(myConcept != null ? ("myConcept - id=" + myConcept.getId()) : ("myConcept=(null)")) + .append("myConceptPid", myConceptPid) + .append(myValueSet != null ? ("myValueSet - id=" + myValueSet.getId()) : ("myValueSet=(null)")) + .append("myValueSetPid", myValueSetPid) + .append("myValueSetUrl", this.getValueSetUrl()) + .append("myValueSetName", this.getValueSetName()) + .append("myLanguage", myLanguage) + .append("myUseSystem", myUseSystem) + .append("myUseCode", myUseCode) + .append("myUseDisplay", myUseDisplay) + .append("myValue", myValue) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java index dfe3691c3ac..ca1d099e66d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptView.java @@ -25,42 +25,40 @@ import org.apache.commons.io.IOUtils; import org.hibernate.annotations.Immutable; import org.hibernate.annotations.Subselect; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Lob; import java.io.IOException; import java.io.Reader; import java.io.Serializable; import java.sql.Clob; import java.sql.SQLException; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Lob; @Entity @Immutable @Subselect( - /* - * Note about the CONCAT function below- We need a primary key (an @Id) column - * because hibernate won't allow the view the function without it, but - */ - "SELECT CONCAT_WS(' ', vsc.PID, vscd.PID) AS PID, " + - " vsc.PID AS CONCEPT_PID, " + - " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + - " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + - " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + - " vsc.CODEVAL AS CONCEPT_CODEVAL, " + - " vsc.DISPLAY AS CONCEPT_DISPLAY, " + - " vsc.SYSTEM_VER AS SYSTEM_VER, " + - " vsc.SOURCE_PID AS SOURCE_PID, " + - " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, " + - " vscd.PID AS DESIGNATION_PID, " + - " vscd.LANG AS DESIGNATION_LANG, " + - " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + - " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + - " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + - " vscd.VAL AS DESIGNATION_VAL " + - "FROM TRM_VALUESET_CONCEPT vsc " + - "LEFT OUTER JOIN TRM_VALUESET_C_DESIGNATION vscd ON vsc.PID = vscd.VALUESET_CONCEPT_PID" -) + /* + * Note about the CONCAT function below- We need a primary key (an @Id) column + * because hibernate won't allow the view the function without it, but + */ + "SELECT CONCAT_WS(' ', vsc.PID, vscd.PID) AS PID, " + " vsc.PID AS CONCEPT_PID, " + + " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + + " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + + " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + + " vsc.CODEVAL AS CONCEPT_CODEVAL, " + + " vsc.DISPLAY AS CONCEPT_DISPLAY, " + + " vsc.SYSTEM_VER AS SYSTEM_VER, " + + " vsc.SOURCE_PID AS SOURCE_PID, " + + " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, " + + " vscd.PID AS DESIGNATION_PID, " + + " vscd.LANG AS DESIGNATION_LANG, " + + " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + + " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + + " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + + " vscd.VAL AS DESIGNATION_VAL " + + "FROM TRM_VALUESET_CONCEPT vsc " + + "LEFT OUTER JOIN TRM_VALUESET_C_DESIGNATION vscd ON vsc.PID = vscd.VALUESET_CONCEPT_PID") public class TermValueSetConceptView implements Serializable, ITermValueSetConceptView { private static final long serialVersionUID = 1L; @@ -86,7 +84,7 @@ public class TermValueSetConceptView implements Serializable, ITermValueSetConce @Column(name = "CONCEPT_DISPLAY", length = TermConcept.MAX_DESC_LENGTH) private String myConceptDisplay; - @Column(name="SYSTEM_VER", length = TermCodeSystemVersion.MAX_VERSION_LENGTH) + @Column(name = "SYSTEM_VER", length = TermCodeSystemVersion.MAX_VERSION_LENGTH) private String myConceptSystemVersion; @Column(name = "DESIGNATION_PID") @@ -185,5 +183,4 @@ public class TermValueSetConceptView implements Serializable, ITermValueSetConce public String getConceptSystemVersion() { return myConceptSystemVersion; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptViewOracle.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptViewOracle.java index 841bc8db97f..5419c8affaf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptViewOracle.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetConceptViewOracle.java @@ -25,42 +25,41 @@ import org.apache.commons.io.IOUtils; import org.hibernate.annotations.Immutable; import org.hibernate.annotations.Subselect; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Lob; import java.io.IOException; import java.io.Reader; import java.io.Serializable; import java.sql.Clob; import java.sql.SQLException; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Lob; @Entity @Immutable @Subselect( - /* - * Note about the CONCAT function below- We need a primary key (an @Id) column - * because hibernate won't allow the view the function without it, but - */ - "SELECT CONCAT(vsc.PID, CONCAT(' ', vscd.PID)) AS PID, " + - " vsc.PID AS CONCEPT_PID, " + - " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + - " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + - " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + - " vsc.CODEVAL AS CONCEPT_CODEVAL, " + - " vsc.DISPLAY AS CONCEPT_DISPLAY, " + - " vsc.SYSTEM_VER AS SYSTEM_VER, " + - " vsc.SOURCE_PID AS SOURCE_PID, " + - " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, " + - " vscd.PID AS DESIGNATION_PID, " + - " vscd.LANG AS DESIGNATION_LANG, " + - " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + - " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + - " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + - " vscd.VAL AS DESIGNATION_VAL " + - "FROM TRM_VALUESET_CONCEPT vsc " + - "LEFT OUTER JOIN TRM_VALUESET_C_DESIGNATION vscd ON vsc.PID = vscd.VALUESET_CONCEPT_PID" -) + /* + * Note about the CONCAT function below- We need a primary key (an @Id) column + * because hibernate won't allow the view the function without it, but + */ + "SELECT CONCAT(vsc.PID, CONCAT(' ', vscd.PID)) AS PID, " + + " vsc.PID AS CONCEPT_PID, " + + " vsc.VALUESET_PID AS CONCEPT_VALUESET_PID, " + + " vsc.VALUESET_ORDER AS CONCEPT_VALUESET_ORDER, " + + " vsc.SYSTEM_URL AS CONCEPT_SYSTEM_URL, " + + " vsc.CODEVAL AS CONCEPT_CODEVAL, " + + " vsc.DISPLAY AS CONCEPT_DISPLAY, " + + " vsc.SYSTEM_VER AS SYSTEM_VER, " + + " vsc.SOURCE_PID AS SOURCE_PID, " + + " vsc.SOURCE_DIRECT_PARENT_PIDS AS SOURCE_DIRECT_PARENT_PIDS, " + + " vscd.PID AS DESIGNATION_PID, " + + " vscd.LANG AS DESIGNATION_LANG, " + + " vscd.USE_SYSTEM AS DESIGNATION_USE_SYSTEM, " + + " vscd.USE_CODE AS DESIGNATION_USE_CODE, " + + " vscd.USE_DISPLAY AS DESIGNATION_USE_DISPLAY, " + + " vscd.VAL AS DESIGNATION_VAL " + + "FROM TRM_VALUESET_CONCEPT vsc " + + "LEFT OUTER JOIN TRM_VALUESET_C_DESIGNATION vscd ON vsc.PID = vscd.VALUESET_CONCEPT_PID") public class TermValueSetConceptViewOracle implements Serializable, ITermValueSetConceptView { private static final long serialVersionUID = 1L; @@ -86,7 +85,7 @@ public class TermValueSetConceptViewOracle implements Serializable, ITermValueSe @Column(name = "CONCEPT_DISPLAY", length = TermConcept.MAX_DESC_LENGTH) private String myConceptDisplay; - @Column(name="SYSTEM_VER", length = TermCodeSystemVersion.MAX_VERSION_LENGTH) + @Column(name = "SYSTEM_VER", length = TermCodeSystemVersion.MAX_VERSION_LENGTH) private String myConceptSystemVersion; @Column(name = "DESIGNATION_PID") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java index 12314cbf725..49690393473 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSetPreExpansionStatusEnum.java @@ -33,9 +33,12 @@ public enum TermValueSetPreExpansionStatusEnum { */ NOT_EXPANDED("notExpanded", "The ValueSet is waiting to be picked up and pre-expanded by a scheduled task."), - EXPANSION_IN_PROGRESS("expansionInProgress", "The ValueSet has been picked up by a scheduled task and pre-expansion is in progress."), + EXPANSION_IN_PROGRESS( + "expansionInProgress", + "The ValueSet has been picked up by a scheduled task and pre-expansion is in progress."), EXPANDED("expanded", "The ValueSet has been picked up by a scheduled task and pre-expansion is complete."), - FAILED_TO_EXPAND("failedToExpand", "The ValueSet has been picked up by a scheduled task and pre-expansion has failed."); + FAILED_TO_EXPAND( + "failedToExpand", "The ValueSet has been picked up by a scheduled task and pre-expansion has failed."); private static Map ourValues; private String myCode; @@ -56,7 +59,8 @@ public enum TermValueSetPreExpansionStatusEnum { public static TermValueSetPreExpansionStatusEnum fromCode(String theCode) { if (ourValues == null) { - HashMap values = new HashMap(); + HashMap values = + new HashMap(); for (TermValueSetPreExpansionStatusEnum next : values()) { values.put(next.getCode(), next); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java index 553deb06c7b..79335b1b4b5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java @@ -21,12 +21,14 @@ package ca.uhn.fhir.jpa.esr; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; -public class ExternallyStoredResourceAddressMetadataKey extends ResourceMetadataKeyEnum { +public class ExternallyStoredResourceAddressMetadataKey + extends ResourceMetadataKeyEnum { /** * Singleton instance */ - public static final ExternallyStoredResourceAddressMetadataKey INSTANCE = new ExternallyStoredResourceAddressMetadataKey(); + public static final ExternallyStoredResourceAddressMetadataKey INSTANCE = + new ExternallyStoredResourceAddressMetadataKey(); /** * Constructor @@ -34,5 +36,4 @@ public class ExternallyStoredResourceAddressMetadataKey extends ResourceMetadata private ExternallyStoredResourceAddressMetadataKey() { super("ExternallyStoredResourceAddress", ExternallyStoredResourceAddress.class); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java index cd75867d117..a8a5caa06c0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java @@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.esr; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.defaultString; @@ -39,7 +39,10 @@ public class ExternallyStoredResourceServiceRegistry { */ public void registerProvider(@Nonnull IExternallyStoredResourceService theProvider) { String id = defaultString(theProvider.getId()); - Validate.isTrue(id.matches(VALID_ID_PATTERN), "Invalid provider ID (must match pattern " + VALID_ID_PATTERN + "): %s", id); + Validate.isTrue( + id.matches(VALID_ID_PATTERN), + "Invalid provider ID (must match pattern " + VALID_ID_PATTERN + "): %s", + id); Validate.isTrue(!myIdToProvider.containsKey(id), "Already have a provider with ID: %s", id); myIdToProvider.put(id, theProvider); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java index c54429b0743..2ee115ee4df 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java @@ -31,4 +31,3 @@ * @since 6.6.0 */ package ca.uhn.fhir.jpa.esr; - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProviderWithIntrospection.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProviderWithIntrospection.java index 6f3e2264eb7..a1f918a9018 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProviderWithIntrospection.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProviderWithIntrospection.java @@ -58,8 +58,6 @@ import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; @@ -70,6 +68,8 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.util.MessageSupplier.msg; @@ -84,7 +84,12 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { /** * Constructor */ - public GraphQLProviderWithIntrospection(FhirContext theFhirContext, IValidationSupport theValidationSupport, IGraphQLStorageServices theIGraphQLStorageServices, ISearchParamRegistry theSearchParamRegistry, IDaoRegistry theDaoRegistry) { + public GraphQLProviderWithIntrospection( + FhirContext theFhirContext, + IValidationSupport theValidationSupport, + IGraphQLStorageServices theIGraphQLStorageServices, + ISearchParamRegistry theSearchParamRegistry, + IDaoRegistry theDaoRegistry) { super(theFhirContext, theValidationSupport, theIGraphQLStorageServices); mySearchParamRegistry = theSearchParamRegistry; @@ -100,14 +105,21 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { } @Override - public String processGraphQlPostRequest(ServletRequestDetails theServletRequestDetails, RequestDetails theRequestDetails, IIdType theId, String theQueryBody) { + public String processGraphQlPostRequest( + ServletRequestDetails theServletRequestDetails, + RequestDetails theRequestDetails, + IIdType theId, + String theQueryBody) { if (theQueryBody.contains("__schema")) { EnumSet operations; if (theId != null) { - throw new InvalidRequestException(Msg.code(2035) + "GraphQL introspection not supported at instance level. Please try at server- or instance- level."); + throw new InvalidRequestException( + Msg.code(2035) + + "GraphQL introspection not supported at instance level. Please try at server- or instance- level."); } - operations = EnumSet.of(GraphQLSchemaGenerator.FHIROperationType.READ, GraphQLSchemaGenerator.FHIROperationType.SEARCH); + operations = EnumSet.of( + GraphQLSchemaGenerator.FHIROperationType.READ, GraphQLSchemaGenerator.FHIROperationType.SEARCH); Collection resourceTypes; if (theRequestDetails.getResourceName() != null) { @@ -119,10 +131,7 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { resourceTypes.add(next); } } - resourceTypes = resourceTypes - .stream() - .sorted() - .collect(Collectors.toList()); + resourceTypes = resourceTypes.stream().sorted().collect(Collectors.toList()); } return generateSchema(theQueryBody, resourceTypes, operations); @@ -131,7 +140,10 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { } } - private String generateSchema(String theQueryBody, Collection theResourceTypes, EnumSet theOperations) { + private String generateSchema( + String theQueryBody, + Collection theResourceTypes, + EnumSet theOperations) { final StringBuilder schemaBuilder = new StringBuilder(); try (Writer writer = new StringBuilderWriter(schemaBuilder)) { @@ -140,36 +152,37 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { myGenerator.generateTypes(writer, theOperations); // Fix up a few things that are missing from the generated schema - writer - .append("\ninterface Element {") - .append("\n id: ID") - .append("\n}") - .append("\n"); -// writer -// .append("\ninterface Quantity {\n") -// .append("id: String\n") -// .append("extension: [Extension]\n") -// .append("value: decimal _value: ElementBase\n") -// .append("comparator: code _comparator: ElementBase\n") -// .append("unit: String _unit: ElementBase\n") -// .append("system: uri _system: ElementBase\n") -// .append("code: code _code: ElementBase\n") -// .append("\n}") -// .append("\n"); + writer.append("\ninterface Element {") + .append("\n id: ID") + .append("\n}") + .append("\n"); + // writer + // .append("\ninterface Quantity {\n") + // .append("id: String\n") + // .append("extension: [Extension]\n") + // .append("value: decimal _value: ElementBase\n") + // .append("comparator: code _comparator: ElementBase\n") + // .append("unit: String _unit: ElementBase\n") + // .append("system: uri _system: ElementBase\n") + // .append("code: code _code: ElementBase\n") + // .append("\n}") + // .append("\n"); -// writer -// .append("\ntype Resource {") -// .append("\n id: [token]" + "\n}") -// .append("\n"); -// writer -// .append("\ninput ResourceInput {") -// .append("\n id: [token]" + "\n}") -// .append("\n"); + // writer + // .append("\ntype Resource {") + // .append("\n id: [token]" + "\n}") + // .append("\n"); + // writer + // .append("\ninput ResourceInput {") + // .append("\n id: [token]" + "\n}") + // .append("\n"); // Generate schemas for the resource types for (String nextResourceType : theResourceTypes) { StructureDefinition sd = fetchStructureDefinition(nextResourceType); - List parameters = toR5SearchParams(mySearchParamRegistry.getActiveSearchParams(nextResourceType).values()); + List parameters = toR5SearchParams(mySearchParamRegistry + .getActiveSearchParams(nextResourceType) + .values()); myGenerator.generateResource(writer, sd, parameters, theOperations); } @@ -177,15 +190,16 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { writer.append("\ntype Query {"); for (String nextResourceType : theResourceTypes) { if (theOperations.contains(GraphQLSchemaGenerator.FHIROperationType.READ)) { - writer - .append("\n ") - .append(nextResourceType) - .append("(id: String): ") - .append(nextResourceType) - .append("\n"); + writer.append("\n ") + .append(nextResourceType) + .append("(id: String): ") + .append(nextResourceType) + .append("\n"); } if (theOperations.contains(GraphQLSchemaGenerator.FHIROperationType.SEARCH)) { - List parameters = toR5SearchParams(mySearchParamRegistry.getActiveSearchParams(nextResourceType).values()); + List parameters = toR5SearchParams(mySearchParamRegistry + .getActiveSearchParams(nextResourceType) + .values()); myGenerator.generateListAccessQuery(writer, parameters, nextResourceType); myGenerator.generateConnectionAccessQuery(writer, parameters, nextResourceType); } @@ -213,14 +227,16 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { // Skip GraphQL built-in types continue; } - runtimeWiringBuilder.scalar(new GraphQLScalarType.Builder().name(next).coercing(new GraphqlStringCoercing()).build()); + runtimeWiringBuilder.scalar(new GraphQLScalarType.Builder() + .name(next) + .coercing(new GraphqlStringCoercing()) + .build()); } for (InterfaceTypeDefinition next : typeDefinitionRegistry.getTypes(InterfaceTypeDefinition.class)) { TypeResolver resolver = new TypeResolverProxy(); - TypeRuntimeWiring wiring = TypeRuntimeWiring - .newTypeWiring(next.getName()) - .typeResolver(resolver) + TypeRuntimeWiring wiring = TypeRuntimeWiring.newTypeWiring(next.getName()) + .typeResolver(resolver) .build(); runtimeWiringBuilder.type(wiring); } @@ -234,7 +250,6 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { Map data = executionResult.toSpecification(); Gson gson = new GsonBuilder().create(); return gson.toJson(data); - } @Nonnull @@ -291,9 +306,9 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider { @Nonnull private StructureDefinition fetchStructureDefinition(String resourceName) { - StructureDefinition retVal = myContext.fetchResource(StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/" + resourceName); + StructureDefinition retVal = myContext.fetchResource( + StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/" + resourceName); Validate.notNull(retVal); return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/CascadingDeleteInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/CascadingDeleteInterceptor.java index c539f2fdf85..a460257089e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/CascadingDeleteInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/CascadingDeleteInterceptor.java @@ -41,10 +41,10 @@ import org.hl7.fhir.r4.model.OperationOutcome; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.jpa.delete.DeleteConflictService.MAX_RETRY_ATTEMPTS; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -73,8 +73,10 @@ public class CascadingDeleteInterceptor { public static final int CASCADING_DELETE_INTERCEPTOR_ORDER = 1; private static final Logger ourLog = LoggerFactory.getLogger(CascadingDeleteInterceptor.class); - private static final String CASCADED_DELETES_KEY = CascadingDeleteInterceptor.class.getName() + "_CASCADED_DELETES_KEY"; - private static final String CASCADED_DELETES_FAILED_KEY = CascadingDeleteInterceptor.class.getName() + "_CASCADED_DELETES_FAILED_KEY"; + private static final String CASCADED_DELETES_KEY = + CascadingDeleteInterceptor.class.getName() + "_CASCADED_DELETES_KEY"; + private static final String CASCADED_DELETES_FAILED_KEY = + CascadingDeleteInterceptor.class.getName() + "_CASCADED_DELETES_FAILED_KEY"; private final DaoRegistry myDaoRegistry; private final IInterceptorBroadcaster myInterceptorBroadcaster; @@ -86,7 +88,11 @@ public class CascadingDeleteInterceptor { * * @param theDaoRegistry The DAO registry (must not be null) */ - public CascadingDeleteInterceptor(@Nonnull FhirContext theFhirContext, @Nonnull DaoRegistry theDaoRegistry, @Nonnull IInterceptorBroadcaster theInterceptorBroadcaster, @Nonnull ThreadSafeResourceDeleterSvc theThreadSafeResourceDeleterSvc) { + public CascadingDeleteInterceptor( + @Nonnull FhirContext theFhirContext, + @Nonnull DaoRegistry theDaoRegistry, + @Nonnull IInterceptorBroadcaster theInterceptorBroadcaster, + @Nonnull ThreadSafeResourceDeleterSvc theThreadSafeResourceDeleterSvc) { Validate.notNull(theDaoRegistry, "theDaoRegistry must not be null"); Validate.notNull(theInterceptorBroadcaster, "theInterceptorBroadcaster must not be null"); Validate.notNull(theFhirContext, "theFhirContext must not be null"); @@ -99,7 +105,8 @@ public class CascadingDeleteInterceptor { } @Hook(value = Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, order = CASCADING_DELETE_INTERCEPTOR_ORDER) - public DeleteConflictOutcome handleDeleteConflicts(DeleteConflictList theConflictList, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + public DeleteConflictOutcome handleDeleteConflicts( + DeleteConflictList theConflictList, RequestDetails theRequest, TransactionDetails theTransactionDetails) { ourLog.debug("Have delete conflicts: {}", theConflictList); if (shouldCascade(theRequest) == DeleteCascadeModeEnum.NONE) { @@ -133,7 +140,8 @@ public class CascadingDeleteInterceptor { public void outgoingFailureOperationOutcome(RequestDetails theRequestDetails, IBaseOperationOutcome theResponse) { if (theRequestDetails != null) { - String failedDeleteMessage = (String) theRequestDetails.getUserData().get(CASCADED_DELETES_FAILED_KEY); + String failedDeleteMessage = + (String) theRequestDetails.getUserData().get(CASCADED_DELETES_FAILED_KEY); if (isNotBlank(failedDeleteMessage)) { FhirContext ctx = theRequestDetails.getFhirContext(); String severity = OperationOutcome.IssueSeverity.INFORMATION.toCode(); @@ -141,13 +149,12 @@ public class CascadingDeleteInterceptor { String details = failedDeleteMessage; OperationOutcomeUtil.addIssue(ctx, theResponse, severity, details, null, code); } - } } - @Hook(Pointcut.SERVER_OUTGOING_RESPONSE) - public void outgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseDetails, IBaseResource theResponse) { + public void outgoingResponse( + RequestDetails theRequestDetails, ResponseDetails theResponseDetails, IBaseResource theResponse) { if (theRequestDetails != null) { // Successful delete list @@ -159,16 +166,16 @@ public class CascadingDeleteInterceptor { IBaseOperationOutcome oo = (IBaseOperationOutcome) theResponse; String severity = OperationOutcome.IssueSeverity.INFORMATION.toCode(); String code = OperationOutcome.IssueType.INFORMATIONAL.toCode(); - String details = ctx.getLocalizer().getMessage(CascadingDeleteInterceptor.class, "successMsg", deleteList.size(), deleteList); + String details = ctx.getLocalizer() + .getMessage( + CascadingDeleteInterceptor.class, "successMsg", deleteList.size(), deleteList); OperationOutcomeUtil.addIssue(ctx, oo, severity, details, null, code); } } } - } } - /** * Subclasses may override * diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/ForceOffsetSearchModeInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/ForceOffsetSearchModeInterceptor.java index b9f2b794187..ad24b2adf86 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/ForceOffsetSearchModeInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/ForceOffsetSearchModeInterceptor.java @@ -64,6 +64,4 @@ public class ForceOffsetSearchModeInterceptor { theMap.setCount(myDefaultCount); } } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/JpaConsentContextServices.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/JpaConsentContextServices.java index 2e858093a1c..d3d6b7f1577 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/JpaConsentContextServices.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/JpaConsentContextServices.java @@ -21,5 +21,4 @@ package ca.uhn.fhir.jpa.interceptor; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; -public class JpaConsentContextServices implements IConsentContextServices { -} +public class JpaConsentContextServices implements IConsentContextServices {} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/JpaPreResourceAccessDetails.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/JpaPreResourceAccessDetails.java index 7399b8b2419..2904ad587d9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/JpaPreResourceAccessDetails.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/JpaPreResourceAccessDetails.java @@ -25,10 +25,10 @@ import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; import ca.uhn.fhir.util.ICallable; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.concurrent.NotThreadSafe; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.concurrent.NotThreadSafe; /** * THIS CLASS IS NOT THREAD SAFE @@ -41,7 +41,8 @@ public class JpaPreResourceAccessDetails implements IPreResourceAccessDetails { private final ICallable mySearchBuilderSupplier; private List myResources; - public JpaPreResourceAccessDetails(List theResourcePids, ICallable theSearchBuilderSupplier) { + public JpaPreResourceAccessDetails( + List theResourcePids, ICallable theSearchBuilderSupplier) { myResourcePids = theResourcePids; myBlocked = new boolean[myResourcePids.size()]; mySearchBuilderSupplier = theSearchBuilderSupplier; @@ -56,7 +57,9 @@ public class JpaPreResourceAccessDetails implements IPreResourceAccessDetails { public IBaseResource getResource(int theIndex) { if (myResources == null) { myResources = new ArrayList<>(myResourcePids.size()); - mySearchBuilderSupplier.call().loadResourcesByPid(myResourcePids, Collections.emptySet(), myResources, false, null); + mySearchBuilderSupplier + .call() + .loadResourcesByPid(myResourcePids, Collections.emptySet(), myResources, false, null); } return myResources.get(theIndex); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/OverridePathBasedReferentialIntegrityForDeletesInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/OverridePathBasedReferentialIntegrityForDeletesInterceptor.java index 004f18c6a62..cb7cd53159c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/OverridePathBasedReferentialIntegrityForDeletesInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/OverridePathBasedReferentialIntegrityForDeletesInterceptor.java @@ -54,11 +54,13 @@ import java.util.Set; @Interceptor public class OverridePathBasedReferentialIntegrityForDeletesInterceptor { - private static final Logger ourLog = LoggerFactory.getLogger(OverridePathBasedReferentialIntegrityForDeletesInterceptor.class); + private static final Logger ourLog = + LoggerFactory.getLogger(OverridePathBasedReferentialIntegrityForDeletesInterceptor.class); private final Set myPaths = new HashSet<>(); @Autowired private FhirContext myFhirContext; + @Autowired private DaoRegistry myDaoRegistry; @@ -98,30 +100,36 @@ public class OverridePathBasedReferentialIntegrityForDeletesInterceptor { /** * Interceptor hook method. Do not invoke directly. */ - @Hook(value = Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, order = CascadingDeleteInterceptor.OVERRIDE_PATH_BASED_REF_INTEGRITY_INTERCEPTOR_ORDER) + @Hook( + value = Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, + order = CascadingDeleteInterceptor.OVERRIDE_PATH_BASED_REF_INTEGRITY_INTERCEPTOR_ORDER) public void handleDeleteConflicts(DeleteConflictList theDeleteConflictList, RequestDetails requestDetails) { for (DeleteConflict nextConflict : theDeleteConflictList) { - ourLog.info("Ignoring referential integrity deleting {} - Referred to from {} at path {}", nextConflict.getTargetId(), nextConflict.getSourceId(), nextConflict.getSourcePath()); + ourLog.info( + "Ignoring referential integrity deleting {} - Referred to from {} at path {}", + nextConflict.getTargetId(), + nextConflict.getSourceId(), + nextConflict.getSourcePath()); IdDt sourceId = nextConflict.getSourceId(); IdDt targetId = nextConflict.getTargetId(); String targetIdValue = targetId.toVersionless().getValue(); - IBaseResource sourceResource = myDaoRegistry.getResourceDao(sourceId.getResourceType()).read(sourceId, requestDetails); + IBaseResource sourceResource = + myDaoRegistry.getResourceDao(sourceId.getResourceType()).read(sourceId, requestDetails); IFhirPath fhirPath = myFhirContext.newFhirPath(); for (String nextPath : myPaths) { List selections = fhirPath.evaluate(sourceResource, nextPath, IBaseReference.class); for (IBaseReference nextSelection : selections) { - String selectionTargetValue = nextSelection.getReferenceElement().toVersionless().getValue(); + String selectionTargetValue = + nextSelection.getReferenceElement().toVersionless().getValue(); if (Objects.equals(targetIdValue, selectionTargetValue)) { theDeleteConflictList.setResourceIdToIgnoreConflict(nextConflict.getTargetId()); break; } } - } - } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/PerformanceTracingLoggingInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/PerformanceTracingLoggingInterceptor.java index 264d1afa90d..9bde4bd1656 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/PerformanceTracingLoggingInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/PerformanceTracingLoggingInterceptor.java @@ -64,32 +64,56 @@ public class PerformanceTracingLoggingInterceptor { @Hook(value = Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED) public void searchFirstResultLoaded(SearchRuntimeDetails theOutcome) { - log("Initial query result returned in {} for query {}", theOutcome.getQueryStopwatch(), theOutcome.getSearchUuid()); + log( + "Initial query result returned in {} for query {}", + theOutcome.getQueryStopwatch(), + theOutcome.getSearchUuid()); } @Hook(value = Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE) public void searchSelectComplete(SearchRuntimeDetails theOutcome) { - log("SqlQuery found {} matches in {} for query {}", theOutcome.getFoundMatchesCount(), theOutcome.getQueryStopwatch(), theOutcome.getSearchUuid()); + log( + "SqlQuery found {} matches in {} for query {}", + theOutcome.getFoundMatchesCount(), + theOutcome.getQueryStopwatch(), + theOutcome.getSearchUuid()); } @Hook(value = Pointcut.JPA_PERFTRACE_SEARCH_COMPLETE) public void searchComplete(SearchRuntimeDetails theOutcome) { - log("SqlQuery {} is complete in {} - Found {} matches", theOutcome.getSearchUuid(), theOutcome.getQueryStopwatch(), theOutcome.getFoundMatchesCount()); + log( + "SqlQuery {} is complete in {} - Found {} matches", + theOutcome.getSearchUuid(), + theOutcome.getQueryStopwatch(), + theOutcome.getFoundMatchesCount()); } @Hook(value = Pointcut.JPA_PERFTRACE_SEARCH_PASS_COMPLETE) public void searchPassComplete(SearchRuntimeDetails theOutcome) { - log("SqlQuery {} pass complete and set to status {} in {} - Found {} matches", theOutcome.getSearchUuid(), theOutcome.getSearchStatus(), theOutcome.getQueryStopwatch(), theOutcome.getFoundMatchesCount()); + log( + "SqlQuery {} pass complete and set to status {} in {} - Found {} matches", + theOutcome.getSearchUuid(), + theOutcome.getSearchStatus(), + theOutcome.getQueryStopwatch(), + theOutcome.getFoundMatchesCount()); } @Hook(value = Pointcut.JPA_PERFTRACE_SEARCH_FAILED) public void searchFailed(SearchRuntimeDetails theOutcome) { - log("SqlQuery {} failed in {} - Found {} matches", theOutcome.getSearchUuid(), theOutcome.getQueryStopwatch(), theOutcome.getFoundMatchesCount()); + log( + "SqlQuery {} failed in {} - Found {} matches", + theOutcome.getSearchUuid(), + theOutcome.getQueryStopwatch(), + theOutcome.getFoundMatchesCount()); } @Hook(value = Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE) public void indexSearchQueryComplete(SearchRuntimeDetails theOutcome) { - log("Index query for {} completed in {} - Found {} matches", theOutcome.getSearchUuid(), theOutcome.getQueryStopwatch(), theOutcome.getFoundIndexMatchesCount()); + log( + "Index query for {} completed in {} - Found {} matches", + theOutcome.getSearchUuid(), + theOutcome.getQueryStopwatch(), + theOutcome.getFoundIndexMatchesCount()); } @Hook(value = Pointcut.JPA_PERFTRACE_INFO) @@ -105,5 +129,4 @@ public class PerformanceTracingLoggingInterceptor { private void log(String theMessage, Object... theArgs) { LogUtil.log(myLog, myLevel, theMessage, theArgs); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/TransactionConcurrencySemaphoreInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/TransactionConcurrencySemaphoreInterceptor.java index cb3e4bca51a..f21a8f4b6c2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/TransactionConcurrencySemaphoreInterceptor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/interceptor/TransactionConcurrencySemaphoreInterceptor.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.interceptor; - import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; @@ -49,7 +48,8 @@ import java.util.stream.Collectors; public class TransactionConcurrencySemaphoreInterceptor { private static final Logger ourLog = LoggerFactory.getLogger(TransactionConcurrencySemaphoreInterceptor.class); - private static final String HELD_SEMAPHORES = TransactionConcurrencySemaphoreInterceptor.class.getName() + "_HELD_SEMAPHORES"; + private static final String HELD_SEMAPHORES = + TransactionConcurrencySemaphoreInterceptor.class.getName() + "_HELD_SEMAPHORES"; private final Cache mySemaphoreCache; private final MemoryCacheService myMemoryCacheService; private boolean myLogWaits; @@ -78,21 +78,33 @@ public class TransactionConcurrencySemaphoreInterceptor { } @Hook(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE) - public void pre(TransactionDetails theTransactionDetails, TransactionWriteOperationsDetails theWriteOperationsDetails) { + public void pre( + TransactionDetails theTransactionDetails, TransactionWriteOperationsDetails theWriteOperationsDetails) { List heldSemaphores = new ArrayList<>(); Map pendingAndHeldSemaphores = new HashMap<>(); AtomicBoolean locked = new AtomicBoolean(false); try { - acquireSemaphoresForUrlList(locked, heldSemaphores, pendingAndHeldSemaphores, theWriteOperationsDetails.getUpdateRequestUrls(), false); - acquireSemaphoresForUrlList(locked, heldSemaphores, pendingAndHeldSemaphores, theWriteOperationsDetails.getConditionalCreateRequestUrls(), true); + acquireSemaphoresForUrlList( + locked, + heldSemaphores, + pendingAndHeldSemaphores, + theWriteOperationsDetails.getUpdateRequestUrls(), + false); + acquireSemaphoresForUrlList( + locked, + heldSemaphores, + pendingAndHeldSemaphores, + theWriteOperationsDetails.getConditionalCreateRequestUrls(), + true); pendingAndHeldSemaphores.keySet().removeIf(k -> pendingAndHeldSemaphores.get(k) == null); if (!pendingAndHeldSemaphores.isEmpty()) { if (isLogWaits()) { - ourLog.info("Waiting to acquire write semaphore for URLs:{}{}", - (pendingAndHeldSemaphores.size() > 1 ? "\n * " : ""), - (pendingAndHeldSemaphores.keySet().stream().sorted().collect(Collectors.joining("\n * ")))); + ourLog.info( + "Waiting to acquire write semaphore for URLs:{}{}", + (pendingAndHeldSemaphores.size() > 1 ? "\n * " : ""), + (pendingAndHeldSemaphores.keySet().stream().sorted().collect(Collectors.joining("\n * ")))); } for (Map.Entry nextEntry : pendingAndHeldSemaphores.entrySet()) { Semaphore nextSemaphore = nextEntry.getValue(); @@ -101,7 +113,10 @@ public class TransactionConcurrencySemaphoreInterceptor { ourLog.trace("Acquired semaphore {} on request URL: {}", nextSemaphore, nextEntry.getKey()); heldSemaphores.add(nextSemaphore); } else { - ourLog.warn("Timed out waiting for semaphore {} on request URL: {}", nextSemaphore, nextEntry.getKey()); + ourLog.warn( + "Timed out waiting for semaphore {} on request URL: {}", + nextSemaphore, + nextEntry.getKey()); break; } } catch (InterruptedException e) { @@ -111,7 +126,7 @@ public class TransactionConcurrencySemaphoreInterceptor { } } - theTransactionDetails.putUserData(HELD_SEMAPHORES, heldSemaphores); + theTransactionDetails.putUserData(HELD_SEMAPHORES, heldSemaphores); } finally { if (locked.get()) { @@ -120,7 +135,12 @@ public class TransactionConcurrencySemaphoreInterceptor { } } - private void acquireSemaphoresForUrlList(AtomicBoolean theLocked, List theHeldSemaphores, Map thePendingAndHeldSemaphores, List urls, boolean isConditionalCreates) { + private void acquireSemaphoresForUrlList( + AtomicBoolean theLocked, + List theHeldSemaphores, + Map thePendingAndHeldSemaphores, + List urls, + boolean isConditionalCreates) { for (String nextUrl : urls) { if (isConditionalCreates) { @@ -144,7 +164,7 @@ public class TransactionConcurrencySemaphoreInterceptor { ourLog.trace("Acquired semaphore {} on request URL: {}", semaphore, nextUrl); theHeldSemaphores.add(semaphore); thePendingAndHeldSemaphores.put(nextUrl, null); - } else { + } else { thePendingAndHeldSemaphores.put(nextUrl, semaphore); } } @@ -172,6 +192,4 @@ public class TransactionConcurrencySemaphoreInterceptor { public long countSemaphores() { return mySemaphoreCache.estimatedSize(); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 84f2f0a6e47..0107943e2ee 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -59,19 +59,15 @@ import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH; public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys - public static final DriverTypeEnum[] NON_AUTOMATIC_FK_INDEX_PLATFORMS = new DriverTypeEnum[]{ - DriverTypeEnum.POSTGRES_9_4, DriverTypeEnum.ORACLE_12C, DriverTypeEnum.MSSQL_2012}; + public static final DriverTypeEnum[] NON_AUTOMATIC_FK_INDEX_PLATFORMS = + new DriverTypeEnum[] {DriverTypeEnum.POSTGRES_9_4, DriverTypeEnum.ORACLE_12C, DriverTypeEnum.MSSQL_2012}; private final Set myFlags; - /** * Constructor */ public HapiFhirJpaMigrationTasks(Set theFlags) { - myFlags = theFlags - .stream() - .map(FlagEnum::fromCommandLineValue) - .collect(Collectors.toSet()); + myFlags = theFlags.stream().map(FlagEnum::fromCommandLineValue).collect(Collectors.toSet()); init330(); // 20180114 - 20180329 init340(); // 20180401 - 20180528 @@ -106,202 +102,190 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { Builder.BuilderWithTableName resourceTable = version.onTable("HFJ_RESOURCE"); resourceTable - .addIndex("20230502.1", "IDX_RES_RESID_UPDATED") - .unique(false) - .online(true) - .withColumns("RES_ID", "RES_UPDATED", "PARTITION_ID"); + .addIndex("20230502.1", "IDX_RES_RESID_UPDATED") + .unique(false) + .online(true) + .withColumns("RES_ID", "RES_UPDATED", "PARTITION_ID"); Builder.BuilderWithTableName tagDefTable = version.onTable("HFJ_TAG_DEF"); tagDefTable.dropIndex("20230505.1", "IDX_TAGDEF_TYPESYSCODEVERUS"); tagDefTable.dropIndex("20230505.2", "IDX_TAG_DEF_TP_CD_SYS"); tagDefTable - .addIndex("20230505.3", "IDX_TAG_DEF_TP_CD_SYS") - .unique(false) - .online(false) - .withColumns("TAG_TYPE", "TAG_CODE", "TAG_SYSTEM", "TAG_ID", "TAG_VERSION", "TAG_USER_SELECTED"); + .addIndex("20230505.3", "IDX_TAG_DEF_TP_CD_SYS") + .unique(false) + .online(false) + .withColumns("TAG_TYPE", "TAG_CODE", "TAG_SYSTEM", "TAG_ID", "TAG_VERSION", "TAG_USER_SELECTED"); - // This migration is failing in Oracle because there is already an index created on column RES_VER_PID since it is a primary key. + // This migration is failing in Oracle because there is already an index created on column RES_VER_PID since it + // is a primary key. // IDX_RESVERPROV_RESVER_PID is removed in 20230523.1 - version - .onTable("HFJ_RES_VER_PROV") - .addIndex("20230510.1", "IDX_RESVERPROV_RESVER_PID") - .unique(false) - .withColumns("RES_VER_PID") - .failureAllowed(); + version.onTable("HFJ_RES_VER_PROV") + .addIndex("20230510.1", "IDX_RESVERPROV_RESVER_PID") + .unique(false) + .withColumns("RES_VER_PID") + .failureAllowed(); - version - .onTable("HFJ_RES_VER_PROV") - .addIndex("20230510.2", "IDX_RESVERPROV_RES_PID") - .unique(false) - .withColumns("RES_PID"); + version.onTable("HFJ_RES_VER_PROV") + .addIndex("20230510.2", "IDX_RESVERPROV_RES_PID") + .unique(false) + .withColumns("RES_PID"); - version - .onTable(ResourceHistoryTable.HFJ_RES_VER) - .addColumn("20230510.4", "SOURCE_URI") - .nullable() - .type(ColumnTypeEnum.STRING, 100); - version - .onTable(ResourceHistoryTable.HFJ_RES_VER) - .addColumn("20230510.5", "REQUEST_ID") - .nullable() - .type(ColumnTypeEnum.STRING, 16); + version.onTable(ResourceHistoryTable.HFJ_RES_VER) + .addColumn("20230510.4", "SOURCE_URI") + .nullable() + .type(ColumnTypeEnum.STRING, 100); + version.onTable(ResourceHistoryTable.HFJ_RES_VER) + .addColumn("20230510.5", "REQUEST_ID") + .nullable() + .type(ColumnTypeEnum.STRING, 16); - version - .onTable("HFJ_RES_VER_PROV") - .addForeignKey("20230510.6", "FK_RESVERPROV_RES_PID") - .toColumn("RES_PID") - .references("HFJ_RESOURCE", "RES_ID"); + version.onTable("HFJ_RES_VER_PROV") + .addForeignKey("20230510.6", "FK_RESVERPROV_RES_PID") + .toColumn("RES_PID") + .references("HFJ_RESOURCE", "RES_ID"); - version - .onTable("HFJ_RES_VER_PROV") - .dropIndex("20230523.1", "IDX_RESVERPROV_RESVER_PID"); + version.onTable("HFJ_RES_VER_PROV").dropIndex("20230523.1", "IDX_RESVERPROV_RESVER_PID"); // add warning message to batch job instance - version - .onTable("BT2_WORK_CHUNK") - .addColumn("20230524.1", "WARNING_MSG") - .nullable() - .type(ColumnTypeEnum.CLOB) - .doNothing(); // the migration below is the better implementation + version.onTable("BT2_WORK_CHUNK") + .addColumn("20230524.1", "WARNING_MSG") + .nullable() + .type(ColumnTypeEnum.CLOB) + .doNothing(); // the migration below is the better implementation - version - .onTable("BT2_JOB_INSTANCE") - .addColumn("20230524.2", "WARNING_MSG") - .nullable() - .type(ColumnTypeEnum.CLOB) - .doNothing(); // the migration below is the better implementation + version.onTable("BT2_JOB_INSTANCE") + .addColumn("20230524.2", "WARNING_MSG") + .nullable() + .type(ColumnTypeEnum.CLOB) + .doNothing(); // the migration below is the better implementation // adding indexes to foreign keys // this makes our table scans more efficient, // but it also makes us more stable // Oracle does not like unindexed foreign keys version.onTable("NPM_PACKAGE_VER") - .addIndex("20230609.3", "FK_NPM_PKV_PKG") - .unique(false) - .withColumns("PACKAGE_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.3", "FK_NPM_PKV_PKG") + .unique(false) + .withColumns("PACKAGE_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("NPM_PACKAGE_VER") - .addIndex("20230609.4", "FK_NPM_PKV_RESID") - .unique(false) - .withColumns("BINARY_RES_ID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.4", "FK_NPM_PKV_RESID") + .unique(false) + .withColumns("BINARY_RES_ID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("NPM_PACKAGE_VER_RES") - .addIndex("20230609.5", "FK_NPM_PACKVERRES_PACKVER") - .unique(false) - .withColumns("PACKVER_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.5", "FK_NPM_PACKVERRES_PACKVER") + .unique(false) + .withColumns("PACKVER_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("NPM_PACKAGE_VER_RES") - .addIndex("20230609.6", "FK_NPM_PKVR_RESID") - .unique(false) - .withColumns("BINARY_RES_ID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.6", "FK_NPM_PKVR_RESID") + .unique(false) + .withColumns("BINARY_RES_ID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("MPI_LINK") - .addIndex("20230609.7", "FK_EMPI_LINK_TARGET") - .unique(false) - .withColumns("TARGET_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.7", "FK_EMPI_LINK_TARGET") + .unique(false) + .withColumns("TARGET_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CODESYSTEM") - .addIndex("20230609.8", "FK_TRMCODESYSTEM_RES") - .unique(false) - .withColumns("RES_ID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.8", "FK_TRMCODESYSTEM_RES") + .unique(false) + .withColumns("RES_ID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CODESYSTEM") - .addIndex("20230609.9", "FK_TRMCODESYSTEM_CURVER") - .unique(false) - .withColumns("CURRENT_VERSION_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.9", "FK_TRMCODESYSTEM_CURVER") + .unique(false) + .withColumns("CURRENT_VERSION_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CODESYSTEM_VER") - .addIndex("20230609.10", "FK_CODESYSVER_RES_ID") - .unique(false) - .withColumns("RES_ID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.10", "FK_CODESYSVER_RES_ID") + .unique(false) + .withColumns("RES_ID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CODESYSTEM_VER") - .addIndex("20230609.11", "FK_CODESYSVER_CS_ID") - .unique(false) - .withColumns("CODESYSTEM_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.11", "FK_CODESYSVER_CS_ID") + .unique(false) + .withColumns("CODESYSTEM_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_PC_LINK") - .addIndex("20230609.12", "FK_TERM_CONCEPTPC_CS") - .unique(false) - .withColumns("CODESYSTEM_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.12", "FK_TERM_CONCEPTPC_CS") + .unique(false) + .withColumns("CODESYSTEM_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_PROPERTY") - .addIndex("20230609.13", "FK_CONCEPTPROP_CSV") - .unique(false) - .withColumns("CS_VER_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.13", "FK_CONCEPTPROP_CSV") + .unique(false) + .withColumns("CS_VER_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_VALUESET") - .addIndex("20230609.14", "FK_TRMVALUESET_RES") - .unique(false) - .withColumns("RES_ID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.14", "FK_TRMVALUESET_RES") + .unique(false) + .withColumns("RES_ID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_VALUESET_C_DESIGNATION") - .addIndex("20230609.15", "FK_TRM_VSCD_VS_PID") - .unique(false) - .withColumns("VALUESET_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.15", "FK_TRM_VSCD_VS_PID") + .unique(false) + .withColumns("VALUESET_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_MAP") - .addIndex("20230609.17", "FK_TRMCONCEPTMAP_RES") - .unique(false) - .withColumns("RES_ID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.17", "FK_TRMCONCEPTMAP_RES") + .unique(false) + .withColumns("RES_ID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_DESIG") - .addIndex("20230609.18", "FK_CONCEPTDESIG_CSV") - .unique(false) - .withColumns("CS_VER_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.18", "FK_CONCEPTDESIG_CSV") + .unique(false) + .withColumns("CS_VER_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_MAP_GROUP") - .addIndex("20230609.19", "FK_TCMGROUP_CONCEPTMAP") - .unique(false) - .withColumns("CONCEPT_MAP_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.19", "FK_TCMGROUP_CONCEPTMAP") + .unique(false) + .withColumns("CONCEPT_MAP_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") - .addIndex("20230609.20", "FK_TCMGELEMENT_GROUP") - .unique(false) - .withColumns("CONCEPT_MAP_GROUP_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.20", "FK_TCMGELEMENT_GROUP") + .unique(false) + .withColumns("CONCEPT_MAP_GROUP_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") - .addIndex("20230609.21", "FK_TCMGETARGET_ELEMENT") - .unique(false) - .withColumns("CONCEPT_MAP_GRP_ELM_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20230609.21", "FK_TCMGETARGET_ELEMENT") + .unique(false) + .withColumns("CONCEPT_MAP_GRP_ELM_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); // add warning message to batch job instance using limited varchar column to store - version - .onTable("BT2_WORK_CHUNK") - .dropColumn("20230622.1", "WARNING_MSG") - .failureAllowed(); + version.onTable("BT2_WORK_CHUNK") + .dropColumn("20230622.1", "WARNING_MSG") + .failureAllowed(); - version - .onTable("BT2_WORK_CHUNK") - .addColumn("20230622.2", "WARNING_MSG") - .nullable() - .type(ColumnTypeEnum.STRING, 4000); + version.onTable("BT2_WORK_CHUNK") + .addColumn("20230622.2", "WARNING_MSG") + .nullable() + .type(ColumnTypeEnum.STRING, 4000); - version - .onTable("BT2_JOB_INSTANCE") - .dropColumn("20230622.3", "WARNING_MSG") - .failureAllowed(); + version.onTable("BT2_JOB_INSTANCE") + .dropColumn("20230622.3", "WARNING_MSG") + .failureAllowed(); - version - .onTable("BT2_JOB_INSTANCE") - .addColumn("20230622.4", "WARNING_MSG") - .nullable() - .type(ColumnTypeEnum.STRING, 4000); + version.onTable("BT2_JOB_INSTANCE") + .addColumn("20230622.4", "WARNING_MSG") + .nullable() + .type(ColumnTypeEnum.STRING, 4000); } protected void init660() { @@ -309,68 +293,74 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // fix Postgres clob types - that stupid oid driver problem is still there // BT2_JOB_INSTANCE.PARAMS_JSON_LOB - version.onTable("BT2_JOB_INSTANCE") - .migratePostgresTextClobToBinaryClob("20230208.1", "PARAMS_JSON_LOB"); + version.onTable("BT2_JOB_INSTANCE").migratePostgresTextClobToBinaryClob("20230208.1", "PARAMS_JSON_LOB"); // BT2_JOB_INSTANCE.REPORT - version.onTable("BT2_JOB_INSTANCE") - .migratePostgresTextClobToBinaryClob("20230208.2", "REPORT"); + version.onTable("BT2_JOB_INSTANCE").migratePostgresTextClobToBinaryClob("20230208.2", "REPORT"); // BT2_WORK_CHUNK.CHUNK_DATA - version.onTable("BT2_WORK_CHUNK") - .migratePostgresTextClobToBinaryClob("20230208.3", "CHUNK_DATA"); + version.onTable("BT2_WORK_CHUNK").migratePostgresTextClobToBinaryClob("20230208.3", "CHUNK_DATA"); { Builder.BuilderWithTableName tagDefTable = version.onTable("HFJ_TAG_DEF"); // add columns - tagDefTable - .addColumn("20230209.1", "TAG_VERSION") - .nullable() - .type(ColumnTypeEnum.STRING, 30); - tagDefTable - .addColumn("20230209.2", "TAG_USER_SELECTED") - .nullable() - .type(ColumnTypeEnum.BOOLEAN); + tagDefTable.addColumn("20230209.1", "TAG_VERSION").nullable().type(ColumnTypeEnum.STRING, 30); + tagDefTable.addColumn("20230209.2", "TAG_USER_SELECTED").nullable().type(ColumnTypeEnum.BOOLEAN); // Update indexing tagDefTable.dropIndex("20230209.3", "IDX_TAGDEF_TYPESYSCODE"); tagDefTable.dropIndex("20230209.4", "IDX_TAGDEF_TYPESYSCODEVERUS"); Map addTagDefConstraint = new HashMap<>(); - addTagDefConstraint.put(DriverTypeEnum.H2_EMBEDDED, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); - addTagDefConstraint.put(DriverTypeEnum.MARIADB_10_1, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); - addTagDefConstraint.put(DriverTypeEnum.MSSQL_2012, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); - addTagDefConstraint.put(DriverTypeEnum.MYSQL_5_7, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); - addTagDefConstraint.put(DriverTypeEnum.ORACLE_12C, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); - addTagDefConstraint.put(DriverTypeEnum.POSTGRES_9_4, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); + addTagDefConstraint.put( + DriverTypeEnum.H2_EMBEDDED, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); + addTagDefConstraint.put( + DriverTypeEnum.MARIADB_10_1, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); + addTagDefConstraint.put( + DriverTypeEnum.MSSQL_2012, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); + addTagDefConstraint.put( + DriverTypeEnum.MYSQL_5_7, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); + addTagDefConstraint.put( + DriverTypeEnum.ORACLE_12C, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); + addTagDefConstraint.put( + DriverTypeEnum.POSTGRES_9_4, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODEVERUS UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_VERSION, TAG_USER_SELECTED)"); version.executeRawSql("20230209.5", addTagDefConstraint); } - version - .onTable(Search.HFJ_SEARCH) - .addColumn("20230215.1", Search.SEARCH_UUID) - .nullable() - .type(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH); - version - .onTable(BulkImportJobEntity.HFJ_BLK_IMPORT_JOB) - .addColumn("20230215.2", BulkImportJobEntity.JOB_ID) - .nullable() - .type(ColumnTypeEnum.STRING, UUID_LENGTH); - version - .onTable(BulkExportJobEntity.HFJ_BLK_EXPORT_JOB) - .addColumn("20230215.3", BulkExportJobEntity.JOB_ID) - .nullable() - .type(ColumnTypeEnum.STRING, UUID_LENGTH); + version.onTable(Search.HFJ_SEARCH) + .addColumn("20230215.1", Search.SEARCH_UUID) + .nullable() + .type(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH); + version.onTable(BulkImportJobEntity.HFJ_BLK_IMPORT_JOB) + .addColumn("20230215.2", BulkImportJobEntity.JOB_ID) + .nullable() + .type(ColumnTypeEnum.STRING, UUID_LENGTH); + version.onTable(BulkExportJobEntity.HFJ_BLK_EXPORT_JOB) + .addColumn("20230215.3", BulkExportJobEntity.JOB_ID) + .nullable() + .type(ColumnTypeEnum.STRING, UUID_LENGTH); - - Builder.BuilderAddTableByColumns resSearchUrlTable = version.addTableByColumns("20230227.1", "HFJ_RES_SEARCH_URL", "RES_SEARCH_URL"); + Builder.BuilderAddTableByColumns resSearchUrlTable = + version.addTableByColumns("20230227.1", "HFJ_RES_SEARCH_URL", "RES_SEARCH_URL"); resSearchUrlTable.addColumn("RES_SEARCH_URL").nonNullable().type(ColumnTypeEnum.STRING, 768); resSearchUrlTable.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.LONG); resSearchUrlTable.addColumn("CREATED_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); - resSearchUrlTable.addIndex("20230227.2", "IDX_RESSEARCHURL_RES").unique(false).withColumns("RES_ID"); - resSearchUrlTable.addIndex("20230227.3", "IDX_RESSEARCHURL_TIME").unique(false).withColumns("CREATED_TIME"); + resSearchUrlTable + .addIndex("20230227.2", "IDX_RESSEARCHURL_RES") + .unique(false) + .withColumns("RES_ID"); + resSearchUrlTable + .addIndex("20230227.3", "IDX_RESSEARCHURL_TIME") + .unique(false) + .withColumns("CREATED_TIME"); { // string search index @@ -378,25 +368,26 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // add res_id to indentity to speed up sorts. stringTable - .addIndex("20230303.1", "IDX_SP_STRING_HASH_IDENT_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "RES_ID", "PARTITION_ID"); + .addIndex("20230303.1", "IDX_SP_STRING_HASH_IDENT_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "RES_ID", "PARTITION_ID"); stringTable.dropIndexOnline("20230303.2", "IDX_SP_STRING_HASH_IDENT"); // add hash_norm to res_id to speed up joins on a second string. stringTable - .addIndex("20230303.3", "IDX_SP_STRING_RESID_V2") - .unique(false) - .online(true) - .withColumns("RES_ID", "HASH_NORM_PREFIX", "PARTITION_ID"); + .addIndex("20230303.3", "IDX_SP_STRING_RESID_V2") + .unique(false) + .online(true) + .withColumns("RES_ID", "HASH_NORM_PREFIX", "PARTITION_ID"); // drop and recreate FK_SPIDXSTR_RESOURCE since it will be useing the old IDX_SP_STRING_RESID stringTable.dropForeignKey("20230303.4", "FK_SPIDXSTR_RESOURCE", "HFJ_RESOURCE"); stringTable.dropIndexOnline("20230303.5", "IDX_SP_STRING_RESID"); - stringTable.addForeignKey("20230303.6", "FK_SPIDXSTR_RESOURCE") - .toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID"); - + stringTable + .addForeignKey("20230303.6", "FK_SPIDXSTR_RESOURCE") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); } final String revColumnName = "REV"; @@ -407,12 +398,14 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { { version.addIdGenerator("20230306.1", "SEQ_HFJ_REVINFO"); - final Builder.BuilderAddTableByColumns enversRevInfo = version.addTableByColumns("20230306.2", enversRevisionTable, revColumnName); + final Builder.BuilderAddTableByColumns enversRevInfo = + version.addTableByColumns("20230306.2", enversRevisionTable, revColumnName); enversRevInfo.addColumn(revColumnName).nonNullable().type(ColumnTypeEnum.LONG); enversRevInfo.addColumn(revTstmpColumnName).nullable().type(ColumnTypeEnum.LONG); - final Builder.BuilderAddTableByColumns empiLink = version.addTableByColumns("20230306.6", enversMpiLinkAuditTable, "PID", revColumnName); + final Builder.BuilderAddTableByColumns empiLink = + version.addTableByColumns("20230306.6", enversMpiLinkAuditTable, "PID", revColumnName); empiLink.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); empiLink.addColumn("REV").nonNullable().type(ColumnTypeEnum.LONG); @@ -432,59 +425,53 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { empiLink.addColumn("VECTOR").nullable().type(ColumnTypeEnum.LONG); empiLink.addColumn("SCORE").nullable().type(ColumnTypeEnum.FLOAT); - // N.B. It's impossible to rename a foreign key in a Hibernate Envers audit table, and the schema migration unit test will fail if we try to drop and recreate it + // N.B. It's impossible to rename a foreign key in a Hibernate Envers audit table, and the schema migration + // unit test will fail if we try to drop and recreate it empiLink.addForeignKey("20230306.7", "FKAOW7NXNCLOEC419ARS0FPP58M") - .toColumn(revColumnName) - .references(enversRevisionTable, revColumnName); + .toColumn(revColumnName) + .references(enversRevisionTable, revColumnName); } { // The pre-release already contains the long version of this column - // We do this becausea doing a modifyColumn on Postgres (and possibly other RDBMS's) will fail with a nasty error: - // column "revtstmp" cannot be cast automatically to type timestamp without time zone Hint: You might need to specify "USING revtstmp::timestamp without time zone". - version - .onTable(enversRevisionTable) - .dropColumn("20230316.1", revTstmpColumnName); + // We do this becausea doing a modifyColumn on Postgres (and possibly other RDBMS's) will fail with a nasty + // error: + // column "revtstmp" cannot be cast automatically to type timestamp without time zone Hint: You might need + // to specify "USING revtstmp::timestamp without time zone". + version.onTable(enversRevisionTable).dropColumn("20230316.1", revTstmpColumnName); - version - .onTable(enversRevisionTable) - .addColumn("20230316.2", revTstmpColumnName) - .nullable() - .type(ColumnTypeEnum.DATE_TIMESTAMP); + version.onTable(enversRevisionTable) + .addColumn("20230316.2", revTstmpColumnName) + .nullable() + .type(ColumnTypeEnum.DATE_TIMESTAMP); // New columns from AuditableBasePartitionable - version - .onTable(enversMpiLinkAuditTable) - .addColumn("20230316.3", "PARTITION_ID") - .nullable() - .type(ColumnTypeEnum.INT); + version.onTable(enversMpiLinkAuditTable) + .addColumn("20230316.3", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); - version - .onTable(enversMpiLinkAuditTable) - .addColumn("20230316.4", "PARTITION_DATE") - .nullable() - .type(ColumnTypeEnum.DATE_ONLY); + version.onTable(enversMpiLinkAuditTable) + .addColumn("20230316.4", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); } - version - .onTable(ResourceTable.HFJ_RESOURCE) - .addColumn("20230323.1", "SEARCH_URL_PRESENT") - .nullable() - .type(ColumnTypeEnum.BOOLEAN); - + version.onTable(ResourceTable.HFJ_RESOURCE) + .addColumn("20230323.1", "SEARCH_URL_PRESENT") + .nullable() + .type(ColumnTypeEnum.BOOLEAN); { Builder.BuilderWithTableName uriTable = version.onTable("HFJ_SPIDX_URI"); - uriTable - .addIndex("20230324.1", "IDX_SP_URI_HASH_URI_V2") - .unique(true) - .online(true) - .withColumns("HASH_URI", "RES_ID", "PARTITION_ID"); - uriTable - .addIndex("20230324.2", "IDX_SP_URI_HASH_IDENTITY_V2") - .unique(true) - .online(true) - .withColumns("HASH_IDENTITY", "SP_URI", "RES_ID", "PARTITION_ID"); + uriTable.addIndex("20230324.1", "IDX_SP_URI_HASH_URI_V2") + .unique(true) + .online(true) + .withColumns("HASH_URI", "RES_ID", "PARTITION_ID"); + uriTable.addIndex("20230324.2", "IDX_SP_URI_HASH_IDENTITY_V2") + .unique(true) + .online(true) + .withColumns("HASH_IDENTITY", "SP_URI", "RES_ID", "PARTITION_ID"); uriTable.dropIndex("20230324.3", "IDX_SP_URI_RESTYPE_NAME"); uriTable.dropIndex("20230324.4", "IDX_SP_URI_UPDATED"); uriTable.dropIndex("20230324.5", "IDX_SP_URI"); @@ -492,132 +479,127 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { uriTable.dropIndex("20230324.7", "IDX_SP_URI_HASH_IDENTITY"); } + version.onTable("HFJ_SPIDX_COORDS").dropIndex("20230325.1", "IDX_SP_COORDS_HASH"); version.onTable("HFJ_SPIDX_COORDS") - .dropIndex("20230325.1", "IDX_SP_COORDS_HASH"); - version.onTable("HFJ_SPIDX_COORDS") - .addIndex("20230325.2", "IDX_SP_COORDS_HASH_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE", "RES_ID", "PARTITION_ID"); - + .addIndex("20230325.2", "IDX_SP_COORDS_HASH_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE", "RES_ID", "PARTITION_ID"); // Postgres tuning. - String postgresTuningStatementsAll = ClasspathUtil.loadResource("ca/uhn/fhir/jpa/docs/database/hapifhirpostgres94-init01.sql"); - List postgresTuningStatements = Arrays - .stream(postgresTuningStatementsAll.split("\\n")) - .map(StringUtils::trim) - .filter(StringUtils::isNotBlank) - .filter(t -> !t.startsWith("--")) - .collect(Collectors.toList()); + String postgresTuningStatementsAll = + ClasspathUtil.loadResource("ca/uhn/fhir/jpa/docs/database/hapifhirpostgres94-init01.sql"); + List postgresTuningStatements = Arrays.stream(postgresTuningStatementsAll.split("\\n")) + .map(StringUtils::trim) + .filter(StringUtils::isNotBlank) + .filter(t -> !t.startsWith("--")) + .collect(Collectors.toList()); version.executeRawSqls("20230402.1", Map.of(DriverTypeEnum.POSTGRES_9_4, postgresTuningStatements)); // Use an unlimited length text column for RES_TEXT_VC - version - .onTable("HFJ_RES_VER") - .modifyColumn("20230421.1", "RES_TEXT_VC") - .nullable() - .failureAllowed() - .withType(ColumnTypeEnum.TEXT); + version.onTable("HFJ_RES_VER") + .modifyColumn("20230421.1", "RES_TEXT_VC") + .nullable() + .failureAllowed() + .withType(ColumnTypeEnum.TEXT); { // add hash_norm to res_id to speed up joins on a second string. Builder.BuilderWithTableName linkTable = version.onTable("HFJ_RES_LINK"); linkTable - .addIndex("20230424.1", "IDX_RL_TGT_v2") - .unique(false) - .online(true) - .withColumns("TARGET_RESOURCE_ID", "SRC_PATH", "SRC_RESOURCE_ID", "TARGET_RESOURCE_TYPE", "PARTITION_ID"); + .addIndex("20230424.1", "IDX_RL_TGT_v2") + .unique(false) + .online(true) + .withColumns( + "TARGET_RESOURCE_ID", + "SRC_PATH", + "SRC_RESOURCE_ID", + "TARGET_RESOURCE_TYPE", + "PARTITION_ID"); // drop and recreate FK_SPIDXSTR_RESOURCE since it will be useing the old IDX_SP_STRING_RESID linkTable.dropForeignKey("20230424.2", "FK_RESLINK_TARGET", "HFJ_RESOURCE"); linkTable.dropIndexOnline("20230424.3", "IDX_RL_TPATHRES"); linkTable.dropIndexOnline("20230424.4", "IDX_RL_DEST"); - linkTable.addForeignKey("20230424.5", "FK_RESLINK_TARGET") - .toColumn("TARGET_RESOURCE_ID").references("HFJ_RESOURCE", "RES_ID"); + linkTable + .addForeignKey("20230424.5", "FK_RESLINK_TARGET") + .toColumn("TARGET_RESOURCE_ID") + .references("HFJ_RESOURCE", "RES_ID"); } { version.onTable("MPI_LINK") - .addIndex("20230504.1", "IDX_EMPI_GR_TGT") - .unique(false) - .withColumns("GOLDEN_RESOURCE_PID", "TARGET_PID"); + .addIndex("20230504.1", "IDX_EMPI_GR_TGT") + .unique(false) + .withColumns("GOLDEN_RESOURCE_PID", "TARGET_PID"); } - } protected void init640() { Builder version = forVersion(VersionEnum.V6_3_0); // start forced_id inline migration - version - .onTable("HFJ_RESOURCE") - .addColumn("20221108.1", "FHIR_ID") - .nullable() - // FHIR ids contain a subset of ascii, limited to 64 chars. - .type(ColumnTypeEnum.STRING, 64); + version.onTable("HFJ_RESOURCE") + .addColumn("20221108.1", "FHIR_ID") + .nullable() + // FHIR ids contain a subset of ascii, limited to 64 chars. + .type(ColumnTypeEnum.STRING, 64); // Add new Index to HFJ_SEARCH_INCLUDE on SEARCH_PID - version - .onTable("HFJ_SEARCH_INCLUDE") - .addIndex("20221207.1", "FK_SEARCHINC_SEARCH") - .unique(false) - .online(true) - .withColumns("SEARCH_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + version.onTable("HFJ_SEARCH_INCLUDE") + .addIndex("20221207.1", "FK_SEARCHINC_SEARCH") + .unique(false) + .online(true) + .withColumns("SEARCH_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); } protected void init640_after_20230126() { Builder version = forVersion(VersionEnum.V6_3_0); - { //We added this constraint when userSelected and Version were added. It is no longer necessary. + { // We added this constraint when userSelected and Version were added. It is no longer necessary. Builder.BuilderWithTableName tagDefTable = version.onTable("HFJ_TAG_DEF"); tagDefTable.dropIndex("20230503.1", "IDX_TAGDEF_TYPESYSCODEVERUS"); } } - private void init620() { Builder version = forVersion(VersionEnum.V6_2_0); // add new REPORT column to BATCH2 tables - version - .onTable("BT2_JOB_INSTANCE") - .addColumn("20220830.1", "FAST_TRACKING") - .nullable() - .type(ColumnTypeEnum.BOOLEAN); + version.onTable("BT2_JOB_INSTANCE") + .addColumn("20220830.1", "FAST_TRACKING") + .nullable() + .type(ColumnTypeEnum.BOOLEAN); - version - .onTable("HFJ_BINARY_STORAGE_BLOB") - .modifyColumn("20221017.1", "BLOB_SIZE") - .nullable() - .withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_BINARY_STORAGE_BLOB") + .modifyColumn("20221017.1", "BLOB_SIZE") + .nullable() + .withType(ColumnTypeEnum.LONG); version.onTable("HFJ_SPIDX_URI") - .modifyColumn("20221103.1", "SP_URI") - .nullable() - .withType(ColumnTypeEnum.STRING, 500); + .modifyColumn("20221103.1", "SP_URI") + .nullable() + .withType(ColumnTypeEnum.STRING, 500); version.onTable("BT2_JOB_INSTANCE") - .addColumn("20230110.1", "UPDATE_TIME") - .nullable() - .type(ColumnTypeEnum.DATE_TIMESTAMP); + .addColumn("20230110.1", "UPDATE_TIME") + .nullable() + .type(ColumnTypeEnum.DATE_TIMESTAMP); version.onTable("BT2_WORK_CHUNK") - .addColumn("20230110.2", "UPDATE_TIME") - .nullable() - .type(ColumnTypeEnum.DATE_TIMESTAMP); - - + .addColumn("20230110.2", "UPDATE_TIME") + .nullable() + .type(ColumnTypeEnum.DATE_TIMESTAMP); } private void init610() { Builder version = forVersion(VersionEnum.V6_1_0); // add new REPORT column to BATCH2 tables - version - .onTable("BT2_JOB_INSTANCE") - .addColumn("20220601.1", "REPORT") - .nullable() - .type(ColumnTypeEnum.CLOB); + version.onTable("BT2_JOB_INSTANCE") + .addColumn("20220601.1", "REPORT") + .nullable() + .type(ColumnTypeEnum.CLOB); } private void init600() { @@ -637,10 +619,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // replace and drop IDX_SP_DATE_HASH dateTable - .addIndex("20220207.1", "IDX_SP_DATE_HASH_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH", "RES_ID", "PARTITION_ID"); + .addIndex("20220207.1", "IDX_SP_DATE_HASH_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH", "RES_ID", "PARTITION_ID"); dateTable.dropIndexOnline("20220207.2", "IDX_SP_DATE_HASH"); // drop redundant @@ -648,26 +630,31 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // replace and drop IDX_SP_DATE_HASH_HIGH dateTable - .addIndex("20220207.4", "IDX_SP_DATE_HASH_HIGH_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "SP_VALUE_HIGH", "RES_ID", "PARTITION_ID"); + .addIndex("20220207.4", "IDX_SP_DATE_HASH_HIGH_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "SP_VALUE_HIGH", "RES_ID", "PARTITION_ID"); dateTable.dropIndexOnline("20220207.5", "IDX_SP_DATE_HASH_HIGH"); // replace and drop IDX_SP_DATE_ORD_HASH dateTable - .addIndex("20220207.6", "IDX_SP_DATE_ORD_HASH_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "SP_VALUE_LOW_DATE_ORDINAL", "SP_VALUE_HIGH_DATE_ORDINAL", "RES_ID", "PARTITION_ID"); + .addIndex("20220207.6", "IDX_SP_DATE_ORD_HASH_V2") + .unique(false) + .online(true) + .withColumns( + "HASH_IDENTITY", + "SP_VALUE_LOW_DATE_ORDINAL", + "SP_VALUE_HIGH_DATE_ORDINAL", + "RES_ID", + "PARTITION_ID"); dateTable.dropIndexOnline("20220207.7", "IDX_SP_DATE_ORD_HASH"); // replace and drop IDX_SP_DATE_ORD_HASH_HIGH dateTable - .addIndex("20220207.8", "IDX_SP_DATE_ORD_HASH_HIGH_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "SP_VALUE_HIGH_DATE_ORDINAL", "RES_ID", "PARTITION_ID"); + .addIndex("20220207.8", "IDX_SP_DATE_ORD_HASH_HIGH_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "SP_VALUE_HIGH_DATE_ORDINAL", "RES_ID", "PARTITION_ID"); dateTable.dropIndexOnline("20220207.9", "IDX_SP_DATE_ORD_HASH_HIGH"); // drop redundant @@ -675,10 +662,17 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // replace and drop IDX_SP_DATE_RESID dateTable - .addIndex("20220207.11", "IDX_SP_DATE_RESID_V2") - .unique(false) - .online(true) - .withColumns("RES_ID", "HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH", "SP_VALUE_LOW_DATE_ORDINAL", "SP_VALUE_HIGH_DATE_ORDINAL", "PARTITION_ID"); + .addIndex("20220207.11", "IDX_SP_DATE_RESID_V2") + .unique(false) + .online(true) + .withColumns( + "RES_ID", + "HASH_IDENTITY", + "SP_VALUE_LOW", + "SP_VALUE_HIGH", + "SP_VALUE_LOW_DATE_ORDINAL", + "SP_VALUE_HIGH_DATE_ORDINAL", + "PARTITION_ID"); // some engines tie the FK constraint to a particular index. // So we need to drop and recreate the constraint to drop the old RES_ID index. // Rename it while we're at it. FK17s70oa59rm9n61k9thjqrsqm was not a pretty name. @@ -686,8 +680,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { dateTable.dropIndexOnline("20220207.13", "IDX_SP_DATE_RESID"); dateTable.dropIndexOnline("20220207.14", "FK17S70OA59RM9N61K9THJQRSQM"); - dateTable.addForeignKey("20220207.15", "FK_SP_DATE_RES") - .toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID"); + dateTable + .addForeignKey("20220207.15", "FK_SP_DATE_RES") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); // drop obsolete dateTable.dropIndexOnline("20220207.16", "IDX_SP_DATE_UPDATED"); @@ -703,33 +699,37 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // replace and drop IDX_SP_TOKEN_HASH for sorting tokenTable - .addIndex("20220208.1", "IDX_SP_TOKEN_HASH_V2") - .unique(false).online(true) - .withColumns("HASH_IDENTITY", "SP_SYSTEM", "SP_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220208.1", "IDX_SP_TOKEN_HASH_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "SP_SYSTEM", "SP_VALUE", "RES_ID", "PARTITION_ID"); tokenTable.dropIndexOnline("20220208.2", "IDX_SP_TOKEN_HASH"); // for search by system tokenTable - .addIndex("20220208.3", "IDX_SP_TOKEN_HASH_S_V2") - .unique(false).online(true) - .withColumns("HASH_SYS", "RES_ID", "PARTITION_ID"); + .addIndex("20220208.3", "IDX_SP_TOKEN_HASH_S_V2") + .unique(false) + .online(true) + .withColumns("HASH_SYS", "RES_ID", "PARTITION_ID"); tokenTable.dropIndexOnline("20220208.4", "IDX_SP_TOKEN_HASH_S"); // for search by system+value tokenTable - .addIndex("20220208.5", "IDX_SP_TOKEN_HASH_SV_V2") - .unique(false).online(true) - .withColumns("HASH_SYS_AND_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220208.5", "IDX_SP_TOKEN_HASH_SV_V2") + .unique(false) + .online(true) + .withColumns("HASH_SYS_AND_VALUE", "RES_ID", "PARTITION_ID"); tokenTable.dropIndexOnline("20220208.6", "IDX_SP_TOKEN_HASH_SV"); // for search by value tokenTable - .addIndex("20220208.7", "IDX_SP_TOKEN_HASH_V_V2") - .unique(false).online(true) - .withColumns("HASH_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220208.7", "IDX_SP_TOKEN_HASH_V_V2") + .unique(false) + .online(true) + .withColumns("HASH_VALUE", "RES_ID", "PARTITION_ID"); tokenTable.dropIndexOnline("20220208.8", "IDX_SP_TOKEN_HASH_V"); @@ -740,9 +740,16 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { { // replace and drop IDX_SP_TOKEN_RESID, and the associated fk constraint tokenTable - .addIndex("20220208.10", "IDX_SP_TOKEN_RESID_V2") - .unique(false).online(true) - .withColumns("RES_ID", "HASH_SYS_AND_VALUE", "HASH_VALUE", "HASH_SYS", "HASH_IDENTITY", "PARTITION_ID"); + .addIndex("20220208.10", "IDX_SP_TOKEN_RESID_V2") + .unique(false) + .online(true) + .withColumns( + "RES_ID", + "HASH_SYS_AND_VALUE", + "HASH_VALUE", + "HASH_SYS", + "HASH_IDENTITY", + "PARTITION_ID"); // some engines tie the FK constraint to a particular index. // So we need to drop and recreate the constraint to drop the old RES_ID index. @@ -751,23 +758,27 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { tokenTable.dropIndexOnline("20220208.12", "IDX_SP_TOKEN_RESID"); tokenTable.dropIndexOnline("20220208.13", "FK7ULX3J1GG3V7MAQREJGC7YBC4"); - tokenTable.addForeignKey("20220208.14", "FK_SP_TOKEN_RES") - .toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID"); + tokenTable + .addForeignKey("20220208.14", "FK_SP_TOKEN_RES") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); } } // fix for https://github.com/hapifhir/hapi-fhir/issues/3316 - // index must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically + // index must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index + // automatically version.onTable("TRM_VALUESET_C_DESIGNATION") - .addIndex("20220223.1", "FK_TRM_VALUESET_CONCEPT_PID") - .unique(false) - .withColumns("VALUESET_CONCEPT_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20220223.1", "FK_TRM_VALUESET_CONCEPT_PID") + .unique(false) + .withColumns("VALUESET_CONCEPT_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); // Batch2 Framework - Builder.BuilderAddTableByColumns batchInstance = version.addTableByColumns("20220227.1", "BT2_JOB_INSTANCE", "ID"); + Builder.BuilderAddTableByColumns batchInstance = + version.addTableByColumns("20220227.1", "BT2_JOB_INSTANCE", "ID"); batchInstance.addColumn("ID").nonNullable().type(ColumnTypeEnum.STRING, 100); batchInstance.addColumn("CREATE_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); batchInstance.addColumn("START_TIME").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP); @@ -804,38 +815,38 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { batchChunk.addColumn("ERROR_MSG").nullable().type(ColumnTypeEnum.STRING, 500); batchChunk.addColumn("ERROR_COUNT").nonNullable().type(ColumnTypeEnum.INT); batchChunk.addIndex("20220227.4", "IDX_BT2WC_II_SEQ").unique(false).withColumns("INSTANCE_ID", "SEQ"); - batchChunk.addForeignKey("20220227.5", "FK_BT2WC_INSTANCE").toColumn("INSTANCE_ID").references("BT2_JOB_INSTANCE", "ID"); + batchChunk + .addForeignKey("20220227.5", "FK_BT2WC_INSTANCE") + .toColumn("INSTANCE_ID") + .references("BT2_JOB_INSTANCE", "ID"); replaceNumericSPIndices(version); replaceQuantitySPIndices(version); // Drop Index on HFJ_RESOURCE.INDEX_STATUS - version - .onTable("HFJ_RESOURCE") - .dropIndex("20220314.1", "IDX_INDEXSTATUS"); + version.onTable("HFJ_RESOURCE").dropIndex("20220314.1", "IDX_INDEXSTATUS"); - version - .onTable("BT2_JOB_INSTANCE") - .addColumn("20220416.1", "CUR_GATED_STEP_ID") - .nullable() - .type(ColumnTypeEnum.STRING, 100); + version.onTable("BT2_JOB_INSTANCE") + .addColumn("20220416.1", "CUR_GATED_STEP_ID") + .nullable() + .type(ColumnTypeEnum.STRING, 100); - //Make Job expiry nullable so that we can prevent job expiry by using a null value. - version - .onTable("HFJ_BLK_EXPORT_JOB").modifyColumn("20220423.1", "EXP_TIME").nullable().withType(ColumnTypeEnum.DATE_TIMESTAMP); + // Make Job expiry nullable so that we can prevent job expiry by using a null value. + version.onTable("HFJ_BLK_EXPORT_JOB") + .modifyColumn("20220423.1", "EXP_TIME") + .nullable() + .withType(ColumnTypeEnum.DATE_TIMESTAMP); // New Index on HFJ_RESOURCE for $reindex Operation - hapi-fhir #3534 { version.onTable("HFJ_RESOURCE") - .addIndex("20220425.1", "IDX_RES_TYPE_DEL_UPDATED") - .unique(false) - .online(true) - .withColumns("RES_TYPE", "RES_DELETED_AT", "RES_UPDATED", "PARTITION_ID", "RES_ID"); + .addIndex("20220425.1", "IDX_RES_TYPE_DEL_UPDATED") + .unique(false) + .online(true) + .withColumns("RES_TYPE", "RES_DELETED_AT", "RES_UPDATED", "PARTITION_ID", "RES_ID"); // Drop existing Index on HFJ_RESOURCE.RES_TYPE since the new Index will meet the overall Index Demand - version - .onTable("HFJ_RESOURCE") - .dropIndexOnline("20220425.2", "IDX_RES_TYPE"); + version.onTable("HFJ_RESOURCE").dropIndexOnline("20220425.2", "IDX_RES_TYPE"); } /** @@ -848,17 +859,17 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // add res_id, and partition_id so queries are covered without row-reads. tokenTable - .addIndex("20220428.1", "IDX_SP_STRING_HASH_NRM_V2") - .unique(false) - .online(true) - .withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED", "RES_ID", "PARTITION_ID"); + .addIndex("20220428.1", "IDX_SP_STRING_HASH_NRM_V2") + .unique(false) + .online(true) + .withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED", "RES_ID", "PARTITION_ID"); tokenTable.dropIndexOnline("20220428.2", "IDX_SP_STRING_HASH_NRM"); tokenTable - .addIndex("20220428.3", "IDX_SP_STRING_HASH_EXCT_V2") - .unique(false) - .online(true) - .withColumns("HASH_EXACT", "RES_ID", "PARTITION_ID"); + .addIndex("20220428.3", "IDX_SP_STRING_HASH_EXCT_V2") + .unique(false) + .online(true) + .withColumns("HASH_EXACT", "RES_ID", "PARTITION_ID"); tokenTable.dropIndexOnline("20220428.4", "IDX_SP_STRING_HASH_EXCT"); // we will drop the updated column. Start with the index. @@ -871,62 +882,88 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // add res_id, and partition_id so queries are covered without row-reads. resTagTable - .addIndex("20220429.1", "IDX_RES_TAG_RES_TAG") - .unique(false) - .online(true) - .withColumns("RES_ID", "TAG_ID", "PARTITION_ID"); + .addIndex("20220429.1", "IDX_RES_TAG_RES_TAG") + .unique(false) + .online(true) + .withColumns("RES_ID", "TAG_ID", "PARTITION_ID"); resTagTable - .addIndex("20220429.2", "IDX_RES_TAG_TAG_RES") - .unique(false) - .online(true) - .withColumns("TAG_ID", "RES_ID", "PARTITION_ID"); + .addIndex("20220429.2", "IDX_RES_TAG_TAG_RES") + .unique(false) + .online(true) + .withColumns("TAG_ID", "RES_ID", "PARTITION_ID"); resTagTable.dropIndex("20220429.4", "IDX_RESTAG_TAGID"); // Weird that we don't have addConstraint. No time to do it today. Map addResTagConstraint = new HashMap<>(); - addResTagConstraint.put(DriverTypeEnum.H2_EMBEDDED, "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); - addResTagConstraint.put(DriverTypeEnum.MARIADB_10_1, "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); - addResTagConstraint.put(DriverTypeEnum.MSSQL_2012, "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); - addResTagConstraint.put(DriverTypeEnum.MYSQL_5_7, "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); - addResTagConstraint.put(DriverTypeEnum.ORACLE_12C, "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); - addResTagConstraint.put(DriverTypeEnum.POSTGRES_9_4, "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); + addResTagConstraint.put( + DriverTypeEnum.H2_EMBEDDED, + "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); + addResTagConstraint.put( + DriverTypeEnum.MARIADB_10_1, + "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); + addResTagConstraint.put( + DriverTypeEnum.MSSQL_2012, + "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); + addResTagConstraint.put( + DriverTypeEnum.MYSQL_5_7, + "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); + addResTagConstraint.put( + DriverTypeEnum.ORACLE_12C, + "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); + addResTagConstraint.put( + DriverTypeEnum.POSTGRES_9_4, + "ALTER TABLE HFJ_RES_TAG ADD CONSTRAINT IDX_RESTAG_TAGID UNIQUE (RES_ID, TAG_ID)"); version.executeRawSql("20220429.5", addResTagConstraint); Builder.BuilderWithTableName tagTable = version.onTable("HFJ_TAG_DEF"); - tagTable - .addIndex("20220429.6", "IDX_TAG_DEF_TP_CD_SYS") - .unique(false) - .online(false) - .withColumns("TAG_TYPE", "TAG_CODE", "TAG_SYSTEM", "TAG_ID"); + tagTable.addIndex("20220429.6", "IDX_TAG_DEF_TP_CD_SYS") + .unique(false) + .online(false) + .withColumns("TAG_TYPE", "TAG_CODE", "TAG_SYSTEM", "TAG_ID"); // move constraint to new index - // Ugh. Only oracle supports using IDX_TAG_DEF_TP_CD_SYS to enforce this constraint. The others will create another index. + // Ugh. Only oracle supports using IDX_TAG_DEF_TP_CD_SYS to enforce this constraint. The others will + // create another index. // For Sql Server, should change the index to be unique with include columns. Do this in 6.1 tagTable.dropIndex("20220429.8", "IDX_TAGDEF_TYPESYSCODE"); Map addTagDefConstraint = new HashMap<>(); - addTagDefConstraint.put(DriverTypeEnum.H2_EMBEDDED, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); - addTagDefConstraint.put(DriverTypeEnum.MARIADB_10_1, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); - addTagDefConstraint.put(DriverTypeEnum.MSSQL_2012, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); - addTagDefConstraint.put(DriverTypeEnum.MYSQL_5_7, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); - addTagDefConstraint.put(DriverTypeEnum.ORACLE_12C, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); - addTagDefConstraint.put(DriverTypeEnum.POSTGRES_9_4, "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); + addTagDefConstraint.put( + DriverTypeEnum.H2_EMBEDDED, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); + addTagDefConstraint.put( + DriverTypeEnum.MARIADB_10_1, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); + addTagDefConstraint.put( + DriverTypeEnum.MSSQL_2012, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); + addTagDefConstraint.put( + DriverTypeEnum.MYSQL_5_7, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); + addTagDefConstraint.put( + DriverTypeEnum.ORACLE_12C, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); + addTagDefConstraint.put( + DriverTypeEnum.POSTGRES_9_4, + "ALTER TABLE HFJ_TAG_DEF ADD CONSTRAINT IDX_TAGDEF_TYPESYSCODE UNIQUE (TAG_TYPE, TAG_CODE, TAG_SYSTEM)"); version.executeRawSql("20220429.9", addTagDefConstraint); - } - // Fix for https://github.com/hapifhir/hapi-fhir-jpaserver-starter/issues/328 version.onTable("NPM_PACKAGE_VER") - .modifyColumn("20220501.1", "FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20); + .modifyColumn("20220501.1", "FHIR_VERSION_ID") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 20); version.onTable("NPM_PACKAGE_VER_RES") - .modifyColumn("20220501.2", "FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20); + .modifyColumn("20220501.2", "FHIR_VERSION_ID") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 20); // Fix for https://gitlab.com/simpatico.ai/cdr/-/issues/3166 version.onTable("MPI_LINK") - .addIndex("20220613.1", "IDX_EMPI_MATCH_TGT_VER") - .unique(false) - .online(true) - .withColumns("MATCH_RESULT", "TARGET_PID", "VERSION"); + .addIndex("20220613.1", "IDX_EMPI_MATCH_TGT_VER") + .unique(false) + .online(true) + .withColumns("MATCH_RESULT", "TARGET_PID", "VERSION"); } /** @@ -940,19 +977,20 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // Main query index numberTable - .addIndex("20220304.1", "IDX_SP_NUMBER_HASH_VAL_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "SP_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220304.1", "IDX_SP_NUMBER_HASH_VAL_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "SP_VALUE", "RES_ID", "PARTITION_ID"); numberTable.dropIndexOnline("20220304.2", "IDX_SP_NUMBER_HASH_VAL"); // for joining to other queries { numberTable - .addIndex("20220304.3", "IDX_SP_NUMBER_RESID_V2") - .unique(false).online(true) - .withColumns("RES_ID", "HASH_IDENTITY", "SP_VALUE", "PARTITION_ID"); + .addIndex("20220304.3", "IDX_SP_NUMBER_RESID_V2") + .unique(false) + .online(true) + .withColumns("RES_ID", "HASH_IDENTITY", "SP_VALUE", "PARTITION_ID"); // some engines tie the FK constraint to a particular index. // So we need to drop and recreate the constraint to drop the old RES_ID index. @@ -961,8 +999,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { numberTable.dropIndexOnline("20220304.5", "IDX_SP_NUMBER_RESID"); numberTable.dropIndexOnline("20220304.6", "FKCLTIHNC5TGPRJ9BHPT7XI5OTB"); - numberTable.addForeignKey("20220304.7", "FK_SP_NUMBER_RES") - .toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID"); + numberTable + .addForeignKey("20220304.7", "FK_SP_NUMBER_RES") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); } // obsolete numberTable.dropIndexOnline("20220304.8", "IDX_SP_NUMBER_UPDATED"); @@ -981,37 +1021,44 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // bare quantity quantityTable - .addIndex("20220304.11", "IDX_SP_QUANTITY_HASH_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "SP_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220304.11", "IDX_SP_QUANTITY_HASH_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "SP_VALUE", "RES_ID", "PARTITION_ID"); quantityTable.dropIndexOnline("20220304.12", "IDX_SP_QUANTITY_HASH"); // quantity with system+units quantityTable - .addIndex("20220304.13", "IDX_SP_QUANTITY_HASH_SYSUN_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220304.13", "IDX_SP_QUANTITY_HASH_SYSUN_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE", "RES_ID", "PARTITION_ID"); quantityTable.dropIndexOnline("20220304.14", "IDX_SP_QUANTITY_HASH_SYSUN"); // quantity with units quantityTable - .addIndex("20220304.15", "IDX_SP_QUANTITY_HASH_UN_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220304.15", "IDX_SP_QUANTITY_HASH_UN_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE", "RES_ID", "PARTITION_ID"); quantityTable.dropIndexOnline("20220304.16", "IDX_SP_QUANTITY_HASH_UN"); // for joining to other queries and sorts { quantityTable - .addIndex("20220304.17", "IDX_SP_QUANTITY_RESID_V2") - .unique(false).online(true) - .withColumns("RES_ID", "HASH_IDENTITY", "HASH_IDENTITY_SYS_UNITS", "HASH_IDENTITY_AND_UNITS", "SP_VALUE", "PARTITION_ID"); + .addIndex("20220304.17", "IDX_SP_QUANTITY_RESID_V2") + .unique(false) + .online(true) + .withColumns( + "RES_ID", + "HASH_IDENTITY", + "HASH_IDENTITY_SYS_UNITS", + "HASH_IDENTITY_AND_UNITS", + "SP_VALUE", + "PARTITION_ID"); // some engines tie the FK constraint to a particular index. // So we need to drop and recreate the constraint to drop the old RES_ID index. @@ -1020,8 +1067,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { quantityTable.dropIndexOnline("20220304.19", "IDX_SP_QUANTITY_RESID"); quantityTable.dropIndexOnline("20220304.20", "FKN603WJJOI1A6ASEWXBBD78BI5"); - quantityTable.addForeignKey("20220304.21", "FK_SP_QUANTITY_RES") - .toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID"); + quantityTable + .addForeignKey("20220304.21", "FK_SP_QUANTITY_RES") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); } // obsolete quantityTable.dropIndexOnline("20220304.22", "IDX_SP_QUANTITY_UPDATED"); @@ -1032,37 +1081,44 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // bare quantity quantityNormTable - .addIndex("20220304.23", "IDX_SP_QNTY_NRML_HASH_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY", "SP_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220304.23", "IDX_SP_QNTY_NRML_HASH_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY", "SP_VALUE", "RES_ID", "PARTITION_ID"); quantityNormTable.dropIndexOnline("20220304.24", "IDX_SP_QNTY_NRML_HASH"); // quantity with system+units quantityNormTable - .addIndex("20220304.25", "IDX_SP_QNTY_NRML_HASH_SYSUN_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220304.25", "IDX_SP_QNTY_NRML_HASH_SYSUN_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE", "RES_ID", "PARTITION_ID"); quantityNormTable.dropIndexOnline("20220304.26", "IDX_SP_QNTY_NRML_HASH_SYSUN"); // quantity with units quantityNormTable - .addIndex("20220304.27", "IDX_SP_QNTY_NRML_HASH_UN_V2") - .unique(false) - .online(true) - .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE", "RES_ID", "PARTITION_ID"); + .addIndex("20220304.27", "IDX_SP_QNTY_NRML_HASH_UN_V2") + .unique(false) + .online(true) + .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE", "RES_ID", "PARTITION_ID"); quantityNormTable.dropIndexOnline("20220304.28", "IDX_SP_QNTY_NRML_HASH_UN"); // for joining to other queries and sorts { quantityNormTable - .addIndex("20220304.29", "IDX_SP_QNTY_NRML_RESID_V2") - .unique(false).online(true) - .withColumns("RES_ID", "HASH_IDENTITY", "HASH_IDENTITY_SYS_UNITS", "HASH_IDENTITY_AND_UNITS", "SP_VALUE", "PARTITION_ID"); + .addIndex("20220304.29", "IDX_SP_QNTY_NRML_RESID_V2") + .unique(false) + .online(true) + .withColumns( + "RES_ID", + "HASH_IDENTITY", + "HASH_IDENTITY_SYS_UNITS", + "HASH_IDENTITY_AND_UNITS", + "SP_VALUE", + "PARTITION_ID"); // some engines tie the FK constraint to a particular index. // So we need to drop and recreate the constraint to drop the old RES_ID index. @@ -1071,12 +1127,13 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { quantityNormTable.dropIndexOnline("20220304.31", "IDX_SP_QNTY_NRML_RESID"); quantityNormTable.dropIndexOnline("20220304.32", "FKRCJOVMUH5KC0O6FVBLE319PYV"); - quantityNormTable.addForeignKey("20220304.33", "FK_SP_QUANTITYNM_RES") - .toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID"); + quantityNormTable + .addForeignKey("20220304.33", "FK_SP_QUANTITYNM_RES") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); } // obsolete quantityNormTable.dropIndexOnline("20220304.34", "IDX_SP_QNTY_NRML_UPDATED"); - } } @@ -1086,78 +1143,78 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { */ private void addIndexesForDeleteExpunge(Builder theVersion) { - theVersion.onTable("HFJ_HISTORY_TAG") - .addIndex("20211210.2", "IDX_RESHISTTAG_RESID") - .unique(false) - .withColumns("RES_ID"); + theVersion + .onTable("HFJ_HISTORY_TAG") + .addIndex("20211210.2", "IDX_RESHISTTAG_RESID") + .unique(false) + .withColumns("RES_ID"); - theVersion.onTable("HFJ_RES_VER_PROV") - .addIndex("20211210.3", "FK_RESVERPROV_RES_PID") - .unique(false) - .withColumns("RES_PID") - .doNothing() // This index is added below in a better form - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + theVersion + .onTable("HFJ_RES_VER_PROV") + .addIndex("20211210.3", "FK_RESVERPROV_RES_PID") + .unique(false) + .withColumns("RES_PID") + .doNothing() // This index is added below in a better form + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); - theVersion.onTable("HFJ_FORCED_ID") - .addIndex("20211210.4", "FK_FORCEDID_RESOURCE") - .unique(true) - .withColumns("RESOURCE_PID") - .doNothing()//This migration was added in error, as this table already has a unique constraint on RESOURCE_PID and every database creates an index on anything that is unique. - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + theVersion + .onTable("HFJ_FORCED_ID") + .addIndex("20211210.4", "FK_FORCEDID_RESOURCE") + .unique(true) + .withColumns("RESOURCE_PID") + .doNothing() // This migration was added in error, as this table already has a unique constraint on + // RESOURCE_PID and every database creates an index on anything that is unique. + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); } private void init570() { Builder version = forVersion(VersionEnum.V5_7_0); - // both indexes must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically + // both indexes must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index + // automatically version.onTable("TRM_CONCEPT_PROPERTY") - .addIndex("20211102.1", "FK_CONCEPTPROP_CONCEPT") - .unique(false) - .withColumns("CONCEPT_PID") - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20211102.1", "FK_CONCEPTPROP_CONCEPT") + .unique(false) + .withColumns("CONCEPT_PID") + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_DESIG") - .addIndex("20211102.2", "FK_CONCEPTDESIG_CONCEPT") - .unique(false) - .withColumns("CONCEPT_PID") - // H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20211102.2", "FK_CONCEPTDESIG_CONCEPT") + .unique(false) + .withColumns("CONCEPT_PID") + // H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_PC_LINK") - .addIndex("20211102.3", "FK_TERM_CONCEPTPC_CHILD") - .unique(false) - .withColumns("CHILD_PID") - // H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20211102.3", "FK_TERM_CONCEPTPC_CHILD") + .unique(false) + .withColumns("CHILD_PID") + // H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); version.onTable("TRM_CONCEPT_PC_LINK") - .addIndex("20211102.4", "FK_TERM_CONCEPTPC_PARENT") - .unique(false) - .withColumns("PARENT_PID") - // H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys - .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); + .addIndex("20211102.4", "FK_TERM_CONCEPTPC_PARENT") + .unique(false) + .withColumns("PARENT_PID") + // H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys + .onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS); addIndexesForDeleteExpunge(version); // Add inline resource text column version.onTable("HFJ_RES_VER") - .addColumn("20220102.1", "RES_TEXT_VC") - .nullable() - .type(ColumnTypeEnum.STRING, 4000); + .addColumn("20220102.1", "RES_TEXT_VC") + .nullable() + .type(ColumnTypeEnum.STRING, 4000); // Add partition id column for mdm Builder.BuilderWithTableName empiLink = version.onTable("MPI_LINK"); - empiLink.addColumn("20220324.1", "PARTITION_ID") - .nullable() - .type(ColumnTypeEnum.INT); - empiLink.addColumn("20220324.2", "PARTITION_DATE") - .nullable() - .type(ColumnTypeEnum.DATE_ONLY); + empiLink.addColumn("20220324.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); + empiLink.addColumn("20220324.2", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); } - private void init560() { init560_20211027(); } @@ -1175,37 +1232,57 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // For MSSQL only - Replace ForcedId index with a version that has an INCLUDE clause Builder.BuilderWithTableName forcedId = version.onTable("HFJ_FORCED_ID"); - forcedId.dropIndex("20210516.1", "IDX_FORCEDID_TYPE_FID").onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012).runEvenDuringSchemaInitialization(); - forcedId.addIndex("20210516.2", "IDX_FORCEDID_TYPE_FID").unique(true).includeColumns("RESOURCE_PID").withColumns("RESOURCE_TYPE", "FORCED_ID").onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012).runEvenDuringSchemaInitialization(); + forcedId.dropIndex("20210516.1", "IDX_FORCEDID_TYPE_FID") + .onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012) + .runEvenDuringSchemaInitialization(); + forcedId.addIndex("20210516.2", "IDX_FORCEDID_TYPE_FID") + .unique(true) + .includeColumns("RESOURCE_PID") + .withColumns("RESOURCE_TYPE", "FORCED_ID") + .onlyAppliesToPlatforms(DriverTypeEnum.MSSQL_2012) + .runEvenDuringSchemaInitialization(); // Add bulk import file description version.onTable("HFJ_BLK_IMPORT_JOBFILE") - .addColumn("20210528.1", "FILE_DESCRIPTION").nullable().type(ColumnTypeEnum.STRING, 500); + .addColumn("20210528.1", "FILE_DESCRIPTION") + .nullable() + .type(ColumnTypeEnum.STRING, 500); // Bump ConceptMap display lengths version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") - .modifyColumn("20210617.1", "TARGET_DISPLAY").nullable().withType(ColumnTypeEnum.STRING, 500); + .modifyColumn("20210617.1", "TARGET_DISPLAY") + .nullable() + .withType(ColumnTypeEnum.STRING, 500); version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") - .modifyColumn("20210617.2", "SOURCE_DISPLAY").nullable().withType(ColumnTypeEnum.STRING, 500); + .modifyColumn("20210617.2", "SOURCE_DISPLAY") + .nullable() + .withType(ColumnTypeEnum.STRING, 500); version.onTable("HFJ_BLK_EXPORT_JOB") - .modifyColumn("20210624.1", "REQUEST").nonNullable().withType(ColumnTypeEnum.STRING, 1024); + .modifyColumn("20210624.1", "REQUEST") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 1024); version.onTable("HFJ_IDX_CMP_STRING_UNIQ") - .modifyColumn("20210713.1", "IDX_STRING").nonNullable().withType(ColumnTypeEnum.STRING, 500); + .modifyColumn("20210713.1", "IDX_STRING") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 500); version.onTable("HFJ_RESOURCE") - .addColumn("20210720.1", "SP_CMPTOKS_PRESENT").nullable().type(ColumnTypeEnum.BOOLEAN); + .addColumn("20210720.1", "SP_CMPTOKS_PRESENT") + .nullable() + .type(ColumnTypeEnum.BOOLEAN); version.addIdGenerator("20210720.2", "SEQ_IDXCMBTOKNU_ID"); - Builder.BuilderAddTableByColumns cmpToks = version - .addTableByColumns("20210720.3", "HFJ_IDX_CMB_TOK_NU", "PID"); + Builder.BuilderAddTableByColumns cmpToks = version.addTableByColumns("20210720.3", "HFJ_IDX_CMB_TOK_NU", "PID"); cmpToks.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); cmpToks.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.LONG); cmpToks.addColumn("HASH_COMPLETE").nonNullable().type(ColumnTypeEnum.LONG); cmpToks.addColumn("IDX_STRING").nonNullable().type(ColumnTypeEnum.STRING, 500); - cmpToks.addForeignKey("20210720.4", "FK_IDXCMBTOKNU_RES_ID").toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID"); + cmpToks.addForeignKey("20210720.4", "FK_IDXCMBTOKNU_RES_ID") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); cmpToks.addIndex("20210720.5", "IDX_IDXCMBTOKNU_STR").unique(false).withColumns("IDX_STRING"); cmpToks.addIndex("20210720.6", "IDX_IDXCMBTOKNU_RES").unique(false).withColumns("RES_ID"); @@ -1217,13 +1294,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // Dropping index on the language column, as it's no longer in use. // TODO: After 2 releases from 5.5.0, drop the column too - version.onTable("HFJ_RESOURCE") - .dropIndex("20210908.1", "IDX_RES_LANG"); + version.onTable("HFJ_RESOURCE").dropIndex("20210908.1", "IDX_RES_LANG"); version.onTable("TRM_VALUESET") - .addColumn("20210915.1", "EXPANDED_AT") - .nullable() - .type(ColumnTypeEnum.DATE_TIMESTAMP); + .addColumn("20210915.1", "EXPANDED_AT") + .nullable() + .type(ColumnTypeEnum.DATE_TIMESTAMP); /* * Replace CLOB columns with BLOB columns @@ -1231,38 +1307,49 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // TRM_VALUESET_CONCEPT.SOURCE_DIRECT_PARENT_PIDS version.onTable("TRM_VALUESET_CONCEPT") - .migratePostgresTextClobToBinaryClob("20211003.1", "SOURCE_DIRECT_PARENT_PIDS"); + .migratePostgresTextClobToBinaryClob("20211003.1", "SOURCE_DIRECT_PARENT_PIDS"); // TRM_CONCEPT.PARENT_PIDS - version.onTable("TRM_CONCEPT") - .migratePostgresTextClobToBinaryClob("20211003.2", "PARENT_PIDS"); + version.onTable("TRM_CONCEPT").migratePostgresTextClobToBinaryClob("20211003.2", "PARENT_PIDS"); // HFJ_SEARCH.SEARCH_QUERY_STRING - version.onTable("HFJ_SEARCH") - .migratePostgresTextClobToBinaryClob("20211003.3", "SEARCH_QUERY_STRING"); - + version.onTable("HFJ_SEARCH").migratePostgresTextClobToBinaryClob("20211003.3", "SEARCH_QUERY_STRING"); } private void init540() { Builder version = forVersion(VersionEnum.V5_4_0); - //-- add index on HFJ_SPIDX_DATE - version.onTable("HFJ_SPIDX_DATE").addIndex("20210309.1", "IDX_SP_DATE_HASH_HIGH") - .unique(false).withColumns("HASH_IDENTITY", "SP_VALUE_HIGH") - .doNothing(); + // -- add index on HFJ_SPIDX_DATE + version.onTable("HFJ_SPIDX_DATE") + .addIndex("20210309.1", "IDX_SP_DATE_HASH_HIGH") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE_HIGH") + .doNothing(); - //-- add index on HFJ_FORCED_ID - version.onTable("HFJ_FORCED_ID").addIndex("20210309.2", "IDX_FORCEID_FID") - .unique(false).withColumns("FORCED_ID"); + // -- add index on HFJ_FORCED_ID + version.onTable("HFJ_FORCED_ID") + .addIndex("20210309.2", "IDX_FORCEID_FID") + .unique(false) + .withColumns("FORCED_ID"); - //-- ValueSet Concept Fulltext Indexing - version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.1", "INDEX_STATUS").nullable().type(ColumnTypeEnum.LONG); - version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.2", "SOURCE_DIRECT_PARENT_PIDS").nullable().type(ColumnTypeEnum.CLOB); - version.onTable("TRM_VALUESET_CONCEPT").addColumn("20210406.3", "SOURCE_PID").nullable().type(ColumnTypeEnum.LONG); + // -- ValueSet Concept Fulltext Indexing + version.onTable("TRM_VALUESET_CONCEPT") + .addColumn("20210406.1", "INDEX_STATUS") + .nullable() + .type(ColumnTypeEnum.LONG); + version.onTable("TRM_VALUESET_CONCEPT") + .addColumn("20210406.2", "SOURCE_DIRECT_PARENT_PIDS") + .nullable() + .type(ColumnTypeEnum.CLOB); + version.onTable("TRM_VALUESET_CONCEPT") + .addColumn("20210406.3", "SOURCE_PID") + .nullable() + .type(ColumnTypeEnum.LONG); // Bulk Import Job - Builder.BuilderAddTableByColumns blkImportJobTable = version.addTableByColumns("20210410.1", "HFJ_BLK_IMPORT_JOB", "PID"); + Builder.BuilderAddTableByColumns blkImportJobTable = + version.addTableByColumns("20210410.1", "HFJ_BLK_IMPORT_JOB", "PID"); blkImportJobTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, UUID_LENGTH); blkImportJobTable.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10); @@ -1273,39 +1360,53 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { blkImportJobTable.addColumn("FILE_COUNT").nonNullable().type(ColumnTypeEnum.INT); blkImportJobTable.addColumn("ROW_PROCESSING_MODE").nonNullable().type(ColumnTypeEnum.STRING, 20); blkImportJobTable.addColumn("BATCH_SIZE").nonNullable().type(ColumnTypeEnum.INT); - blkImportJobTable.addIndex("20210410.2", "IDX_BLKIM_JOB_ID").unique(true).withColumns("JOB_ID"); + blkImportJobTable + .addIndex("20210410.2", "IDX_BLKIM_JOB_ID") + .unique(true) + .withColumns("JOB_ID"); version.addIdGenerator("20210410.3", "SEQ_BLKIMJOB_PID"); // Bulk Import Job File - Builder.BuilderAddTableByColumns blkImportJobFileTable = version.addTableByColumns("20210410.4", "HFJ_BLK_IMPORT_JOBFILE", "PID"); + Builder.BuilderAddTableByColumns blkImportJobFileTable = + version.addTableByColumns("20210410.4", "HFJ_BLK_IMPORT_JOBFILE", "PID"); blkImportJobFileTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); blkImportJobFileTable.addColumn("JOB_PID").nonNullable().type(ColumnTypeEnum.LONG); blkImportJobFileTable.addColumn("JOB_CONTENTS").nonNullable().type(ColumnTypeEnum.BLOB); blkImportJobFileTable.addColumn("FILE_SEQ").nonNullable().type(ColumnTypeEnum.INT); blkImportJobFileTable.addColumn("TENANT_NAME").nullable().type(ColumnTypeEnum.STRING, 200); - blkImportJobFileTable.addIndex("20210410.5", "IDX_BLKIM_JOBFILE_JOBID").unique(false).withColumns("JOB_PID"); - blkImportJobFileTable.addForeignKey("20210410.6", "FK_BLKIMJOBFILE_JOB").toColumn("JOB_PID").references("HFJ_BLK_IMPORT_JOB", "PID"); + blkImportJobFileTable + .addIndex("20210410.5", "IDX_BLKIM_JOBFILE_JOBID") + .unique(false) + .withColumns("JOB_PID"); + blkImportJobFileTable + .addForeignKey("20210410.6", "FK_BLKIMJOBFILE_JOB") + .toColumn("JOB_PID") + .references("HFJ_BLK_IMPORT_JOB", "PID"); version.addIdGenerator("20210410.7", "SEQ_BLKIMJOBFILE_PID"); - //Increase ResourceLink path length - version.onTable("HFJ_RES_LINK").modifyColumn("20210505.1", "SRC_PATH").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 500); + // Increase ResourceLink path length + version.onTable("HFJ_RES_LINK") + .modifyColumn("20210505.1", "SRC_PATH") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 500); } private void init530() { Builder version = forVersion(VersionEnum.V5_3_0); - //-- TRM - version - .onTable("TRM_VALUESET_CONCEPT") - .dropIndex("20210104.1", "IDX_VS_CONCEPT_CS_CODE"); + // -- TRM + version.onTable("TRM_VALUESET_CONCEPT").dropIndex("20210104.1", "IDX_VS_CONCEPT_CS_CODE"); - version - .onTable("TRM_VALUESET_CONCEPT") - .addIndex("20210104.2", "IDX_VS_CONCEPT_CSCD").unique(true).withColumns("VALUESET_PID", "SYSTEM_URL", "CODEVAL"); + version.onTable("TRM_VALUESET_CONCEPT") + .addIndex("20210104.2", "IDX_VS_CONCEPT_CSCD") + .unique(true) + .withColumns("VALUESET_PID", "SYSTEM_URL", "CODEVAL"); - //-- Add new Table, HFJ_SPIDX_QUANTITY_NRML + // -- Add new Table, HFJ_SPIDX_QUANTITY_NRML version.addIdGenerator("20210109.1", "SEQ_SPIDX_QUANTITY_NRML"); - Builder.BuilderAddTableByColumns pkg = version.addTableByColumns("20210109.2", "HFJ_SPIDX_QUANTITY_NRML", "SP_ID"); + Builder.BuilderAddTableByColumns pkg = + version.addTableByColumns("20210109.2", "HFJ_SPIDX_QUANTITY_NRML", "SP_ID"); pkg.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.LONG); pkg.addColumn("RES_TYPE").nonNullable().type(ColumnTypeEnum.STRING, 100); pkg.addColumn("SP_UPDATED").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP); @@ -1319,31 +1420,43 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { pkg.addColumn("HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG); pkg.addColumn("SP_VALUE").nullable().type(ColumnTypeEnum.FLOAT); pkg.addIndex("20210109.3", "IDX_SP_QNTY_NRML_HASH").unique(false).withColumns("HASH_IDENTITY", "SP_VALUE"); - pkg.addIndex("20210109.4", "IDX_SP_QNTY_NRML_HASH_UN").unique(false).withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE"); - pkg.addIndex("20210109.5", "IDX_SP_QNTY_NRML_HASH_SYSUN").unique(false).withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE"); + pkg.addIndex("20210109.4", "IDX_SP_QNTY_NRML_HASH_UN") + .unique(false) + .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE"); + pkg.addIndex("20210109.5", "IDX_SP_QNTY_NRML_HASH_SYSUN") + .unique(false) + .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE"); pkg.addIndex("20210109.6", "IDX_SP_QNTY_NRML_UPDATED").unique(false).withColumns("SP_UPDATED"); pkg.addIndex("20210109.7", "IDX_SP_QNTY_NRML_RESID").unique(false).withColumns("RES_ID"); - //-- Link to the resourceTable - version.onTable("HFJ_RESOURCE").addColumn("20210109.10", "SP_QUANTITY_NRML_PRESENT").nullable().type(ColumnTypeEnum.BOOLEAN); + // -- Link to the resourceTable + version.onTable("HFJ_RESOURCE") + .addColumn("20210109.10", "SP_QUANTITY_NRML_PRESENT") + .nullable() + .type(ColumnTypeEnum.BOOLEAN); - //-- Fixed the partition and fk + // -- Fixed the partition and fk Builder.BuilderWithTableName nrmlTable = version.onTable("HFJ_SPIDX_QUANTITY_NRML"); nrmlTable.addColumn("20210111.1", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); nrmlTable.addColumn("20210111.2", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); // - The fk name is generated from Hibernate, have to use this name here nrmlTable - .addForeignKey("20210111.3", "FKRCJOVMUH5KC0O6FVBLE319PYV") - .toColumn("RES_ID") - .references("HFJ_RESOURCE", "RES_ID"); + .addForeignKey("20210111.3", "FKRCJOVMUH5KC0O6FVBLE319PYV") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); Builder.BuilderWithTableName quantityTable = version.onTable("HFJ_SPIDX_QUANTITY"); - quantityTable.modifyColumn("20210116.1", "SP_VALUE").nullable().failureAllowed().withType(ColumnTypeEnum.DOUBLE); + quantityTable + .modifyColumn("20210116.1", "SP_VALUE") + .nullable() + .failureAllowed() + .withType(ColumnTypeEnum.DOUBLE); // HFJ_RES_LINK version.onTable("HFJ_RES_LINK") - .addColumn("20210126.1", "TARGET_RESOURCE_VERSION").nullable().type(ColumnTypeEnum.LONG); - + .addColumn("20210126.1", "TARGET_RESOURCE_VERSION") + .nullable() + .type(ColumnTypeEnum.LONG); } protected void init520() { @@ -1352,10 +1465,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { Builder.BuilderWithTableName mdmLink = version.onTable("MPI_LINK"); mdmLink.addColumn("20201029.1", "GOLDEN_RESOURCE_PID").nonNullable().type(ColumnTypeEnum.LONG); mdmLink.addColumn("20201029.2", "RULE_COUNT").nullable().type(ColumnTypeEnum.LONG); - mdmLink - .addForeignKey("20201029.3", "FK_EMPI_LINK_GOLDEN_RESOURCE") - .toColumn("GOLDEN_RESOURCE_PID") - .references("HFJ_RESOURCE", "RES_ID"); + mdmLink.addForeignKey("20201029.3", "FK_EMPI_LINK_GOLDEN_RESOURCE") + .toColumn("GOLDEN_RESOURCE_PID") + .references("HFJ_RESOURCE", "RES_ID"); } protected void init510() { @@ -1386,12 +1498,17 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { pkgVer.addColumn("FHIR_VERSION").nonNullable().type(ColumnTypeEnum.STRING, 10); pkgVer.addColumn("PACKAGE_SIZE_BYTES").nonNullable().type(ColumnTypeEnum.LONG); pkgVer.addColumn("UPDATED_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); - pkgVer.addForeignKey("20200610.6", "FK_NPM_PKV_PKG").toColumn("PACKAGE_PID").references("NPM_PACKAGE", "PID"); - pkgVer.addForeignKey("20200610.7", "FK_NPM_PKV_RESID").toColumn("BINARY_RES_ID").references("HFJ_RESOURCE", "RES_ID"); + pkgVer.addForeignKey("20200610.6", "FK_NPM_PKV_PKG") + .toColumn("PACKAGE_PID") + .references("NPM_PACKAGE", "PID"); + pkgVer.addForeignKey("20200610.7", "FK_NPM_PKV_RESID") + .toColumn("BINARY_RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); pkgVer.addIndex("20200610.8", "IDX_PACKVER").unique(true).withColumns("PACKAGE_ID", "VERSION_ID"); version.addIdGenerator("20200610.9", "SEQ_NPM_PACKVERRES"); - Builder.BuilderAddTableByColumns pkgVerResAdd = version.addTableByColumns("20200610.10", "NPM_PACKAGE_VER_RES", "PID"); + Builder.BuilderAddTableByColumns pkgVerResAdd = + version.addTableByColumns("20200610.10", "NPM_PACKAGE_VER_RES", "PID"); pkgVerResAdd.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); pkgVerResAdd.addColumn("PACKVER_PID").nonNullable().type(ColumnTypeEnum.LONG); pkgVerResAdd.addColumn("BINARY_RES_ID").nonNullable().type(ColumnTypeEnum.LONG); @@ -1404,8 +1521,14 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { pkgVerResAdd.addColumn("FHIR_VERSION").nonNullable().type(ColumnTypeEnum.STRING, 10); pkgVerResAdd.addColumn("RES_SIZE_BYTES").nonNullable().type(ColumnTypeEnum.LONG); pkgVerResAdd.addColumn("UPDATED_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); - pkgVerResAdd.addForeignKey("20200610.11", "FK_NPM_PACKVERRES_PACKVER").toColumn("PACKVER_PID").references("NPM_PACKAGE_VER", "PID"); - pkgVerResAdd.addForeignKey("20200610.12", "FK_NPM_PKVR_RESID").toColumn("BINARY_RES_ID").references("HFJ_RESOURCE", "RES_ID"); + pkgVerResAdd + .addForeignKey("20200610.11", "FK_NPM_PACKVERRES_PACKVER") + .toColumn("PACKVER_PID") + .references("NPM_PACKAGE_VER", "PID"); + pkgVerResAdd + .addForeignKey("20200610.12", "FK_NPM_PKVR_RESID") + .toColumn("BINARY_RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); pkgVerResAdd.addIndex("20200610.13", "IDX_PACKVERRES_URL").unique(false).withColumns("CANONICAL_URL"); init510_20200610(); @@ -1423,31 +1546,37 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { empiLink.addColumn("20200715.4", "VECTOR").nullable().type(ColumnTypeEnum.LONG); empiLink.addColumn("20200715.5", "SCORE").nullable().type(ColumnTypeEnum.FLOAT); - init510_20200725(); - //EMPI Target Type + // EMPI Target Type empiLink.addColumn("20200727.1", "TARGET_TYPE").nullable().type(ColumnTypeEnum.STRING, 40); - //ConceptMap add version for search + // ConceptMap add version for search Builder.BuilderWithTableName trmConceptMap = version.onTable("TRM_CONCEPT_MAP"); trmConceptMap.addColumn("20200910.1", "VER").nullable().type(ColumnTypeEnum.STRING, 200); trmConceptMap.dropIndex("20200910.2", "IDX_CONCEPT_MAP_URL").failureAllowed(); trmConceptMap.addIndex("20200910.3", "IDX_CONCEPT_MAP_URL").unique(true).withColumns("URL", "VER"); - //Term CodeSystem Version and Term ValueSet Version + // Term CodeSystem Version and Term ValueSet Version Builder.BuilderWithTableName trmCodeSystemVer = version.onTable("TRM_CODESYSTEM_VER"); - trmCodeSystemVer.addIndex("20200923.1", "IDX_CODESYSTEM_AND_VER").unique(true).withColumns("CODESYSTEM_PID", "CS_VERSION_ID"); + trmCodeSystemVer + .addIndex("20200923.1", "IDX_CODESYSTEM_AND_VER") + .unique(true) + .withColumns("CODESYSTEM_PID", "CS_VERSION_ID"); Builder.BuilderWithTableName trmValueSet = version.onTable("TRM_VALUESET"); trmValueSet.addColumn("20200923.2", "VER").nullable().type(ColumnTypeEnum.STRING, 200); trmValueSet.dropIndex("20200923.3", "IDX_VALUESET_URL").failureAllowed(); trmValueSet.addIndex("20200923.4", "IDX_VALUESET_URL").unique(true).withColumns("URL", "VER"); - //Term ValueSet Component add system version + // Term ValueSet Component add system version Builder.BuilderWithTableName trmValueSetComp = version.onTable("TRM_VALUESET_CONCEPT"); trmValueSetComp.addColumn("20201028.1", "SYSTEM_VER").nullable().type(ColumnTypeEnum.STRING, 200); trmValueSetComp.dropIndex("20201028.2", "IDX_VS_CONCEPT_CS_CD").failureAllowed(); - trmValueSetComp.addIndex("20201028.3", "IDX_VS_CONCEPT_CS_CODE").unique(true).withColumns("VALUESET_PID", "SYSTEM_URL", "SYSTEM_VER", "CODEVAL").doNothing(); + trmValueSetComp + .addIndex("20201028.3", "IDX_VS_CONCEPT_CS_CODE") + .unique(true) + .withColumns("VALUESET_PID", "SYSTEM_URL", "SYSTEM_VER", "CODEVAL") + .doNothing(); } protected void init510_20200725() { @@ -1462,16 +1591,25 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // nothing } - private void init501() { //20200514 - present + private void init501() { // 20200514 - present Builder version = forVersion(VersionEnum.V5_0_1); Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE"); - spidxDate.addIndex("20200514.1", "IDX_SP_DATE_HASH_LOW").unique(false).withColumns("HASH_IDENTITY", "SP_VALUE_LOW") - .doNothing(); - spidxDate.addIndex("20200514.2", "IDX_SP_DATE_ORD_HASH").unique(false).withColumns("HASH_IDENTITY", "SP_VALUE_LOW_DATE_ORDINAL", "SP_VALUE_HIGH_DATE_ORDINAL") - .doNothing(); - spidxDate.addIndex("20200514.3", "IDX_SP_DATE_ORD_HASH_LOW").unique(false).withColumns("HASH_IDENTITY", "SP_VALUE_LOW_DATE_ORDINAL") - .doNothing(); + spidxDate + .addIndex("20200514.1", "IDX_SP_DATE_HASH_LOW") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE_LOW") + .doNothing(); + spidxDate + .addIndex("20200514.2", "IDX_SP_DATE_ORD_HASH") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE_LOW_DATE_ORDINAL", "SP_VALUE_HIGH_DATE_ORDINAL") + .doNothing(); + spidxDate + .addIndex("20200514.3", "IDX_SP_DATE_ORD_HASH_LOW") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE_LOW_DATE_ORDINAL") + .doNothing(); // MPI_LINK version.addIdGenerator("20200517.1", "SEQ_EMPI_LINK_ID"); @@ -1479,25 +1617,21 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { empiLink.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); empiLink.addColumn("PERSON_PID").nonNullable().type(ColumnTypeEnum.LONG); - empiLink - .addForeignKey("20200517.3", "FK_EMPI_LINK_PERSON") - .toColumn("PERSON_PID") - .references("HFJ_RESOURCE", "RES_ID"); + empiLink.addForeignKey("20200517.3", "FK_EMPI_LINK_PERSON") + .toColumn("PERSON_PID") + .references("HFJ_RESOURCE", "RES_ID"); empiLink.addColumn("TARGET_PID").nonNullable().type(ColumnTypeEnum.LONG); - empiLink - .addForeignKey("20200517.4", "FK_EMPI_LINK_TARGET") - .toColumn("TARGET_PID") - .references("HFJ_RESOURCE", "RES_ID"); + empiLink.addForeignKey("20200517.4", "FK_EMPI_LINK_TARGET") + .toColumn("TARGET_PID") + .references("HFJ_RESOURCE", "RES_ID"); empiLink.addColumn("MATCH_RESULT").nonNullable().type(ColumnTypeEnum.INT); empiLink.addColumn("LINK_SOURCE").nonNullable().type(ColumnTypeEnum.INT); empiLink.addColumn("CREATED").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); empiLink.addColumn("UPDATED").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); - empiLink.addIndex("20200517.5", "IDX_EMPI_PERSON_TGT").unique(true).withColumns("PERSON_PID", "TARGET_PID"); - } protected void init500() { // 20200218 - 20200519 @@ -1506,8 +1640,15 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // Eliminate circular dependency. version.onTable("HFJ_RESOURCE").dropColumn("20200218.1", "FORCED_ID_PID"); version.onTable("HFJ_RES_VER").dropColumn("20200218.2", "FORCED_ID_PID"); - version.onTable("HFJ_RES_VER").addForeignKey("20200218.3", "FK_RESOURCE_HISTORY_RESOURCE").toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID"); - version.onTable("HFJ_RES_VER").modifyColumn("20200220.1", "RES_ID").nonNullable().failureAllowed().withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_RES_VER") + .addForeignKey("20200218.3", "FK_RESOURCE_HISTORY_RESOURCE") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); + version.onTable("HFJ_RES_VER") + .modifyColumn("20200220.1", "RES_ID") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.LONG); // // Drop unused column @@ -1515,67 +1656,210 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { version.onTable("HFJ_RESOURCE").dropColumn("20200419.2", "RES_PROFILE").failureAllowed(); // Add Partitioning - Builder.BuilderAddTableByColumns partition = version.addTableByColumns("20200420.0", "HFJ_PARTITION", "PART_ID"); + Builder.BuilderAddTableByColumns partition = + version.addTableByColumns("20200420.0", "HFJ_PARTITION", "PART_ID"); partition.addColumn("PART_ID").nonNullable().type(ColumnTypeEnum.INT); partition.addColumn("PART_NAME").nonNullable().type(ColumnTypeEnum.STRING, 200); partition.addColumn("PART_DESC").nullable().type(ColumnTypeEnum.STRING, 200); partition.addIndex("20200420.1", "IDX_PART_NAME").unique(true).withColumns("PART_NAME"); // Partition columns on individual tables - version.onTable("HFJ_RESOURCE").addColumn("20200420.2", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_RESOURCE").addColumn("20200420.3", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_RES_VER").addColumn("20200420.4", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_RES_VER").addColumn("20200420.5", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_IDX_CMP_STRING_UNIQ").addColumn("20200420.6", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_IDX_CMP_STRING_UNIQ").addColumn("20200420.7", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_IDX_CMP_STRING_UNIQ").addColumn("20200420.8", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_IDX_CMP_STRING_UNIQ").addColumn("20200420.9", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_HISTORY_TAG").addColumn("20200420.10", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_HISTORY_TAG").addColumn("20200420.11", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_RES_TAG").addColumn("20200420.12", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_RES_TAG").addColumn("20200420.13", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_FORCED_ID").addColumn("20200420.14", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_FORCED_ID").addColumn("20200420.15", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_RES_LINK").addColumn("20200420.16", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_RES_LINK").addColumn("20200420.17", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_SPIDX_STRING").addColumn("20200420.18", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_SPIDX_STRING").addColumn("20200420.19", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_SPIDX_COORDS").addColumn("20200420.20", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_SPIDX_COORDS").addColumn("20200420.21", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_SPIDX_NUMBER").addColumn("20200420.22", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_SPIDX_NUMBER").addColumn("20200420.23", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_SPIDX_TOKEN").addColumn("20200420.24", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_SPIDX_TOKEN").addColumn("20200420.25", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_SPIDX_DATE").addColumn("20200420.26", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_SPIDX_DATE").addColumn("20200420.27", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_SPIDX_URI").addColumn("20200420.28", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_SPIDX_URI").addColumn("20200420.29", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_SPIDX_QUANTITY").addColumn("20200420.30", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_SPIDX_QUANTITY").addColumn("20200420.31", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_RES_VER_PROV").addColumn("20200420.32", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_RES_VER_PROV").addColumn("20200420.33", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_RES_PARAM_PRESENT").addColumn("20200420.34", "PARTITION_ID").nullable().type(ColumnTypeEnum.INT); - version.onTable("HFJ_RES_PARAM_PRESENT").addColumn("20200420.35", "PARTITION_DATE").nullable().type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_RESOURCE") + .addColumn("20200420.2", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_RESOURCE") + .addColumn("20200420.3", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_RES_VER") + .addColumn("20200420.4", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_RES_VER") + .addColumn("20200420.5", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_IDX_CMP_STRING_UNIQ") + .addColumn("20200420.6", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_IDX_CMP_STRING_UNIQ") + .addColumn("20200420.7", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_IDX_CMP_STRING_UNIQ") + .addColumn("20200420.8", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_IDX_CMP_STRING_UNIQ") + .addColumn("20200420.9", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_HISTORY_TAG") + .addColumn("20200420.10", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_HISTORY_TAG") + .addColumn("20200420.11", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_RES_TAG") + .addColumn("20200420.12", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_RES_TAG") + .addColumn("20200420.13", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_FORCED_ID") + .addColumn("20200420.14", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_FORCED_ID") + .addColumn("20200420.15", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_RES_LINK") + .addColumn("20200420.16", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_RES_LINK") + .addColumn("20200420.17", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_SPIDX_STRING") + .addColumn("20200420.18", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_SPIDX_STRING") + .addColumn("20200420.19", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_SPIDX_COORDS") + .addColumn("20200420.20", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_SPIDX_COORDS") + .addColumn("20200420.21", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_SPIDX_NUMBER") + .addColumn("20200420.22", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_SPIDX_NUMBER") + .addColumn("20200420.23", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_SPIDX_TOKEN") + .addColumn("20200420.24", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_SPIDX_TOKEN") + .addColumn("20200420.25", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_SPIDX_DATE") + .addColumn("20200420.26", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_SPIDX_DATE") + .addColumn("20200420.27", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_SPIDX_URI") + .addColumn("20200420.28", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_SPIDX_URI") + .addColumn("20200420.29", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_SPIDX_QUANTITY") + .addColumn("20200420.30", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_SPIDX_QUANTITY") + .addColumn("20200420.31", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_RES_VER_PROV") + .addColumn("20200420.32", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_RES_VER_PROV") + .addColumn("20200420.33", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); + version.onTable("HFJ_RES_PARAM_PRESENT") + .addColumn("20200420.34", "PARTITION_ID") + .nullable() + .type(ColumnTypeEnum.INT); + version.onTable("HFJ_RES_PARAM_PRESENT") + .addColumn("20200420.35", "PARTITION_DATE") + .nullable() + .type(ColumnTypeEnum.DATE_ONLY); - version.onTable("HFJ_SPIDX_STRING").modifyColumn("20200420.36", "SP_MISSING").nonNullable().failureAllowed().withType(ColumnTypeEnum.BOOLEAN); - version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20200420.37", "SP_MISSING").nonNullable().failureAllowed().withType(ColumnTypeEnum.BOOLEAN); - version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("20200420.38", "SP_MISSING").nonNullable().failureAllowed().withType(ColumnTypeEnum.BOOLEAN); - version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("20200420.39", "SP_MISSING").nonNullable().failureAllowed().withType(ColumnTypeEnum.BOOLEAN); - version.onTable("HFJ_SPIDX_DATE").modifyColumn("20200420.40", "SP_MISSING").nonNullable().failureAllowed().withType(ColumnTypeEnum.BOOLEAN); - version.onTable("HFJ_SPIDX_URI").modifyColumn("20200420.41", "SP_MISSING").nonNullable().failureAllowed().withType(ColumnTypeEnum.BOOLEAN); - version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("20200420.42", "SP_MISSING").nonNullable().failureAllowed().withType(ColumnTypeEnum.BOOLEAN); + version.onTable("HFJ_SPIDX_STRING") + .modifyColumn("20200420.36", "SP_MISSING") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.BOOLEAN); + version.onTable("HFJ_SPIDX_COORDS") + .modifyColumn("20200420.37", "SP_MISSING") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.BOOLEAN); + version.onTable("HFJ_SPIDX_NUMBER") + .modifyColumn("20200420.38", "SP_MISSING") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.BOOLEAN); + version.onTable("HFJ_SPIDX_TOKEN") + .modifyColumn("20200420.39", "SP_MISSING") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.BOOLEAN); + version.onTable("HFJ_SPIDX_DATE") + .modifyColumn("20200420.40", "SP_MISSING") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.BOOLEAN); + version.onTable("HFJ_SPIDX_URI") + .modifyColumn("20200420.41", "SP_MISSING") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.BOOLEAN); + version.onTable("HFJ_SPIDX_QUANTITY") + .modifyColumn("20200420.42", "SP_MISSING") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.BOOLEAN); // Add support for integer comparisons during day-precision date search. Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE"); - spidxDate.addColumn("20200501.1", "SP_VALUE_LOW_DATE_ORDINAL").nullable().type(ColumnTypeEnum.INT); - spidxDate.addColumn("20200501.2", "SP_VALUE_HIGH_DATE_ORDINAL").nullable().type(ColumnTypeEnum.INT); - - spidxDate.addTask(new CalculateOrdinalDatesTask(VersionEnum.V5_0_0, "20200501.3") - .addCalculator("SP_VALUE_LOW_DATE_ORDINAL", t -> ResourceIndexedSearchParamDate.calculateOrdinalValue(t.getDate("SP_VALUE_LOW"))) - .addCalculator("SP_VALUE_HIGH_DATE_ORDINAL", t -> ResourceIndexedSearchParamDate.calculateOrdinalValue(t.getDate("SP_VALUE_HIGH"))) - .setColumnName("SP_VALUE_LOW_DATE_ORDINAL") //It doesn't matter which of the two we choose as they will both be null. - ); + spidxDate + .addColumn("20200501.1", "SP_VALUE_LOW_DATE_ORDINAL") + .nullable() + .type(ColumnTypeEnum.INT); + spidxDate + .addColumn("20200501.2", "SP_VALUE_HIGH_DATE_ORDINAL") + .nullable() + .type(ColumnTypeEnum.INT); + spidxDate.addTask( + new CalculateOrdinalDatesTask(VersionEnum.V5_0_0, "20200501.3") + .addCalculator( + "SP_VALUE_LOW_DATE_ORDINAL", + t -> ResourceIndexedSearchParamDate.calculateOrdinalValue(t.getDate("SP_VALUE_LOW"))) + .addCalculator( + "SP_VALUE_HIGH_DATE_ORDINAL", + t -> ResourceIndexedSearchParamDate.calculateOrdinalValue(t.getDate("SP_VALUE_HIGH"))) + .setColumnName( + "SP_VALUE_LOW_DATE_ORDINAL") // It doesn't matter which of the two we choose as they + // will both be null. + ); } /** @@ -1641,7 +1925,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { Builder version = forVersion(VersionEnum.V4_2_0); // TermValueSetConceptDesignation - version.onTable("TRM_VALUESET_C_DESIGNATION").dropIndex("20200202.1", "IDX_VALUESET_C_DSGNTN_VAL").failureAllowed(); + version.onTable("TRM_VALUESET_C_DESIGNATION") + .dropIndex("20200202.1", "IDX_VALUESET_C_DSGNTN_VAL") + .failureAllowed(); Builder.BuilderWithTableName searchTable = version.onTable("HFJ_SEARCH"); searchTable.dropIndex("20200203.1", "IDX_SEARCH_LASTRETURNED"); searchTable.dropColumn("20200203.2", "SEARCH_LAST_RETURNED"); @@ -1656,21 +1942,56 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { * doing is setting a not-null on a column that will never be null anyway. Setting not null * fails on SQL Server because there is an index on this column... Which is dumb, but hey. */ - version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("20190920.1", "RES_ID").nonNullable().failureAllowed().withType(ColumnTypeEnum.LONG); - version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190920.2", "RES_ID").nonNullable().failureAllowed().withType(ColumnTypeEnum.LONG); - version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("20190920.3", "RES_ID").nonNullable().failureAllowed().withType(ColumnTypeEnum.LONG); - version.onTable("HFJ_SPIDX_STRING").modifyColumn("20190920.4", "RES_ID").nonNullable().failureAllowed().withType(ColumnTypeEnum.LONG); - version.onTable("HFJ_SPIDX_DATE").modifyColumn("20190920.5", "RES_ID").nonNullable().failureAllowed().withType(ColumnTypeEnum.LONG); - version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("20190920.6", "RES_ID").nonNullable().failureAllowed().withType(ColumnTypeEnum.LONG); - version.onTable("HFJ_SPIDX_URI").modifyColumn("20190920.7", "RES_ID").nonNullable().failureAllowed().withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_SPIDX_NUMBER") + .modifyColumn("20190920.1", "RES_ID") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_SPIDX_COORDS") + .modifyColumn("20190920.2", "RES_ID") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_SPIDX_TOKEN") + .modifyColumn("20190920.3", "RES_ID") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_SPIDX_STRING") + .modifyColumn("20190920.4", "RES_ID") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_SPIDX_DATE") + .modifyColumn("20190920.5", "RES_ID") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_SPIDX_QUANTITY") + .modifyColumn("20190920.6", "RES_ID") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.LONG); + version.onTable("HFJ_SPIDX_URI") + .modifyColumn("20190920.7", "RES_ID") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.LONG); // HFJ_SEARCH - version.onTable("HFJ_SEARCH").addColumn("20190921.1", "EXPIRY_OR_NULL").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP); - version.onTable("HFJ_SEARCH").addColumn("20190921.2", "NUM_BLOCKED").nullable().type(ColumnTypeEnum.INT); + version.onTable("HFJ_SEARCH") + .addColumn("20190921.1", "EXPIRY_OR_NULL") + .nullable() + .type(ColumnTypeEnum.DATE_TIMESTAMP); + version.onTable("HFJ_SEARCH") + .addColumn("20190921.2", "NUM_BLOCKED") + .nullable() + .type(ColumnTypeEnum.INT); // HFJ_BLK_EXPORT_JOB version.addIdGenerator("20190921.3", "SEQ_BLKEXJOB_PID"); - Builder.BuilderAddTableByColumns bulkExportJob = version.addTableByColumns("20190921.4", "HFJ_BLK_EXPORT_JOB", "PID"); + Builder.BuilderAddTableByColumns bulkExportJob = + version.addTableByColumns("20190921.4", "HFJ_BLK_EXPORT_JOB", "PID"); bulkExportJob.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); bulkExportJob.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, 36); bulkExportJob.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10); @@ -1686,49 +2007,68 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // HFJ_BLK_EXPORT_COLLECTION version.addIdGenerator("20190921.7", "SEQ_BLKEXCOL_PID"); - Builder.BuilderAddTableByColumns bulkExportCollection = version.addTableByColumns("20190921.8", "HFJ_BLK_EXPORT_COLLECTION", "PID"); + Builder.BuilderAddTableByColumns bulkExportCollection = + version.addTableByColumns("20190921.8", "HFJ_BLK_EXPORT_COLLECTION", "PID"); bulkExportCollection.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); bulkExportCollection.addColumn("JOB_PID").nonNullable().type(ColumnTypeEnum.LONG); - bulkExportCollection.addForeignKey("20190921.9", "FK_BLKEXCOL_JOB").toColumn("JOB_PID").references("HFJ_BLK_EXPORT_JOB", "PID"); + bulkExportCollection + .addForeignKey("20190921.9", "FK_BLKEXCOL_JOB") + .toColumn("JOB_PID") + .references("HFJ_BLK_EXPORT_JOB", "PID"); bulkExportCollection.addColumn("RES_TYPE").nonNullable().type(ColumnTypeEnum.STRING, 40); bulkExportCollection.addColumn("TYPE_FILTER").nullable().type(ColumnTypeEnum.STRING, 1000); bulkExportCollection.addColumn("OPTLOCK").nonNullable().type(ColumnTypeEnum.INT); // HFJ_BLK_EXPORT_COLFILE version.addIdGenerator("20190921.10", "SEQ_BLKEXCOLFILE_PID"); - Builder.BuilderAddTableByColumns bulkExportCollectionFile = version.addTableByColumns("20190921.11", "HFJ_BLK_EXPORT_COLFILE", "PID"); + Builder.BuilderAddTableByColumns bulkExportCollectionFile = + version.addTableByColumns("20190921.11", "HFJ_BLK_EXPORT_COLFILE", "PID"); bulkExportCollectionFile.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); bulkExportCollectionFile.addColumn("COLLECTION_PID").nonNullable().type(ColumnTypeEnum.LONG); bulkExportCollectionFile.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.STRING, 100); - bulkExportCollectionFile.addForeignKey("20190921.12", "FK_BLKEXCOLFILE_COLLECT").toColumn("COLLECTION_PID").references("HFJ_BLK_EXPORT_COLLECTION", "PID"); + bulkExportCollectionFile + .addForeignKey("20190921.12", "FK_BLKEXCOLFILE_COLLECT") + .toColumn("COLLECTION_PID") + .references("HFJ_BLK_EXPORT_COLLECTION", "PID"); // HFJ_RES_VER_PROV version.startSectionWithMessage("Processing bulkExportCollectionFile: HFJ_RES_VER_PROV"); - Builder.BuilderAddTableByColumns resVerProv = version.addTableByColumns("20190921.13", "HFJ_RES_VER_PROV", "RES_VER_PID"); + Builder.BuilderAddTableByColumns resVerProv = + version.addTableByColumns("20190921.13", "HFJ_RES_VER_PROV", "RES_VER_PID"); resVerProv.addColumn("RES_VER_PID").nonNullable().type(ColumnTypeEnum.LONG); resVerProv - .addForeignKey("20190921.14", "FK_RESVERPROV_RESVER_PID") - .toColumn("RES_VER_PID") - .references("HFJ_RES_VER", "PID"); + .addForeignKey("20190921.14", "FK_RESVERPROV_RESVER_PID") + .toColumn("RES_VER_PID") + .references("HFJ_RES_VER", "PID"); resVerProv.addColumn("RES_PID").nonNullable().type(ColumnTypeEnum.LONG); resVerProv - .addForeignKey("20190921.15", "FK_RESVERPROV_RES_PID") - .toColumn("RES_PID") - .references("HFJ_RESOURCE", "RES_ID") - .doNothing(); // Added below in a better form + .addForeignKey("20190921.15", "FK_RESVERPROV_RES_PID") + .toColumn("RES_PID") + .references("HFJ_RESOURCE", "RES_ID") + .doNothing(); // Added below in a better form resVerProv.addColumn("SOURCE_URI").nullable().type(ColumnTypeEnum.STRING, 100); resVerProv.addColumn("REQUEST_ID").nullable().type(ColumnTypeEnum.STRING, 16); - resVerProv.addIndex("20190921.16", "IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI"); - resVerProv.addIndex("20190921.17", "IDX_RESVERPROV_REQUESTID").unique(false).withColumns("REQUEST_ID"); + resVerProv + .addIndex("20190921.16", "IDX_RESVERPROV_SOURCEURI") + .unique(false) + .withColumns("SOURCE_URI"); + resVerProv + .addIndex("20190921.17", "IDX_RESVERPROV_REQUESTID") + .unique(false) + .withColumns("REQUEST_ID"); // TermValueSetConceptDesignation version.startSectionWithMessage("Processing bulkExportCollectionFile: TRM_VALUESET_C_DESIGNATION"); - Builder.BuilderWithTableName termValueSetConceptDesignationTable = version.onTable("TRM_VALUESET_C_DESIGNATION"); - termValueSetConceptDesignationTable.addColumn("20190921.18", "VALUESET_PID").nonNullable().type(ColumnTypeEnum.LONG); + Builder.BuilderWithTableName termValueSetConceptDesignationTable = + version.onTable("TRM_VALUESET_C_DESIGNATION"); termValueSetConceptDesignationTable - .addForeignKey("20190921.19", "FK_TRM_VSCD_VS_PID") - .toColumn("VALUESET_PID") - .references("TRM_VALUESET", "PID"); + .addColumn("20190921.18", "VALUESET_PID") + .nonNullable() + .type(ColumnTypeEnum.LONG); + termValueSetConceptDesignationTable + .addForeignKey("20190921.19", "FK_TRM_VSCD_VS_PID") + .toColumn("VALUESET_PID") + .references("TRM_VALUESET", "PID"); // Drop HFJ_SEARCH_RESULT foreign keys version.onTable("HFJ_SEARCH_RESULT").dropForeignKey("20190921.20", "FK_SEARCHRES_RES", "HFJ_RESOURCE"); @@ -1737,48 +2077,90 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // TermValueSet version.startSectionWithMessage("Processing bulkExportCollectionFile: TRM_VALUESET"); Builder.BuilderWithTableName termValueSetTable = version.onTable("TRM_VALUESET"); - termValueSetTable.addColumn("20190921.22", "TOTAL_CONCEPTS").nonNullable().type(ColumnTypeEnum.LONG); - termValueSetTable.addColumn("20190921.23", "TOTAL_CONCEPT_DESIGNATIONS").nonNullable().type(ColumnTypeEnum.LONG); termValueSetTable - .dropIndex("20190921.24", "IDX_VALUESET_EXP_STATUS"); + .addColumn("20190921.22", "TOTAL_CONCEPTS") + .nonNullable() + .type(ColumnTypeEnum.LONG); + termValueSetTable + .addColumn("20190921.23", "TOTAL_CONCEPT_DESIGNATIONS") + .nonNullable() + .type(ColumnTypeEnum.LONG); + termValueSetTable.dropIndex("20190921.24", "IDX_VALUESET_EXP_STATUS"); version.dropIdGenerator("20190921.25", "SEQ_SEARCHPARM_ID"); // TermValueSetConcept version.startSectionWithMessage("Processing bulkExportCollectionFile: TRM_VALUESET_CONCEPT"); Builder.BuilderWithTableName termValueSetConceptTable = version.onTable("TRM_VALUESET_CONCEPT"); - termValueSetConceptTable.addColumn("20190921.26", "VALUESET_ORDER").nonNullable().type(ColumnTypeEnum.INT); termValueSetConceptTable - .addIndex("20190921.27", "IDX_VS_CONCEPT_ORDER") - .unique(true) - .withColumns("VALUESET_PID", "VALUESET_ORDER"); + .addColumn("20190921.26", "VALUESET_ORDER") + .nonNullable() + .type(ColumnTypeEnum.INT); + termValueSetConceptTable + .addIndex("20190921.27", "IDX_VS_CONCEPT_ORDER") + .unique(true) + .withColumns("VALUESET_PID", "VALUESET_ORDER"); // Account for RESTYPE_LEN column increasing from 30 to 40 - version.onTable("HFJ_RESOURCE").modifyColumn("20191002.1", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 40); - version.onTable("HFJ_RES_VER").modifyColumn("20191002.2", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 40); - version.onTable("HFJ_HISTORY_TAG").modifyColumn("20191002.3", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 40); - version.onTable("HFJ_RES_LINK").modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 40); - version.onTable("HFJ_RES_LINK").modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 40); - version.onTable("HFJ_RES_TAG").modifyColumn("20191002.6", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 40); + version.onTable("HFJ_RESOURCE") + .modifyColumn("20191002.1", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 40); + version.onTable("HFJ_RES_VER") + .modifyColumn("20191002.2", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 40); + version.onTable("HFJ_HISTORY_TAG") + .modifyColumn("20191002.3", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 40); + version.onTable("HFJ_RES_LINK") + .modifyColumn("20191002.4", "SOURCE_RESOURCE_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 40); + version.onTable("HFJ_RES_LINK") + .modifyColumn("20191002.5", "TARGET_RESOURCE_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 40); + version.onTable("HFJ_RES_TAG") + .modifyColumn("20191002.6", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 40); // TermConceptDesignation version.startSectionWithMessage("Processing table: TRM_CONCEPT_DESIG"); - version.onTable("TRM_CONCEPT_DESIG").modifyColumn("20191002.7", "VAL").nonNullable().withType(ColumnTypeEnum.STRING, 2000); + version.onTable("TRM_CONCEPT_DESIG") + .modifyColumn("20191002.7", "VAL") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 2000); // TermValueSetConceptDesignation version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION"); - version.onTable("TRM_VALUESET_C_DESIGNATION").modifyColumn("20191002.8", "VAL").nonNullable().withType(ColumnTypeEnum.STRING, 2000); + version.onTable("TRM_VALUESET_C_DESIGNATION") + .modifyColumn("20191002.8", "VAL") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 2000); // TermConceptProperty version.startSectionWithMessage("Processing table: TRM_CONCEPT_PROPERTY"); - version.onTable("TRM_CONCEPT_PROPERTY").addColumn("20191002.9", "PROP_VAL_LOB").nullable().type(ColumnTypeEnum.BLOB); + version.onTable("TRM_CONCEPT_PROPERTY") + .addColumn("20191002.9", "PROP_VAL_LOB") + .nullable() + .type(ColumnTypeEnum.BLOB); } protected void init400() { // 20190401 - 20190814 Builder version = forVersion(VersionEnum.V4_0_0); // BinaryStorageEntity - Builder.BuilderAddTableByColumns binaryBlob = version.addTableByColumns("20190722.1", "HFJ_BINARY_STORAGE_BLOB", "BLOB_ID"); + Builder.BuilderAddTableByColumns binaryBlob = + version.addTableByColumns("20190722.1", "HFJ_BINARY_STORAGE_BLOB", "BLOB_ID"); binaryBlob.addColumn("BLOB_ID").nonNullable().type(ColumnTypeEnum.STRING, 200); binaryBlob.addColumn("RESOURCE_ID").nonNullable().type(ColumnTypeEnum.STRING, 100); binaryBlob.addColumn("BLOB_SIZE").nullable().type(ColumnTypeEnum.INT); @@ -1791,121 +2173,160 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { version.onTable("TRM_VALUESET_CODE").dropThisTable("20190722.2"); version.onTable("TRM_CONCEPT_MAP_GROUP") - .renameColumn("20190722.3", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true) - .renameColumn("20190722.4", "mySourceValueSet", "SOURCE_VS", false, true) - .renameColumn("20190722.5", "myTargetValueSet", "TARGET_VS", false, true); + .renameColumn("20190722.3", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true) + .renameColumn("20190722.4", "mySourceValueSet", "SOURCE_VS", false, true) + .renameColumn("20190722.5", "myTargetValueSet", "TARGET_VS", false, true); version.onTable("TRM_CONCEPT_MAP_GROUP") - .modifyColumn("20190722.6", "CONCEPT_MAP_URL").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.6", "CONCEPT_MAP_URL") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GROUP") - .modifyColumn("20190722.7", "SOURCE_VERSION").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.7", "SOURCE_VERSION") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GROUP") - .modifyColumn("20190722.8", "SOURCE_VS").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.8", "SOURCE_VS") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GROUP") - .modifyColumn("20190722.9", "TARGET_VERSION").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.9", "TARGET_VERSION") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GROUP") - .modifyColumn("20190722.10", "TARGET_VS").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.10", "TARGET_VS") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") - .renameColumn("20190722.11", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true) - .renameColumn("20190722.12", "mySystem", "SYSTEM_URL", false, true) - .renameColumn("20190722.13", "mySystemVersion", "SYSTEM_VERSION", false, true) - .renameColumn("20190722.14", "myValueSet", "VALUESET_URL", false, true); + .renameColumn("20190722.11", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true) + .renameColumn("20190722.12", "mySystem", "SYSTEM_URL", false, true) + .renameColumn("20190722.13", "mySystemVersion", "SYSTEM_VERSION", false, true) + .renameColumn("20190722.14", "myValueSet", "VALUESET_URL", false, true); version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") - .modifyColumn("20190722.15", "CONCEPT_MAP_URL").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.15", "CONCEPT_MAP_URL") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") - .modifyColumn("20190722.16", "SOURCE_CODE").nonNullable().withType(ColumnTypeEnum.STRING, 500); + .modifyColumn("20190722.16", "SOURCE_CODE") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 500); version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") - .modifyColumn("20190722.17", "SYSTEM_URL").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.17", "SYSTEM_URL") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") - .modifyColumn("20190722.18", "SYSTEM_VERSION").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.18", "SYSTEM_VERSION") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GRP_ELEMENT") - .modifyColumn("20190722.19", "VALUESET_URL").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.19", "VALUESET_URL") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") - .renameColumn("20190722.20", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true) - .renameColumn("20190722.21", "mySystem", "SYSTEM_URL", false, true) - .renameColumn("20190722.22", "mySystemVersion", "SYSTEM_VERSION", false, true) - .renameColumn("20190722.23", "myValueSet", "VALUESET_URL", false, true); + .renameColumn("20190722.20", "myConceptMapUrl", "CONCEPT_MAP_URL", false, true) + .renameColumn("20190722.21", "mySystem", "SYSTEM_URL", false, true) + .renameColumn("20190722.22", "mySystemVersion", "SYSTEM_VERSION", false, true) + .renameColumn("20190722.23", "myValueSet", "VALUESET_URL", false, true); version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") - .modifyColumn("20190722.24", "CONCEPT_MAP_URL").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.24", "CONCEPT_MAP_URL") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") - .modifyColumn("20190722.25", "SYSTEM_URL").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.25", "SYSTEM_URL") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") - .modifyColumn("20190722.26", "SYSTEM_VERSION").nullable().withType(ColumnTypeEnum.STRING, 200); + .modifyColumn("20190722.26", "SYSTEM_VERSION") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") - .modifyColumn("20190722.27", "TARGET_CODE").nonNullable().withType(ColumnTypeEnum.STRING, 500); + .modifyColumn("20190722.27", "TARGET_CODE") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 500); version.onTable("TRM_CONCEPT_MAP_GRP_ELM_TGT") - .modifyColumn("20190722.28", "VALUESET_URL").nullable().withType(ColumnTypeEnum.STRING, 200); - - version.onTable("TRM_CONCEPT") - .renameColumn("20190722.29", "CODE", "CODEVAL", false, true); + .modifyColumn("20190722.28", "VALUESET_URL") + .nullable() + .withType(ColumnTypeEnum.STRING, 200); + version.onTable("TRM_CONCEPT").renameColumn("20190722.29", "CODE", "CODEVAL", false, true); // TermValueSet version.startSectionWithMessage("Processing table: TRM_VALUESET"); version.addIdGenerator("20190722.30", "SEQ_VALUESET_PID"); - Builder.BuilderAddTableByColumns termValueSetTable = version.addTableByColumns("20190722.31", "TRM_VALUESET", "PID"); + Builder.BuilderAddTableByColumns termValueSetTable = + version.addTableByColumns("20190722.31", "TRM_VALUESET", "PID"); termValueSetTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); termValueSetTable.addColumn("URL").nonNullable().type(ColumnTypeEnum.STRING, 200); termValueSetTable - .addIndex("20190722.32", "IDX_VALUESET_URL") - .unique(true) - .withColumns("URL"); + .addIndex("20190722.32", "IDX_VALUESET_URL") + .unique(true) + .withColumns("URL"); termValueSetTable.addColumn("RES_ID").nonNullable().type(ColumnTypeEnum.LONG); termValueSetTable - .addForeignKey("20190722.33", "FK_TRMVALUESET_RES") - .toColumn("RES_ID") - .references("HFJ_RESOURCE", "RES_ID"); + .addForeignKey("20190722.33", "FK_TRMVALUESET_RES") + .toColumn("RES_ID") + .references("HFJ_RESOURCE", "RES_ID"); termValueSetTable.addColumn("NAME").nullable().type(ColumnTypeEnum.STRING, 200); + version.onTable("TRM_VALUESET").renameColumn("20190722.34", "NAME", "VSNAME", true, true); version.onTable("TRM_VALUESET") - .renameColumn("20190722.34", "NAME", "VSNAME", true, true); - version.onTable("TRM_VALUESET") - .modifyColumn("20190722.35", "RES_ID").nullable().withType(ColumnTypeEnum.LONG); + .modifyColumn("20190722.35", "RES_ID") + .nullable() + .withType(ColumnTypeEnum.LONG); Builder.BuilderWithTableName termValueSetTableChange = version.onTable("TRM_VALUESET"); - termValueSetTableChange.addColumn("20190722.36", "EXPANSION_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 50); termValueSetTableChange - .addIndex("20190722.37", "IDX_VALUESET_EXP_STATUS") - .unique(false) - .withColumns("EXPANSION_STATUS"); + .addColumn("20190722.36", "EXPANSION_STATUS") + .nonNullable() + .type(ColumnTypeEnum.STRING, 50); + termValueSetTableChange + .addIndex("20190722.37", "IDX_VALUESET_EXP_STATUS") + .unique(false) + .withColumns("EXPANSION_STATUS"); // TermValueSetConcept version.startSectionWithMessage("Processing table: TRM_VALUESET_CONCEPT"); version.addIdGenerator("20190722.38", "SEQ_VALUESET_CONCEPT_PID"); - Builder.BuilderAddTableByColumns termValueSetConceptTable = version.addTableByColumns("20190722.39", "TRM_VALUESET_CONCEPT", "PID"); + Builder.BuilderAddTableByColumns termValueSetConceptTable = + version.addTableByColumns("20190722.39", "TRM_VALUESET_CONCEPT", "PID"); termValueSetConceptTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); termValueSetConceptTable.addColumn("VALUESET_PID").nonNullable().type(ColumnTypeEnum.LONG); termValueSetConceptTable - .addForeignKey("20190722.40", "FK_TRM_VALUESET_PID") - .toColumn("VALUESET_PID") - .references("TRM_VALUESET", "PID"); + .addForeignKey("20190722.40", "FK_TRM_VALUESET_PID") + .toColumn("VALUESET_PID") + .references("TRM_VALUESET", "PID"); termValueSetConceptTable.addColumn("SYSTEM_URL").nonNullable().type(ColumnTypeEnum.STRING, 200); termValueSetConceptTable.addColumn("CODEVAL").nonNullable().type(ColumnTypeEnum.STRING, 500); termValueSetConceptTable.addColumn("DISPLAY").nullable().type(ColumnTypeEnum.STRING, 400); version.onTable("TRM_VALUESET_CONCEPT") - .renameColumn("20190722.41", "CODE", "CODEVAL", true, true) - .renameColumn("20190722.42", "SYSTEM", "SYSTEM_URL", true, true); + .renameColumn("20190722.41", "CODE", "CODEVAL", true, true) + .renameColumn("20190722.42", "SYSTEM", "SYSTEM_URL", true, true); version.startSectionWithMessage("Processing table: TRM_VALUESET_CONCEPT, swapping index for unique constraint"); termValueSetConceptTable.dropIndex("20190801.1", "IDX_VALUESET_CONCEPT_CS_CD"); // This index has been renamed in later versions. As such, allowing failure here as some DBs disallow // multiple indexes referencing the same set of columns. termValueSetConceptTable - .addIndex("20190801.2", "IDX_VS_CONCEPT_CS_CD") - .unique(true) - .withColumns("VALUESET_PID", "SYSTEM_URL", "CODEVAL").failureAllowed(); + .addIndex("20190801.2", "IDX_VS_CONCEPT_CS_CD") + .unique(true) + .withColumns("VALUESET_PID", "SYSTEM_URL", "CODEVAL") + .failureAllowed(); // TermValueSetConceptDesignation version.startSectionWithMessage("Processing table: TRM_VALUESET_C_DESIGNATION"); version.addIdGenerator("20190801.3", "SEQ_VALUESET_C_DSGNTN_PID"); - Builder.BuilderAddTableByColumns termValueSetConceptDesignationTable = version.addTableByColumns("20190801.4", "TRM_VALUESET_C_DESIGNATION", "PID"); + Builder.BuilderAddTableByColumns termValueSetConceptDesignationTable = + version.addTableByColumns("20190801.4", "TRM_VALUESET_C_DESIGNATION", "PID"); termValueSetConceptDesignationTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); - termValueSetConceptDesignationTable.addColumn("VALUESET_CONCEPT_PID").nonNullable().type(ColumnTypeEnum.LONG); termValueSetConceptDesignationTable - .addForeignKey("20190801.5", "FK_TRM_VALUESET_CONCEPT_PID") - .toColumn("VALUESET_CONCEPT_PID") - .references("TRM_VALUESET_CONCEPT", "PID"); + .addColumn("VALUESET_CONCEPT_PID") + .nonNullable() + .type(ColumnTypeEnum.LONG); + termValueSetConceptDesignationTable + .addForeignKey("20190801.5", "FK_TRM_VALUESET_CONCEPT_PID") + .toColumn("VALUESET_CONCEPT_PID") + .references("TRM_VALUESET_CONCEPT", "PID"); termValueSetConceptDesignationTable.addColumn("LANG").nullable().type(ColumnTypeEnum.STRING, 500); termValueSetConceptDesignationTable.addColumn("USE_SYSTEM").nullable().type(ColumnTypeEnum.STRING, 500); termValueSetConceptDesignationTable.addColumn("USE_CODE").nullable().type(ColumnTypeEnum.STRING, 500); @@ -1914,19 +2335,23 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // This index turned out not to be needed so it is disabled termValueSetConceptDesignationTable - .addIndex("20190801.6", "IDX_VALUESET_C_DSGNTN_VAL") - .unique(false) - .withColumns("VAL") - .doNothing(); + .addIndex("20190801.6", "IDX_VALUESET_C_DSGNTN_VAL") + .unique(false) + .withColumns("VAL") + .doNothing(); // TermCodeSystemVersion version.startSectionWithMessage("Processing table: TRM_CODESYSTEM_VER"); Builder.BuilderWithTableName termCodeSystemVersionTable = version.onTable("TRM_CODESYSTEM_VER"); - termCodeSystemVersionTable.addColumn("20190814.1", "CS_DISPLAY").nullable().type(ColumnTypeEnum.STRING, 200); + termCodeSystemVersionTable + .addColumn("20190814.1", "CS_DISPLAY") + .nullable() + .type(ColumnTypeEnum.STRING, 200); // ResourceReindexJobEntry version.addIdGenerator("20190814.2", "SEQ_RES_REINDEX_JOB"); - Builder.BuilderAddTableByColumns reindex = version.addTableByColumns("20190814.3", "HFJ_RES_REINDEX_JOB", "PID"); + Builder.BuilderAddTableByColumns reindex = + version.addTableByColumns("20190814.3", "HFJ_RES_REINDEX_JOB", "PID"); reindex.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG); reindex.addColumn("RES_TYPE").nullable().type(ColumnTypeEnum.STRING, 100); reindex.addColumn("UPDATE_THRESHOLD_HIGH").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP); @@ -1937,72 +2362,153 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // Search version.onTable("HFJ_SEARCH") - .addColumn("20190814.4", "SEARCH_DELETED").nullable().type(ColumnTypeEnum.BOOLEAN); + .addColumn("20190814.4", "SEARCH_DELETED") + .nullable() + .type(ColumnTypeEnum.BOOLEAN); version.onTable("HFJ_SEARCH") - .modifyColumn("20190814.5", "SEARCH_LAST_RETURNED").nonNullable().withType(ColumnTypeEnum.DATE_TIMESTAMP); + .modifyColumn("20190814.5", "SEARCH_LAST_RETURNED") + .nonNullable() + .withType(ColumnTypeEnum.DATE_TIMESTAMP); version.onTable("HFJ_SEARCH") - .addColumn("20190814.6", "SEARCH_PARAM_MAP").nullable().type(ColumnTypeEnum.BLOB); + .addColumn("20190814.6", "SEARCH_PARAM_MAP") + .nullable() + .type(ColumnTypeEnum.BLOB); version.onTable("HFJ_SEARCH") - .modifyColumn("20190814.7", "SEARCH_UUID").nonNullable().withType(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH); + .modifyColumn("20190814.7", "SEARCH_UUID") + .nonNullable() + .withType(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH); version.onTable("HFJ_SEARCH_PARM").dropThisTable("20190814.8"); // Make some columns non-nullable that were previously nullable - These are marked as failure allowed, since // SQL Server won't let us change nullability on columns with indexes pointing to them - version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190814.9", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 100); - version.onTable("HFJ_SPIDX_DATE").modifyColumn("20190814.10", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 100); - version.onTable("HFJ_SPIDX_STRING").modifyColumn("20190814.11", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 100); - version.onTable("HFJ_SPIDX_STRING").addColumn("20190814.12", "HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG); - version.onTable("HFJ_SPIDX_STRING").addIndex("20190814.13", "IDX_SP_STRING_HASH_IDENT").unique(false).withColumns("HASH_IDENTITY"); - version.onTable("HFJ_SPIDX_COORDS").modifyColumn("20190814.14", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 100); - version.onTable("HFJ_SPIDX_QUANTITY").modifyColumn("20190814.15", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 100); + version.onTable("HFJ_SPIDX_COORDS") + .modifyColumn("20190814.9", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 100); + version.onTable("HFJ_SPIDX_DATE") + .modifyColumn("20190814.10", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 100); + version.onTable("HFJ_SPIDX_STRING") + .modifyColumn("20190814.11", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 100); + version.onTable("HFJ_SPIDX_STRING") + .addColumn("20190814.12", "HASH_IDENTITY") + .nullable() + .type(ColumnTypeEnum.LONG); + version.onTable("HFJ_SPIDX_STRING") + .addIndex("20190814.13", "IDX_SP_STRING_HASH_IDENT") + .unique(false) + .withColumns("HASH_IDENTITY"); + version.onTable("HFJ_SPIDX_COORDS") + .modifyColumn("20190814.14", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 100); + version.onTable("HFJ_SPIDX_QUANTITY") + .modifyColumn("20190814.15", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 100); version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.16", "HASH_UNITS_AND_VALPREFIX"); version.onTable("HFJ_SPIDX_QUANTITY").dropColumn("20190814.17", "HASH_VALPREFIX"); - version.onTable("HFJ_SPIDX_NUMBER").modifyColumn("20190814.18", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 100); - version.onTable("HFJ_SPIDX_TOKEN").modifyColumn("20190814.19", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 100); - version.onTable("HFJ_SPIDX_URI").modifyColumn("20190814.20", "RES_TYPE").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 100); - version.onTable("HFJ_SPIDX_URI").modifyColumn("20190814.21", "SP_URI").nullable().failureAllowed().withType(ColumnTypeEnum.STRING, 254); - version.onTable("TRM_CODESYSTEM").modifyColumn("20190814.22", "CODE_SYSTEM_URI").nonNullable().failureAllowed().withType(ColumnTypeEnum.STRING, 200); - version.onTable("TRM_CODESYSTEM").modifyColumn("20190814.23", "CS_NAME").nullable().failureAllowed().withType(ColumnTypeEnum.STRING, 200); - version.onTable("TRM_CODESYSTEM_VER").modifyColumn("20190814.24", "CS_VERSION_ID").nullable().failureAllowed().withType(ColumnTypeEnum.STRING, 200); + version.onTable("HFJ_SPIDX_NUMBER") + .modifyColumn("20190814.18", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 100); + version.onTable("HFJ_SPIDX_TOKEN") + .modifyColumn("20190814.19", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 100); + version.onTable("HFJ_SPIDX_URI") + .modifyColumn("20190814.20", "RES_TYPE") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 100); + version.onTable("HFJ_SPIDX_URI") + .modifyColumn("20190814.21", "SP_URI") + .nullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 254); + version.onTable("TRM_CODESYSTEM") + .modifyColumn("20190814.22", "CODE_SYSTEM_URI") + .nonNullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 200); + version.onTable("TRM_CODESYSTEM") + .modifyColumn("20190814.23", "CS_NAME") + .nullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 200); + version.onTable("TRM_CODESYSTEM_VER") + .modifyColumn("20190814.24", "CS_VERSION_ID") + .nullable() + .failureAllowed() + .withType(ColumnTypeEnum.STRING, 200); } - private void init360() { // 20180918 - 20181112 Builder version = forVersion(VersionEnum.V3_6_0); // Resource Link Builder.BuilderWithTableName resourceLink = version.onTable("HFJ_RES_LINK"); version.startSectionWithMessage("Starting work on table: " + resourceLink.getTableName()); - resourceLink - .modifyColumn("20180929.1", "SRC_PATH") - .nonNullable() - .withType(ColumnTypeEnum.STRING, 200); + resourceLink.modifyColumn("20180929.1", "SRC_PATH").nonNullable().withType(ColumnTypeEnum.STRING, 200); // Search Builder.BuilderWithTableName search = version.onTable("HFJ_SEARCH"); version.startSectionWithMessage("Starting work on table: " + search.getTableName()); - search - .addColumn("20181001.1", "OPTLOCK_VERSION") - .nullable() - .type(ColumnTypeEnum.INT); + search.addColumn("20181001.1", "OPTLOCK_VERSION").nullable().type(ColumnTypeEnum.INT); version.addTableRawSql("20181104.1", "HFJ_RES_REINDEX_JOB") - .addSql(DriverTypeEnum.MSSQL_2012, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime2, UPDATE_THRESHOLD_HIGH datetime2 not null, UPDATE_THRESHOLD_LOW datetime2, primary key (PID))") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))") - .addSql(DriverTypeEnum.MARIADB_10_1, "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create table HFJ_RES_REINDEX_JOB (PID int8 not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))") - .addSql(DriverTypeEnum.MYSQL_5_7, " create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))") - .addSql(DriverTypeEnum.ORACLE_12C, "create table HFJ_RES_REINDEX_JOB (PID number(19,0) not null, JOB_DELETED number(1,0) not null, RES_TYPE varchar2(255 char), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))"); + .addSql( + DriverTypeEnum.MSSQL_2012, + "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime2, UPDATE_THRESHOLD_HIGH datetime2 not null, UPDATE_THRESHOLD_LOW datetime2, primary key (PID))") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create table HFJ_RES_REINDEX_JOB (PID int8 not null, JOB_DELETED boolean not null, RES_TYPE varchar(255), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))") + .addSql( + DriverTypeEnum.MYSQL_5_7, + " create table HFJ_RES_REINDEX_JOB (PID bigint not null, JOB_DELETED bit not null, RES_TYPE varchar(255), SUSPENDED_UNTIL datetime(6), UPDATE_THRESHOLD_HIGH datetime(6) not null, UPDATE_THRESHOLD_LOW datetime(6), primary key (PID))") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create table HFJ_RES_REINDEX_JOB (PID number(19,0) not null, JOB_DELETED number(1,0) not null, RES_TYPE varchar2(255 char), SUSPENDED_UNTIL timestamp, UPDATE_THRESHOLD_HIGH timestamp not null, UPDATE_THRESHOLD_LOW timestamp, primary key (PID))"); - version.onTable("TRM_CONCEPT_DESIG").addColumn("20181104.2", "CS_VER_PID").nullable().type(ColumnTypeEnum.LONG); - version.onTable("TRM_CONCEPT_DESIG").addForeignKey("20181104.3", "FK_CONCEPTDESIG_CSV").toColumn("CS_VER_PID").references("TRM_CODESYSTEM_VER", "PID"); + version.onTable("TRM_CONCEPT_DESIG") + .addColumn("20181104.2", "CS_VER_PID") + .nullable() + .type(ColumnTypeEnum.LONG); + version.onTable("TRM_CONCEPT_DESIG") + .addForeignKey("20181104.3", "FK_CONCEPTDESIG_CSV") + .toColumn("CS_VER_PID") + .references("TRM_CODESYSTEM_VER", "PID"); - version.onTable("TRM_CONCEPT_PROPERTY").addColumn("20181104.4", "CS_VER_PID").nullable().type(ColumnTypeEnum.LONG); - version.onTable("TRM_CONCEPT_PROPERTY").addForeignKey("20181104.5", "FK_CONCEPTPROP_CSV").toColumn("CS_VER_PID").references("TRM_CODESYSTEM_VER", "PID"); - - version.onTable("TRM_CONCEPT").addColumn("20181104.6", "PARENT_PIDS").nullable().type(ColumnTypeEnum.CLOB); + version.onTable("TRM_CONCEPT_PROPERTY") + .addColumn("20181104.4", "CS_VER_PID") + .nullable() + .type(ColumnTypeEnum.LONG); + version.onTable("TRM_CONCEPT_PROPERTY") + .addForeignKey("20181104.5", "FK_CONCEPTPROP_CSV") + .toColumn("CS_VER_PID") + .references("TRM_CODESYSTEM_VER", "PID"); + version.onTable("TRM_CONCEPT") + .addColumn("20181104.6", "PARENT_PIDS") + .nullable() + .type(ColumnTypeEnum.CLOB); } private void init350() { // 20180601 - 20180917 @@ -2012,266 +2518,296 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { Builder.BuilderWithTableName forcedId = version.onTable("HFJ_FORCED_ID"); version.startSectionWithMessage("Starting work on table: " + forcedId.getTableName()); - forcedId - .dropIndex("20180827.1", "IDX_FORCEDID_TYPE_FORCEDID"); - forcedId - .dropIndex("20180827.2", "IDX_FORCEDID_TYPE_RESID"); + forcedId.dropIndex("20180827.1", "IDX_FORCEDID_TYPE_FORCEDID"); + forcedId.dropIndex("20180827.2", "IDX_FORCEDID_TYPE_RESID"); - forcedId - .addIndex("20180827.3", "IDX_FORCEDID_TYPE_FID") - .unique(true) - .withColumns("RESOURCE_TYPE", "FORCED_ID"); + forcedId.addIndex("20180827.3", "IDX_FORCEDID_TYPE_FID").unique(true).withColumns("RESOURCE_TYPE", "FORCED_ID"); // Indexes - Coords Builder.BuilderWithTableName spidxCoords = version.onTable("HFJ_SPIDX_COORDS"); version.startSectionWithMessage("Starting work on table: " + spidxCoords.getTableName()); - spidxCoords - .addColumn("20180903.1", "HASH_IDENTITY") - .nullable() - .type(ColumnTypeEnum.LONG); + spidxCoords.addColumn("20180903.1", "HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG); if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) { + spidxCoords.dropIndex("20180903.2", "IDX_SP_COORDS"); spidxCoords - .dropIndex("20180903.2", "IDX_SP_COORDS"); - spidxCoords - .addIndex("20180903.4", "IDX_SP_COORDS_HASH") - .unique(false) - .withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE"); - spidxCoords - .addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.5") - .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getString("SP_NAME"))) - .setColumnName("HASH_IDENTITY") - ); + .addIndex("20180903.4", "IDX_SP_COORDS_HASH") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_LATITUDE", "SP_LONGITUDE"); + spidxCoords.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.5") + .addCalculator( + "HASH_IDENTITY", + t -> BaseResourceIndexedSearchParam.calculateHashIdentity( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getString("SP_NAME"))) + .setColumnName("HASH_IDENTITY")); } // Indexes - Date Builder.BuilderWithTableName spidxDate = version.onTable("HFJ_SPIDX_DATE"); version.startSectionWithMessage("Starting work on table: " + spidxDate.getTableName()); - spidxDate - .addColumn("20180903.6", "HASH_IDENTITY") - .nullable() - .type(ColumnTypeEnum.LONG); + spidxDate.addColumn("20180903.6", "HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG); if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) { + spidxDate.dropIndex("20180903.7", "IDX_SP_TOKEN"); spidxDate - .dropIndex("20180903.7", "IDX_SP_TOKEN"); - spidxDate - .addIndex("20180903.8", "IDX_SP_DATE_HASH") - .unique(false) - .withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH") - .doNothing(); - spidxDate - .dropIndex("20180903.9", "IDX_SP_DATE"); - spidxDate - .addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.10") - .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getString("SP_NAME"))) - .setColumnName("HASH_IDENTITY") - ); + .addIndex("20180903.8", "IDX_SP_DATE_HASH") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH") + .doNothing(); + spidxDate.dropIndex("20180903.9", "IDX_SP_DATE"); + spidxDate.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.10") + .addCalculator( + "HASH_IDENTITY", + t -> BaseResourceIndexedSearchParam.calculateHashIdentity( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getString("SP_NAME"))) + .setColumnName("HASH_IDENTITY")); } // Indexes - Number Builder.BuilderWithTableName spidxNumber = version.onTable("HFJ_SPIDX_NUMBER"); version.startSectionWithMessage("Starting work on table: " + spidxNumber.getTableName()); - spidxNumber - .addColumn("20180903.11", "HASH_IDENTITY") - .nullable() - .type(ColumnTypeEnum.LONG); + spidxNumber.addColumn("20180903.11", "HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG); if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) { + spidxNumber.dropIndex("20180903.12", "IDX_SP_NUMBER"); spidxNumber - .dropIndex("20180903.12", "IDX_SP_NUMBER"); - spidxNumber - .addIndex("20180903.13", "IDX_SP_NUMBER_HASH_VAL") - .unique(false) - .withColumns("HASH_IDENTITY", "SP_VALUE") - .doNothing(); - spidxNumber - .addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.14") - .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getString("SP_NAME"))) - .setColumnName("HASH_IDENTITY") - ); + .addIndex("20180903.13", "IDX_SP_NUMBER_HASH_VAL") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE") + .doNothing(); + spidxNumber.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.14") + .addCalculator( + "HASH_IDENTITY", + t -> BaseResourceIndexedSearchParam.calculateHashIdentity( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getString("SP_NAME"))) + .setColumnName("HASH_IDENTITY")); } // Indexes - Quantity Builder.BuilderWithTableName spidxQuantity = version.onTable("HFJ_SPIDX_QUANTITY"); version.startSectionWithMessage("Starting work on table: " + spidxQuantity.getTableName()); + spidxQuantity.addColumn("20180903.15", "HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG); spidxQuantity - .addColumn("20180903.15", "HASH_IDENTITY") - .nullable() - .type(ColumnTypeEnum.LONG); + .addColumn("20180903.16", "HASH_IDENTITY_SYS_UNITS") + .nullable() + .type(ColumnTypeEnum.LONG); spidxQuantity - .addColumn("20180903.16", "HASH_IDENTITY_SYS_UNITS") - .nullable() - .type(ColumnTypeEnum.LONG); - spidxQuantity - .addColumn("20180903.17", "HASH_IDENTITY_AND_UNITS") - .nullable() - .type(ColumnTypeEnum.LONG); + .addColumn("20180903.17", "HASH_IDENTITY_AND_UNITS") + .nullable() + .type(ColumnTypeEnum.LONG); if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) { + spidxQuantity.dropIndex("20180903.18", "IDX_SP_QUANTITY"); spidxQuantity - .dropIndex("20180903.18", "IDX_SP_QUANTITY"); + .addIndex("20180903.19", "IDX_SP_QUANTITY_HASH") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE"); spidxQuantity - .addIndex("20180903.19", "IDX_SP_QUANTITY_HASH") - .unique(false) - .withColumns("HASH_IDENTITY", "SP_VALUE"); + .addIndex("20180903.20", "IDX_SP_QUANTITY_HASH_UN") + .unique(false) + .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE"); spidxQuantity - .addIndex("20180903.20", "IDX_SP_QUANTITY_HASH_UN") - .unique(false) - .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE"); - spidxQuantity - .addIndex("20180903.21", "IDX_SP_QUANTITY_HASH_SYSUN") - .unique(false) - .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE"); - spidxQuantity - .addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.22") - .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getString("SP_NAME"))) - .addCalculator("HASH_IDENTITY_AND_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashUnits(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_UNITS"))) - .addCalculator("HASH_IDENTITY_SYS_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_SYSTEM"), t.getString("SP_UNITS"))) - .setColumnName("HASH_IDENTITY") - ); + .addIndex("20180903.21", "IDX_SP_QUANTITY_HASH_SYSUN") + .unique(false) + .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE"); + spidxQuantity.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.22") + .addCalculator( + "HASH_IDENTITY", + t -> BaseResourceIndexedSearchParam.calculateHashIdentity( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getString("SP_NAME"))) + .addCalculator( + "HASH_IDENTITY_AND_UNITS", + t -> ResourceIndexedSearchParamQuantity.calculateHashUnits( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getString("SP_NAME"), + t.getString("SP_UNITS"))) + .addCalculator( + "HASH_IDENTITY_SYS_UNITS", + t -> ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getString("SP_NAME"), + t.getString("SP_SYSTEM"), + t.getString("SP_UNITS"))) + .setColumnName("HASH_IDENTITY")); } // Indexes - String Builder.BuilderWithTableName spidxString = version.onTable("HFJ_SPIDX_STRING"); version.startSectionWithMessage("Starting work on table: " + spidxString.getTableName()); - spidxString - .addColumn("20180903.23", "HASH_NORM_PREFIX") - .nullable() - .type(ColumnTypeEnum.LONG); + spidxString.addColumn("20180903.23", "HASH_NORM_PREFIX").nullable().type(ColumnTypeEnum.LONG); if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) { + spidxString.dropIndex("20180903.24", "IDX_SP_STRING"); spidxString - .dropIndex("20180903.24", "IDX_SP_STRING"); + .addIndex("20180903.25", "IDX_SP_STRING_HASH_NRM") + .unique(false) + .withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED"); + spidxString.addColumn("20180903.26", "HASH_EXACT").nullable().type(ColumnTypeEnum.LONG); spidxString - .addIndex("20180903.25", "IDX_SP_STRING_HASH_NRM") - .unique(false) - .withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED"); - spidxString - .addColumn("20180903.26", "HASH_EXACT") - .nullable() - .type(ColumnTypeEnum.LONG); - spidxString - .addIndex("20180903.27", "IDX_SP_STRING_HASH_EXCT") - .unique(false) - .withColumns("HASH_EXACT"); - spidxString - .addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.28") + .addIndex("20180903.27", "IDX_SP_STRING_HASH_EXCT") + .unique(false) + .withColumns("HASH_EXACT"); + spidxString.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.28") .setColumnName("HASH_NORM_PREFIX") - .addCalculator("HASH_NORM_PREFIX", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new PartitionSettings(), RequestPartitionId.defaultPartition(), new StorageSettings(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED"))) - .addCalculator("HASH_EXACT", t -> ResourceIndexedSearchParamString.calculateHashExact(new PartitionSettings(), (ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId) null, t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT"))) - ); + .addCalculator( + "HASH_NORM_PREFIX", + t -> ResourceIndexedSearchParamString.calculateHashNormalized( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + new StorageSettings(), + t.getResourceType(), + t.getString("SP_NAME"), + t.getString("SP_VALUE_NORMALIZED"))) + .addCalculator( + "HASH_EXACT", + t -> ResourceIndexedSearchParamString.calculateHashExact( + new PartitionSettings(), + (ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId) null, + t.getResourceType(), + t.getParamName(), + t.getString("SP_VALUE_EXACT")))); } // Indexes - Token Builder.BuilderWithTableName spidxToken = version.onTable("HFJ_SPIDX_TOKEN"); version.startSectionWithMessage("Starting work on table: " + spidxToken.getTableName()); - spidxToken - .addColumn("20180903.29", "HASH_IDENTITY") - .nullable() - .type(ColumnTypeEnum.LONG); - spidxToken - .addColumn("20180903.30", "HASH_SYS") - .nullable() - .type(ColumnTypeEnum.LONG); - spidxToken - .addColumn("20180903.31", "HASH_SYS_AND_VALUE") - .nullable() - .type(ColumnTypeEnum.LONG); - spidxToken - .addColumn("20180903.32", "HASH_VALUE") - .nullable() - .type(ColumnTypeEnum.LONG); + spidxToken.addColumn("20180903.29", "HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG); + spidxToken.addColumn("20180903.30", "HASH_SYS").nullable().type(ColumnTypeEnum.LONG); + spidxToken.addColumn("20180903.31", "HASH_SYS_AND_VALUE").nullable().type(ColumnTypeEnum.LONG); + spidxToken.addColumn("20180903.32", "HASH_VALUE").nullable().type(ColumnTypeEnum.LONG); if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) { + spidxToken.dropIndex("20180903.33", "IDX_SP_TOKEN"); + spidxToken.dropIndex("20180903.34", "IDX_SP_TOKEN_UNQUAL"); spidxToken - .dropIndex("20180903.33", "IDX_SP_TOKEN"); + .addIndex("20180903.35", "IDX_SP_TOKEN_HASH") + .unique(false) + .withColumns("HASH_IDENTITY") + .doNothing(); spidxToken - .dropIndex("20180903.34", "IDX_SP_TOKEN_UNQUAL"); + .addIndex("20180903.36", "IDX_SP_TOKEN_HASH_S") + .unique(false) + .withColumns("HASH_SYS") + .doNothing(); spidxToken - .addIndex("20180903.35", "IDX_SP_TOKEN_HASH") - .unique(false) - .withColumns("HASH_IDENTITY") - .doNothing(); + .addIndex("20180903.37", "IDX_SP_TOKEN_HASH_SV") + .unique(false) + .withColumns("HASH_SYS_AND_VALUE") + .doNothing(); spidxToken - .addIndex("20180903.36", "IDX_SP_TOKEN_HASH_S") - .unique(false) - .withColumns("HASH_SYS") - .doNothing(); - spidxToken - .addIndex("20180903.37", "IDX_SP_TOKEN_HASH_SV") - .unique(false) - .withColumns("HASH_SYS_AND_VALUE") - .doNothing(); - spidxToken - .addIndex("20180903.38", "IDX_SP_TOKEN_HASH_V") - .unique(false) - .withColumns("HASH_VALUE") - .doNothing(); - spidxToken - .addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.39") + .addIndex("20180903.38", "IDX_SP_TOKEN_HASH_V") + .unique(false) + .withColumns("HASH_VALUE") + .doNothing(); + spidxToken.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.39") .setColumnName("HASH_IDENTITY") - .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getString("SP_NAME"))) - .addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"))) - .addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE"))) - .addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(new PartitionSettings(), RequestPartitionId.defaultPartition(), t.getResourceType(), t.getParamName(), t.getString("SP_VALUE"))) - ); + .addCalculator( + "HASH_IDENTITY", + t -> BaseResourceIndexedSearchParam.calculateHashIdentity( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getString("SP_NAME"))) + .addCalculator( + "HASH_SYS", + t -> ResourceIndexedSearchParamToken.calculateHashSystem( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getParamName(), + t.getString("SP_SYSTEM"))) + .addCalculator( + "HASH_SYS_AND_VALUE", + t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getParamName(), + t.getString("SP_SYSTEM"), + t.getString("SP_VALUE"))) + .addCalculator( + "HASH_VALUE", + t -> ResourceIndexedSearchParamToken.calculateHashValue( + new PartitionSettings(), + RequestPartitionId.defaultPartition(), + t.getResourceType(), + t.getParamName(), + t.getString("SP_VALUE")))); } // Indexes - URI Builder.BuilderWithTableName spidxUri = version.onTable("HFJ_SPIDX_URI"); version.startSectionWithMessage("Starting work on table: " + spidxUri.getTableName()); - spidxUri - .addColumn("20180903.40", "HASH_IDENTITY") - .nullable() - .type(ColumnTypeEnum.LONG); + spidxUri.addColumn("20180903.40", "HASH_IDENTITY").nullable().type(ColumnTypeEnum.LONG); if (!myFlags.contains(FlagEnum.NO_MIGRATE_HASHES)) { - spidxUri - .addIndex("20180903.41", "IDX_SP_URI_HASH_IDENTITY") - .unique(false) - .withColumns("HASH_IDENTITY", "SP_URI"); - spidxUri - .addColumn("20180903.42", "HASH_URI") - .nullable() - .type(ColumnTypeEnum.LONG); - spidxUri - .addIndex("20180903.43", "IDX_SP_URI_HASH_URI") - .unique(false) - .withColumns("HASH_URI"); - spidxUri - .addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.44") + spidxUri.addIndex("20180903.41", "IDX_SP_URI_HASH_IDENTITY") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_URI"); + spidxUri.addColumn("20180903.42", "HASH_URI").nullable().type(ColumnTypeEnum.LONG); + spidxUri.addIndex("20180903.43", "IDX_SP_URI_HASH_URI") + .unique(false) + .withColumns("HASH_URI"); + spidxUri.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.44") .setColumnName("HASH_IDENTITY") - .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(new PartitionSettings(), (RequestPartitionId) null, t.getResourceType(), t.getString("SP_NAME"))) - .addCalculator("HASH_URI", t -> ResourceIndexedSearchParamUri.calculateHashUri(new PartitionSettings(), (RequestPartitionId) null, t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_URI"))) - ); + .addCalculator( + "HASH_IDENTITY", + t -> BaseResourceIndexedSearchParam.calculateHashIdentity( + new PartitionSettings(), + (RequestPartitionId) null, + t.getResourceType(), + t.getString("SP_NAME"))) + .addCalculator( + "HASH_URI", + t -> ResourceIndexedSearchParamUri.calculateHashUri( + new PartitionSettings(), + (RequestPartitionId) null, + t.getResourceType(), + t.getString("SP_NAME"), + t.getString("SP_URI")))); } // Search Parameter Presence Builder.BuilderWithTableName spp = version.onTable("HFJ_RES_PARAM_PRESENT"); version.startSectionWithMessage("Starting work on table: " + spp.getTableName()); spp.dropIndex("20180903.45", "IDX_RESPARMPRESENT_SPID_RESID"); - spp - .addColumn("20180903.46", "HASH_PRESENCE") - .nullable() - .type(ColumnTypeEnum.LONG); - spp - .addIndex("20180903.47", "IDX_RESPARMPRESENT_HASHPRES") - .unique(false) - .withColumns("HASH_PRESENCE"); + spp.addColumn("20180903.46", "HASH_PRESENCE").nullable().type(ColumnTypeEnum.LONG); + spp.addIndex("20180903.47", "IDX_RESPARMPRESENT_HASHPRES").unique(false).withColumns("HASH_PRESENCE"); - ArbitrarySqlTask consolidateSearchParamPresenceIndexesTask = new ArbitrarySqlTask(VersionEnum.V3_5_0, "20180903.48", "HFJ_SEARCH_PARM", "Consolidate search parameter presence indexes"); + ArbitrarySqlTask consolidateSearchParamPresenceIndexesTask = new ArbitrarySqlTask( + VersionEnum.V3_5_0, "20180903.48", "HFJ_SEARCH_PARM", "Consolidate search parameter presence indexes"); consolidateSearchParamPresenceIndexesTask.setExecuteOnlyIfTableExists("HFJ_SEARCH_PARM"); consolidateSearchParamPresenceIndexesTask.setBatchSize(1); - String sql = "SELECT " + - "HFJ_SEARCH_PARM.RES_TYPE RES_TYPE, HFJ_SEARCH_PARM.PARAM_NAME PARAM_NAME, " + - "HFJ_RES_PARAM_PRESENT.PID PID, HFJ_RES_PARAM_PRESENT.SP_ID SP_ID, HFJ_RES_PARAM_PRESENT.SP_PRESENT SP_PRESENT, HFJ_RES_PARAM_PRESENT.HASH_PRESENCE HASH_PRESENCE " + - "from HFJ_RES_PARAM_PRESENT " + - "join HFJ_SEARCH_PARM ON (HFJ_SEARCH_PARM.PID = HFJ_RES_PARAM_PRESENT.SP_ID) " + - "where HFJ_RES_PARAM_PRESENT.HASH_PRESENCE is null"; + String sql = "SELECT " + "HFJ_SEARCH_PARM.RES_TYPE RES_TYPE, HFJ_SEARCH_PARM.PARAM_NAME PARAM_NAME, " + + "HFJ_RES_PARAM_PRESENT.PID PID, HFJ_RES_PARAM_PRESENT.SP_ID SP_ID, HFJ_RES_PARAM_PRESENT.SP_PRESENT SP_PRESENT, HFJ_RES_PARAM_PRESENT.HASH_PRESENCE HASH_PRESENCE " + + "from HFJ_RES_PARAM_PRESENT " + + "join HFJ_SEARCH_PARM ON (HFJ_SEARCH_PARM.PID = HFJ_RES_PARAM_PRESENT.SP_ID) " + + "where HFJ_RES_PARAM_PRESENT.HASH_PRESENCE is null"; consolidateSearchParamPresenceIndexesTask.addExecuteOnlyIfColumnExists("HFJ_RES_PARAM_PRESENT", "SP_ID"); - consolidateSearchParamPresenceIndexesTask.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> { - Number pid = (Number) t.get("PID"); - Boolean present = columnToBoolean(t.get("SP_PRESENT")); - String resType = (String) t.get("RES_TYPE"); - String paramName = (String) t.get("PARAM_NAME"); - Long hash = SearchParamPresentEntity.calculateHashPresence(new PartitionSettings(), (RequestPartitionId) null, resType, paramName, present); - consolidateSearchParamPresenceIndexesTask.executeSql("HFJ_RES_PARAM_PRESENT", "update HFJ_RES_PARAM_PRESENT set HASH_PRESENCE = ? where PID = ?", hash, pid); - }); + consolidateSearchParamPresenceIndexesTask.addQuery( + sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> { + Number pid = (Number) t.get("PID"); + Boolean present = columnToBoolean(t.get("SP_PRESENT")); + String resType = (String) t.get("RES_TYPE"); + String paramName = (String) t.get("PARAM_NAME"); + Long hash = SearchParamPresentEntity.calculateHashPresence( + new PartitionSettings(), (RequestPartitionId) null, resType, paramName, present); + consolidateSearchParamPresenceIndexesTask.executeSql( + "HFJ_RES_PARAM_PRESENT", + "update HFJ_RES_PARAM_PRESENT set HASH_PRESENCE = ? where PID = ?", + hash, + pid); + }); version.addTask(consolidateSearchParamPresenceIndexesTask); // SP_ID is no longer needed @@ -2280,157 +2816,353 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // Concept Builder.BuilderWithTableName trmConcept = version.onTable("TRM_CONCEPT"); version.startSectionWithMessage("Starting work on table: " + trmConcept.getTableName()); - trmConcept - .addColumn("20180903.50", "CONCEPT_UPDATED") - .nullable() - .type(ColumnTypeEnum.DATE_TIMESTAMP); - trmConcept - .addIndex("20180903.51", "IDX_CONCEPT_UPDATED") - .unique(false) - .withColumns("CONCEPT_UPDATED"); - trmConcept - .modifyColumn("20180903.52", "CODE") - .nonNullable() - .withType(ColumnTypeEnum.STRING, 500); + trmConcept.addColumn("20180903.50", "CONCEPT_UPDATED").nullable().type(ColumnTypeEnum.DATE_TIMESTAMP); + trmConcept.addIndex("20180903.51", "IDX_CONCEPT_UPDATED").unique(false).withColumns("CONCEPT_UPDATED"); + trmConcept.modifyColumn("20180903.52", "CODE").nonNullable().withType(ColumnTypeEnum.STRING, 500); // Concept Designation version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_DESIG"); - version - .addTableRawSql("20180907.1", "TRM_CONCEPT_DESIG") - .addSql(DriverTypeEnum.H2_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") - .addSql(DriverTypeEnum.H2_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.H2_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") - .addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID)) ENGINE=InnoDB") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)") - .addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)") - .addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_DESIG (PID number(19,0) not null, LANG varchar2(500 char), USE_CODE varchar2(500 char), USE_DISPLAY varchar2(500 char), USE_SYSTEM varchar2(500 char), VAL varchar2(500 char) not null, CS_VER_PID number(19,0), CONCEPT_PID number(19,0), primary key (PID))") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_DESIG (PID int8 not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID int8, CONCEPT_PID int8, primary key (PID))") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") - .addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT"); + version.addTableRawSql("20180907.1", "TRM_CONCEPT_DESIG") + .addSql( + DriverTypeEnum.H2_EMBEDDED, + "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") + .addSql( + DriverTypeEnum.H2_EMBEDDED, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.H2_EMBEDDED, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID)) ENGINE=InnoDB") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create table TRM_CONCEPT_DESIG (PID number(19,0) not null, LANG varchar2(500 char), USE_CODE varchar2(500 char), USE_DISPLAY varchar2(500 char), USE_SYSTEM varchar2(500 char), VAL varchar2(500 char) not null, CS_VER_PID number(19,0), CONCEPT_PID number(19,0), primary key (PID))") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create table TRM_CONCEPT_DESIG (PID int8 not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID int8, CONCEPT_PID int8, primary key (PID))") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") + .addSql( + DriverTypeEnum.MSSQL_2012, + "create table TRM_CONCEPT_DESIG (PID bigint not null, LANG varchar(500), USE_CODE varchar(500), USE_DISPLAY varchar(500), USE_SYSTEM varchar(500), VAL varchar(500) not null, CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_DESIG add constraint FK_CONCEPTDESIG_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT"); // Concept Property version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_PROPERTY"); - version - .addTableRawSql("20180907.2", "TRM_CONCEPT_PROPERTY") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") - .addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)") - .addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)") - .addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_PROPERTY (PID number(19,0) not null, PROP_CODESYSTEM varchar2(500 char), PROP_DISPLAY varchar2(500 char), PROP_KEY varchar2(500 char) not null, PROP_TYPE number(10,0) not null, PROP_VAL varchar2(500 char), CS_VER_PID number(19,0), CONCEPT_PID number(19,0), primary key (PID))") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_PROPERTY (PID int8 not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE int4 not null, PROP_VAL varchar(500), CS_VER_PID int8, CONCEPT_PID int8, primary key (PID))") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") - .addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE int not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT"); + version.addTableRawSql("20180907.2", "TRM_CONCEPT_PROPERTY") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE integer not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER (PID)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT (PID)") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create table TRM_CONCEPT_PROPERTY (PID number(19,0) not null, PROP_CODESYSTEM varchar2(500 char), PROP_DISPLAY varchar2(500 char), PROP_KEY varchar2(500 char) not null, PROP_TYPE number(10,0) not null, PROP_VAL varchar2(500 char), CS_VER_PID number(19,0), CONCEPT_PID number(19,0), primary key (PID))") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create table TRM_CONCEPT_PROPERTY (PID int8 not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE int4 not null, PROP_VAL varchar(500), CS_VER_PID int8, CONCEPT_PID int8, primary key (PID))") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT") + .addSql( + DriverTypeEnum.MSSQL_2012, + "create table TRM_CONCEPT_PROPERTY (PID bigint not null, PROP_CODESYSTEM varchar(500), PROP_DISPLAY varchar(500), PROP_KEY varchar(500) not null, PROP_TYPE int not null, PROP_VAL varchar(500), CS_VER_PID bigint, CONCEPT_PID bigint, primary key (PID))") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CSV foreign key (CS_VER_PID) references TRM_CODESYSTEM_VER") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_PROPERTY add constraint FK_CONCEPTPROP_CONCEPT foreign key (CONCEPT_PID) references TRM_CONCEPT"); // Concept Map - Map version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP"); - version - .addTableRawSql("20180907.3", "TRM_CONCEPT_MAP") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE") - .addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE (RES_ID)") - .addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP (PID number(19,0) not null, RES_ID number(19,0), SOURCE_URL varchar2(200 char), TARGET_URL varchar2(200 char), URL varchar2(200 char) not null, primary key (PID))") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP (PID int8 not null, RES_ID int8, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)") - .addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE") - .addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE (RES_ID)") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)"); + version.addTableRawSql("20180907.3", "TRM_CONCEPT_MAP") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE (RES_ID)") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create table TRM_CONCEPT_MAP (PID number(19,0) not null, RES_ID number(19,0), SOURCE_URL varchar2(200 char), TARGET_URL varchar2(200 char), URL varchar2(200 char) not null, primary key (PID))") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create table TRM_CONCEPT_MAP (PID int8 not null, RES_ID int8, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)") + .addSql( + DriverTypeEnum.MSSQL_2012, + "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create table TRM_CONCEPT_MAP (PID bigint not null, RES_ID bigint, SOURCE_URL varchar(200), TARGET_URL varchar(200), URL varchar(200) not null, primary key (PID))") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_MAP add constraint FK_TRMCONCEPTMAP_RES foreign key (RES_ID) references HFJ_RESOURCE (RES_ID)") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_MAP add constraint IDX_CONCEPT_MAP_URL unique (URL)"); // Concept Map - Group version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GROUP"); - version - .addTableRawSql("20180907.4", "TRM_CONCEPT_MAP_GROUP") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create unique index IDX_CONCEPT_MAP_URL on TRM_CONCEPT_MAP (URL)") - .addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GROUP (PID number(19,0) not null, myConceptMapUrl varchar2(255 char), SOURCE_URL varchar2(200 char) not null, mySourceValueSet varchar2(255 char), SOURCE_VERSION varchar2(100 char), TARGET_URL varchar2(200 char) not null, myTargetValueSet varchar2(255 char), TARGET_VERSION varchar2(100 char), CONCEPT_MAP_PID number(19,0) not null, primary key (PID))") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP") - .addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP (PID)") - .addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP (PID)") - .addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GROUP (PID int8 not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID int8 not null, primary key (PID))") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP"); + version.addTableRawSql("20180907.4", "TRM_CONCEPT_MAP_GROUP") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create unique index IDX_CONCEPT_MAP_URL on TRM_CONCEPT_MAP (URL)") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create table TRM_CONCEPT_MAP_GROUP (PID number(19,0) not null, myConceptMapUrl varchar2(255 char), SOURCE_URL varchar2(200 char) not null, mySourceValueSet varchar2(255 char), SOURCE_VERSION varchar2(100 char), TARGET_URL varchar2(200 char) not null, myTargetValueSet varchar2(255 char), TARGET_VERSION varchar2(100 char), CONCEPT_MAP_PID number(19,0) not null, primary key (PID))") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP (PID)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP (PID)") + .addSql( + DriverTypeEnum.MSSQL_2012, + "create table TRM_CONCEPT_MAP_GROUP (PID bigint not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create table TRM_CONCEPT_MAP_GROUP (PID int8 not null, myConceptMapUrl varchar(255), SOURCE_URL varchar(200) not null, mySourceValueSet varchar(255), SOURCE_VERSION varchar(100), TARGET_URL varchar(200) not null, myTargetValueSet varchar(255), TARGET_VERSION varchar(100), CONCEPT_MAP_PID int8 not null, primary key (PID))") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_MAP_GROUP add constraint FK_TCMGROUP_CONCEPTMAP foreign key (CONCEPT_MAP_PID) references TRM_CONCEPT_MAP"); // Concept Map - Group Element version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELEMENT"); - version - .addTableRawSql("20180907.5", "TRM_CONCEPT_MAP_GRP_ELEMENT") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP") - .addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP (PID)") - .addSql(DriverTypeEnum.MARIADB_10_1, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") - .addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MYSQL_5_7, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP (PID)") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID int8 not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID int8 not null, primary key (PID))") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") - .addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID number(19,0) not null, SOURCE_CODE varchar2(500 char) not null, myConceptMapUrl varchar2(255 char), SOURCE_DISPLAY varchar2(400 char), mySystem varchar2(255 char), mySystemVersion varchar2(255 char), myValueSet varchar2(255 char), CONCEPT_MAP_GROUP_PID number(19,0) not null, primary key (PID))") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP") - .addSql(DriverTypeEnum.ORACLE_12C, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") - .addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP"); + version.addTableRawSql("20180907.5", "TRM_CONCEPT_MAP_GRP_ELEMENT") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP (PID)") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP (PID)") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID int8 not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID int8 not null, primary key (PID))") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID number(19,0) not null, SOURCE_CODE varchar2(500 char) not null, myConceptMapUrl varchar2(255 char), SOURCE_DISPLAY varchar2(400 char), mySystem varchar2(255 char), mySystemVersion varchar2(255 char), myValueSet varchar2(255 char), CONCEPT_MAP_GROUP_PID number(19,0) not null, primary key (PID))") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") + .addSql( + DriverTypeEnum.MSSQL_2012, + "create table TRM_CONCEPT_MAP_GRP_ELEMENT (PID bigint not null, SOURCE_CODE varchar(500) not null, myConceptMapUrl varchar(255), SOURCE_DISPLAY varchar(400), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GROUP_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MSSQL_2012, + "create index IDX_CNCPT_MAP_GRP_CD on TRM_CONCEPT_MAP_GRP_ELEMENT (SOURCE_CODE)") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_MAP_GRP_ELEMENT add constraint FK_TCMGELEMENT_GROUP foreign key (CONCEPT_MAP_GROUP_PID) references TRM_CONCEPT_MAP_GROUP"); // Concept Map - Group Element Target version.startSectionWithMessage("Starting work on table: TRM_CONCEPT_MAP_GRP_ELM_TGT"); - version - .addTableRawSql("20180907.6", "TRM_CONCEPT_MAP_GRP_ELM_TGT") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT") - .addSql(DriverTypeEnum.DERBY_EMBEDDED, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") - .addSql(DriverTypeEnum.MARIADB_10_1, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MARIADB_10_1, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT (PID)") - .addSql(DriverTypeEnum.MARIADB_10_1, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") - .addSql(DriverTypeEnum.MYSQL_5_7, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MYSQL_5_7, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT (PID)") - .addSql(DriverTypeEnum.MYSQL_5_7, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") - .addSql(DriverTypeEnum.ORACLE_12C, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID number(19,0) not null, TARGET_CODE varchar2(500 char) not null, myConceptMapUrl varchar2(255 char), TARGET_DISPLAY varchar2(400 char), TARGET_EQUIVALENCE varchar2(50 char), mySystem varchar2(255 char), mySystemVersion varchar2(255 char), myValueSet varchar2(255 char), CONCEPT_MAP_GRP_ELM_PID number(19,0) not null, primary key (PID))") - .addSql(DriverTypeEnum.ORACLE_12C, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT") - .addSql(DriverTypeEnum.ORACLE_12C, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID int8 not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID int8 not null, primary key (PID))") - .addSql(DriverTypeEnum.POSTGRES_9_4, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT") - .addSql(DriverTypeEnum.POSTGRES_9_4, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") - .addSql(DriverTypeEnum.MSSQL_2012, "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))") - .addSql(DriverTypeEnum.MSSQL_2012, "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") - .addSql(DriverTypeEnum.MSSQL_2012, "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT"); - - version.onTable("HFJ_IDX_CMP_STRING_UNIQ").modifyColumn("20180907.7", "IDX_STRING").nonNullable().withType(ColumnTypeEnum.STRING, 200); - + version.addTableRawSql("20180907.6", "TRM_CONCEPT_MAP_GRP_ELM_TGT") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT") + .addSql( + DriverTypeEnum.DERBY_EMBEDDED, + "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT (PID)") + .addSql( + DriverTypeEnum.MARIADB_10_1, + "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT (PID)") + .addSql( + DriverTypeEnum.MYSQL_5_7, + "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID number(19,0) not null, TARGET_CODE varchar2(500 char) not null, myConceptMapUrl varchar2(255 char), TARGET_DISPLAY varchar2(400 char), TARGET_EQUIVALENCE varchar2(50 char), mySystem varchar2(255 char), mySystemVersion varchar2(255 char), myValueSet varchar2(255 char), CONCEPT_MAP_GRP_ELM_PID number(19,0) not null, primary key (PID))") + .addSql( + DriverTypeEnum.ORACLE_12C, + "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT") + .addSql( + DriverTypeEnum.ORACLE_12C, + "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID int8 not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID int8 not null, primary key (PID))") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT") + .addSql( + DriverTypeEnum.POSTGRES_9_4, + "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") + .addSql( + DriverTypeEnum.MSSQL_2012, + "create table TRM_CONCEPT_MAP_GRP_ELM_TGT (PID bigint not null, TARGET_CODE varchar(500) not null, myConceptMapUrl varchar(255), TARGET_DISPLAY varchar(400), TARGET_EQUIVALENCE varchar(50), mySystem varchar(255), mySystemVersion varchar(255), myValueSet varchar(255), CONCEPT_MAP_GRP_ELM_PID bigint not null, primary key (PID))") + .addSql( + DriverTypeEnum.MSSQL_2012, + "create index IDX_CNCPT_MP_GRP_ELM_TGT_CD on TRM_CONCEPT_MAP_GRP_ELM_TGT (TARGET_CODE)") + .addSql( + DriverTypeEnum.MSSQL_2012, + "alter table TRM_CONCEPT_MAP_GRP_ELM_TGT add constraint FK_TCMGETARGET_ELEMENT foreign key (CONCEPT_MAP_GRP_ELM_PID) references TRM_CONCEPT_MAP_GRP_ELEMENT"); + version.onTable("HFJ_IDX_CMP_STRING_UNIQ") + .modifyColumn("20180907.7", "IDX_STRING") + .nonNullable() + .withType(ColumnTypeEnum.STRING, 200); } private Boolean columnToBoolean(Object theValue) { @@ -2451,38 +3183,28 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { // CodeSystem Version Builder.BuilderWithTableName resourceLink = version.onTable("TRM_CODESYSTEM_VER"); version.startSectionWithMessage("Starting work on table: " + resourceLink.getTableName()); + resourceLink.dropIndex("20180401.1", "IDX_CSV_RESOURCEPID_AND_VER"); + resourceLink.dropColumn("20180401.2", "RES_VERSION_ID"); + resourceLink.addColumn("20180401.3", "CS_VERSION_ID").nullable().type(ColumnTypeEnum.STRING, 255); + resourceLink.addColumn("20180401.4", "CODESYSTEM_PID").nullable().type(ColumnTypeEnum.LONG); resourceLink - .dropIndex("20180401.1", "IDX_CSV_RESOURCEPID_AND_VER"); - resourceLink - .dropColumn("20180401.2", "RES_VERSION_ID"); - resourceLink - .addColumn("20180401.3", "CS_VERSION_ID") - .nullable() - .type(ColumnTypeEnum.STRING, 255); - resourceLink - .addColumn("20180401.4", "CODESYSTEM_PID") - .nullable() - .type(ColumnTypeEnum.LONG); - resourceLink - .addForeignKey("20180401.5", "FK_CODESYSVER_CS_ID") - .toColumn("CODESYSTEM_PID") - .references("TRM_CODESYSTEM", "PID"); + .addForeignKey("20180401.5", "FK_CODESYSVER_CS_ID") + .toColumn("CODESYSTEM_PID") + .references("TRM_CODESYSTEM", "PID"); // Concept Builder.BuilderWithTableName concept = version.onTable("TRM_CONCEPT"); version.startSectionWithMessage("Starting work on table: " + concept.getTableName()); - concept - .addColumn("20180401.6", "CODE_SEQUENCE") - .nullable() - .type(ColumnTypeEnum.INT); - - + concept.addColumn("20180401.6", "CODE_SEQUENCE").nullable().type(ColumnTypeEnum.INT); } protected void init330() { // 20180114 - 20180329 Builder version = forVersion(VersionEnum.V3_3_0); - version.initializeSchema("20180115.0", new SchemaInitializationProvider("HAPI FHIR", "/ca/uhn/hapi/fhir/jpa/docs/database", "HFJ_RESOURCE", true)); + version.initializeSchema( + "20180115.0", + new SchemaInitializationProvider( + "HAPI FHIR", "/ca/uhn/hapi/fhir/jpa/docs/database", "HFJ_RESOURCE", true)); Builder.BuilderWithTableName hfjResource = version.onTable("HFJ_RESOURCE"); version.startSectionWithMessage("Starting work on table: " + hfjResource.getTableName()); @@ -2491,10 +3213,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { Builder.BuilderWithTableName hfjResVer = version.onTable("HFJ_RES_VER"); version.startSectionWithMessage("Starting work on table: " + hfjResVer.getTableName()); - hfjResVer.modifyColumn("20180115.3", "RES_ENCODING") - .nullable(); - hfjResVer.modifyColumn("20180115.4", "RES_TEXT") - .nullable(); + hfjResVer.modifyColumn("20180115.3", "RES_ENCODING").nullable(); + hfjResVer.modifyColumn("20180115.4", "RES_TEXT").nullable(); } public enum FlagEnum { @@ -2507,13 +3227,17 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks { } public static FlagEnum fromCommandLineValue(String theCommandLineValue) { - Optional retVal = Arrays.stream(values()).filter(t -> t.myCommandLineValue.equals(theCommandLineValue)).findFirst(); + Optional retVal = Arrays.stream(values()) + .filter(t -> t.myCommandLineValue.equals(theCommandLineValue)) + .findFirst(); return retVal.orElseThrow(() -> { - List validValues = Arrays.stream(values()).map(t -> t.myCommandLineValue).sorted().collect(Collectors.toList()); - return new IllegalArgumentException("Invalid flag \"" + theCommandLineValue + "\". Valid values: " + validValues); + List validValues = Arrays.stream(values()) + .map(t -> t.myCommandLineValue) + .sorted() + .collect(Collectors.toList()); + return new IllegalArgumentException( + "Invalid flag \"" + theCommandLineValue + "\". Valid values: " + validValues); }); } } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java index 5463da85369..2982ab20296 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java @@ -24,25 +24,25 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid; import java.util.Date; public class JpaResourceLookup implements IResourceLookup { - private final String myResourceType; - private final Long myResourcePid; - private final Date myDeletedAt; + private final String myResourceType; + private final Long myResourcePid; + private final Date myDeletedAt; public JpaResourceLookup(String theResourceType, Long theResourcePid, Date theDeletedAt) { - myResourceType = theResourceType; - myResourcePid = theResourcePid; - myDeletedAt = theDeletedAt; - } + myResourceType = theResourceType; + myResourcePid = theResourcePid; + myDeletedAt = theDeletedAt; + } - @Override - public String getResourceType() { - return myResourceType; - } + @Override + public String getResourceType() { + return myResourceType; + } - @Override - public Date getDeleted() { - return myDeletedAt; - } + @Override + public Date getDeleted() { + return myDeletedAt; + } @Override public JpaPid getPersistentId() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/IHapiPackageCacheManager.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/IHapiPackageCacheManager.java index e598cdcc596..a151698f446 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/IHapiPackageCacheManager.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/IHapiPackageCacheManager.java @@ -45,7 +45,6 @@ public interface IHapiPackageCacheManager extends IPackageCacheManager { List loadPackageAssetsByType(FhirVersionEnum theFhirVersion, String theResourceType); - class PackageContents { private byte[] myBytes; @@ -96,5 +95,4 @@ public interface IHapiPackageCacheManager extends IPackageCacheManager { return this; } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/IPackageInstallerSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/IPackageInstallerSvc.java index bb4d9cc5d0c..c7d0cd95548 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/IPackageInstallerSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/IPackageInstallerSvc.java @@ -24,5 +24,4 @@ public interface IPackageInstallerSvc { PackageInstallOutcomeJson install(PackageInstallationSpec theSpec); PackageDeleteOutcomeJson uninstall(PackageInstallationSpec theSpec); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java index 16102cc8281..442c4e7b7c8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java @@ -39,11 +39,11 @@ import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity; import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionResourceEntity; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.jpa.packages.loader.NpmPackageData; import ca.uhn.fhir.jpa.packages.loader.PackageLoaderSvc; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.EncodingEnum; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; @@ -70,17 +70,6 @@ import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Join; -import javax.persistence.criteria.JoinType; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -94,6 +83,17 @@ import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.TypedQuery; +import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Join; +import javax.persistence.criteria.JoinType; +import javax.persistence.criteria.Predicate; +import javax.persistence.criteria.Root; import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toPredicateArray; import static ca.uhn.fhir.util.StringUtil.toUtf8String; @@ -105,27 +105,35 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac public static final String UTF8_BOM = "\uFEFF"; private static final Logger ourLog = LoggerFactory.getLogger(JpaPackageCache.class); private final Map myVersionToContext = Collections.synchronizedMap(new HashMap<>()); + @PersistenceContext protected EntityManager myEntityManager; + @Autowired private INpmPackageDao myPackageDao; + @Autowired private INpmPackageVersionDao myPackageVersionDao; + @Autowired private INpmPackageVersionResourceDao myPackageVersionResourceDao; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private FhirContext myCtx; + @Autowired private PlatformTransactionManager myTxManager; + @Autowired private PartitionSettings myPartitionSettings; @Autowired private PackageLoaderSvc myPackageLoaderSvc; - @Autowired(required = false)//It is possible that some implementers will not create such a bean. + @Autowired(required = false) // It is possible that some implementers will not create such a bean. private IBinaryStorageSvc myBinaryStorageSvc; @Override @@ -169,12 +177,13 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac lookupVersion = lookupVersion.substring(0, lookupVersion.length() - 2); } while (lookupVersion.endsWith(".x")); - List candidateVersionIds = myPackageVersionDao.findVersionIdsByPackageIdAndLikeVersion(theId, lookupVersion + ".%"); + List candidateVersionIds = + myPackageVersionDao.findVersionIdsByPackageIdAndLikeVersion(theId, lookupVersion + ".%"); if (candidateVersionIds.size() > 0) { candidateVersionIds.sort(PackageVersionComparator.INSTANCE); - packageVersion = loadPackageVersionEntity(theId, candidateVersionIds.get(candidateVersionIds.size() - 1)); + packageVersion = + loadPackageVersionEntity(theId, candidateVersionIds.get(candidateVersionIds.size() - 1)); } - } return packageVersion.map(t -> loadPackage(t)).orElse(null); @@ -189,7 +198,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac } else { Optional pkg = myPackageDao.findByPackageId(theId); if (pkg.isPresent()) { - packageVersion = myPackageVersionDao.findByPackageIdAndVersion(theId, pkg.get().getCurrentVersionId()); + packageVersion = myPackageVersionDao.findByPackageIdAndVersion( + theId, pkg.get().getCurrentVersionId()); } } return packageVersion; @@ -207,17 +217,19 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac private IHapiPackageCacheManager.PackageContents loadPackageContents(NpmPackageVersionEntity thePackageVersion) { IFhirResourceDao binaryDao = getBinaryDao(); - IBaseBinary binary = binaryDao.readByPid(JpaPid.fromId(thePackageVersion.getPackageBinary().getId())); + IBaseBinary binary = binaryDao.readByPid( + JpaPid.fromId(thePackageVersion.getPackageBinary().getId())); try { byte[] content = fetchBlobFromBinary(binary); PackageContents retVal = new PackageContents() - .setBytes(content) - .setPackageId(thePackageVersion.getPackageId()) - .setVersion(thePackageVersion.getVersionId()) - .setLastModified(thePackageVersion.getUpdatedTime()); + .setBytes(content) + .setPackageId(thePackageVersion.getPackageId()) + .setVersion(thePackageVersion.getVersionId()) + .setLastModified(thePackageVersion.getUpdatedTime()); return retVal; } catch (IOException e) { - throw new InternalErrorException(Msg.code(1295) + "Failed to load package. There was a problem reading binaries", e); + throw new InternalErrorException( + Msg.code(1295) + "Failed to load package. There was a problem reading binaries", e); } } @@ -235,7 +247,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac } else { byte[] value = BinaryUtil.getOrCreateData(myCtx, theBinary).getValue(); if (value == null) { - throw new InternalErrorException(Msg.code(1296) + "Failed to fetch blob from Binary/" + theBinary.getIdElement()); + throw new InternalErrorException( + Msg.code(1296) + "Failed to fetch blob from Binary/" + theBinary.getIdElement()); } return value; } @@ -246,19 +259,19 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac return myDaoRegistry.getResourceDao("Binary"); } - private NpmPackage addPackageToCacheInternal( - NpmPackageData thePackageData - ) { + private NpmPackage addPackageToCacheInternal(NpmPackageData thePackageData) { NpmPackage npmPackage = thePackageData.getPackage(); String packageId = thePackageData.getPackageId(); String initialPackageVersionId = thePackageData.getPackageVersionId(); byte[] bytes = thePackageData.getBytes(); if (!npmPackage.id().equalsIgnoreCase(packageId)) { - throw new InvalidRequestException(Msg.code(1297) + "Package ID " + npmPackage.id() + " doesn't match expected: " + packageId); + throw new InvalidRequestException( + Msg.code(1297) + "Package ID " + npmPackage.id() + " doesn't match expected: " + packageId); } if (!PackageVersionComparator.isEquivalent(initialPackageVersionId, npmPackage.version())) { - throw new InvalidRequestException(Msg.code(1298) + "Package ID " + npmPackage.version() + " doesn't match expected: " + initialPackageVersionId); + throw new InvalidRequestException(Msg.code(1298) + "Package ID " + npmPackage.version() + + " doesn't match expected: " + initialPackageVersionId); } String packageVersionId = npmPackage.version(); @@ -273,31 +286,39 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac return newTxTemplate().execute(tx -> { ResourceTable persistedPackage = createResourceBinary(binary); NpmPackageEntity pkg = myPackageDao.findByPackageId(packageId).orElseGet(() -> createPackage(npmPackage)); - NpmPackageVersionEntity packageVersion = myPackageVersionDao.findByPackageIdAndVersion(packageId, packageVersionId).orElse(null); + NpmPackageVersionEntity packageVersion = myPackageVersionDao + .findByPackageIdAndVersion(packageId, packageVersionId) + .orElse(null); if (packageVersion != null) { - NpmPackage existingPackage = loadPackageFromCacheOnly(packageVersion.getPackageId(), packageVersion.getVersionId()); - String msg = "Package version already exists in local storage, no action taken: " + packageId + "#" + packageVersionId; + NpmPackage existingPackage = + loadPackageFromCacheOnly(packageVersion.getPackageId(), packageVersion.getVersionId()); + String msg = "Package version already exists in local storage, no action taken: " + packageId + "#" + + packageVersionId; getProcessingMessages(existingPackage).add(msg); ourLog.info(msg); return existingPackage; } - boolean currentVersion = updateCurrentVersionFlagForAllPackagesBasedOnNewIncomingVersion(packageId, packageVersionId); + boolean currentVersion = + updateCurrentVersionFlagForAllPackagesBasedOnNewIncomingVersion(packageId, packageVersionId); String packageDesc = null; if (npmPackage.description() != null) { if (npmPackage.description().length() > NpmPackageVersionEntity.PACKAGE_DESC_LENGTH) { - packageDesc = npmPackage.description().substring(0, NpmPackageVersionEntity.PACKAGE_DESC_LENGTH - 4) + "..."; + packageDesc = npmPackage.description().substring(0, NpmPackageVersionEntity.PACKAGE_DESC_LENGTH - 4) + + "..."; } else { packageDesc = npmPackage.description(); } } if (currentVersion) { - getProcessingMessages(npmPackage).add("Marking package " + packageId + "#" + initialPackageVersionId + " as current version"); + getProcessingMessages(npmPackage) + .add("Marking package " + packageId + "#" + initialPackageVersionId + " as current version"); pkg.setCurrentVersionId(packageVersionId); pkg.setDescription(packageDesc); myPackageDao.save(pkg); } else { - getProcessingMessages(npmPackage).add("Package " + packageId + "#" + initialPackageVersionId + " is not the newest version"); + getProcessingMessages(npmPackage) + .add("Package " + packageId + "#" + initialPackageVersionId + " is not the newest version"); } packageVersion = new NpmPackageVersionEntity(); @@ -315,7 +336,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac String dirName = "package"; NpmPackage.NpmPackageFolder packageFolder = npmPackage.getFolders().get(dirName); - for (Map.Entry> nextTypeToFiles : packageFolder.getTypes().entrySet()) { + for (Map.Entry> nextTypeToFiles : + packageFolder.getTypes().entrySet()) { String nextType = nextTypeToFiles.getKey(); for (String nextFile : nextTypeToFiles.getValue()) { @@ -345,7 +367,10 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac */ String contentType = Constants.CT_FHIR_JSON_NEW; ResourceUtil.removeNarrative(packageContext, resource); - byte[] minimizedContents = packageContext.newJsonParser().encodeResourceToString(resource).getBytes(StandardCharsets.UTF_8); + byte[] minimizedContents = packageContext + .newJsonParser() + .encodeResourceToString(resource) + .getBytes(StandardCharsets.UTF_8); IBaseBinary resourceBinary = createPackageResourceBinary(nextFile, minimizedContents, contentType); ResourceTable persistedResource = createResourceBinary(resourceBinary); @@ -359,34 +384,48 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac resourceEntity.setFilename(nextFile); resourceEntity.setResourceType(nextType); resourceEntity.setResSizeBytes(contents.length); - BaseRuntimeChildDefinition urlChild = packageContext.getResourceDefinition(nextType).getChildByName("url"); - BaseRuntimeChildDefinition versionChild = packageContext.getResourceDefinition(nextType).getChildByName("version"); + BaseRuntimeChildDefinition urlChild = + packageContext.getResourceDefinition(nextType).getChildByName("url"); + BaseRuntimeChildDefinition versionChild = + packageContext.getResourceDefinition(nextType).getChildByName("version"); String url = null; String version = null; if (urlChild != null) { - url = urlChild.getAccessor().getFirstValueOrNull(resource).map(t -> ((IPrimitiveType) t).getValueAsString()).orElse(null); + url = urlChild.getAccessor() + .getFirstValueOrNull(resource) + .map(t -> ((IPrimitiveType) t).getValueAsString()) + .orElse(null); resourceEntity.setCanonicalUrl(url); - version = versionChild.getAccessor().getFirstValueOrNull(resource).map(t -> ((IPrimitiveType) t).getValueAsString()).orElse(null); + version = versionChild + .getAccessor() + .getFirstValueOrNull(resource) + .map(t -> ((IPrimitiveType) t).getValueAsString()) + .orElse(null); resourceEntity.setCanonicalVersion(version); } myPackageVersionResourceDao.save(resourceEntity); String resType = packageContext.getResourceType(resource); - String msg = "Indexing " + resType + " Resource[" + dirName + '/' + nextFile + "] with URL: " + defaultString(url) + "|" + defaultString(version); + String msg = "Indexing " + resType + " Resource[" + dirName + '/' + nextFile + "] with URL: " + + defaultString(url) + "|" + defaultString(version); getProcessingMessages(npmPackage).add(msg); ourLog.info("Package[{}#{}] " + msg, packageId, packageVersionId); } } - getProcessingMessages(npmPackage).add("Successfully added package " + npmPackage.id() + "#" + npmPackage.version() + " to registry"); + getProcessingMessages(npmPackage) + .add("Successfully added package " + npmPackage.id() + "#" + npmPackage.version() + " to registry"); return npmPackage; }); } @Override - public NpmPackage addPackageToCache(String thePackageId, String thePackageVersionId, InputStream thePackageTgzInputStream, String theSourceDesc) throws IOException { - NpmPackageData npmData = myPackageLoaderSvc.createNpmPackageDataFromData(thePackageId, thePackageVersionId, theSourceDesc, thePackageTgzInputStream); + public NpmPackage addPackageToCache( + String thePackageId, String thePackageVersionId, InputStream thePackageTgzInputStream, String theSourceDesc) + throws IOException { + NpmPackageData npmData = myPackageLoaderSvc.createNpmPackageDataFromData( + thePackageId, thePackageVersionId, theSourceDesc, thePackageTgzInputStream); return addPackageToCacheInternal(npmData); } @@ -396,17 +435,20 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac if (myPartitionSettings.isPartitioningEnabled()) { SystemRequestDetails requestDetails = new SystemRequestDetails(); if (myPartitionSettings.isUnnamedPartitionMode() && myPartitionSettings.getDefaultPartitionId() != null) { - requestDetails.setRequestPartitionId(RequestPartitionId.fromPartitionId(myPartitionSettings.getDefaultPartitionId())); + requestDetails.setRequestPartitionId( + RequestPartitionId.fromPartitionId(myPartitionSettings.getDefaultPartitionId())); } else { requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME); } - return (ResourceTable) getBinaryDao().create(theResourceBinary, requestDetails).getEntity(); + return (ResourceTable) + getBinaryDao().create(theResourceBinary, requestDetails).getEntity(); } else { return (ResourceTable) getBinaryDao().create(theResourceBinary).getEntity(); } } - private boolean updateCurrentVersionFlagForAllPackagesBasedOnNewIncomingVersion(String thePackageId, String thePackageVersion) { + private boolean updateCurrentVersionFlagForAllPackagesBasedOnNewIncomingVersion( + String thePackageId, String thePackageVersion) { Collection existingVersions = myPackageVersionDao.findByPackageId(thePackageId); boolean retVal = true; @@ -466,7 +508,10 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac // and add it to the cache NpmPackage retVal = addPackageToCacheInternal(pkgData); getProcessingMessages(retVal) - .add(0, "Package fetched from server at: " + pkgData.getPackage().url()); + .add( + 0, + "Package fetched from server at: " + + pkgData.getPackage().url()); return retVal; } finally { pkgData.getInputStream().close(); @@ -496,7 +541,11 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac } if (theInstallationSpec.getPackageContents() != null) { - return addPackageToCache(theInstallationSpec.getName(), theInstallationSpec.getVersion(), new ByteArrayInputStream(theInstallationSpec.getPackageContents()), sourceDescription); + return addPackageToCache( + theInstallationSpec.getName(), + theInstallationSpec.getVersion(), + new ByteArrayInputStream(theInstallationSpec.getPackageContents()), + sourceDescription); } return newTxTemplate().execute(tx -> { @@ -519,9 +568,11 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac if (versionSeparator != -1) { String canonicalVersion = canonicalUrl.substring(versionSeparator + 1); canonicalUrl = canonicalUrl.substring(0, versionSeparator); - slice = myPackageVersionResourceDao.findCurrentVersionByCanonicalUrlAndVersion(PageRequest.of(0, 1), theFhirVersion, canonicalUrl, canonicalVersion); + slice = myPackageVersionResourceDao.findCurrentVersionByCanonicalUrlAndVersion( + PageRequest.of(0, 1), theFhirVersion, canonicalUrl, canonicalVersion); } else { - slice = myPackageVersionResourceDao.findCurrentVersionByCanonicalUrl(PageRequest.of(0, 1), theFhirVersion, canonicalUrl); + slice = myPackageVersionResourceDao.findCurrentVersionByCanonicalUrl( + PageRequest.of(0, 1), theFhirVersion, canonicalUrl); } if (slice.isEmpty()) { @@ -536,10 +587,12 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac try { JpaPid binaryPid = JpaPid.fromId(contents.getResourceBinary().getId()); IBaseBinary binary = getBinaryDao().readByPid(binaryPid); - byte[] resourceContentsBytes= fetchBlobFromBinary(binary); + byte[] resourceContentsBytes = fetchBlobFromBinary(binary); String resourceContents = new String(resourceContentsBytes, StandardCharsets.UTF_8); FhirContext packageContext = getFhirContext(contents.getFhirVersion()); - return EncodingEnum.detectEncoding(resourceContents).newParser(packageContext).parseResource(resourceContents); + return EncodingEnum.detectEncoding(resourceContents) + .newParser(packageContext) + .parseResource(resourceContents); } catch (Exception e) { throw new RuntimeException(Msg.code(1305) + "Failed to load package resource " + contents, e); } @@ -555,8 +608,10 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac throw new ResourceNotFoundException(Msg.code(1306) + "Unknown package ID: " + thePackageId); } - List packageVersions = new ArrayList<>(myPackageVersionDao.findByPackageId(thePackageId)); - packageVersions.sort(new ReverseComparator<>((o1, o2) -> PackageVersionComparator.INSTANCE.compare(o1.getVersionId(), o2.getVersionId()))); + List packageVersions = + new ArrayList<>(myPackageVersionDao.findByPackageId(thePackageId)); + packageVersions.sort(new ReverseComparator<>( + (o1, o2) -> PackageVersionComparator.INSTANCE.compare(o1.getVersionId(), o2.getVersionId()))); for (NpmPackageVersionEntity next : packageVersions) { if (next.isCurrentVersion()) { @@ -570,7 +625,6 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac version.setVersion(next.getVersionId()); version.setBytes(next.getPackageSizeBytes()); retVal.addVersion(version); - } return retVal; @@ -620,14 +674,13 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac for (NpmPackageVersionEntity next : resultList) { if (!retVal.hasPackageWithId(next.getPackageId())) { - retVal - .addObject() - .getPackage() - .setName(next.getPackageId()) - .setDescription(next.getPackage().getDescription()) - .setVersion(next.getVersionId()) - .addFhirVersion(next.getFhirVersionId()) - .setBytes(next.getPackageSizeBytes()); + retVal.addObject() + .getPackage() + .setName(next.getPackageId()) + .setDescription(next.getPackage().getDescription()) + .setVersion(next.getVersionId()) + .addFhirVersion(next.getFhirVersionId()) + .setBytes(next.getPackageSizeBytes()); } else { NpmPackageSearchResultJson.Package retPackage = retVal.getPackageWithId(next.getPackageId()); retPackage.addFhirVersion(next.getFhirVersionId()); @@ -636,13 +689,10 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac if (cmp > 0) { retPackage.setVersion(next.getVersionId()); } - } } - } - return retVal; } @@ -651,7 +701,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac public PackageDeleteOutcomeJson uninstallPackage(String thePackageId, String theVersion) { PackageDeleteOutcomeJson retVal = new PackageDeleteOutcomeJson(); - Optional packageVersion = myPackageVersionDao.findByPackageIdAndVersion(thePackageId, theVersion); + Optional packageVersion = + myPackageVersionDao.findByPackageIdAndVersion(thePackageId, theVersion); if (packageVersion.isPresent()) { String msg = "Deleting package " + thePackageId + "#" + theVersion; @@ -667,14 +718,16 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac ExpungeOptions options = new ExpungeOptions(); options.setExpungeDeletedResources(true).setExpungeOldVersions(true); - deleteAndExpungeResourceBinary(next.getResourceBinary().getIdDt().toVersionless(), options); + deleteAndExpungeResourceBinary( + next.getResourceBinary().getIdDt().toVersionless(), options); } myPackageVersionDao.delete(packageVersion.get()); ExpungeOptions options = new ExpungeOptions(); options.setExpungeDeletedResources(true).setExpungeOldVersions(true); - deleteAndExpungeResourceBinary(packageVersion.get().getPackageBinary().getIdDt().toVersionless(), options); + deleteAndExpungeResourceBinary( + packageVersion.get().getPackageBinary().getIdDt().toVersionless(), options); Collection remainingVersions = myPackageVersionDao.findByPackageId(thePackageId); if (remainingVersions.size() == 0) { @@ -685,10 +738,10 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac myPackageDao.delete(pkgEntity.get()); } else { - List versions = remainingVersions - .stream() - .sorted((o1, o2) -> PackageVersionComparator.INSTANCE.compare(o1.getVersionId(), o2.getVersionId())) - .collect(Collectors.toList()); + List versions = remainingVersions.stream() + .sorted((o1, o2) -> + PackageVersionComparator.INSTANCE.compare(o1.getVersionId(), o2.getVersionId())) + .collect(Collectors.toList()); for (int i = 0; i < versions.size(); i++) { boolean isCurrent = i == versions.size() - 1; if (isCurrent != versions.get(i).isCurrentVersion()) { @@ -696,14 +749,12 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac myPackageVersionDao.save(versions.get(i)); } } - } } else { String msg = "No package found with the given ID"; retVal.getMessage().add(msg); - } return retVal; @@ -712,8 +763,9 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac @Override @Transactional public List loadPackageAssetsByType(FhirVersionEnum theFhirVersion, String theResourceType) { -// List outcome = myPackageVersionResourceDao.findAll(); - Slice outcome = myPackageVersionResourceDao.findCurrentVersionByResourceType(PageRequest.of(0, 1000), theFhirVersion, theResourceType); + // List outcome = myPackageVersionResourceDao.findAll(); + Slice outcome = myPackageVersionResourceDao.findCurrentVersionByResourceType( + PageRequest.of(0, 1000), theFhirVersion, theResourceType); return outcome.stream().map(t -> loadPackageEntity(t)).collect(Collectors.toList()); } @@ -722,15 +774,17 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, new SystemRequestDetails()); } - @Nonnull - public List createSearchPredicates(PackageSearchSpec thePackageSearchSpec, CriteriaBuilder theCb, Root theRoot) { + public List createSearchPredicates( + PackageSearchSpec thePackageSearchSpec, CriteriaBuilder theCb, Root theRoot) { List predicates = new ArrayList<>(); if (isNotBlank(thePackageSearchSpec.getResourceUrl())) { - Join resources = theRoot.join("myResources", JoinType.LEFT); + Join resources = + theRoot.join("myResources", JoinType.LEFT); - predicates.add(theCb.equal(resources.get("myCanonicalUrl").as(String.class), thePackageSearchSpec.getResourceUrl())); + predicates.add(theCb.equal( + resources.get("myCanonicalUrl").as(String.class), thePackageSearchSpec.getResourceUrl())); } if (isNotBlank(thePackageSearchSpec.getDescription())) { @@ -746,7 +800,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac predicates.add(theCb.equal(theRoot.get("myFhirVersion").as(FhirVersionEnum.class), versionEnum)); } } else { - predicates.add(theCb.like(theRoot.get("myFhirVersionId").as(String.class), thePackageSearchSpec.getFhirVersion() + "%")); + predicates.add(theCb.like( + theRoot.get("myFhirVersionId").as(String.class), thePackageSearchSpec.getFhirVersion() + "%")); } } @@ -755,8 +810,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac @SuppressWarnings("unchecked") public static List getProcessingMessages(NpmPackage thePackage) { - return (List) thePackage.getUserData().computeIfAbsent("JpPackageCache_ProcessingMessages", t -> new ArrayList<>()); + return (List) + thePackage.getUserData().computeIfAbsent("JpPackageCache_ProcessingMessages", t -> new ArrayList<>()); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmJpaValidationSupport.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmJpaValidationSupport.java index 9a29e24d41b..f60e6fbcd85 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmJpaValidationSupport.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmJpaValidationSupport.java @@ -25,8 +25,8 @@ import ca.uhn.fhir.context.support.IValidationSupport; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nullable; import java.util.List; +import javax.annotation.Nullable; public class NpmJpaValidationSupport implements IValidationSupport { @@ -61,7 +61,8 @@ public class NpmJpaValidationSupport implements IValidationSupport { FhirVersionEnum fhirVersion = myFhirContext.getVersion().getVersion(); IBaseResource asset = myHapiPackageCacheManager.loadPackageAssetByUrl(fhirVersion, theUri); if (asset != null) { - Class type = myFhirContext.getResourceDefinition(theResourceType).getImplementingClass(); + Class type = + myFhirContext.getResourceDefinition(theResourceType).getImplementingClass(); if (type.isAssignableFrom(asset.getClass())) { return asset; } @@ -69,7 +70,6 @@ public class NpmJpaValidationSupport implements IValidationSupport { return null; } - @SuppressWarnings("unchecked") @Nullable @Override @@ -77,5 +77,4 @@ public class NpmJpaValidationSupport implements IValidationSupport { FhirVersionEnum fhirVersion = myFhirContext.getVersion().getVersion(); return (List) myHapiPackageCacheManager.loadPackageAssetsByType(fhirVersion, "StructureDefinition"); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java index 7631aa96451..2ba08c9ae98 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java @@ -28,24 +28,32 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import io.swagger.v3.oas.annotations.media.Schema; -import javax.annotation.Nonnull; import java.util.Date; import java.util.LinkedHashMap; import java.util.Map; +import javax.annotation.Nonnull; @Schema(description = "Represents an NPM package metadata response") @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public class NpmPackageMetadataJson { @JsonProperty("dist-tags") private DistTags myDistTags; + @JsonProperty("modified") @JsonSerialize(using = JsonDateSerializer.class) @JsonDeserialize(using = JsonDateDeserializer.class) private Date myModified; + @JsonProperty("name") private String myName; + @JsonProperty("versions") private Map myVersionIdToVersion; @@ -77,7 +85,6 @@ public class NpmPackageMetadataJson { myName = theName; } - public static class DistTags { @JsonProperty("latest") @@ -93,19 +100,27 @@ public class NpmPackageMetadataJson { } } - @JsonInclude(JsonInclude.Include.NON_NULL) - @JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) + @JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public static class Version { @JsonProperty("name") private String myName; + @JsonProperty("version") private String myVersion; + @JsonProperty("description") private String myDescription; + @JsonProperty("fhirVersion") private String myFhirVersion; + @Schema(description = "The size of this package in bytes", example = "1000") @JsonProperty("_bytes") private long myBytes; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageSearchResultJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageSearchResultJson.java index fc528e2037b..8ebbf39384b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageSearchResultJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageSearchResultJson.java @@ -29,11 +29,17 @@ import java.util.List; @Schema(description = "Represents an NPM package search response") @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public class NpmPackageSearchResultJson { @JsonProperty("objects") private List myObjects; + @JsonProperty("total") private int myTotal; @@ -63,11 +69,20 @@ public class NpmPackageSearchResultJson { } public Package getPackageWithId(String thePackageId) { - return getObjects().stream().map(t -> t.getPackage()).filter(t -> t.getName().equals(thePackageId)).findFirst().orElseThrow(() -> new IllegalArgumentException()); + return getObjects().stream() + .map(t -> t.getPackage()) + .filter(t -> t.getName().equals(thePackageId)) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException()); } @JsonInclude(JsonInclude.Include.NON_NULL) - @JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) + @JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public static class ObjectElement { @JsonProperty("package") @@ -82,17 +97,26 @@ public class NpmPackageSearchResultJson { } @JsonInclude(JsonInclude.Include.NON_NULL) - @JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) + @JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public static class Package { @JsonProperty("name") private String myName; + @JsonProperty("version") private String myVersion; + @JsonProperty("description") private String myDescription; + @JsonProperty("fhirVersion") private List myFhirVersion; + @Schema(description = "The size of this package in bytes", example = "1000") @JsonProperty("_bytes") private long myBytes; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageDeleteOutcomeJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageDeleteOutcomeJson.java index a2783f2fbf0..b7ea0f4bbff 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageDeleteOutcomeJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageDeleteOutcomeJson.java @@ -29,7 +29,12 @@ import java.util.List; @Schema(description = "Represents an NPM package deletion response") @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public class PackageDeleteOutcomeJson { @JsonProperty("messages") @@ -41,5 +46,4 @@ public class PackageDeleteOutcomeJson { } return myMessage; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallOutcomeJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallOutcomeJson.java index 15254a2b6d1..857112a0319 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallOutcomeJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallOutcomeJson.java @@ -33,7 +33,12 @@ import java.util.Map; @Schema(description = "Represents an NPM package installation response") @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public class PackageInstallOutcomeJson { @JsonProperty("messages") @@ -42,8 +47,6 @@ public class PackageInstallOutcomeJson { @JsonProperty("resourcesInstalled") private Map myResourcesInstalled; - - public List getMessage() { if (myMessage == null) { myMessage = new ArrayList<>(); @@ -70,8 +73,8 @@ public class PackageInstallOutcomeJson { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("message", myMessage) - .append("resourcesInstalled", myResourcesInstalled) - .toString(); + .append("message", myMessage) + .append("resourcesInstalled", myResourcesInstalled) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallationSpec.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallationSpec.java index 13ac10728c2..3034192236a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallationSpec.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallationSpec.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.packages; - import ca.uhn.fhir.model.api.annotation.ExampleSupplier; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -32,40 +31,65 @@ import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -@Schema( - name = "PackageInstallationSpec", description = "Defines a set of instructions for package installation" - ) +@Schema(name = "PackageInstallationSpec", description = "Defines a set of instructions for package installation") @JsonPropertyOrder({ - "name", "version", "packageUrl", "installMode", "installResourceTypes", "validationMode", "reloadExisting" + "name", + "version", + "packageUrl", + "installMode", + "installResourceTypes", + "validationMode", + "reloadExisting" }) @ExampleSupplier({PackageInstallationSpec.ExampleSupplier.class, PackageInstallationSpec.ExampleSupplier2.class}) @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public class PackageInstallationSpec { @Schema(description = "The direct package URL") @JsonProperty("packageUrl") private String myPackageUrl; + @Schema(description = "The NPM package Name") @JsonProperty("name") private String myName; + @Schema(description = "The direct package version") @JsonProperty("version") private String myVersion; - @Schema(description = "Should resources from this package be extracted from the package and installed into the repository individually") + + @Schema( + description = + "Should resources from this package be extracted from the package and installed into the repository individually") @JsonProperty("installMode") private InstallModeEnum myInstallMode; - @Schema(description = "If resources are being installed individually, this is list provides the resource types to install. By default, all conformance resources will be installed.") + + @Schema( + description = + "If resources are being installed individually, this is list provides the resource types to install. By default, all conformance resources will be installed.") @JsonProperty("installResourceTypes") private List myInstallResourceTypes; + @Schema(description = "Should dependencies be automatically resolved, fetched and installed with the same settings") @JsonProperty("fetchDependencies") private boolean myFetchDependencies; - @Schema(description = "Should existing resources be reloaded. Defaults to true, but can be set to false to avoid re-index operations for existing search parameters") + + @Schema( + description = + "Should existing resources be reloaded. Defaults to true, but can be set to false to avoid re-index operations for existing search parameters") @JsonProperty("reloadExisting") private boolean myReloadExisting = true; - @Schema(description = "Any values provided here will be interpreted as a regex. Dependencies with an ID matching any regex will be skipped.") + + @Schema( + description = + "Any values provided here will be interpreted as a regex. Dependencies with an ID matching any regex will be skipped.") private List myDependencyExcludes; + @JsonIgnore private byte[] myPackageContents; @@ -180,10 +204,10 @@ public class PackageInstallationSpec { @Override public PackageInstallationSpec get() { return new PackageInstallationSpec() - .setName("hl7.fhir.us.core") - .setVersion("3.1.0") - .setInstallMode(InstallModeEnum.STORE_ONLY) - .setFetchDependencies(true); + .setName("hl7.fhir.us.core") + .setVersion("3.1.0") + .setInstallMode(InstallModeEnum.STORE_ONLY) + .setFetchDependencies(true); } } @@ -192,12 +216,11 @@ public class PackageInstallationSpec { @Override public PackageInstallationSpec get() { return new PackageInstallationSpec() - .setName("com.example.my-resources") - .setVersion("1.0") - .setPackageUrl("classpath:/my-resources.tgz") - .setInstallMode(InstallModeEnum.STORE_AND_INSTALL) - .addInstallResourceTypes("Organization", "Medication", "PlanDefinition", "SearchParameter"); + .setName("com.example.my-resources") + .setVersion("1.0") + .setPackageUrl("classpath:/my-resources.tgz") + .setInstallMode(InstallModeEnum.STORE_AND_INSTALL) + .addInstallResourceTypes("Organization", "Medication", "PlanDefinition", "SearchParameter"); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java index 1aba8fa12dd..78e2707a23c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java @@ -60,12 +60,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.PostConstruct; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; import static ca.uhn.fhir.jpa.packages.util.PackageUtils.DEFAULT_INSTALL_TYPES; import static org.apache.commons.lang3.StringUtils.defaultString; @@ -78,26 +78,35 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { private static final Logger ourLog = LoggerFactory.getLogger(PackageInstallerSvcImpl.class); - boolean enabled = true; + @Autowired private FhirContext myFhirContext; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private IValidationSupport validationSupport; + @Autowired private IHapiPackageCacheManager myPackageCacheManager; + @Autowired private IHapiTransactionService myTxService; + @Autowired private INpmPackageVersionDao myPackageVersionDao; + @Autowired private ISearchParamRegistryController mySearchParamRegistryController; + @Autowired private PartitionSettings myPartitionSettings; + @Autowired private SearchParameterHelper mySearchParameterHelper; + @Autowired private PackageResourceParsingSvc myPackageResourceParsingSvc; @@ -121,7 +130,9 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { case DSTU2_HL7ORG: case DSTU2_1: default: { - ourLog.info("IG installation not supported for version: {}", myFhirContext.getVersion().getVersion()); + ourLog.info( + "IG installation not supported for version: {}", + myFhirContext.getVersion().getVersion()); enabled = false; } } @@ -129,7 +140,8 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { @Override public PackageDeleteOutcomeJson uninstall(PackageInstallationSpec theInstallationSpec) { - PackageDeleteOutcomeJson outcome = myPackageCacheManager.uninstallPackage(theInstallationSpec.getName(), theInstallationSpec.getVersion()); + PackageDeleteOutcomeJson outcome = + myPackageCacheManager.uninstallPackage(theInstallationSpec.getName(), theInstallationSpec.getVersion()); validationSupport.invalidateCaches(); return outcome; } @@ -149,17 +161,25 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { */ @SuppressWarnings("ConstantConditions") @Override - public PackageInstallOutcomeJson install(PackageInstallationSpec theInstallationSpec) throws ImplementationGuideInstallationException { + public PackageInstallOutcomeJson install(PackageInstallationSpec theInstallationSpec) + throws ImplementationGuideInstallationException { PackageInstallOutcomeJson retVal = new PackageInstallOutcomeJson(); if (enabled) { try { - boolean exists = myTxService.withSystemRequest().withRequestPartitionId(RequestPartitionId.defaultPartition()).execute(() -> { - Optional existing = myPackageVersionDao.findByPackageIdAndVersion(theInstallationSpec.getName(), theInstallationSpec.getVersion()); - return existing.isPresent(); - }); + boolean exists = myTxService + .withSystemRequest() + .withRequestPartitionId(RequestPartitionId.defaultPartition()) + .execute(() -> { + Optional existing = myPackageVersionDao.findByPackageIdAndVersion( + theInstallationSpec.getName(), theInstallationSpec.getVersion()); + return existing.isPresent(); + }); if (exists) { - ourLog.info("Package {}#{} is already installed", theInstallationSpec.getName(), theInstallationSpec.getVersion()); + ourLog.info( + "Package {}#{} is already installed", + theInstallationSpec.getName(), + theInstallationSpec.getVersion()); } NpmPackage npmPackage = myPackageCacheManager.installPackage(theInstallationSpec); @@ -183,7 +203,10 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { validationSupport.invalidateCaches(); } catch (IOException e) { - throw new ImplementationGuideInstallationException(Msg.code(1285) + "Could not load NPM package " + theInstallationSpec.getName() + "#" + theInstallationSpec.getVersion(), e); + throw new ImplementationGuideInstallationException( + Msg.code(1285) + "Could not load NPM package " + theInstallationSpec.getName() + "#" + + theInstallationSpec.getVersion(), + e); } } @@ -197,7 +220,9 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { * * @throws ImplementationGuideInstallationException if installation fails */ - private void install(NpmPackage npmPackage, PackageInstallationSpec theInstallationSpec, PackageInstallOutcomeJson theOutcome) throws ImplementationGuideInstallationException { + private void install( + NpmPackage npmPackage, PackageInstallationSpec theInstallationSpec, PackageInstallOutcomeJson theOutcome) + throws ImplementationGuideInstallationException { String name = npmPackage.getNpm().get("name").asJsonString().getValue(); String version = npmPackage.getNpm().get("version").asJsonString().getValue(); @@ -226,12 +251,15 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { next = isStructureDefinitionWithoutSnapshot(next) ? generateSnapshot(next) : next; create(next, theInstallationSpec, theOutcome); } catch (Exception e) { - ourLog.warn("Failed to upload resource of type {} with ID {} - Error: {}", myFhirContext.getResourceType(next), next.getIdElement().getValue(), e.toString()); - throw new ImplementationGuideInstallationException(Msg.code(1286) + String.format("Error installing IG %s#%s: %s", name, version, e), e); + ourLog.warn( + "Failed to upload resource of type {} with ID {} - Error: {}", + myFhirContext.getResourceType(next), + next.getIdElement().getValue(), + e.toString()); + throw new ImplementationGuideInstallationException( + Msg.code(1286) + String.format("Error installing IG %s#%s: %s", name, version, e), e); } - } - } ourLog.info(String.format("Finished installation of package %s#%s:", name, version)); @@ -240,18 +268,27 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { } } - private void fetchAndInstallDependencies(NpmPackage npmPackage, PackageInstallationSpec theInstallationSpec, PackageInstallOutcomeJson theOutcome) throws ImplementationGuideInstallationException { + private void fetchAndInstallDependencies( + NpmPackage npmPackage, PackageInstallationSpec theInstallationSpec, PackageInstallOutcomeJson theOutcome) + throws ImplementationGuideInstallationException { if (npmPackage.getNpm().has("dependencies")) { - JsonObject dependenciesElement = npmPackage.getNpm().get("dependencies").asJsonObject(); + JsonObject dependenciesElement = + npmPackage.getNpm().get("dependencies").asJsonObject(); for (String id : dependenciesElement.getNames()) { String ver = dependenciesElement.getJsonString(id).asString(); try { - theOutcome.getMessage().add("Package " + npmPackage.id() + "#" + npmPackage.version() + " depends on package " + id + "#" + ver); + theOutcome + .getMessage() + .add("Package " + npmPackage.id() + "#" + npmPackage.version() + " depends on package " + id + + "#" + ver); boolean skip = false; for (String next : theInstallationSpec.getDependencyExcludes()) { if (id.matches(next)) { - theOutcome.getMessage().add("Not installing dependency " + id + " because it matches exclude criteria: " + next); + theOutcome + .getMessage() + .add("Not installing dependency " + id + " because it matches exclude criteria: " + + next); skip = true; break; } @@ -266,12 +303,14 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { // installing the package fetchAndInstallDependencies(dependency, theInstallationSpec, theOutcome); - if (theInstallationSpec.getInstallMode() == PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL) { + if (theInstallationSpec.getInstallMode() + == PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL) { install(dependency, theInstallationSpec, theOutcome); } } catch (IOException e) { - throw new ImplementationGuideInstallationException(Msg.code(1287) + String.format("Cannot resolve dependency %s#%s", id, ver), e); + throw new ImplementationGuideInstallationException( + Msg.code(1287) + String.format("Cannot resolve dependency %s#%s", id, ver), e); } } } @@ -281,7 +320,8 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { * Asserts if package FHIR version is compatible with current FHIR version * by using semantic versioning rules. */ - protected void assertFhirVersionsAreCompatible(String fhirVersion, String currentFhirVersion) throws ImplementationGuideInstallationException { + protected void assertFhirVersionsAreCompatible(String fhirVersion, String currentFhirVersion) + throws ImplementationGuideInstallationException { FhirVersionEnum fhirVersionEnum = FhirVersionEnum.forVersionString(fhirVersion); FhirVersionEnum currentFhirVersionEnum = FhirVersionEnum.forVersionString(currentFhirVersion); @@ -292,16 +332,20 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { compatible = true; } if (!compatible) { - throw new ImplementationGuideInstallationException(Msg.code(1288) + String.format("Cannot install implementation guide: FHIR versions mismatch (expected <=%s, package uses %s)", currentFhirVersion, fhirVersion)); + throw new ImplementationGuideInstallationException(Msg.code(1288) + + String.format( + "Cannot install implementation guide: FHIR versions mismatch (expected <=%s, package uses %s)", + currentFhirVersion, fhirVersion)); } } /** * ============================= Utility methods =============================== */ - - - private void create(IBaseResource theResource, PackageInstallationSpec theInstallationSpec, PackageInstallOutcomeJson theOutcome) { + private void create( + IBaseResource theResource, + PackageInstallationSpec theInstallationSpec, + PackageInstallOutcomeJson theOutcome) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource.getClass()); SearchParameterMap map = createSearchParameterMapFor(theResource); IBundleProvider searchResult = searchResource(dao, map); @@ -327,17 +371,26 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { } else { if (theInstallationSpec.isReloadExisting()) { ourLog.info("Updating existing resource matching {}", map.toNormalizedQueryString(myFhirContext)); - theResource.setId(searchResult.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless()); + theResource.setId(searchResult + .getResources(0, 1) + .get(0) + .getIdElement() + .toUnqualifiedVersionless()); DaoMethodOutcome outcome = updateResource(dao, theResource); if (!outcome.isNop()) { theOutcome.incrementResourcesInstalled(myFhirContext.getResourceType(theResource)); } } else { - ourLog.info("Skipping update of existing resource matching {}", map.toNormalizedQueryString(myFhirContext)); + ourLog.info( + "Skipping update of existing resource matching {}", + map.toNormalizedQueryString(myFhirContext)); } } } else { - ourLog.warn("Failed to upload resource of type {} with ID {} - Error: Resource failed validation", theResource.fhirType(), theResource.getIdElement().getValue()); + ourLog.warn( + "Failed to upload resource of type {} with ID {} - Error: Resource failed validation", + theResource.fhirType(), + theResource.getIdElement().getValue()); } } @@ -374,25 +427,36 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { String code = SearchParameterUtil.getCode(myFhirContext, theResource); if (defaultString(code).startsWith("_")) { - ourLog.warn("Failed to validate resource of type {} with url {} - Error: Resource code starts with \"_\"", theResource.fhirType(), SearchParameterUtil.getURL(myFhirContext, theResource)); + ourLog.warn( + "Failed to validate resource of type {} with url {} - Error: Resource code starts with \"_\"", + theResource.fhirType(), + SearchParameterUtil.getURL(myFhirContext, theResource)); return false; } String expression = SearchParameterUtil.getExpression(myFhirContext, theResource); if (isBlank(expression)) { - ourLog.warn("Failed to validate resource of type {} with url {} - Error: Resource expression is blank", theResource.fhirType(), SearchParameterUtil.getURL(myFhirContext, theResource)); + ourLog.warn( + "Failed to validate resource of type {} with url {} - Error: Resource expression is blank", + theResource.fhirType(), + SearchParameterUtil.getURL(myFhirContext, theResource)); return false; } if (SearchParameterUtil.getBaseAsStrings(myFhirContext, theResource).isEmpty()) { - ourLog.warn("Failed to validate resource of type {} with url {} - Error: Resource base is empty", theResource.fhirType(), SearchParameterUtil.getURL(myFhirContext, theResource)); + ourLog.warn( + "Failed to validate resource of type {} with url {} - Error: Resource base is empty", + theResource.fhirType(), + SearchParameterUtil.getURL(myFhirContext, theResource)); return false; } - } if (!isValidResourceStatusForPackageUpload(theResource)) { - ourLog.warn("Failed to validate resource of type {} with ID {} - Error: Resource status not accepted value.", theResource.fhirType(), theResource.getIdElement().getValue()); + ourLog.warn( + "Failed to validate resource of type {} with ID {} - Error: Resource status not accepted value.", + theResource.fhirType(), + theResource.getIdElement().getValue()); return false; } @@ -416,7 +480,8 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { * @return {@link Boolean#TRUE} if the status value of this resource is acceptable for package upload. */ private boolean isValidResourceStatusForPackageUpload(IBaseResource theResource) { - List statusTypes = myFhirContext.newFhirPath().evaluate(theResource, "status", IPrimitiveType.class); + List statusTypes = + myFhirContext.newFhirPath().evaluate(theResource, "status", IPrimitiveType.class); // Resource does not have a status field if (statusTypes.isEmpty()) return true; // Resource has a null status field @@ -447,9 +512,14 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { private IBaseResource generateSnapshot(IBaseResource sd) { try { - return validationSupport.generateSnapshot(new ValidationSupportContext(validationSupport), sd, null, null, null); + return validationSupport.generateSnapshot( + new ValidationSupportContext(validationSupport), sd, null, null, null); } catch (Exception e) { - throw new ImplementationGuideInstallationException(Msg.code(1290) + String.format("Failure when generating snapshot of StructureDefinition: %s", sd.getIdElement()), e); + throw new ImplementationGuideInstallationException( + Msg.code(1290) + + String.format( + "Failure when generating snapshot of StructureDefinition: %s", sd.getIdElement()), + e); } } @@ -471,7 +541,6 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { } } - /** * Strategy is to build a SearchParameterMap same way the SearchParamValidatingInterceptor does, to make sure that * the loader search detects existing resources and routes process to 'update' path, to avoid treating it as a new @@ -480,7 +549,8 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { * we cascade to building the map from 'url' or 'identifier'. */ private SearchParameterMap buildSearchParameterMapForSearchParameter(IBaseResource theResource) { - Optional spmFromCanonicalized = mySearchParameterHelper.buildSearchParameterMapFromCanonical(theResource); + Optional spmFromCanonicalized = + mySearchParameterHelper.buildSearchParameterMapFromCanonical(theResource); if (spmFromCanonicalized.isPresent()) { return spmFromCanonicalized.get(); } @@ -494,12 +564,12 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { } } - private String extractUniqeIdFromNamingSystem(IBaseResource resource) { FhirTerser terser = myFhirContext.newTerser(); IBase uniqueIdComponent = (IBase) terser.getSingleValueOrNull(resource, "uniqueId"); if (uniqueIdComponent == null) { - throw new ImplementationGuideInstallationException(Msg.code(1291) + "NamingSystem does not have uniqueId component."); + throw new ImplementationGuideInstallationException( + Msg.code(1291) + "NamingSystem does not have uniqueId component."); } IPrimitiveType asPrimitiveType = (IPrimitiveType) terser.getSingleValueOrNull(uniqueIdComponent, "value"); return (String) asPrimitiveType.getValue(); @@ -523,14 +593,16 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { if (identifier != null) { return new TokenParam(identifier.getSystem(), identifier.getValue()); } else { - throw new UnsupportedOperationException(Msg.code(1292) + "Resources in a package must have a url or identifier to be loaded by the package installer."); + throw new UnsupportedOperationException(Msg.code(1292) + + "Resources in a package must have a url or identifier to be loaded by the package installer."); } } private boolean resourceHasUrlElement(IBaseResource resource) { BaseRuntimeElementDefinition def = myFhirContext.getElementDefinition(resource.getClass()); if (!(def instanceof BaseRuntimeElementCompositeDefinition)) { - throw new IllegalArgumentException(Msg.code(1293) + "Resource is not a composite type: " + resource.getClass().getName()); + throw new IllegalArgumentException(Msg.code(1293) + "Resource is not a composite type: " + + resource.getClass().getName()); } BaseRuntimeElementCompositeDefinition currentDef = (BaseRuntimeElementCompositeDefinition) def; BaseRuntimeChildDefinition nextDef = currentDef.getChildByName("url"); @@ -541,5 +613,4 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc { void setFhirContextForUnitTest(FhirContext theCtx) { myFhirContext = theCtx; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageVersionComparator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageVersionComparator.java index 0b6fe0cfe96..df8d28bc601 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageVersionComparator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageVersionComparator.java @@ -59,7 +59,7 @@ public class PackageVersionComparator implements Comparator { String[] o1parts = theSpec.split("\\."); String[] o2parts = thePackageVersion.split("\\."); - for (int i = 0; i < o1parts.length; i++ ) { + for (int i = 0; i < o1parts.length; i++) { if (!o1parts[i].equals("x")) { if (!o1parts[i].equals(o2parts[i])) { return false; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/NpmPackageData.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/NpmPackageData.java index ef19cd3ed05..ffbb76ebd6a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/NpmPackageData.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/NpmPackageData.java @@ -56,13 +56,12 @@ public class NpmPackageData { private final InputStream myInputStream; public NpmPackageData( - String thePackageId, - String thePackageVersionId, - String theSourceDesc, - byte[] theBytes, - NpmPackage thePackage, - InputStream theStream - ) { + String thePackageId, + String thePackageVersionId, + String theSourceDesc, + byte[] theBytes, + NpmPackage thePackage, + InputStream theStream) { myPackageId = thePackageId; myPackageVersionId = thePackageVersionId; mySourceDesc = theSourceDesc; @@ -80,7 +79,7 @@ public class NpmPackageData { } public InputStream getInputStream() { - return myInputStream; + return myInputStream; } public String getPackageId() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/PackageLoaderSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/PackageLoaderSvc.java index 16add14c842..1d02fae2c6d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/PackageLoaderSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/PackageLoaderSvc.java @@ -37,7 +37,6 @@ import org.hl7.fhir.utilities.npm.NpmPackage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -45,6 +44,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Paths; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -56,11 +56,10 @@ public class PackageLoaderSvc extends BasePackageCacheManager { if (isNotBlank(theSpec.getPackageUrl())) { byte[] contents = loadPackageUrlContents(theSpec.getPackageUrl()); return createNpmPackageDataFromData( - theSpec.getName(), - theSpec.getVersion(), - theSpec.getPackageUrl(), - new ByteArrayInputStream(contents) - ); + theSpec.getName(), + theSpec.getVersion(), + theSpec.getPackageUrl(), + new ByteArrayInputStream(contents)); } return fetchPackageFromServerInternal(theSpec.getName(), theSpec.getVersion()); @@ -74,29 +73,22 @@ public class PackageLoaderSvc extends BasePackageCacheManager { * as fetched from the server * @throws IOException */ - public NpmPackageData fetchPackageFromPackageSpec( - String thePackageId, - String thePackageVersion - ) throws FHIRException, IOException { + public NpmPackageData fetchPackageFromPackageSpec(String thePackageId, String thePackageVersion) + throws FHIRException, IOException { return fetchPackageFromServerInternal(thePackageId, thePackageVersion); } - private NpmPackageData fetchPackageFromServerInternal( - String thePackageId, - String thePackageVersion - ) throws IOException { + private NpmPackageData fetchPackageFromServerInternal(String thePackageId, String thePackageVersion) + throws IOException { BasePackageCacheManager.InputStreamWithSrc pkg = this.loadFromPackageServer(thePackageId, thePackageVersion); if (pkg == null) { - throw new ResourceNotFoundException(Msg.code(1301) + "Unable to locate package " + thePackageId + "#" + thePackageVersion); + throw new ResourceNotFoundException( + Msg.code(1301) + "Unable to locate package " + thePackageId + "#" + thePackageVersion); } NpmPackageData npmPackage = createNpmPackageDataFromData( - thePackageId, - thePackageVersion == null ? pkg.version : thePackageVersion, - pkg.url, - pkg.stream - ); + thePackageId, thePackageVersion == null ? pkg.version : thePackageVersion, pkg.url, pkg.stream); return npmPackage; } @@ -113,11 +105,8 @@ public class PackageLoaderSvc extends BasePackageCacheManager { * @throws IOException */ public NpmPackageData createNpmPackageDataFromData( - String thePackageId, - String thePackageVersionId, - String theSourceDesc, - InputStream thePackageTgzInputStream - ) throws IOException { + String thePackageId, String thePackageVersionId, String theSourceDesc, InputStream thePackageTgzInputStream) + throws IOException { Validate.notBlank(thePackageId, "thePackageId must not be null"); Validate.notBlank(thePackageVersionId, "thePackageVersionId must not be null"); Validate.notNull(thePackageTgzInputStream, "thePackageTgzInputStream must not be null"); @@ -129,31 +118,22 @@ public class PackageLoaderSvc extends BasePackageCacheManager { NpmPackage npmPackage = NpmPackage.fromPackage(new ByteArrayInputStream(bytes)); return new NpmPackageData( - thePackageId, - thePackageVersionId, - theSourceDesc, - bytes, - npmPackage, - thePackageTgzInputStream - ); + thePackageId, thePackageVersionId, theSourceDesc, bytes, npmPackage, thePackageTgzInputStream); } @Override public NpmPackage loadPackageFromCacheOnly(String theS, @Nullable String theS1) { - throw new UnsupportedOperationException( - Msg.code(2215) - + "Cannot load from cache. " - + "Caching not supported in PackageLoaderSvc. Use JpaPackageCache instead." - ); + throw new UnsupportedOperationException(Msg.code(2215) + + "Cannot load from cache. " + + "Caching not supported in PackageLoaderSvc. Use JpaPackageCache instead."); } @Override - public NpmPackage addPackageToCache(String theS, String theS1, InputStream theInputStream, String theS2) throws IOException { - throw new UnsupportedOperationException( - Msg.code(2216) - + "Cannot add to cache. " - + "Caching not supported in PackageLoaderSvc. Use JpaPackageCache instead." - ); + public NpmPackage addPackageToCache(String theS, String theS1, InputStream theInputStream, String theS2) + throws IOException { + throw new UnsupportedOperationException(Msg.code(2216) + + "Cannot add to cache. " + + "Caching not supported in PackageLoaderSvc. Use JpaPackageCache instead."); } @Override @@ -164,38 +144,38 @@ public class PackageLoaderSvc extends BasePackageCacheManager { * on their own provides no value if nothing is cached/loaded onto hard disk somewhere * */ - throw new UnsupportedOperationException( - Msg.code(2217) - + "No packages are cached; " - + " this service only loads from the server directly. " - + "Call fetchPackageFromServer to fetch the npm package from the server. " - + "Or use JpaPackageCache for a cache implementation." - ); + throw new UnsupportedOperationException(Msg.code(2217) + + "No packages are cached; " + + " this service only loads from the server directly. " + + "Call fetchPackageFromServer to fetch the npm package from the server. " + + "Or use JpaPackageCache for a cache implementation."); } public byte[] loadPackageUrlContents(String thePackageUrl) { if (thePackageUrl.startsWith("classpath:")) { - return ClasspathUtil.loadResourceAsByteArray(thePackageUrl.substring("classpath:" .length())); + return ClasspathUtil.loadResourceAsByteArray(thePackageUrl.substring("classpath:".length())); } else if (thePackageUrl.startsWith("file:")) { try { byte[] bytes = Files.readAllBytes(Paths.get(new URI(thePackageUrl))); return bytes; } catch (IOException | URISyntaxException e) { - throw new InternalErrorException(Msg.code(2031) + "Error loading \"" + thePackageUrl + "\": " + e.getMessage()); + throw new InternalErrorException( + Msg.code(2031) + "Error loading \"" + thePackageUrl + "\": " + e.getMessage()); } } else { HttpClientConnectionManager connManager = new BasicHttpClientConnectionManager(); - try (CloseableHttpResponse request = HttpClientBuilder - .create() - .setConnectionManager(connManager) - .build() - .execute(new HttpGet(thePackageUrl))) { + try (CloseableHttpResponse request = HttpClientBuilder.create() + .setConnectionManager(connManager) + .build() + .execute(new HttpGet(thePackageUrl))) { if (request.getStatusLine().getStatusCode() != 200) { - throw new ResourceNotFoundException(Msg.code(1303) + "Received HTTP " + request.getStatusLine().getStatusCode() + " from URL: " + thePackageUrl); + throw new ResourceNotFoundException(Msg.code(1303) + "Received HTTP " + + request.getStatusLine().getStatusCode() + " from URL: " + thePackageUrl); } return IOUtils.toByteArray(request.getEntity().getContent()); } catch (IOException e) { - throw new InternalErrorException(Msg.code(1304) + "Error loading \"" + thePackageUrl + "\": " + e.getMessage()); + throw new InternalErrorException( + Msg.code(1304) + "Error loading \"" + thePackageUrl + "\": " + e.getMessage()); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/PackageResourceParsingSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/PackageResourceParsingSvc.java index 25daffd0d74..7dc2d8b708f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/PackageResourceParsingSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/loader/PackageResourceParsingSvc.java @@ -49,14 +49,18 @@ public class PackageResourceParsingSvc { return Collections.emptyList(); } ArrayList resources = new ArrayList<>(); - List filesForType = thePkg.getFolders().get("package").getTypes().get(theType); + List filesForType = + thePkg.getFolders().get("package").getTypes().get(theType); if (filesForType != null) { for (String file : filesForType) { try { byte[] content = thePkg.getFolders().get("package").fetchFile(file); resources.add(myFhirContext.newJsonParser().parseResource(new String(content))); } catch (IOException e) { - throw new InternalErrorException(Msg.code(1289) + "Cannot install resource of type " + theType + ": Could not fetch file " + file, e); + throw new InternalErrorException( + Msg.code(1289) + "Cannot install resource of type " + theType + ": Could not fetch file " + + file, + e); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/util/PackageUtils.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/util/PackageUtils.java index acaaf1ab26b..830528ac718 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/util/PackageUtils.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/util/PackageUtils.java @@ -32,12 +32,11 @@ public class PackageUtils { * Default install types */ public static List DEFAULT_INSTALL_TYPES = Collections.unmodifiableList(Lists.newArrayList( - "NamingSystem", - "CodeSystem", - "ValueSet", - "StructureDefinition", - "ConceptMap", - "SearchParameter", - "Subscription" - )); + "NamingSystem", + "CodeSystem", + "ValueSet", + "StructureDefinition", + "ConceptMap", + "SearchParameter", + "Subscription")); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/IPartitionLookupSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/IPartitionLookupSvc.java index 585e5bc4a2a..db0648ef9de 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/IPartitionLookupSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/IPartitionLookupSvc.java @@ -23,8 +23,8 @@ import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; -import javax.annotation.Nullable; import java.util.List; +import javax.annotation.Nullable; public interface IPartitionLookupSvc { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionLookupSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionLookupSvcImpl.java index 4c1b1792e55..473cf6145f7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionLookupSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionLookupSvcImpl.java @@ -48,14 +48,13 @@ import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; -import javax.annotation.PostConstruct; import java.util.List; -import java.util.ListIterator; import java.util.Optional; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -66,17 +65,22 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { @Autowired private PartitionSettings myPartitionSettings; + @Autowired private IInterceptorService myInterceptorService; + @Autowired private IPartitionDao myPartitionDao; private LoadingCache myNameToPartitionCache; private LoadingCache myIdToPartitionCache; + @Autowired private FhirContext myFhirCtx; + @Autowired private PlatformTransactionManager myTxManager; + private TransactionTemplate myTxTemplate; /** @@ -110,7 +114,8 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { if (myPartitionSettings.isUnnamedPartitionMode()) { return new PartitionEntity().setId(thePartitionId); } - if (myPartitionSettings.getDefaultPartitionId() != null && myPartitionSettings.getDefaultPartitionId().equals(thePartitionId)) { + if (myPartitionSettings.getDefaultPartitionId() != null + && myPartitionSettings.getDefaultPartitionId().equals(thePartitionId)) { return new PartitionEntity().setId(thePartitionId).setName(JpaConstants.DEFAULT_PARTITION_NAME); } return myIdToPartitionCache.get(thePartitionId); @@ -135,6 +140,7 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { } return candidate; } + @Override @Transactional public PartitionEntity createPartition(PartitionEntity thePartition, RequestDetails theRequestDetails) { @@ -150,9 +156,9 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { // Interceptor call: STORAGE_PARTITION_CREATED if (myInterceptorService.hasHooks(Pointcut.STORAGE_PARTITION_CREATED)) { HookParams params = new HookParams() - .add(RequestPartitionId.class, thePartition.toRequestPartitionId()) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + .add(RequestPartitionId.class, thePartition.toRequestPartitionId()) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); myInterceptorService.callHooks(Pointcut.STORAGE_PARTITION_CREATED, params); } @@ -167,7 +173,9 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { Optional existingPartitionOpt = myPartitionDao.findById(thePartition.getId()); if (existingPartitionOpt.isPresent() == false) { - String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "unknownPartitionId", thePartition.getId()); + String msg = myFhirCtx + .getLocalizer() + .getMessageSanitized(PartitionLookupSvcImpl.class, "unknownPartitionId", thePartition.getId()); throw new InvalidRequestException(Msg.code(1307) + msg); } @@ -191,7 +199,9 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { Optional partition = myPartitionDao.findById(thePartitionId); if (!partition.isPresent()) { - String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "unknownPartitionId", thePartitionId); + String msg = myFhirCtx + .getLocalizer() + .getMessageSanitized(PartitionLookupSvcImpl.class, "unknownPartitionId", thePartitionId); throw new IllegalArgumentException(Msg.code(1308) + msg); } @@ -208,14 +218,17 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { private void validatePartitionNameDoesntAlreadyExist(String theName) { if (myPartitionDao.findForName(theName).isPresent()) { - String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "cantCreateDuplicatePartitionName", theName); + String msg = myFhirCtx + .getLocalizer() + .getMessageSanitized(PartitionLookupSvcImpl.class, "cantCreateDuplicatePartitionName", theName); throw new InvalidRequestException(Msg.code(1309) + msg); } } - private void validIdUponCreation(PartitionEntity thePartition){ + private void validIdUponCreation(PartitionEntity thePartition) { if (myPartitionDao.findById(thePartition.getId()).isPresent()) { - String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "duplicatePartitionId"); + String msg = + myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "duplicatePartitionId"); throw new InvalidRequestException(Msg.code(2366) + msg); } } @@ -227,36 +240,42 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { } if (thePartition.getName().equals(JpaConstants.DEFAULT_PARTITION_NAME)) { - String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "cantCreateDefaultPartition"); + String msg = myFhirCtx + .getLocalizer() + .getMessageSanitized(PartitionLookupSvcImpl.class, "cantCreateDefaultPartition"); throw new InvalidRequestException(Msg.code(1311) + msg); } if (!PARTITION_NAME_VALID_PATTERN.matcher(thePartition.getName()).matches()) { - String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "invalidName", thePartition.getName()); + String msg = myFhirCtx + .getLocalizer() + .getMessageSanitized(PartitionLookupSvcImpl.class, "invalidName", thePartition.getName()); throw new InvalidRequestException(Msg.code(1312) + msg); } } private void validateNotInUnnamedPartitionMode() { if (myPartitionSettings.isUnnamedPartitionMode()) { - throw new MethodNotAllowedException(Msg.code(1313) + "Can not invoke this operation in unnamed partition mode"); + throw new MethodNotAllowedException( + Msg.code(1313) + "Can not invoke this operation in unnamed partition mode"); } } private PartitionEntity lookupPartitionByName(@Nonnull String theName) { - return executeInTransaction(() -> myPartitionDao.findForName(theName)) - .orElseThrow(() -> { - String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "invalidName", theName); - return new ResourceNotFoundException(msg); - }); + return executeInTransaction(() -> myPartitionDao.findForName(theName)).orElseThrow(() -> { + String msg = + myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "invalidName", theName); + return new ResourceNotFoundException(msg); + }); } private PartitionEntity lookupPartitionById(@Nonnull Integer theId) { - return executeInTransaction(() -> myPartitionDao.findById(theId)) - .orElseThrow(() -> { - String msg = myFhirCtx.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "unknownPartitionId", theId); - return new ResourceNotFoundException(msg); - }); + return executeInTransaction(() -> myPartitionDao.findById(theId)).orElseThrow(() -> { + String msg = myFhirCtx + .getLocalizer() + .getMessageSanitized(PartitionLookupSvcImpl.class, "unknownPartitionId", theId); + return new ResourceNotFoundException(msg); + }); } protected T executeInTransaction(ICallable theCallable) { @@ -281,7 +300,8 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc { public static void validatePartitionIdSupplied(FhirContext theFhirContext, Integer thePartitionId) { if (thePartitionId == null) { - String msg = theFhirContext.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "noIdSupplied"); + String msg = + theFhirContext.getLocalizer().getMessageSanitized(PartitionLookupSvcImpl.class, "noIdSupplied"); throw new InvalidRequestException(Msg.code(1314) + msg); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionManagementProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionManagementProvider.java index 714636f9c88..31edc63401d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionManagementProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionManagementProvider.java @@ -35,9 +35,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; - import java.util.List; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.validatePartitionIdSupplied; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -58,6 +57,7 @@ public class PartitionManagementProvider { @Autowired private FhirContext myCtx; + @Autowired private IPartitionLookupSvc myPartitionLookupSvc; @@ -69,14 +69,28 @@ public class PartitionManagementProvider { */ @Operation(name = ProviderConstants.PARTITION_MANAGEMENT_CREATE_PARTITION) public IBaseParameters addPartition( - @ResourceParam IBaseParameters theRequest, - @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, min = 1, max = 1, typeName = "integer") IPrimitiveType thePartitionId, - @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, min = 1, max = 1, typeName = "code") IPrimitiveType thePartitionName, - @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, min = 0, max = 1, typeName = "string") IPrimitiveType thePartitionDescription, - RequestDetails theRequestDetails - ) { + @ResourceParam IBaseParameters theRequest, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, + min = 1, + max = 1, + typeName = "integer") + IPrimitiveType thePartitionId, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, + min = 1, + max = 1, + typeName = "code") + IPrimitiveType thePartitionName, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, + min = 0, + max = 1, + typeName = "string") + IPrimitiveType thePartitionDescription, + RequestDetails theRequestDetails) { - if (thePartitionId == null && thePartitionName!= null && thePartitionName.hasValue()) { + if (thePartitionId == null && thePartitionName != null && thePartitionName.hasValue()) { thePartitionId = requestRandomPartitionId(thePartitionName); } @@ -95,7 +109,10 @@ public class PartitionManagementProvider { @Nonnull private IPrimitiveType requestRandomPartitionId(IPrimitiveType thePartitionName) { int unusedPartitionId = myPartitionLookupSvc.generateRandomUnusedPartitionId(); - ourLog.info("Request to create partition came in without a partition ID. Auto-assigning an available ID.[partition_id={}, partition_name={}]", unusedPartitionId, thePartitionName); + ourLog.info( + "Request to create partition came in without a partition ID. Auto-assigning an available ID.[partition_id={}, partition_name={}]", + unusedPartitionId, + thePartitionName); return new IntegerDt(unusedPartitionId); } @@ -107,9 +124,13 @@ public class PartitionManagementProvider { */ @Operation(name = ProviderConstants.PARTITION_MANAGEMENT_READ_PARTITION, idempotent = true) public IBaseParameters addPartition( - @ResourceParam IBaseParameters theRequest, - @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, min = 1, max = 1, typeName = "integer") IPrimitiveType thePartitionId - ) { + @ResourceParam IBaseParameters theRequest, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, + min = 1, + max = 1, + typeName = "integer") + IPrimitiveType thePartitionId) { validatePartitionIdSupplied(myCtx, toValueOrNull(thePartitionId)); // Note: Input validation happens inside IPartitionLookupSvc @@ -126,11 +147,25 @@ public class PartitionManagementProvider { */ @Operation(name = ProviderConstants.PARTITION_MANAGEMENT_UPDATE_PARTITION) public IBaseParameters updatePartition( - @ResourceParam IBaseParameters theRequest, - @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, min = 1, max = 1, typeName = "integer") IPrimitiveType thePartitionId, - @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, min = 1, max = 1, typeName = "code") IPrimitiveType thePartitionName, - @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, min = 0, max = 1, typeName = "string") IPrimitiveType thePartitionDescription - ) { + @ResourceParam IBaseParameters theRequest, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, + min = 1, + max = 1, + typeName = "integer") + IPrimitiveType thePartitionId, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, + min = 1, + max = 1, + typeName = "code") + IPrimitiveType thePartitionName, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, + min = 0, + max = 1, + typeName = "string") + IPrimitiveType thePartitionDescription) { validatePartitionIdSupplied(myCtx, toValueOrNull(thePartitionId)); PartitionEntity input = parseInput(thePartitionId, thePartitionName, thePartitionDescription); @@ -151,11 +186,15 @@ public class PartitionManagementProvider { */ @Operation(name = ProviderConstants.PARTITION_MANAGEMENT_DELETE_PARTITION) public IBaseParameters updatePartition( - @ResourceParam IBaseParameters theRequest, - @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, min = 1, max = 1, typeName = "integer") IPrimitiveType thePartitionId - ) { + @ResourceParam IBaseParameters theRequest, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, + min = 1, + max = 1, + typeName = "integer") + IPrimitiveType thePartitionId) { validatePartitionIdSupplied(myCtx, toValueOrNull(thePartitionId)); - + myPartitionLookupSvc.deletePartition(thePartitionId.getValue()); IBaseParameters retVal = ParametersUtil.newInstance(myCtx); @@ -171,19 +210,20 @@ public class PartitionManagementProvider { * */ @Operation(name = ProviderConstants.PARTITION_MANAGEMENT_LIST_PARTITIONS, idempotent = true) - public IBaseParameters addPartitions( - @ResourceParam IBaseParameters theRequest - ) { + public IBaseParameters addPartitions(@ResourceParam IBaseParameters theRequest) { List output = myPartitionLookupSvc.listPartitions(); return prepareOutputList(output); } private IBaseParameters prepareOutput(PartitionEntity theOutput) { IBaseParameters retVal = ParametersUtil.newInstance(myCtx); - ParametersUtil.addParameterToParametersInteger(myCtx, retVal, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, theOutput.getId()); - ParametersUtil.addParameterToParametersCode(myCtx, retVal, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, theOutput.getName()); + ParametersUtil.addParameterToParametersInteger( + myCtx, retVal, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, theOutput.getId()); + ParametersUtil.addParameterToParametersCode( + myCtx, retVal, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, theOutput.getName()); if (isNotBlank(theOutput.getDescription())) { - ParametersUtil.addParameterToParametersString(myCtx, retVal, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, theOutput.getDescription()); + ParametersUtil.addParameterToParametersString( + myCtx, retVal, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, theOutput.getDescription()); } return retVal; } @@ -192,17 +232,44 @@ public class PartitionManagementProvider { IBaseParameters retVal = ParametersUtil.newInstance(myCtx); for (PartitionEntity partitionEntity : theOutput) { IBase resultPart = ParametersUtil.addParameterToParameters(myCtx, retVal, "partition"); - ParametersUtil.addPartInteger(myCtx, resultPart, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, partitionEntity.getId()); - ParametersUtil.addPartCode(myCtx, resultPart, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, partitionEntity.getName()); + ParametersUtil.addPartInteger( + myCtx, resultPart, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, partitionEntity.getId()); + ParametersUtil.addPartCode( + myCtx, + resultPart, + ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, + partitionEntity.getName()); if (isNotBlank(partitionEntity.getDescription())) { - ParametersUtil.addPartString(myCtx, resultPart, ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, partitionEntity.getDescription()); + ParametersUtil.addPartString( + myCtx, + resultPart, + ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, + partitionEntity.getDescription()); } } return retVal; } @Nonnull - private PartitionEntity parseInput(@OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, min = 1, max = 1, typeName = "integer") IPrimitiveType thePartitionId, @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, min = 1, max = 1, typeName = "code") IPrimitiveType thePartitionName, @OperationParam(name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, min = 0, max = 1, typeName = "string") IPrimitiveType thePartitionDescription) { + private PartitionEntity parseInput( + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_ID, + min = 1, + max = 1, + typeName = "integer") + IPrimitiveType thePartitionId, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_NAME, + min = 1, + max = 1, + typeName = "code") + IPrimitiveType thePartitionName, + @OperationParam( + name = ProviderConstants.PARTITION_MANAGEMENT_PARTITION_DESC, + min = 0, + max = 1, + typeName = "string") + IPrimitiveType thePartitionDescription) { PartitionEntity input = new PartitionEntity(); if (thePartitionId != null) { input.setId(thePartitionId.getValue()); @@ -215,5 +282,4 @@ public class PartitionManagementProvider { } return input; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java index a4526706684..542e04882c6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/RequestPartitionHelperSvc.java @@ -49,16 +49,29 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { try { partition = myPartitionConfigSvc.getPartitionById(id); } catch (IllegalArgumentException e) { - String msg = myFhirContext.getLocalizer().getMessage(BaseRequestPartitionHelperSvc.class, "unknownPartitionId", theRequestPartitionId.getPartitionIds().get(i)); + String msg = myFhirContext + .getLocalizer() + .getMessage( + BaseRequestPartitionHelperSvc.class, + "unknownPartitionId", + theRequestPartitionId.getPartitionIds().get(i)); throw new ResourceNotFoundException(Msg.code(1316) + msg); } } if (theRequestPartitionId.getPartitionNames() != null) { if (partition == null) { - Validate.isTrue(theRequestPartitionId.getPartitionIds().get(i) == null, "Partition %s must not have an ID", JpaConstants.DEFAULT_PARTITION_NAME); + Validate.isTrue( + theRequestPartitionId.getPartitionIds().get(i) == null, + "Partition %s must not have an ID", + JpaConstants.DEFAULT_PARTITION_NAME); } else { - Validate.isTrue(Objects.equals(theRequestPartitionId.getPartitionIds().get(i), partition.getId()), "Partition name %s does not match ID %n", theRequestPartitionId.getPartitionNames().get(i), theRequestPartitionId.getPartitionIds().get(i)); + Validate.isTrue( + Objects.equals( + theRequestPartitionId.getPartitionIds().get(i), partition.getId()), + "Partition name %s does not match ID %n", + theRequestPartitionId.getPartitionNames().get(i), + theRequestPartitionId.getPartitionIds().get(i)); } } else { if (names == null) { @@ -70,34 +83,48 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { names.add(null); } } - } if (names != null) { - return RequestPartitionId.forPartitionIdsAndNames(names, theRequestPartitionId.getPartitionIds(), theRequestPartitionId.getPartitionDate()); + return RequestPartitionId.forPartitionIdsAndNames( + names, theRequestPartitionId.getPartitionIds(), theRequestPartitionId.getPartitionDate()); } return theRequestPartitionId; } @Override - protected RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId) { + protected RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId) { List ids = null; for (int i = 0; i < theRequestPartitionId.getPartitionNames().size(); i++) { PartitionEntity partition; try { - partition = myPartitionConfigSvc.getPartitionByName(theRequestPartitionId.getPartitionNames().get(i)); + partition = myPartitionConfigSvc.getPartitionByName( + theRequestPartitionId.getPartitionNames().get(i)); } catch (IllegalArgumentException e) { - String msg = myFhirContext.getLocalizer().getMessage(BaseRequestPartitionHelperSvc.class, "unknownPartitionName", theRequestPartitionId.getPartitionNames().get(i)); + String msg = myFhirContext + .getLocalizer() + .getMessage( + BaseRequestPartitionHelperSvc.class, + "unknownPartitionName", + theRequestPartitionId.getPartitionNames().get(i)); throw new ResourceNotFoundException(Msg.code(1317) + msg); } if (theRequestPartitionId.hasPartitionIds()) { if (partition == null) { - Validate.isTrue(theRequestPartitionId.getPartitionIds().get(i) == null, "Partition %s must not have an ID", JpaConstants.DEFAULT_PARTITION_NAME); + Validate.isTrue( + theRequestPartitionId.getPartitionIds().get(i) == null, + "Partition %s must not have an ID", + JpaConstants.DEFAULT_PARTITION_NAME); } else { - Validate.isTrue(Objects.equals(theRequestPartitionId.getPartitionIds().get(i), partition.getId()), "Partition name %s does not match ID %n", theRequestPartitionId.getPartitionNames().get(i), theRequestPartitionId.getPartitionIds().get(i)); + Validate.isTrue( + Objects.equals( + theRequestPartitionId.getPartitionIds().get(i), partition.getId()), + "Partition name %s does not match ID %n", + theRequestPartitionId.getPartitionNames().get(i), + theRequestPartitionId.getPartitionIds().get(i)); } } else { if (ids == null) { @@ -109,15 +136,13 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc { ids.add(null); } } - } if (ids != null) { - return RequestPartitionId.forPartitionIdsAndNames(theRequestPartitionId.getPartitionNames(), ids, theRequestPartitionId.getPartitionDate()); + return RequestPartitionId.forPartitionIdsAndNames( + theRequestPartitionId.getPartitionNames(), ids, theRequestPartitionId.getPartitionDate()); } return theRequestPartitionId; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderCodeSystem.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderCodeSystem.java index 98b7f3d6b92..1929635de94 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderCodeSystem.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderCodeSystem.java @@ -40,10 +40,10 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; -import javax.servlet.http.HttpServletRequest; import java.util.List; import java.util.Optional; import java.util.function.Supplier; +import javax.servlet.http.HttpServletRequest; import static ca.uhn.fhir.jpa.provider.ValueSetOperationProvider.toValidateCodeResult; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -57,22 +57,26 @@ public abstract class BaseJpaResourceProviderCodeSystem * $lookup operation */ @SuppressWarnings("unchecked") - @Operation(name = JpaConstants.OPERATION_LOOKUP, idempotent = true, returnParameters = { - @OperationParam(name = "name", typeName = "string", min = 1), - @OperationParam(name = "version", typeName = "string", min = 0), - @OperationParam(name = "display", typeName = "string", min = 1), - @OperationParam(name = "abstract", typeName = "boolean", min = 1), - }) + @Operation( + name = JpaConstants.OPERATION_LOOKUP, + idempotent = true, + returnParameters = { + @OperationParam(name = "name", typeName = "string", min = 1), + @OperationParam(name = "version", typeName = "string", min = 0), + @OperationParam(name = "display", typeName = "string", min = 1), + @OperationParam(name = "abstract", typeName = "boolean", min = 1), + }) public IBaseParameters lookup( - HttpServletRequest theServletRequest, - @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theCode, - @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSystem, - @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theCoding, - @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theVersion, - @OperationParam(name = "displayLanguage", min = 0, max = 1, typeName = "code") IPrimitiveType theDisplayLanguage, - @OperationParam(name = "property", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "code") List> theProperties, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theCode, + @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSystem, + @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theCoding, + @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theVersion, + @OperationParam(name = "displayLanguage", min = 0, max = 1, typeName = "code") + IPrimitiveType theDisplayLanguage, + @OperationParam(name = "property", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "code") + List> theProperties, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { @@ -87,23 +91,24 @@ public abstract class BaseJpaResourceProviderCodeSystem } } - /** * $subsumes operation */ - @Operation(name = JpaConstants.OPERATION_SUBSUMES, idempotent = true, returnParameters = { - @OperationParam(name = "outcome", typeName = "code", min = 1), - }) + @Operation( + name = JpaConstants.OPERATION_SUBSUMES, + idempotent = true, + returnParameters = { + @OperationParam(name = "outcome", typeName = "code", min = 1), + }) public IBaseParameters subsumes( - HttpServletRequest theServletRequest, - @OperationParam(name = "codeA", min = 0, max = 1, typeName = "code") IPrimitiveType theCodeA, - @OperationParam(name = "codeB", min = 0, max = 1, typeName = "code") IPrimitiveType theCodeB, - @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSystem, - @OperationParam(name = "codingA", min = 0, max = 1, typeName = "Coding") IBaseCoding theCodingA, - @OperationParam(name = "codingB", min = 0, max = 1, typeName = "Coding") IBaseCoding theCodingB, - @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theVersion, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @OperationParam(name = "codeA", min = 0, max = 1, typeName = "code") IPrimitiveType theCodeA, + @OperationParam(name = "codeB", min = 0, max = 1, typeName = "code") IPrimitiveType theCodeB, + @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSystem, + @OperationParam(name = "codingA", min = 0, max = 1, typeName = "Coding") IBaseCoding theCodingA, + @OperationParam(name = "codingB", min = 0, max = 1, typeName = "Coding") IBaseCoding theCodingB, + @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theVersion, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { @@ -127,63 +132,85 @@ public abstract class BaseJpaResourceProviderCodeSystem * $validate-code operation */ @SuppressWarnings("unchecked") - @Operation(name = JpaConstants.OPERATION_VALIDATE_CODE, idempotent = true, returnParameters = { - @OperationParam(name = "result", typeName = "boolean", min = 1), - @OperationParam(name = "message", typeName = "string"), - @OperationParam(name = "display", typeName = "string") - }) + @Operation( + name = JpaConstants.OPERATION_VALIDATE_CODE, + idempotent = true, + returnParameters = { + @OperationParam(name = "result", typeName = "boolean", min = 1), + @OperationParam(name = "message", typeName = "string"), + @OperationParam(name = "display", typeName = "string") + }) public IBaseParameters validateCode( - HttpServletRequest theServletRequest, - @IdParam(optional = true) IIdType theId, - @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theCodeSystemUrl, - @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theVersion, - @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theCode, - @OperationParam(name = "display", min = 0, max = 1, typeName = "string") IPrimitiveType theDisplay, - @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theCoding, - @OperationParam(name = "codeableConcept", min = 0, max = 1, typeName = "CodeableConcept") IBaseDatatype theCodeableConcept, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @IdParam(optional = true) IIdType theId, + @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theCodeSystemUrl, + @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theVersion, + @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theCode, + @OperationParam(name = "display", min = 0, max = 1, typeName = "string") IPrimitiveType theDisplay, + @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theCoding, + @OperationParam(name = "codeableConcept", min = 0, max = 1, typeName = "CodeableConcept") + IBaseDatatype theCodeableConcept, + RequestDetails theRequestDetails) { CodeValidationResult result = null; startRequest(theServletRequest); try { - // TODO: JA why not just always just the chain here? and we can then get rid of the corresponding DAO method entirely + // TODO: JA why not just always just the chain here? and we can then get rid of the corresponding DAO method + // entirely // If a Remote Terminology Server has been configured, use it if (myValidationSupportChain.isRemoteTerminologyServiceConfigured()) { - String codeSystemUrl = (theCodeSystemUrl != null && theCodeSystemUrl.hasValue()) ? - theCodeSystemUrl.getValueAsString() : null; + String codeSystemUrl = (theCodeSystemUrl != null && theCodeSystemUrl.hasValue()) + ? theCodeSystemUrl.getValueAsString() + : null; if (theCoding != null) { if (isNotBlank(theCoding.getSystem())) { if (codeSystemUrl != null && !codeSystemUrl.equalsIgnoreCase(theCoding.getSystem())) { - throw new InvalidRequestException(Msg.code(1160) + "Coding.system '" + theCoding.getSystem() + "' does not equal param url '" + theCodeSystemUrl + "'. Unable to validate-code."); + throw new InvalidRequestException(Msg.code(1160) + "Coding.system '" + theCoding.getSystem() + + "' does not equal param url '" + theCodeSystemUrl + + "'. Unable to validate-code."); } codeSystemUrl = theCoding.getSystem(); String code = theCoding.getCode(); String display = theCoding.getDisplay(); result = validateCodeWithTerminologyService(codeSystemUrl, code, display) - .orElseGet(supplyUnableToValidateResult(codeSystemUrl, code)); + .orElseGet(supplyUnableToValidateResult(codeSystemUrl, code)); } } } else { // Otherwise, use the local DAO layer to validate the code IFhirResourceDaoCodeSystem dao = (IFhirResourceDaoCodeSystem) getDao(); - result = dao.validateCode(theId, theCodeSystemUrl, theVersion, theCode, theDisplay, theCoding, theCodeableConcept, theRequestDetails); + result = dao.validateCode( + theId, + theCodeSystemUrl, + theVersion, + theCode, + theDisplay, + theCoding, + theCodeableConcept, + theRequestDetails); } return toValidateCodeResult(getContext(), result); } finally { endRequest(theServletRequest); } - } - private Optional validateCodeWithTerminologyService(String theCodeSystemUrl, String theCode, String theDisplay) { - return Optional.ofNullable(myValidationSupportChain.validateCode(new ValidationSupportContext(myValidationSupportChain), - new ConceptValidationOptions(), theCodeSystemUrl, theCode, theDisplay, null)); + private Optional validateCodeWithTerminologyService( + String theCodeSystemUrl, String theCode, String theDisplay) { + return Optional.ofNullable(myValidationSupportChain.validateCode( + new ValidationSupportContext(myValidationSupportChain), + new ConceptValidationOptions(), + theCodeSystemUrl, + theCode, + theDisplay, + null)); } private Supplier supplyUnableToValidateResult(String theCodeSystemUrl, String theCode) { - return () -> new CodeValidationResult().setMessage("Terminology service was unable to provide validation for " + theCodeSystemUrl + "#" + theCode); + return () -> new CodeValidationResult() + .setMessage( + "Terminology service was unable to provide validation for " + theCodeSystemUrl + "#" + theCode); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderComposition.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderComposition.java index 314a22e911b..42eb7b187ce 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderComposition.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderComposition.java @@ -37,7 +37,6 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r5.model.Composition; - public abstract class BaseJpaResourceProviderComposition extends BaseJpaResourceProvider { /** @@ -45,33 +44,37 @@ public abstract class BaseJpaResourceProviderComposition theCount, - - @Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") - IPrimitiveType theOffset, - - @Description(shortDefinition = "Only return resources which were last updated as specified by the given range") - @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) - DateRangeParam theLastUpdated, - - @Sort - SortSpec theSortSpec, - - RequestDetails theRequestDetails - ) { + javax.servlet.http.HttpServletRequest theServletRequest, + @IdParam IIdType theId, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") + IPrimitiveType theCount, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") + IPrimitiveType theOffset, + @Description( + shortDefinition = + "Only return resources which were last updated as specified by the given range") + @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) + DateRangeParam theLastUpdated, + @Sort SortSpec theSortSpec, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { - IBundleProvider bundleProvider = ((IFhirResourceDaoComposition) getDao()).getDocumentForComposition(theServletRequest, theId, theCount, theOffset, theLastUpdated, theSortSpec, theRequestDetails); + IBundleProvider bundleProvider = ((IFhirResourceDaoComposition) getDao()) + .getDocumentForComposition( + theServletRequest, + theId, + theCount, + theOffset, + theLastUpdated, + theSortSpec, + theRequestDetails); return bundleProvider; } finally { endRequest(theServletRequest); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderConceptMap.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderConceptMap.java index 9d1e1c4e3a2..304f00ba48f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderConceptMap.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderConceptMap.java @@ -41,7 +41,6 @@ import org.hl7.fhir.r4.model.CodeableConcept; import org.hl7.fhir.r4.model.Coding; import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.Parameters; -import org.hl7.fhir.r4.model.StringType; import org.springframework.beans.factory.annotation.Autowired; import javax.servlet.http.HttpServletRequest; @@ -55,47 +54,57 @@ public abstract class BaseJpaResourceProviderConceptMap @Autowired private VersionCanonicalizer myVersionCanonicalizer; - @Operation(name = JpaConstants.OPERATION_TRANSLATE, idempotent = true, returnParameters = { - @OperationParam(name = "result", typeName = "boolean", min = 1, max = 1), - @OperationParam(name = "message", typeName = "string", min = 0, max = 1), - }) + @Operation( + name = JpaConstants.OPERATION_TRANSLATE, + idempotent = true, + returnParameters = { + @OperationParam(name = "result", typeName = "boolean", min = 1, max = 1), + @OperationParam(name = "message", typeName = "string", min = 0, max = 1), + }) public IBaseParameters translate( - HttpServletRequest theServletRequest, - @IdParam(optional = true) IIdType theId, - @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theUrl, - @OperationParam(name = "conceptMapVersion", min = 0, max = 1, typeName = "string") IPrimitiveType theConceptMapVersion, - @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theSourceCode, - @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSourceCodeSystem, - @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theSourceCodeSystemVersion, - @OperationParam(name = "source", min = 0, max = 1, typeName = "uri") IPrimitiveType theSourceValueSet, - @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theSourceCoding, - @OperationParam(name = "codeableConcept", min = 0, max = 1, typeName = "CodeableConcept") IBaseDatatype theSourceCodeableConcept, - @OperationParam(name = "target", min = 0, max = 1, typeName = "uri") IPrimitiveType theTargetValueSet, - @OperationParam(name = "targetsystem", min = 0, max = 1, typeName = "uri") IPrimitiveType theTargetCodeSystem, - @OperationParam(name = "reverse", min = 0, max = 1, typeName = "boolean") IPrimitiveType theReverse, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @IdParam(optional = true) IIdType theId, + @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theUrl, + @OperationParam(name = "conceptMapVersion", min = 0, max = 1, typeName = "string") + IPrimitiveType theConceptMapVersion, + @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theSourceCode, + @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") + IPrimitiveType theSourceCodeSystem, + @OperationParam(name = "version", min = 0, max = 1, typeName = "string") + IPrimitiveType theSourceCodeSystemVersion, + @OperationParam(name = "source", min = 0, max = 1, typeName = "uri") + IPrimitiveType theSourceValueSet, + @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theSourceCoding, + @OperationParam(name = "codeableConcept", min = 0, max = 1, typeName = "CodeableConcept") + IBaseDatatype theSourceCodeableConcept, + @OperationParam(name = "target", min = 0, max = 1, typeName = "uri") + IPrimitiveType theTargetValueSet, + @OperationParam(name = "targetsystem", min = 0, max = 1, typeName = "uri") + IPrimitiveType theTargetCodeSystem, + @OperationParam(name = "reverse", min = 0, max = 1, typeName = "boolean") + IPrimitiveType theReverse, + RequestDetails theRequestDetails) { Coding sourceCoding = myVersionCanonicalizer.codingToCanonical(theSourceCoding); - CodeableConcept sourceCodeableConcept = myVersionCanonicalizer.codeableConceptToCanonical(theSourceCodeableConcept); + CodeableConcept sourceCodeableConcept = + myVersionCanonicalizer.codeableConceptToCanonical(theSourceCodeableConcept); - boolean haveSourceCode = theSourceCode != null - && isNotBlank(theSourceCode.getValue()); - boolean haveSourceCodeSystem = theSourceCodeSystem != null - && theSourceCodeSystem.hasValue(); - boolean haveSourceCodeSystemVersion = theSourceCodeSystemVersion != null - && theSourceCodeSystemVersion.hasValue(); - boolean haveSourceCoding = sourceCoding != null - && sourceCoding.hasCode(); + boolean haveSourceCode = theSourceCode != null && isNotBlank(theSourceCode.getValue()); + boolean haveSourceCodeSystem = theSourceCodeSystem != null && theSourceCodeSystem.hasValue(); + boolean haveSourceCodeSystemVersion = + theSourceCodeSystemVersion != null && theSourceCodeSystemVersion.hasValue(); + boolean haveSourceCoding = sourceCoding != null && sourceCoding.hasCode(); boolean haveSourceCodeableConcept = sourceCodeableConcept != null - && sourceCodeableConcept.hasCoding() - && sourceCodeableConcept.getCodingFirstRep().hasCode(); + && sourceCodeableConcept.hasCoding() + && sourceCodeableConcept.getCodingFirstRep().hasCode(); boolean haveReverse = theReverse != null; boolean haveId = theId != null && theId.hasIdPart(); // if ((!haveSourceCode && !haveSourceCoding && !haveSourceCodeableConcept) - || moreThanOneTrue(haveSourceCode, haveSourceCoding, haveSourceCodeableConcept)) { - throw new InvalidRequestException(Msg.code(1154) + "One (and only one) of the in parameters (code, coding, codeableConcept) must be provided, to identify the code that is to be translated."); + || moreThanOneTrue(haveSourceCode, haveSourceCoding, haveSourceCodeableConcept)) { + throw new InvalidRequestException( + Msg.code(1154) + + "One (and only one) of the in parameters (code, coding, codeableConcept) must be provided, to identify the code that is to be translated."); } TranslationRequest translationRequest = new TranslationRequest(); @@ -106,11 +115,17 @@ public abstract class BaseJpaResourceProviderConceptMap translationRequest.getCodeableConcept().addCoding().setCode(toStringValue(theSourceCode)); if (haveSourceCodeSystem) { - translationRequest.getCodeableConcept().getCodingFirstRep().setSystem(toStringValue(theSourceCodeSystem)); + translationRequest + .getCodeableConcept() + .getCodingFirstRep() + .setSystem(toStringValue(theSourceCodeSystem)); } if (haveSourceCodeSystemVersion) { - translationRequest.getCodeableConcept().getCodingFirstRep().setVersion(toStringValue(theSourceCodeSystemVersion)); + translationRequest + .getCodeableConcept() + .getCodingFirstRep() + .setVersion(toStringValue(theSourceCodeSystemVersion)); } } else if (haveSourceCoding) { translationRequest.getCodeableConcept().addCoding(sourceCoding); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderEncounter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderEncounter.java index c10e5717e93..4a834d1f8c4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderEncounter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderEncounter.java @@ -42,31 +42,30 @@ public abstract class BaseJpaResourceProviderEncounter */ @Operation(name = JpaConstants.OPERATION_EVERYTHING, idempotent = true, bundleType = BundleTypeEnum.SEARCHSET) public IBundleProvider EncounterInstanceEverything( - - javax.servlet.http.HttpServletRequest theServletRequest, - - @IdParam - IIdType theId, - - @Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") - @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") - IPrimitiveType theCount, - - @Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") - IPrimitiveType theOffset, - - @Description(shortDefinition = "Only return resources which were last updated as specified by the given range") - @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) - DateRangeParam theLastUpdated, - - @Sort - SortSpec theSortSpec - ) { + javax.servlet.http.HttpServletRequest theServletRequest, + @IdParam IIdType theId, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") + IPrimitiveType theCount, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") + IPrimitiveType theOffset, + @Description( + shortDefinition = + "Only return resources which were last updated as specified by the given range") + @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) + DateRangeParam theLastUpdated, + @Sort SortSpec theSortSpec) { startRequest(theServletRequest); try { - return ((IFhirResourceDaoEncounter) getDao()).encounterInstanceEverything(theServletRequest, theId, theCount, theOffset, theLastUpdated, theSortSpec); + return ((IFhirResourceDaoEncounter) getDao()) + .encounterInstanceEverything( + theServletRequest, theId, theCount, theOffset, theLastUpdated, theSortSpec); } finally { endRequest(theServletRequest); } @@ -77,32 +76,30 @@ public abstract class BaseJpaResourceProviderEncounter */ @Operation(name = JpaConstants.OPERATION_EVERYTHING, idempotent = true, bundleType = BundleTypeEnum.SEARCHSET) public IBundleProvider EncounterTypeEverything( - - javax.servlet.http.HttpServletRequest theServletRequest, - - @Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") - @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") - IPrimitiveType theCount, - - @Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") - IPrimitiveType theOffset, - - @Description(shortDefinition = "Only return resources which were last updated as specified by the given range") - @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) - DateRangeParam theLastUpdated, - - @Sort - SortSpec theSortSpec - ) { + javax.servlet.http.HttpServletRequest theServletRequest, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") + IPrimitiveType theCount, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") + IPrimitiveType theOffset, + @Description( + shortDefinition = + "Only return resources which were last updated as specified by the given range") + @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) + DateRangeParam theLastUpdated, + @Sort SortSpec theSortSpec) { startRequest(theServletRequest); try { - return ((IFhirResourceDaoEncounter) getDao()).encounterTypeEverything(theServletRequest, theCount, theOffset, theLastUpdated, theSortSpec); + return ((IFhirResourceDaoEncounter) getDao()) + .encounterTypeEverything(theServletRequest, theCount, theOffset, theLastUpdated, theSortSpec); } finally { endRequest(theServletRequest); } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderEncounterDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderEncounterDstu2.java index f8cb14fadee..ef78d56c9e3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderEncounterDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderEncounterDstu2.java @@ -38,68 +38,66 @@ public abstract class BaseJpaResourceProviderEncounterDstu2 extends BaseJpaResou /** * Encounter/123/$everything */ - @Operation(name = JpaConstants.OPERATION_EVERYTHING, idempotent = true, bundleType=BundleTypeEnum.SEARCHSET) + @Operation(name = JpaConstants.OPERATION_EVERYTHING, idempotent = true, bundleType = BundleTypeEnum.SEARCHSET) public IBundleProvider EncounterInstanceEverything( - javax.servlet.http.HttpServletRequest theServletRequest, - - @IdParam - ca.uhn.fhir.model.primitive.IdDt theId, - - @Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the size of those pages.") - @OperationParam(name = Constants.PARAM_COUNT) - ca.uhn.fhir.model.primitive.UnsignedIntDt theCount, - - @Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = Constants.PARAM_OFFSET) - ca.uhn.fhir.model.primitive.UnsignedIntDt theOffset, - - @Description(shortDefinition="Only return resources which were last updated as specified by the given range") - @OperationParam(name = Constants.PARAM_LASTUPDATED, min=0, max=1) - DateRangeParam theLastUpdated, - - @Sort - SortSpec theSortSpec - ) { + @IdParam ca.uhn.fhir.model.primitive.IdDt theId, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT) + ca.uhn.fhir.model.primitive.UnsignedIntDt theCount, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = Constants.PARAM_OFFSET) + ca.uhn.fhir.model.primitive.UnsignedIntDt theOffset, + @Description( + shortDefinition = + "Only return resources which were last updated as specified by the given range") + @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) + DateRangeParam theLastUpdated, + @Sort SortSpec theSortSpec) { startRequest(theServletRequest); try { - return ((IFhirResourceDaoEncounter)getDao()).encounterInstanceEverything(theServletRequest, theId, theCount, theOffset, theLastUpdated, theSortSpec); + return ((IFhirResourceDaoEncounter) getDao()) + .encounterInstanceEverything( + theServletRequest, theId, theCount, theOffset, theLastUpdated, theSortSpec); } finally { endRequest(theServletRequest); - }} - - /** - * /Encounter/$everything - */ - @Operation(name = JpaConstants.OPERATION_EVERYTHING, idempotent = true, bundleType=BundleTypeEnum.SEARCHSET) - public IBundleProvider EncounterTypeEverything( - - javax.servlet.http.HttpServletRequest theServletRequest, - - @Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the size of those pages.") - @OperationParam(name = Constants.PARAM_COUNT) - ca.uhn.fhir.model.primitive.UnsignedIntDt theCount, - - @Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = Constants.PARAM_OFFSET) - ca.uhn.fhir.model.primitive.UnsignedIntDt theOffset, - - @Description(shortDefinition="Only return resources which were last updated as specified by the given range") - @OperationParam(name = Constants.PARAM_LASTUPDATED, min=0, max=1) - DateRangeParam theLastUpdated, - - @Sort - SortSpec theSortSpec - ) { - - startRequest(theServletRequest); - try { - return ((IFhirResourceDaoEncounter)getDao()).encounterTypeEverything(theServletRequest, theCount, theOffset,theLastUpdated, theSortSpec); - } finally { - endRequest(theServletRequest); - } - + } } + /** + * /Encounter/$everything + */ + @Operation(name = JpaConstants.OPERATION_EVERYTHING, idempotent = true, bundleType = BundleTypeEnum.SEARCHSET) + public IBundleProvider EncounterTypeEverything( + javax.servlet.http.HttpServletRequest theServletRequest, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT) + ca.uhn.fhir.model.primitive.UnsignedIntDt theCount, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = Constants.PARAM_OFFSET) + ca.uhn.fhir.model.primitive.UnsignedIntDt theOffset, + @Description( + shortDefinition = + "Only return resources which were last updated as specified by the given range") + @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) + DateRangeParam theLastUpdated, + @Sort SortSpec theSortSpec) { + + startRequest(theServletRequest); + try { + return ((IFhirResourceDaoEncounter) getDao()) + .encounterTypeEverything(theServletRequest, theCount, theOffset, theLastUpdated, theSortSpec); + } finally { + endRequest(theServletRequest); + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderObservation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderObservation.java index 55837d9b8a3..da112acefe2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderObservation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderObservation.java @@ -45,43 +45,31 @@ public abstract class BaseJpaResourceProviderObservation theCount, - - @Description(shortDefinition="The classification of the type of observation") - @OperationParam(name="category") - TokenAndListParam theCategory, - - @Description(shortDefinition="The code of the observation type") - @OperationParam(name="code") - TokenAndListParam theCode, - - @Description(shortDefinition="The effective date of the observation") - @OperationParam(name="date") - DateAndListParam theDate, - - @Description(shortDefinition="The subject that the observation is about (if patient)") - @OperationParam(name="patient") - ReferenceAndListParam thePatient, - - @Description(shortDefinition="The subject that the observation is about") - @OperationParam(name="subject" ) - ReferenceAndListParam theSubject, - - @Description(shortDefinition="The maximum number of observations to return for each observation code") - @OperationParam(name = "max", typeName = "integer", min = 0, max = 1) - IPrimitiveType theMax, - - @RawParam - Map> theAdditionalRawParams - ) { + javax.servlet.http.HttpServletRequest theServletRequest, + javax.servlet.http.HttpServletResponse theServletResponse, + ca.uhn.fhir.rest.api.server.RequestDetails theRequestDetails, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") + IPrimitiveType theCount, + @Description(shortDefinition = "The classification of the type of observation") + @OperationParam(name = "category") + TokenAndListParam theCategory, + @Description(shortDefinition = "The code of the observation type") @OperationParam(name = "code") + TokenAndListParam theCode, + @Description(shortDefinition = "The effective date of the observation") @OperationParam(name = "date") + DateAndListParam theDate, + @Description(shortDefinition = "The subject that the observation is about (if patient)") + @OperationParam(name = "patient") + ReferenceAndListParam thePatient, + @Description(shortDefinition = "The subject that the observation is about") + @OperationParam(name = "subject") + ReferenceAndListParam theSubject, + @Description(shortDefinition = "The maximum number of observations to return for each observation code") + @OperationParam(name = "max", typeName = "integer", min = 0, max = 1) + IPrimitiveType theMax, + @RawParam Map> theAdditionalRawParams) { startRequest(theServletRequest); try { SearchParameterMap paramMap = new SearchParameterMap(); @@ -103,10 +91,10 @@ public abstract class BaseJpaResourceProviderObservation) getDao()).observationsLastN(paramMap, theRequestDetails, theServletResponse); + return ((IFhirResourceDaoObservation) getDao()) + .observationsLastN(paramMap, theRequestDetails, theServletResponse); } finally { endRequest(theServletRequest); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderPatient.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderPatient.java index 243adcba978..491923f3d3e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderPatient.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderPatient.java @@ -53,47 +53,67 @@ public abstract class BaseJpaResourceProviderPatient ex /** * Patient/123/$everything */ - @Operation(name = JpaConstants.OPERATION_EVERYTHING, canonicalUrl = "http://hl7.org/fhir/OperationDefinition/Patient-everything", idempotent = true, bundleType = BundleTypeEnum.SEARCHSET) + @Operation( + name = JpaConstants.OPERATION_EVERYTHING, + canonicalUrl = "http://hl7.org/fhir/OperationDefinition/Patient-everything", + idempotent = true, + bundleType = BundleTypeEnum.SEARCHSET) public IBundleProvider patientInstanceEverything( - - javax.servlet.http.HttpServletRequest theServletRequest, - - @IdParam - IIdType theId, - - @Description(shortDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") - @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") - IPrimitiveType theCount, - - @Description(shortDefinition = "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") - IPrimitiveType theOffset, - - @Description(shortDefinition = "Only return resources which were last updated as specified by the given range") - @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) - DateRangeParam theLastUpdated, - - @Description(shortDefinition = "Filter the resources to return only resources matching the given _content filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") - @OperationParam(name = Constants.PARAM_CONTENT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") - List> theContent, - - @Description(shortDefinition = "Filter the resources to return only resources matching the given _text filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") - @OperationParam(name = Constants.PARAM_TEXT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") - List> theNarrative, - - @Description(shortDefinition = "Filter the resources to return only resources matching the given _filter filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") - @OperationParam(name = Constants.PARAM_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") - List> theFilter, - - @Description(shortDefinition = "Filter the resources to return only resources matching the given _type filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") - @OperationParam(name = Constants.PARAM_TYPE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") - List> theTypes, - - @Sort - SortSpec theSortSpec, - - RequestDetails theRequestDetails - ) { + javax.servlet.http.HttpServletRequest theServletRequest, + @IdParam IIdType theId, + @Description( + shortDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") + IPrimitiveType theCount, + @Description( + shortDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") + IPrimitiveType theOffset, + @Description( + shortDefinition = + "Only return resources which were last updated as specified by the given range") + @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) + DateRangeParam theLastUpdated, + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _content filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @OperationParam( + name = Constants.PARAM_CONTENT, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theContent, + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _text filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @OperationParam( + name = Constants.PARAM_TEXT, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theNarrative, + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _filter filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @OperationParam( + name = Constants.PARAM_FILTER, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theFilter, + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _type filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @OperationParam( + name = Constants.PARAM_TYPE, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypes, + @Sort SortSpec theSortSpec, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { @@ -107,7 +127,8 @@ public abstract class BaseJpaResourceProviderPatient ex everythingParams.setFilter(toStringAndList(theFilter)); everythingParams.setTypes(toStringAndList(theTypes)); - return ((IFhirResourceDaoPatient) getDao()).patientInstanceEverything(theServletRequest, theRequestDetails, everythingParams, theId); + return ((IFhirResourceDaoPatient) getDao()) + .patientInstanceEverything(theServletRequest, theRequestDetails, everythingParams, theId); } finally { endRequest(theServletRequest); } @@ -116,49 +137,73 @@ public abstract class BaseJpaResourceProviderPatient ex /** * /Patient/$everything */ - @Operation(name = JpaConstants.OPERATION_EVERYTHING, canonicalUrl = "http://hl7.org/fhir/OperationDefinition/Patient-everything", idempotent = true, bundleType = BundleTypeEnum.SEARCHSET) + @Operation( + name = JpaConstants.OPERATION_EVERYTHING, + canonicalUrl = "http://hl7.org/fhir/OperationDefinition/Patient-everything", + idempotent = true, + bundleType = BundleTypeEnum.SEARCHSET) public IBundleProvider patientTypeEverything( - - javax.servlet.http.HttpServletRequest theServletRequest, - - @Description(shortDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") - @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") - IPrimitiveType theCount, - - @Description(shortDefinition = "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") - IPrimitiveType theOffset, - - @Description(shortDefinition = "Only return resources which were last updated as specified by the given range") - @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) - DateRangeParam theLastUpdated, - - @Description(shortDefinition = "Filter the resources to return only resources matching the given _content filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") - @OperationParam(name = Constants.PARAM_CONTENT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") - List> theContent, - - @Description(shortDefinition = "Filter the resources to return only resources matching the given _text filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") - @OperationParam(name = Constants.PARAM_TEXT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") - List> theNarrative, - - @Description(shortDefinition = "Filter the resources to return only resources matching the given _filter filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") - @OperationParam(name = Constants.PARAM_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") - List> theFilter, - - @Description(shortDefinition = "Filter the resources to return only resources matching the given _type filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") - @OperationParam(name = Constants.PARAM_TYPE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") - List> theTypes, - - - @Description(shortDefinition = "Filter the resources to return based on the patient ids provided.") - @OperationParam(name = Constants.PARAM_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "id") - List theId, - - @Sort - SortSpec theSortSpec, - - RequestDetails theRequestDetails - ) { + javax.servlet.http.HttpServletRequest theServletRequest, + @Description( + shortDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT, typeName = "unsignedInt") + IPrimitiveType theCount, + @Description( + shortDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = Constants.PARAM_OFFSET, typeName = "unsignedInt") + IPrimitiveType theOffset, + @Description( + shortDefinition = + "Only return resources which were last updated as specified by the given range") + @OperationParam(name = Constants.PARAM_LASTUPDATED, min = 0, max = 1) + DateRangeParam theLastUpdated, + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _content filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @OperationParam( + name = Constants.PARAM_CONTENT, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theContent, + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _text filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @OperationParam( + name = Constants.PARAM_TEXT, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theNarrative, + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _filter filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @OperationParam( + name = Constants.PARAM_FILTER, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theFilter, + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _type filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @OperationParam( + name = Constants.PARAM_TYPE, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypes, + @Description(shortDefinition = "Filter the resources to return based on the patient ids provided.") + @OperationParam( + name = Constants.PARAM_ID, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "id") + List theId, + @Sort SortSpec theSortSpec, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { @@ -172,11 +217,15 @@ public abstract class BaseJpaResourceProviderPatient ex everythingParams.setFilter(toStringAndList(theFilter)); everythingParams.setTypes(toStringAndList(theTypes)); - return ((IFhirResourceDaoPatient) getDao()).patientTypeEverything(theServletRequest, theRequestDetails, everythingParams, toFlattenedPatientIdTokenParamList(theId)); + return ((IFhirResourceDaoPatient) getDao()) + .patientTypeEverything( + theServletRequest, + theRequestDetails, + everythingParams, + toFlattenedPatientIdTokenParamList(theId)); } finally { endRequest(theServletRequest); } - } /** @@ -195,7 +244,7 @@ public abstract class BaseJpaResourceProviderPatient ex } } - return retVal.getValuesAsQueryTokens().isEmpty() ? null: retVal; + return retVal.getValuesAsQueryTokens().isEmpty() ? null : retVal; } private StringAndListParam toStringAndList(List> theNarrative) { @@ -212,5 +261,4 @@ public abstract class BaseJpaResourceProviderPatient ex } return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderStructureDefinition.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderStructureDefinition.java index c2878796faa..dd89feb1e9f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderStructureDefinition.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProviderStructureDefinition.java @@ -35,22 +35,22 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -public abstract class BaseJpaResourceProviderStructureDefinition extends BaseJpaResourceProvider { +public abstract class BaseJpaResourceProviderStructureDefinition + extends BaseJpaResourceProvider { /** * $snapshot operation */ - @Operation(name=JpaConstants.OPERATION_SNAPSHOT, idempotent = true) + @Operation(name = JpaConstants.OPERATION_SNAPSHOT, idempotent = true) public IBaseResource snapshot( - @IdParam(optional = true) IIdType theId, - @OperationParam(name = "definition", typeName = "StructureDefinition") IBaseResource theStructureDefinition, - @OperationParam(name = "url", typeName = "string") IPrimitiveType theUrl, - RequestDetails theRequestDetails) { + @IdParam(optional = true) IIdType theId, + @OperationParam(name = "definition", typeName = "StructureDefinition") IBaseResource theStructureDefinition, + @OperationParam(name = "url", typeName = "string") IPrimitiveType theUrl, + RequestDetails theRequestDetails) { ValidateUtil.exactlyOneNotNullOrThrowInvalidRequestException( - new Object[]{ theId, theStructureDefinition, theUrl }, - "Must supply either an ID or a StructureDefinition or a URL (but not more than one of these things)" - ); + new Object[] {theId, theStructureDefinition, theUrl}, + "Must supply either an ID or a StructureDefinition or a URL (but not more than one of these things)"); IBaseResource sd; IFhirResourceDaoStructureDefinition dao = getDao(); @@ -66,7 +66,8 @@ public abstract class BaseJpaResourceProviderStructureDefinition getDao() { return (IFhirResourceDaoStructureDefinition) super.getDao(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java index 73b9202915f..4e5b15d8eab 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaSystemProvider.java @@ -41,10 +41,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.servlet.http.HttpServletRequest; import java.util.Date; +import javax.servlet.http.HttpServletRequest; -public abstract class BaseJpaSystemProvider extends BaseStorageSystemProvider implements IJpaSystemProvider { +public abstract class BaseJpaSystemProvider extends BaseStorageSystemProvider + implements IJpaSystemProvider { private static final Logger ourLog = LoggerFactory.getLogger(BaseJpaSystemProvider.class); public static final String RESP_PARAM_SUCCESS = "success"; @@ -68,7 +69,6 @@ public abstract class BaseJpaSystemProvider extends BaseStorageSystemProv @Autowired private ITermReadSvc myTermReadSvc; - public BaseJpaSystemProvider() { // nothing } @@ -79,21 +79,21 @@ public abstract class BaseJpaSystemProvider extends BaseStorageSystemProv @History public IBundleProvider historyServer( - HttpServletRequest theRequest, - @Offset Integer theOffset, - @Since Date theDate, - @At DateRangeParam theAt, - RequestDetails theRequestDetails) { + HttpServletRequest theRequest, + @Offset Integer theOffset, + @Since Date theDate, + @At DateRangeParam theAt, + RequestDetails theRequestDetails) { startRequest(theRequest); try { DateRangeParam range = super.processSinceOrAt(theDate, theAt); - return myDao.history(range.getLowerBoundAsInstant(), range.getUpperBoundAsInstant(), theOffset, theRequestDetails); + return myDao.history( + range.getLowerBoundAsInstant(), range.getUpperBoundAsInstant(), theOffset, theRequestDetails); } finally { endRequest(theRequest); } } - @Operation(name = ProviderConstants.OPERATION_REINDEX_TERMINOLOGY, idempotent = false) public IBaseParameters reindexTerminology(RequestDetails theRequestDetails) { @@ -103,17 +103,18 @@ public abstract class BaseJpaSystemProvider extends BaseStorageSystemProv result = myTermReadSvc.reindexTerminology(); } catch (Exception theE) { - throw new InternalErrorException(Msg.code(2072) + - "Re-creating terminology freetext indexes failed with exception: " + theE.getMessage() + - NL + "With trace:" + NL + ExceptionUtils.getStackTrace(theE)); + throw new InternalErrorException( + Msg.code(2072) + "Re-creating terminology freetext indexes failed with exception: " + + theE.getMessage() + NL + + "With trace:" + NL + ExceptionUtils.getStackTrace(theE)); } IBaseParameters retVal = ParametersUtil.newInstance(getContext()); - if ( ! result.equals(ReindexTerminologyResult.SUCCESS) ) { + if (!result.equals(ReindexTerminologyResult.SUCCESS)) { ParametersUtil.addParameterToParametersBoolean(getContext(), retVal, RESP_PARAM_SUCCESS, false); String msg = result.equals(ReindexTerminologyResult.SEARCH_SVC_DISABLED) - ? "Freetext service is not configured. Operation didn't run." - : "Operation was cancelled because other terminology background tasks are currently running. Try again in a few minutes."; + ? "Freetext service is not configured. Operation didn't run." + : "Operation was cancelled because other terminology background tasks are currently running. Try again in a few minutes."; ParametersUtil.addParameterToParametersString(getContext(), retVal, "message", msg); return retVal; } @@ -123,7 +124,5 @@ public abstract class BaseJpaSystemProvider extends BaseStorageSystemProv return retVal; } - public static final String NL = System.getProperty("line.separator"); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/DiffProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/DiffProvider.java index b426e2a720a..b20caebfd72 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/DiffProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/DiffProvider.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.provider; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.patch.FhirPatch; @@ -45,26 +45,37 @@ import javax.annotation.Nonnull; public class DiffProvider { private static final Logger ourLog = LoggerFactory.getLogger(DiffProvider.class); + @Autowired private FhirContext myContext; + @Autowired private DaoRegistry myDaoRegistry; @Description( - value="This operation examines two resource versions (can be two versions of the same resource, or two different resources) and generates a FHIR Patch document showing the differences.", - shortDefinition = "Comparte two resources or two versions of a single resource") + value = + "This operation examines two resource versions (can be two versions of the same resource, or two different resources) and generates a FHIR Patch document showing the differences.", + shortDefinition = "Comparte two resources or two versions of a single resource") @Operation(name = ProviderConstants.DIFF_OPERATION_NAME, global = true, idempotent = true) public IBaseParameters diff( - @IdParam IIdType theResourceId, - - @Description(value = "The resource ID and version to diff from", example = "Patient/example/version/1") - @OperationParam(name = ProviderConstants.DIFF_FROM_VERSION_PARAMETER, typeName = "string", min = 0, max = 1) - IPrimitiveType theFromVersion, - - @Description(value = "Should differences in the Resource.meta element be included in the diff", example = "false") - @OperationParam(name = ProviderConstants.DIFF_INCLUDE_META_PARAMETER, typeName = "boolean", min = 0, max = 1) - IPrimitiveType theIncludeMeta, - RequestDetails theRequestDetails) { + @IdParam IIdType theResourceId, + @Description(value = "The resource ID and version to diff from", example = "Patient/example/version/1") + @OperationParam( + name = ProviderConstants.DIFF_FROM_VERSION_PARAMETER, + typeName = "string", + min = 0, + max = 1) + IPrimitiveType theFromVersion, + @Description( + value = "Should differences in the Resource.meta element be included in the diff", + example = "false") + @OperationParam( + name = ProviderConstants.DIFF_INCLUDE_META_PARAMETER, + typeName = "boolean", + min = 0, + max = 1) + IPrimitiveType theIncludeMeta, + RequestDetails theRequestDetails) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType()); IBaseResource targetResource = dao.read(theResourceId, theRequestDetails); @@ -88,28 +99,32 @@ public class DiffProvider { long fromVersion = Long.parseLong(theFromVersion.getValueAsString()); sourceResource = dao.read(theResourceId.withVersion(Long.toString(fromVersion)), theRequestDetails); - } FhirPatch fhirPatch = newPatch(theIncludeMeta); return fhirPatch.diff(sourceResource, targetResource); } - @Description("This operation examines two resource versions (can be two versions of the same resource, or two different resources) and generates a FHIR Patch document showing the differences.") + @Description( + "This operation examines two resource versions (can be two versions of the same resource, or two different resources) and generates a FHIR Patch document showing the differences.") @Operation(name = ProviderConstants.DIFF_OPERATION_NAME, idempotent = true) public IBaseParameters diff( - @Description(value = "The resource ID and version to diff from", example = "Patient/example/version/1") - @OperationParam(name = ProviderConstants.DIFF_FROM_PARAMETER, typeName = "id", min = 1, max = 1) - IIdType theFromVersion, - - @Description(value = "The resource ID and version to diff to", example = "Patient/example/version/2") - @OperationParam(name = ProviderConstants.DIFF_TO_PARAMETER, typeName = "id", min = 1, max = 1) - IIdType theToVersion, - - @Description(value = "Should differences in the Resource.meta element be included in the diff", example = "false") - @OperationParam(name = ProviderConstants.DIFF_INCLUDE_META_PARAMETER, typeName = "boolean", min = 0, max = 1) - IPrimitiveType theIncludeMeta, - RequestDetails theRequestDetails) { + @Description(value = "The resource ID and version to diff from", example = "Patient/example/version/1") + @OperationParam(name = ProviderConstants.DIFF_FROM_PARAMETER, typeName = "id", min = 1, max = 1) + IIdType theFromVersion, + @Description(value = "The resource ID and version to diff to", example = "Patient/example/version/2") + @OperationParam(name = ProviderConstants.DIFF_TO_PARAMETER, typeName = "id", min = 1, max = 1) + IIdType theToVersion, + @Description( + value = "Should differences in the Resource.meta element be included in the diff", + example = "false") + @OperationParam( + name = ProviderConstants.DIFF_INCLUDE_META_PARAMETER, + typeName = "boolean", + min = 0, + max = 1) + IPrimitiveType theIncludeMeta, + RequestDetails theRequestDetails) { if (!Objects.equal(theFromVersion.getResourceType(), theToVersion.getResourceType())) { String msg = myContext.getLocalizer().getMessage(DiffProvider.class, "cantDiffDifferentTypes"); @@ -137,5 +152,4 @@ public class DiffProvider { return fhirPatch; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/InstanceReindexProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/InstanceReindexProvider.java index d68ea0e5e44..646f1523b18 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/InstanceReindexProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/InstanceReindexProvider.java @@ -30,10 +30,10 @@ import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class InstanceReindexProvider { @@ -49,27 +49,20 @@ public class InstanceReindexProvider { @Operation(name = ProviderConstants.OPERATION_REINDEX_DRYRUN, idempotent = true, global = true) public IBaseParameters reindexInstanceDryRun( - @IdParam IIdType theId, - @OperationParam(name="code", typeName = "code", min = 0, max = OperationParam.MAX_UNLIMITED) List> theCodes, - RequestDetails theRequestDetails - ) { + @IdParam IIdType theId, + @OperationParam(name = "code", typeName = "code", min = 0, max = OperationParam.MAX_UNLIMITED) + List> theCodes, + RequestDetails theRequestDetails) { Set codes = null; if (theCodes != null && theCodes.size() > 0) { - codes = theCodes - .stream() - .map(IPrimitiveType::getValueAsString) - .collect(Collectors.toSet()); + codes = theCodes.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toSet()); } return myInstanceReindexService.reindexDryRun(theRequestDetails, theId, codes); } @Operation(name = ProviderConstants.OPERATION_REINDEX, idempotent = false, global = true) - public IBaseParameters reindexInstance( - @IdParam IIdType theId, - RequestDetails theRequestDetails - ) { + public IBaseParameters reindexInstance(@IdParam IIdType theId, RequestDetails theRequestDetails) { return myInstanceReindexService.reindex(theRequestDetails, theId); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaCapabilityStatementProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaCapabilityStatementProvider.java index ba8167343ff..2983384f549 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaCapabilityStatementProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaCapabilityStatementProvider.java @@ -39,8 +39,8 @@ import org.hl7.fhir.r4.model.CapabilityStatement.ConditionalDeleteStatus; import org.hl7.fhir.r4.model.CapabilityStatement.ResourceVersionPolicy; import org.hl7.fhir.r4.model.Meta; -import javax.annotation.Nonnull; import java.util.Map; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -58,7 +58,12 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro /** * Constructor */ - public JpaCapabilityStatementProvider(@Nonnull RestfulServer theRestfulServer, @Nonnull IFhirSystemDao theSystemDao, @Nonnull JpaStorageSettings theStorageSettings, @Nonnull ISearchParamRegistry theSearchParamRegistry, IValidationSupport theValidationSupport) { + public JpaCapabilityStatementProvider( + @Nonnull RestfulServer theRestfulServer, + @Nonnull IFhirSystemDao theSystemDao, + @Nonnull JpaStorageSettings theStorageSettings, + @Nonnull ISearchParamRegistry theSearchParamRegistry, + IValidationSupport theValidationSupport) { super(theRestfulServer, theSearchParamRegistry, theValidationSupport); Validate.notNull(theRestfulServer); @@ -90,12 +95,18 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro protected void postProcessRest(FhirTerser theTerser, IBase theRest) { super.postProcessRest(theTerser, theRest); - if (myStorageSettings.getSupportedSubscriptionTypes().contains(org.hl7.fhir.dstu2.model.Subscription.SubscriptionChannelType.WEBSOCKET)) { + if (myStorageSettings + .getSupportedSubscriptionTypes() + .contains(org.hl7.fhir.dstu2.model.Subscription.SubscriptionChannelType.WEBSOCKET)) { if (isNotBlank(myStorageSettings.getWebsocketContextPath())) { - ExtensionUtil.setExtension(myContext, theRest, Constants.CAPABILITYSTATEMENT_WEBSOCKET_URL, "uri", myStorageSettings.getWebsocketContextPath()); + ExtensionUtil.setExtension( + myContext, + theRest, + Constants.CAPABILITYSTATEMENT_WEBSOCKET_URL, + "uri", + myStorageSettings.getWebsocketContextPath()); } } - } @Override @@ -116,11 +127,15 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro if (counts != null) { Long count = counts.get(theResourceName); if (count != null) { - ExtensionUtil.setExtension(myContext, theResource, ExtensionConstants.CONF_RESOURCE_COUNT, "decimal", Long.toString(count)); + ExtensionUtil.setExtension( + myContext, + theResource, + ExtensionConstants.CONF_RESOURCE_COUNT, + "decimal", + Long.toString(count)); } } } - } public boolean isIncludeResourceCounts() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java index 9eb06d490c2..dd06075d423 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java @@ -46,10 +46,10 @@ import ca.uhn.fhir.util.CoverageIgnore; import ca.uhn.fhir.util.ExtensionConstants; import org.hl7.fhir.dstu2.model.Subscription; -import javax.servlet.http.HttpServletRequest; import java.util.Collections; import java.util.List; import java.util.Map; +import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -77,7 +77,10 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider { /** * Constructor */ - public JpaConformanceProviderDstu2(RestfulServer theRestfulServer, IFhirSystemDao theSystemDao, JpaStorageSettings theStorageSettings) { + public JpaConformanceProviderDstu2( + RestfulServer theRestfulServer, + IFhirSystemDao theSystemDao, + JpaStorageSettings theStorageSettings) { super(theRestfulServer); myRestfulServer = theRestfulServer; mySystemDao = theSystemDao; @@ -103,15 +106,18 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider { for (RestResource nextResource : nextRest.getResource()) { - ConditionalDeleteStatusEnum conditionalDelete = nextResource.getConditionalDeleteElement().getValueAsEnum(); - if (conditionalDelete == ConditionalDeleteStatusEnum.MULTIPLE_DELETES_SUPPORTED && myStorageSettings.isAllowMultipleDelete() == false) { + ConditionalDeleteStatusEnum conditionalDelete = + nextResource.getConditionalDeleteElement().getValueAsEnum(); + if (conditionalDelete == ConditionalDeleteStatusEnum.MULTIPLE_DELETES_SUPPORTED + && myStorageSettings.isAllowMultipleDelete() == false) { nextResource.setConditionalDelete(ConditionalDeleteStatusEnum.SINGLE_DELETES_SUPPORTED); } // Add resource counts Long count = counts.get(nextResource.getTypeElement().getValueAsString()); if (count != null) { - nextResource.addUndeclaredExtension(false, ExtensionConstants.CONF_RESOURCE_COUNT, new DecimalDt(count)); + nextResource.addUndeclaredExtension( + false, ExtensionConstants.CONF_RESOURCE_COUNT, new DecimalDt(count)); } // Add chained params @@ -126,11 +132,12 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider { } } } - } } - if (myStorageSettings.getSupportedSubscriptionTypes().contains(Subscription.SubscriptionChannelType.WEBSOCKET)) { + if (myStorageSettings + .getSupportedSubscriptionTypes() + .contains(Subscription.SubscriptionChannelType.WEBSOCKET)) { if (isNotBlank(myStorageSettings.getWebsocketContextPath())) { ExtensionDt websocketExtension = new ExtensionDt(); websocketExtension.setUrl(Constants.CAPABILITYSTATEMENT_WEBSOCKET_URL); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProvider.java index ce24a11facc..4214d62804c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProvider.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.provider; -import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.model.api.annotation.Description; @@ -37,7 +36,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; import java.util.Collections; -import java.util.List; import java.util.Map; import java.util.TreeMap; @@ -46,19 +44,19 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public final class JpaSystemProvider extends BaseJpaSystemProvider { - - @Description("Marks all currently existing resources of a given type, or all resources of all types, for reindexing.") - @Operation(name = MARK_ALL_RESOURCES_FOR_REINDEXING, idempotent = false, returnParameters = { - @OperationParam(name = "status") - }) + @Description( + "Marks all currently existing resources of a given type, or all resources of all types, for reindexing.") + @Operation( + name = MARK_ALL_RESOURCES_FOR_REINDEXING, + idempotent = false, + returnParameters = {@OperationParam(name = "status")}) /** * @deprecated * @see ReindexProvider#Reindex(List, IPrimitiveType, RequestDetails) */ @Deprecated public IBaseResource markAllResourcesForReindexing( - @OperationParam(name = "type", min = 0, max = 1, typeName = "code") IPrimitiveType theType - ) { + @OperationParam(name = "type", min = 0, max = 1, typeName = "code") IPrimitiveType theType) { if (theType != null && isNotBlank(theType.getValueAsString())) { getResourceReindexingSvc().markAllResourcesForReindexing(theType.getValueAsString()); @@ -75,9 +73,10 @@ public final class JpaSystemProvider extends BaseJpaSystemProvider } @Description("Forces a single pass of the resource reindexing processor") - @Operation(name = PERFORM_REINDEXING_PASS, idempotent = false, returnParameters = { - @OperationParam(name = "status") - }) + @Operation( + name = PERFORM_REINDEXING_PASS, + idempotent = false, + returnParameters = {@OperationParam(name = "status")}) /** * @deprecated * @see ReindexProvider#Reindex(List, IPrimitiveType, RequestDetails) @@ -100,7 +99,9 @@ public final class JpaSystemProvider extends BaseJpaSystemProvider } @Operation(name = JpaConstants.OPERATION_GET_RESOURCE_COUNTS, idempotent = true) - @Description(shortDefinition = "Provides the number of resources currently stored on the server, broken down by resource type") + @Description( + shortDefinition = + "Provides the number of resources currently stored on the server, broken down by resource type") public IBaseParameters getResourceCounts() { IBaseParameters retVal = ParametersUtil.newInstance(getContext()); @@ -108,18 +109,24 @@ public final class JpaSystemProvider extends BaseJpaSystemProvider counts = defaultIfNull(counts, Collections.emptyMap()); counts = new TreeMap<>(counts); for (Map.Entry nextEntry : counts.entrySet()) { - ParametersUtil.addParameterToParametersInteger(getContext(), retVal, nextEntry.getKey(), nextEntry.getValue().intValue()); + ParametersUtil.addParameterToParametersInteger( + getContext(), + retVal, + nextEntry.getKey(), + nextEntry.getValue().intValue()); } return retVal; } - @Operation(name = ProviderConstants.OPERATION_META, idempotent = true, returnParameters = { - @OperationParam(name = "return", typeName = "Meta") - }) + @Operation( + name = ProviderConstants.OPERATION_META, + idempotent = true, + returnParameters = {@OperationParam(name = "return", typeName = "Meta")}) public IBaseParameters meta(RequestDetails theRequestDetails) { IBaseParameters retVal = ParametersUtil.newInstance(getContext()); - ParametersUtil.addParameterToParameters(getContext(), retVal, "return", getDao().metaGetOperation(theRequestDetails)); + ParametersUtil.addParameterToParameters( + getContext(), retVal, "return", getDao().metaGetOperation(theRequestDetails)); return retVal; } @@ -134,6 +141,4 @@ public final class JpaSystemProvider extends BaseJpaSystemProvider endRequest(((ServletRequestDetails) theRequestDetails).getServletRequest()); } } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ProcessMessageProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ProcessMessageProvider.java index b597f0d98c6..803254ea8bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ProcessMessageProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ProcessMessageProvider.java @@ -44,13 +44,13 @@ public class ProcessMessageProvider { @Description("Accept a FHIR Message Bundle for processing") @Operation(name = JpaConstants.OPERATION_PROCESS_MESSAGE, idempotent = false) public IBaseBundle processMessage( - HttpServletRequest theServletRequest, - RequestDetails theRequestDetails, - - @OperationParam(name = "content", min = 1, max = 1, typeName = "Bundle") - @Description(shortDefinition = "The message to process (or, if using asynchronous messaging, it may be a response message to accept)") - IBaseBundle theMessageToProcess - ) { + HttpServletRequest theServletRequest, + RequestDetails theRequestDetails, + @OperationParam(name = "content", min = 1, max = 1, typeName = "Bundle") + @Description( + shortDefinition = + "The message to process (or, if using asynchronous messaging, it may be a response message to accept)") + IBaseBundle theMessageToProcess) { startRequest(theServletRequest); try { @@ -58,7 +58,5 @@ public class ProcessMessageProvider { } finally { endRequest(theServletRequest); } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java index ae5b3225d7c..85928273a71 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/TerminologyUploaderProvider.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.jpa.provider; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl; import ca.uhn.fhir.jpa.term.UploadStatistics; @@ -51,8 +51,6 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.CodeSystem; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.servlet.http.HttpServletRequest; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -61,6 +59,8 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; +import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.*; @@ -97,15 +97,19 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { * $upload-external-codesystem * */ - @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM, idempotent = false, returnParameters = { -// @OperationParam(name = "conceptCount", type = IntegerType.class, min = 1) - }) + @Operation( + typeName = "CodeSystem", + name = JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM, + idempotent = false, + returnParameters = { + // @OperationParam(name = "conceptCount", type = IntegerType.class, min = 1) + }) public IBaseParameters uploadSnapshot( - HttpServletRequest theServletRequest, - @OperationParam(name = PARAM_SYSTEM, min = 1, typeName = "uri") IPrimitiveType theCodeSystemUrl, - @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List theFiles, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @OperationParam(name = PARAM_SYSTEM, min = 1, typeName = "uri") IPrimitiveType theCodeSystemUrl, + @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") + List theFiles, + RequestDetails theRequestDetails) { startRequest(theServletRequest); @@ -114,10 +118,13 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { } if (theFiles == null || theFiles.size() == 0) { - throw new InvalidRequestException(Msg.code(1138) + "No '" + PARAM_FILE + "' parameter, or package had no data"); + throw new InvalidRequestException( + Msg.code(1138) + "No '" + PARAM_FILE + "' parameter, or package had no data"); } for (ICompositeType next : theFiles) { - ValidateUtil.isTrueOrThrowInvalidRequest(getContext().getElementDefinition(next.getClass()).getName().equals("Attachment"), "Package must be of type Attachment"); + ValidateUtil.isTrueOrThrowInvalidRequest( + getContext().getElementDefinition(next.getClass()).getName().equals("Attachment"), + "Package must be of type Attachment"); } try { @@ -150,8 +157,10 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { IBaseParameters retVal = ParametersUtil.newInstance(getContext()); ParametersUtil.addParameterToParametersBoolean(getContext(), retVal, RESP_PARAM_SUCCESS, true); - ParametersUtil.addParameterToParametersInteger(getContext(), retVal, RESP_PARAM_CONCEPT_COUNT, stats.getUpdatedConceptCount()); - ParametersUtil.addParameterToParametersReference(getContext(), retVal, RESP_PARAM_TARGET, stats.getTarget().getValue()); + ParametersUtil.addParameterToParametersInteger( + getContext(), retVal, RESP_PARAM_CONCEPT_COUNT, stats.getUpdatedConceptCount()); + ParametersUtil.addParameterToParametersReference( + getContext(), retVal, RESP_PARAM_TARGET, stats.getTarget().getValue()); return retVal; } finally { @@ -164,15 +173,23 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { * $apply-codesystem-delta-add * */ - @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, idempotent = false, returnParameters = { - }) + @Operation( + typeName = "CodeSystem", + name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD, + idempotent = false, + returnParameters = {}) public IBaseParameters uploadDeltaAdd( - HttpServletRequest theServletRequest, - @OperationParam(name = PARAM_SYSTEM, min = 1, max = 1, typeName = "uri") IPrimitiveType theSystem, - @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List theFiles, - @OperationParam(name = PARAM_CODESYSTEM, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "CodeSystem") List theCodeSystems, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @OperationParam(name = PARAM_SYSTEM, min = 1, max = 1, typeName = "uri") IPrimitiveType theSystem, + @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") + List theFiles, + @OperationParam( + name = PARAM_CODESYSTEM, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "CodeSystem") + List theCodeSystems, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { @@ -181,28 +198,36 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { List files = convertAttachmentsToFileDescriptors(theFiles); convertCodeSystemsToFileDescriptors(files, theCodeSystems); - UploadStatistics outcome = myTerminologyLoaderSvc.loadDeltaAdd(theSystem.getValue(), files, theRequestDetails); + UploadStatistics outcome = + myTerminologyLoaderSvc.loadDeltaAdd(theSystem.getValue(), files, theRequestDetails); return toDeltaResponse(outcome); } finally { endRequest(theServletRequest); } } - /** * * $apply-codesystem-delta-remove * */ - @Operation(typeName = "CodeSystem", name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, idempotent = false, returnParameters = { - }) + @Operation( + typeName = "CodeSystem", + name = JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE, + idempotent = false, + returnParameters = {}) public IBaseParameters uploadDeltaRemove( - HttpServletRequest theServletRequest, - @OperationParam(name = PARAM_SYSTEM, min = 1, max = 1, typeName = "uri") IPrimitiveType theSystem, - @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List theFiles, - @OperationParam(name = PARAM_CODESYSTEM, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "CodeSystem") List theCodeSystems, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @OperationParam(name = PARAM_SYSTEM, min = 1, max = 1, typeName = "uri") IPrimitiveType theSystem, + @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") + List theFiles, + @OperationParam( + name = PARAM_CODESYSTEM, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "CodeSystem") + List theCodeSystems, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { @@ -211,15 +236,16 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { List files = convertAttachmentsToFileDescriptors(theFiles); convertCodeSystemsToFileDescriptors(files, theCodeSystems); - UploadStatistics outcome = myTerminologyLoaderSvc.loadDeltaRemove(theSystem.getValue(), files, theRequestDetails); + UploadStatistics outcome = + myTerminologyLoaderSvc.loadDeltaRemove(theSystem.getValue(), files, theRequestDetails); return toDeltaResponse(outcome); } finally { endRequest(theServletRequest); } - } - private void convertCodeSystemsToFileDescriptors(List theFiles, List theCodeSystems) { + private void convertCodeSystemsToFileDescriptors( + List theFiles, List theCodeSystems) { Map codes = new LinkedHashMap<>(); Map> codeToProperties = new LinkedHashMap<>(); @@ -228,7 +254,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { if (theCodeSystems != null) { for (IBaseResource nextCodeSystemUncast : theCodeSystems) { CodeSystem nextCodeSystem = canonicalizeCodeSystem(nextCodeSystemUncast); - convertCodeSystemCodesToCsv(nextCodeSystem.getConcept(), codes, codeToProperties, null, codeToParentCodes); + convertCodeSystemCodesToCsv( + nextCodeSystem.getConcept(), codes, codeToProperties, null, codeToParentCodes); } } @@ -247,7 +274,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { } byte[] bytes = b.toString().getBytes(Charsets.UTF_8); String fileName = TermLoaderSvcImpl.CUSTOM_CONCEPTS_FILE; - ITermLoaderSvc.ByteArrayFileDescriptor fileDescriptor = new ITermLoaderSvc.ByteArrayFileDescriptor(fileName, bytes); + ITermLoaderSvc.ByteArrayFileDescriptor fileDescriptor = + new ITermLoaderSvc.ByteArrayFileDescriptor(fileName, bytes); theFiles.add(fileDescriptor); } @@ -266,7 +294,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { } byte[] bytes = b.toString().getBytes(Charsets.UTF_8); String fileName = TermLoaderSvcImpl.CUSTOM_HIERARCHY_FILE; - ITermLoaderSvc.ByteArrayFileDescriptor fileDescriptor = new ITermLoaderSvc.ByteArrayFileDescriptor(fileName, bytes); + ITermLoaderSvc.ByteArrayFileDescriptor fileDescriptor = + new ITermLoaderSvc.ByteArrayFileDescriptor(fileName, bytes); theFiles.add(fileDescriptor); } // Create codeToProperties file @@ -287,7 +316,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { b.append(","); b.append(csvEscape(propertyComponent.getCode())); b.append(","); - //TODO: check this for different types, other types should be added once TermConceptPropertyTypeEnum contain different types + // TODO: check this for different types, other types should be added once + // TermConceptPropertyTypeEnum contain different types b.append(csvEscape(propertyComponent.getValueStringType().getValue())); b.append(","); b.append(csvEscape(propertyComponent.getValue().primitiveValue())); @@ -296,25 +326,28 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { } byte[] bytes = b.toString().getBytes(Charsets.UTF_8); String fileName = TermLoaderSvcImpl.CUSTOM_PROPERTIES_FILE; - ITermLoaderSvc.ByteArrayFileDescriptor fileDescriptor = new ITermLoaderSvc.ByteArrayFileDescriptor(fileName, bytes); + ITermLoaderSvc.ByteArrayFileDescriptor fileDescriptor = + new ITermLoaderSvc.ByteArrayFileDescriptor(fileName, bytes); theFiles.add(fileDescriptor); } - } @SuppressWarnings("EnumSwitchStatementWhichMissesCases") @Nonnull CodeSystem canonicalizeCodeSystem(@Nonnull IBaseResource theCodeSystem) { RuntimeResourceDefinition resourceDef = getContext().getResourceDefinition(theCodeSystem); - ValidateUtil.isTrueOrThrowInvalidRequest(resourceDef.getName().equals("CodeSystem"), "Resource '%s' is not a CodeSystem", resourceDef.getName()); + ValidateUtil.isTrueOrThrowInvalidRequest( + resourceDef.getName().equals("CodeSystem"), "Resource '%s' is not a CodeSystem", resourceDef.getName()); CodeSystem nextCodeSystem; switch (getContext().getVersion().getVersion()) { case DSTU3: - nextCodeSystem = (CodeSystem) VersionConvertorFactory_30_40.convertResource((org.hl7.fhir.dstu3.model.CodeSystem) theCodeSystem, new BaseAdvisor_30_40(false)); + nextCodeSystem = (CodeSystem) VersionConvertorFactory_30_40.convertResource( + (org.hl7.fhir.dstu3.model.CodeSystem) theCodeSystem, new BaseAdvisor_30_40(false)); break; case R5: - nextCodeSystem = (CodeSystem) VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r5.model.CodeSystem) theCodeSystem, new BaseAdvisor_40_50(false)); + nextCodeSystem = (CodeSystem) VersionConvertorFactory_40_50.convertResource( + (org.hl7.fhir.r5.model.CodeSystem) theCodeSystem, new BaseAdvisor_40_50(false)); break; default: nextCodeSystem = (CodeSystem) theCodeSystem; @@ -322,7 +355,12 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { return nextCodeSystem; } - private void convertCodeSystemCodesToCsv(List theConcept, Map theCodes, Map> theProperties, String theParentCode, Multimap theCodeToParentCodes) { + private void convertCodeSystemCodesToCsv( + List theConcept, + Map theCodes, + Map> theProperties, + String theParentCode, + Multimap theCodeToParentCodes) { for (CodeSystem.ConceptDefinitionComponent nextConcept : theConcept) { if (isNotBlank(nextConcept.getCode())) { theCodes.put(nextConcept.getCode(), nextConcept.getDisplay()); @@ -332,7 +370,8 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { if (nextConcept.getProperty() != null) { theProperties.put(nextConcept.getCode(), nextConcept.getProperty()); } - convertCodeSystemCodesToCsv(nextConcept.getConcept(), theCodes, theProperties, nextConcept.getCode(), theCodeToParentCodes); + convertCodeSystemCodesToCsv( + nextConcept.getConcept(), theCodes, theProperties, nextConcept.getCode(), theCodeToParentCodes); } } } @@ -362,19 +401,21 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { } @Nonnull - private List convertAttachmentsToFileDescriptors(@OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") List theFiles) { + private List convertAttachmentsToFileDescriptors( + @OperationParam(name = PARAM_FILE, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "attachment") + List theFiles) { List files = new ArrayList<>(); if (theFiles != null) { for (ICompositeType next : theFiles) { - String nextUrl = AttachmentUtil.getOrCreateUrl(getContext(), next).getValue(); + String nextUrl = + AttachmentUtil.getOrCreateUrl(getContext(), next).getValue(); ValidateUtil.isNotBlankOrThrowUnprocessableEntity(nextUrl, "Missing Attachment.url value"); byte[] nextData; if (nextUrl.startsWith("localfile:")) { String nextLocalFile = nextUrl.substring("localfile:".length()); - if (isNotBlank(nextLocalFile)) { ourLog.info("Reading in local file: {}", nextLocalFile); File nextFile = new File(nextLocalFile); @@ -385,8 +426,10 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { } } else { - nextData = AttachmentUtil.getOrCreateData(getContext(), next).getValue(); - ValidateUtil.isTrueOrThrowInvalidRequest(nextData != null && nextData.length > 0, "Missing Attachment.data value"); + nextData = + AttachmentUtil.getOrCreateData(getContext(), next).getValue(); + ValidateUtil.isTrueOrThrowInvalidRequest( + nextData != null && nextData.length > 0, "Missing Attachment.data value"); files.add(new ITermLoaderSvc.ByteArrayFileDescriptor(nextUrl, nextData)); } } @@ -396,8 +439,10 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { private IBaseParameters toDeltaResponse(UploadStatistics theOutcome) { IBaseParameters retVal = ParametersUtil.newInstance(getContext()); - ParametersUtil.addParameterToParametersInteger(getContext(), retVal, RESP_PARAM_CONCEPT_COUNT, theOutcome.getUpdatedConceptCount()); - ParametersUtil.addParameterToParametersReference(getContext(), retVal, RESP_PARAM_TARGET, theOutcome.getTarget().getValue()); + ParametersUtil.addParameterToParametersInteger( + getContext(), retVal, RESP_PARAM_CONCEPT_COUNT, theOutcome.getUpdatedConceptCount()); + ParametersUtil.addParameterToParametersReference( + getContext(), retVal, RESP_PARAM_TARGET, theOutcome.getTarget().getValue()); return retVal; } @@ -424,11 +469,6 @@ public class TerminologyUploaderProvider extends BaseJpaProvider { } private static String csvEscape(String theValue) { - return '"' + - theValue - .replace("\"", "\"\"") - .replace("\n", "\\n") - .replace("\r", "") + - '"'; + return '"' + theValue.replace("\"", "\"\"").replace("\n", "\\n").replace("\r", "") + '"'; } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ValueSetOperationProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ValueSetOperationProvider.java index a606e24a1b2..816334e17ac 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ValueSetOperationProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ValueSetOperationProvider.java @@ -52,22 +52,25 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; -import javax.servlet.http.HttpServletRequest; - import java.util.Optional; import java.util.function.Supplier; +import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class ValueSetOperationProvider extends BaseJpaProvider { private static final Logger ourLog = LoggerFactory.getLogger(ValueSetOperationProvider.class); + @Autowired protected IValidationSupport myValidationSupport; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private ITermReadSvc myTermReadSvc; + @Autowired @Qualifier(JpaConfig.JPA_VALIDATION_SUPPORT_CHAIN) private ValidationSupportChain myValidationSupportChain; @@ -83,24 +86,48 @@ public class ValueSetOperationProvider extends BaseJpaProvider { @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true, typeName = "ValueSet") public IBaseResource expand( - HttpServletRequest theServletRequest, - @IdParam(optional = true) IIdType theId, - @OperationParam(name = "valueSet", min = 0, max = 1) IBaseResource theValueSet, - @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theUrl, - @OperationParam(name = "valueSetVersion", min = 0, max = 1, typeName = "string") IPrimitiveType theValueSetVersion, - @OperationParam(name = "filter", min = 0, max = 1, typeName = "string") IPrimitiveType theFilter, - @OperationParam(name = "context", min = 0, max = 1, typeName = "string") IPrimitiveType theContext, - @OperationParam(name = "contextDirection", min = 0, max = 1, typeName = "string") IPrimitiveType theContextDirection, - @OperationParam(name = "offset", min = 0, max = 1, typeName = "integer") IPrimitiveType theOffset, - @OperationParam(name = "count", min = 0, max = 1, typeName = "integer") IPrimitiveType theCount, - @OperationParam(name = JpaConstants.OPERATION_EXPAND_PARAM_DISPLAY_LANGUAGE, min = 0, max = 1, typeName = "code") IPrimitiveType theDisplayLanguage, - @OperationParam(name = JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, min = 0, max = 1, typeName = "boolean") IPrimitiveType theIncludeHierarchy, - RequestDetails theRequestDetails) { + HttpServletRequest theServletRequest, + @IdParam(optional = true) IIdType theId, + @OperationParam(name = "valueSet", min = 0, max = 1) IBaseResource theValueSet, + @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theUrl, + @OperationParam(name = "valueSetVersion", min = 0, max = 1, typeName = "string") + IPrimitiveType theValueSetVersion, + @OperationParam(name = "filter", min = 0, max = 1, typeName = "string") IPrimitiveType theFilter, + @OperationParam(name = "context", min = 0, max = 1, typeName = "string") IPrimitiveType theContext, + @OperationParam(name = "contextDirection", min = 0, max = 1, typeName = "string") + IPrimitiveType theContextDirection, + @OperationParam(name = "offset", min = 0, max = 1, typeName = "integer") IPrimitiveType theOffset, + @OperationParam(name = "count", min = 0, max = 1, typeName = "integer") IPrimitiveType theCount, + @OperationParam( + name = JpaConstants.OPERATION_EXPAND_PARAM_DISPLAY_LANGUAGE, + min = 0, + max = 1, + typeName = "code") + IPrimitiveType theDisplayLanguage, + @OperationParam( + name = JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, + min = 0, + max = 1, + typeName = "boolean") + IPrimitiveType theIncludeHierarchy, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { - return getDao().expand(theId, theValueSet, theUrl, theValueSetVersion, theFilter, theContext, theContextDirection, theOffset, theCount, theDisplayLanguage, theIncludeHierarchy, theRequestDetails); + return getDao().expand( + theId, + theValueSet, + theUrl, + theValueSetVersion, + theFilter, + theContext, + theContextDirection, + theOffset, + theCount, + theDisplayLanguage, + theIncludeHierarchy, + theRequestDetails); } finally { endRequest(theServletRequest); @@ -113,40 +140,50 @@ public class ValueSetOperationProvider extends BaseJpaProvider { } @SuppressWarnings("unchecked") - @Operation(name = JpaConstants.OPERATION_VALIDATE_CODE, idempotent = true, typeName = "ValueSet", returnParameters = { - @OperationParam(name = "result", typeName = "boolean", min = 1), - @OperationParam(name = "message", typeName = "string"), - @OperationParam(name = "display", typeName = "string") - }) + @Operation( + name = JpaConstants.OPERATION_VALIDATE_CODE, + idempotent = true, + typeName = "ValueSet", + returnParameters = { + @OperationParam(name = "result", typeName = "boolean", min = 1), + @OperationParam(name = "message", typeName = "string"), + @OperationParam(name = "display", typeName = "string") + }) public IBaseParameters validateCode( - HttpServletRequest theServletRequest, - @IdParam(optional = true) IIdType theId, - @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theValueSetUrl, - @OperationParam(name = "valueSetVersion", min = 0, max = 1, typeName = "string") IPrimitiveType theValueSetVersion, - @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theCode, - @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSystem, - @OperationParam(name = "systemVersion", min = 0, max = 1, typeName = "string") IPrimitiveType theSystemVersion, - @OperationParam(name = "display", min = 0, max = 1, typeName = "string") IPrimitiveType theDisplay, - @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theCoding, - @OperationParam(name = "codeableConcept", min = 0, max = 1, typeName = "CodeableConcept") ICompositeType theCodeableConcept, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @IdParam(optional = true) IIdType theId, + @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theValueSetUrl, + @OperationParam(name = "valueSetVersion", min = 0, max = 1, typeName = "string") + IPrimitiveType theValueSetVersion, + @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theCode, + @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSystem, + @OperationParam(name = "systemVersion", min = 0, max = 1, typeName = "string") + IPrimitiveType theSystemVersion, + @OperationParam(name = "display", min = 0, max = 1, typeName = "string") IPrimitiveType theDisplay, + @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theCoding, + @OperationParam(name = "codeableConcept", min = 0, max = 1, typeName = "CodeableConcept") + ICompositeType theCodeableConcept, + RequestDetails theRequestDetails) { CodeValidationResult result; startRequest(theServletRequest); try { // If a Remote Terminology Server has been configured, use it if (myValidationSupportChain != null && myValidationSupportChain.isRemoteTerminologyServiceConfigured()) { - String theSystemString = (theSystem != null && theSystem.hasValue()) ? theSystem.getValueAsString() : null; + String theSystemString = + (theSystem != null && theSystem.hasValue()) ? theSystem.getValueAsString() : null; String theCodeString = (theCode != null && theCode.hasValue()) ? theCode.getValueAsString() : null; - String theDisplayString = (theDisplay != null && theDisplay.hasValue()) ? theDisplay.getValueAsString() : null; - String theValueSetUrlString = (theValueSetUrl != null && theValueSetUrl.hasValue()) ? - theValueSetUrl.getValueAsString() : null; + String theDisplayString = + (theDisplay != null && theDisplay.hasValue()) ? theDisplay.getValueAsString() : null; + String theValueSetUrlString = (theValueSetUrl != null && theValueSetUrl.hasValue()) + ? theValueSetUrl.getValueAsString() + : null; if (theCoding != null) { if (isNotBlank(theCoding.getSystem())) { if (theSystemString != null && !theSystemString.equalsIgnoreCase(theCoding.getSystem())) { - throw new InvalidRequestException(Msg.code(2352) + "Coding.system '" + theCoding.getSystem() + - "' does not equal param system '" + theSystemString + "'. Unable to validate-code."); + throw new InvalidRequestException(Msg.code(2352) + "Coding.system '" + theCoding.getSystem() + + "' does not equal param system '" + theSystemString + + "'. Unable to validate-code."); } theSystemString = theCoding.getSystem(); theCodeString = theCoding.getCode(); @@ -154,26 +191,37 @@ public class ValueSetOperationProvider extends BaseJpaProvider { } } - result = validateCodeWithTerminologyService(theSystemString, theCodeString, theDisplayString, theValueSetUrlString) - .orElseGet(supplyUnableToValidateResult(theSystemString, theCodeString, theValueSetUrlString)); + result = validateCodeWithTerminologyService( + theSystemString, theCodeString, theDisplayString, theValueSetUrlString) + .orElseGet(supplyUnableToValidateResult(theSystemString, theCodeString, theValueSetUrlString)); } else { // Otherwise, use the local DAO layer to validate the code IFhirResourceDaoValueSet dao = getDao(); IPrimitiveType valueSetIdentifier; if (theValueSetUrl != null && theValueSetVersion != null) { - valueSetIdentifier = (IPrimitiveType) getContext().getElementDefinition("uri").newInstance(); + valueSetIdentifier = (IPrimitiveType) + getContext().getElementDefinition("uri").newInstance(); valueSetIdentifier.setValue(theValueSetUrl.getValue() + "|" + theValueSetVersion); } else { valueSetIdentifier = theValueSetUrl; } IPrimitiveType codeSystemIdentifier; if (theSystem != null && theSystemVersion != null) { - codeSystemIdentifier = (IPrimitiveType) getContext().getElementDefinition("uri").newInstance(); + codeSystemIdentifier = (IPrimitiveType) + getContext().getElementDefinition("uri").newInstance(); codeSystemIdentifier.setValue(theSystem.getValue() + "|" + theSystemVersion); } else { codeSystemIdentifier = theSystem; } - result = dao.validateCode(valueSetIdentifier, theId, theCode, codeSystemIdentifier, theDisplay, theCoding, theCodeableConcept, theRequestDetails); + result = dao.validateCode( + valueSetIdentifier, + theId, + theCode, + codeSystemIdentifier, + theDisplay, + theCoding, + theCodeableConcept, + theRequestDetails); } return toValidateCodeResult(getContext(), result); } finally { @@ -181,24 +229,31 @@ public class ValueSetOperationProvider extends BaseJpaProvider { } } - private Optional validateCodeWithTerminologyService(String theSystem, String theCode, - String theDisplay, String theValueSetUrl) { - return Optional.ofNullable(myValidationSupportChain.validateCode(new ValidationSupportContext(myValidationSupportChain), - new ConceptValidationOptions(), theSystem, theCode, theDisplay, theValueSetUrl)); + private Optional validateCodeWithTerminologyService( + String theSystem, String theCode, String theDisplay, String theValueSetUrl) { + return Optional.ofNullable(myValidationSupportChain.validateCode( + new ValidationSupportContext(myValidationSupportChain), + new ConceptValidationOptions(), + theSystem, + theCode, + theDisplay, + theValueSetUrl)); } - private Supplier supplyUnableToValidateResult(String theSystem, String theCode, String theValueSetUrl) { - return () -> new CodeValidationResult().setMessage("Validator is unable to provide validation for " + - theCode + "#" + theSystem + " - Unknown or unusable ValueSet[" + theValueSetUrl + "]"); + private Supplier supplyUnableToValidateResult( + String theSystem, String theCode, String theValueSetUrl) { + return () -> new CodeValidationResult() + .setMessage("Validator is unable to provide validation for " + theCode + "#" + theSystem + + " - Unknown or unusable ValueSet[" + theValueSetUrl + "]"); } - @Operation(name = ProviderConstants.OPERATION_INVALIDATE_EXPANSION, idempotent = false, typeName = "ValueSet", returnParameters = { - @OperationParam(name = "message", typeName = "string", min = 1, max = 1) - }) + @Operation( + name = ProviderConstants.OPERATION_INVALIDATE_EXPANSION, + idempotent = false, + typeName = "ValueSet", + returnParameters = {@OperationParam(name = "message", typeName = "string", min = 1, max = 1)}) public IBaseParameters invalidateValueSetExpansion( - @IdParam IIdType theValueSetId, - RequestDetails theRequestDetails, - HttpServletRequest theServletRequest) { + @IdParam IIdType theValueSetId, RequestDetails theRequestDetails, HttpServletRequest theServletRequest) { startRequest(theServletRequest); try { @@ -213,14 +268,21 @@ public class ValueSetOperationProvider extends BaseJpaProvider { } } - - public static ValueSetExpansionOptions createValueSetExpansionOptions(JpaStorageSettings theStorageSettings, IPrimitiveType theOffset, IPrimitiveType theCount, IPrimitiveType theIncludeHierarchy, IPrimitiveType theFilter, IPrimitiveType theDisplayLanguage) { + public static ValueSetExpansionOptions createValueSetExpansionOptions( + JpaStorageSettings theStorageSettings, + IPrimitiveType theOffset, + IPrimitiveType theCount, + IPrimitiveType theIncludeHierarchy, + IPrimitiveType theFilter, + IPrimitiveType theDisplayLanguage) { int offset = theStorageSettings.getPreExpandValueSetsDefaultOffset(); if (theOffset != null && theOffset.hasValue()) { if (theOffset.getValue() >= 0) { offset = theOffset.getValue(); } else { - throw new InvalidRequestException(Msg.code(1135) + "offset parameter for $expand operation must be >= 0 when specified. offset: " + theOffset.getValue()); + throw new InvalidRequestException( + Msg.code(1135) + "offset parameter for $expand operation must be >= 0 when specified. offset: " + + theOffset.getValue()); } } @@ -229,12 +291,17 @@ public class ValueSetOperationProvider extends BaseJpaProvider { if (theCount.getValue() >= 0) { count = theCount.getValue(); } else { - throw new InvalidRequestException(Msg.code(1136) + "count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue()); + throw new InvalidRequestException( + Msg.code(1136) + "count parameter for $expand operation must be >= 0 when specified. count: " + + theCount.getValue()); } } int countMax = theStorageSettings.getPreExpandValueSetsMaxCount(); if (count > countMax) { - ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax); + ourLog.warn( + "count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", + count, + countMax); count = countMax; } @@ -268,6 +335,4 @@ public class ValueSetOperationProvider extends BaseJpaProvider { return retVal; } - } - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ValueSetOperationProviderDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ValueSetOperationProviderDstu2.java index 27e2c777ace..372b030629b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ValueSetOperationProviderDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/ValueSetOperationProviderDstu2.java @@ -34,8 +34,8 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; -import javax.servlet.http.HttpServletRequest; import java.util.List; +import javax.servlet.http.HttpServletRequest; import static ca.uhn.fhir.jpa.provider.BaseJpaResourceProviderCodeSystem.applyVersionToSystem; @@ -49,20 +49,33 @@ public class ValueSetOperationProviderDstu2 extends ValueSetOperationProvider { */ @Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true, typeName = "ValueSet") public IBaseResource expand( - HttpServletRequest theServletRequest, - @IdParam(optional = true) IIdType theId, - @OperationParam(name = "valueSet", min = 0, max = 1) IBaseResource theValueSet, - @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theUrl, - @OperationParam(name = "identifier", min = 0, max = 1, typeName = "uri") IPrimitiveType theIdentifier, - @OperationParam(name = "valueSetVersion", min = 0, max = 1, typeName = "string") IPrimitiveType theValueSetVersion, - @OperationParam(name = "filter", min = 0, max = 1, typeName = "string") IPrimitiveType theFilter, - @OperationParam(name = "context", min = 0, max = 1, typeName = "string") IPrimitiveType theContext, - @OperationParam(name = "contextDirection", min = 0, max = 1, typeName = "string") IPrimitiveType theContextDirection, - @OperationParam(name = "offset", min = 0, max = 1, typeName = "integer") IPrimitiveType theOffset, - @OperationParam(name = "count", min = 0, max = 1, typeName = "integer") IPrimitiveType theCount, - @OperationParam(name = JpaConstants.OPERATION_EXPAND_PARAM_DISPLAY_LANGUAGE, min = 0, max = 1, typeName = "code") IPrimitiveType theDisplayLanguage, - @OperationParam(name = JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, min = 0, max = 1, typeName = "boolean") IPrimitiveType theIncludeHierarchy, - RequestDetails theRequestDetails) { + HttpServletRequest theServletRequest, + @IdParam(optional = true) IIdType theId, + @OperationParam(name = "valueSet", min = 0, max = 1) IBaseResource theValueSet, + @OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType theUrl, + @OperationParam(name = "identifier", min = 0, max = 1, typeName = "uri") + IPrimitiveType theIdentifier, + @OperationParam(name = "valueSetVersion", min = 0, max = 1, typeName = "string") + IPrimitiveType theValueSetVersion, + @OperationParam(name = "filter", min = 0, max = 1, typeName = "string") IPrimitiveType theFilter, + @OperationParam(name = "context", min = 0, max = 1, typeName = "string") IPrimitiveType theContext, + @OperationParam(name = "contextDirection", min = 0, max = 1, typeName = "string") + IPrimitiveType theContextDirection, + @OperationParam(name = "offset", min = 0, max = 1, typeName = "integer") IPrimitiveType theOffset, + @OperationParam(name = "count", min = 0, max = 1, typeName = "integer") IPrimitiveType theCount, + @OperationParam( + name = JpaConstants.OPERATION_EXPAND_PARAM_DISPLAY_LANGUAGE, + min = 0, + max = 1, + typeName = "code") + IPrimitiveType theDisplayLanguage, + @OperationParam( + name = JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, + min = 0, + max = 1, + typeName = "boolean") + IPrimitiveType theIncludeHierarchy, + RequestDetails theRequestDetails) { IPrimitiveType url = theUrl; if (theIdentifier != null) { @@ -72,34 +85,50 @@ public class ValueSetOperationProviderDstu2 extends ValueSetOperationProvider { startRequest(theServletRequest); try { - return getDao().expand(theId, theValueSet, url, theValueSetVersion, theFilter, theContext, theContextDirection, theOffset, theCount, theDisplayLanguage, theIncludeHierarchy, theRequestDetails); + return getDao().expand( + theId, + theValueSet, + url, + theValueSetVersion, + theFilter, + theContext, + theContextDirection, + theOffset, + theCount, + theDisplayLanguage, + theIncludeHierarchy, + theRequestDetails); } finally { endRequest(theServletRequest); } } - /** * $lookup operation - This is on CodeSystem after DSTU2 but on ValueSet in DSTU2 */ @SuppressWarnings("unchecked") - @Operation(name = JpaConstants.OPERATION_LOOKUP, idempotent = true, typeName = "ValueSet", returnParameters = { - @OperationParam(name = "name", typeName = "string", min = 1), - @OperationParam(name = "version", typeName = "string", min = 0), - @OperationParam(name = "display", typeName = "string", min = 1), - @OperationParam(name = "abstract", typeName = "boolean", min = 1), - }) + @Operation( + name = JpaConstants.OPERATION_LOOKUP, + idempotent = true, + typeName = "ValueSet", + returnParameters = { + @OperationParam(name = "name", typeName = "string", min = 1), + @OperationParam(name = "version", typeName = "string", min = 0), + @OperationParam(name = "display", typeName = "string", min = 1), + @OperationParam(name = "abstract", typeName = "boolean", min = 1), + }) public IBaseParameters lookup( - HttpServletRequest theServletRequest, - @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theCode, - @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSystem, - @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theCoding, - @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theVersion, - @OperationParam(name = "displayLanguage", min = 0, max = 1, typeName = "code") IPrimitiveType theDisplayLanguage, - @OperationParam(name = "property", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "code") List> theProperties, - RequestDetails theRequestDetails - ) { + HttpServletRequest theServletRequest, + @OperationParam(name = "code", min = 0, max = 1, typeName = "code") IPrimitiveType theCode, + @OperationParam(name = "system", min = 0, max = 1, typeName = "uri") IPrimitiveType theSystem, + @OperationParam(name = "coding", min = 0, max = 1, typeName = "Coding") IBaseCoding theCoding, + @OperationParam(name = "version", min = 0, max = 1, typeName = "string") IPrimitiveType theVersion, + @OperationParam(name = "displayLanguage", min = 0, max = 1, typeName = "code") + IPrimitiveType theDisplayLanguage, + @OperationParam(name = "property", min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "code") + List> theProperties, + RequestDetails theRequestDetails) { startRequest(theServletRequest); try { @@ -107,13 +136,12 @@ public class ValueSetOperationProviderDstu2 extends ValueSetOperationProvider { applyVersionToSystem(theSystem, theVersion); FhirTerser terser = getContext().newTerser(); - result = JpaResourceDaoCodeSystem.doLookupCode(getContext(), terser, myValidationSupport, theCode, theSystem, theCoding, theDisplayLanguage); + result = JpaResourceDaoCodeSystem.doLookupCode( + getContext(), terser, myValidationSupport, theCode, theSystem, theCoding, theDisplayLanguage); result.throwNotFoundIfAppropriate(); return result.toParameters(theRequestDetails.getFhirContext(), theProperties); } finally { endRequest(theServletRequest); } } - } - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderConceptMapDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderConceptMapDstu3.java deleted file mode 100644 index 6d4426c36c2..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/BaseJpaResourceProviderConceptMapDstu3.java +++ /dev/null @@ -1,19 +0,0 @@ -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2023 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java index f4c85ec3919..2d971de810c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java @@ -44,12 +44,12 @@ import org.hl7.fhir.dstu3.model.Extension; import org.hl7.fhir.dstu3.model.Meta; import org.hl7.fhir.dstu3.model.UriType; -import javax.servlet.http.HttpServletRequest; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -80,7 +80,11 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se /** * Constructor */ - public JpaConformanceProviderDstu3(RestfulServer theRestfulServer, IFhirSystemDao theSystemDao, JpaStorageSettings theStorageSettings, ISearchParamRegistry theSearchParamRegistry) { + public JpaConformanceProviderDstu3( + RestfulServer theRestfulServer, + IFhirSystemDao theSystemDao, + JpaStorageSettings theStorageSettings, + ISearchParamRegistry theSearchParamRegistry) { super(theRestfulServer); myRestfulServer = theRestfulServer; mySystemDao = theSystemDao; @@ -113,14 +117,16 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se nextResource.setVersioning(ResourceVersionPolicy.VERSIONEDUPDATE); ConditionalDeleteStatus conditionalDelete = nextResource.getConditionalDelete(); - if (conditionalDelete == ConditionalDeleteStatus.MULTIPLE && myStorageSettings.isAllowMultipleDelete() == false) { + if (conditionalDelete == ConditionalDeleteStatus.MULTIPLE + && myStorageSettings.isAllowMultipleDelete() == false) { nextResource.setConditionalDelete(ConditionalDeleteStatus.SINGLE); } // Add resource counts Long count = counts.get(nextResource.getTypeElement().getValueAsString()); if (count != null) { - nextResource.addExtension(new Extension(ExtensionConstants.CONF_RESOURCE_COUNT, new DecimalType(count))); + nextResource.addExtension( + new Extension(ExtensionConstants.CONF_RESOURCE_COUNT, new DecimalType(count))); } nextResource.getSearchParam().clear(); @@ -161,7 +167,6 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se // Shouldn't happen break; } - } updateIncludesList(nextResource, searchParams); @@ -171,7 +176,9 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se massage(retVal); - if (myStorageSettings.getSupportedSubscriptionTypes().contains(org.hl7.fhir.dstu2.model.Subscription.SubscriptionChannelType.WEBSOCKET)) { + if (myStorageSettings + .getSupportedSubscriptionTypes() + .contains(org.hl7.fhir.dstu2.model.Subscription.SubscriptionChannelType.WEBSOCKET)) { if (isNotBlank(myStorageSettings.getWebsocketContextPath())) { Extension websocketExtension = new Extension(); websocketExtension.setUrl(Constants.CAPABILITYSTATEMENT_WEBSOCKET_URL); @@ -186,8 +193,8 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se } private ResourceSearchParams constructCompleteSearchParamList(String theResourceName) { - // Borrowed from hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java - + // Borrowed from + // hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java /* * If we have an explicit registry (which will be the case in the JPA server) we use it as priority, @@ -195,16 +202,18 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se * global params like _lastUpdated */ ResourceSearchParams searchParams; - ResourceSearchParams serverConfigurationActiveSearchParams = myServerConfiguration.getActiveSearchParams(theResourceName); + ResourceSearchParams serverConfigurationActiveSearchParams = + myServerConfiguration.getActiveSearchParams(theResourceName); if (mySearchParamRegistry != null) { - searchParams = mySearchParamRegistry.getActiveSearchParams(theResourceName).makeCopy(); + searchParams = + mySearchParamRegistry.getActiveSearchParams(theResourceName).makeCopy(); if (searchParams == null) { return ResourceSearchParams.empty(theResourceName); } for (String nextBuiltInSpName : serverConfigurationActiveSearchParams.getSearchParamNames()) { - if (nextBuiltInSpName.startsWith("_") && - !searchParams.containsParamName(nextBuiltInSpName) && - searchParamEnabled(nextBuiltInSpName)) { + if (nextBuiltInSpName.startsWith("_") + && !searchParams.containsParamName(nextBuiltInSpName) + && searchParamEnabled(nextBuiltInSpName)) { searchParams.put(nextBuiltInSpName, serverConfigurationActiveSearchParams.get(nextBuiltInSpName)); } } @@ -216,42 +225,43 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se } protected boolean searchParamEnabled(String theSearchParam) { - // Borrowed from hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java + // Borrowed from + // hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java return !Constants.PARAM_FILTER.equals(theSearchParam) || myStorageSettings.isFilterParameterEnabled(); } - - private void updateRevIncludesList(CapabilityStatementRestResourceComponent theNextResource, ResourceSearchParams theSearchParams) { + private void updateRevIncludesList( + CapabilityStatementRestResourceComponent theNextResource, ResourceSearchParams theSearchParams) { // Add RevInclude to CapabilityStatement.rest.resource if (theNextResource.getSearchRevInclude().isEmpty()) { String resourcename = theNextResource.getType(); - Set allResourceTypes = myServerConfiguration.collectMethodBindings().keySet(); + Set allResourceTypes = + myServerConfiguration.collectMethodBindings().keySet(); for (String otherResourceType : allResourceTypes) { if (isBlank(otherResourceType)) { continue; } - ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(otherResourceType); - activeSearchParams.values() - .stream() - .filter(t -> isNotBlank(t.getName())) - .filter(t -> t.getTargets().contains(resourcename)) - .forEach(t -> theNextResource.addSearchRevInclude(otherResourceType + ":" + t.getName())); + ResourceSearchParams activeSearchParams = + mySearchParamRegistry.getActiveSearchParams(otherResourceType); + activeSearchParams.values().stream() + .filter(t -> isNotBlank(t.getName())) + .filter(t -> t.getTargets().contains(resourcename)) + .forEach(t -> theNextResource.addSearchRevInclude(otherResourceType + ":" + t.getName())); } } - - } - private void updateIncludesList(CapabilityStatementRestResourceComponent theResource, ResourceSearchParams theSearchParams) { - // Borrowed from hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java + private void updateIncludesList( + CapabilityStatementRestResourceComponent theResource, ResourceSearchParams theSearchParams) { + // Borrowed from + // hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java String resourceName = theResource.getType(); if (theResource.getSearchInclude().isEmpty()) { - List includes = theSearchParams - .values() - .stream() - .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) - .map(t -> resourceName + ":" + t.getName()) - .sorted().collect(Collectors.toList()); + List includes = theSearchParams.values().stream() + .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) + .map(t -> resourceName + ":" + t.getName()) + .sorted() + .collect(Collectors.toList()); theResource.addSearchInclude("*"); for (String nextInclude : includes) { theResource.addSearchInclude(nextInclude); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/IConsentExtensionProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/IConsentExtensionProvider.java index 6f32028e1e5..ae63a090f16 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/IConsentExtensionProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/IConsentExtensionProvider.java @@ -48,5 +48,4 @@ public interface IConsentExtensionProvider extends IMemberMatchConsentHook { } ourLog.trace("{} extension(s) added to Consent", extensions.size()); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/IMemberMatchConsentHook.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/IMemberMatchConsentHook.java index 6cd82f1a180..8c2f59f4023 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/IMemberMatchConsentHook.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/IMemberMatchConsentHook.java @@ -26,5 +26,4 @@ import java.util.function.Consumer; /** * Pre-save hook for Consent saved during $member-match. */ -public interface IMemberMatchConsentHook extends Consumer { -} +public interface IMemberMatchConsentHook extends Consumer {} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/MemberMatchR4ResourceProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/MemberMatchR4ResourceProvider.java index d013e9197cb..5234782144a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/MemberMatchR4ResourceProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/MemberMatchR4ResourceProvider.java @@ -33,8 +33,8 @@ import org.hl7.fhir.r4.model.Coverage; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.Patient; -import javax.annotation.Nullable; import java.util.Optional; +import javax.annotation.Nullable; public class MemberMatchR4ResourceProvider { @@ -45,76 +45,85 @@ public class MemberMatchR4ResourceProvider { myFhirContext = theFhirContext; myMemberMatcherR4Helper = theMemberMatcherR4Helper; } - + /** * /Patient/$member-match operation * Basic implementation matching by coverage id or by coverage identifier. Matching by * Beneficiary (Patient) demographics on family name and birthdate in this version */ - @Operation(name = ProviderConstants.OPERATION_MEMBER_MATCH, typeName = "Patient", canonicalUrl = "http://hl7.org/fhir/us/davinci-hrex/OperationDefinition/member-match", idempotent = false, returnParameters = { - @OperationParam(name = "MemberIdentifier", typeName = "string") - }) + @Operation( + name = ProviderConstants.OPERATION_MEMBER_MATCH, + typeName = "Patient", + canonicalUrl = "http://hl7.org/fhir/us/davinci-hrex/OperationDefinition/member-match", + idempotent = false, + returnParameters = {@OperationParam(name = "MemberIdentifier", typeName = "string")}) public Parameters patientMemberMatch( - javax.servlet.http.HttpServletRequest theServletRequest, - - @Description(shortDefinition = "The target of the operation. Will be returned with Identifier for matched coverage added.") - @OperationParam(name = Constants.PARAM_MEMBER_PATIENT, min = 1, max = 1) - Patient theMemberPatient, - - @Description(shortDefinition = "Old coverage information as extracted from beneficiary's card.") - @OperationParam(name = Constants.PARAM_OLD_COVERAGE, min = 1, max = 1) - Coverage oldCoverage, - - @Description(shortDefinition = "New Coverage information. Provided as a reference. Optionally returned unmodified.") - @OperationParam(name = Constants.PARAM_NEW_COVERAGE, min = 1, max = 1) - Coverage newCoverage, - - @Description(shortDefinition = "Consent information. Consent held by the system seeking the match that grants permission to access the patient information.") - @OperationParam(name = Constants.PARAM_CONSENT, min = 1, max = 1) - Consent theConsent, - - RequestDetails theRequestDetails - ) { + javax.servlet.http.HttpServletRequest theServletRequest, + @Description( + shortDefinition = + "The target of the operation. Will be returned with Identifier for matched coverage added.") + @OperationParam(name = Constants.PARAM_MEMBER_PATIENT, min = 1, max = 1) + Patient theMemberPatient, + @Description(shortDefinition = "Old coverage information as extracted from beneficiary's card.") + @OperationParam(name = Constants.PARAM_OLD_COVERAGE, min = 1, max = 1) + Coverage oldCoverage, + @Description( + shortDefinition = + "New Coverage information. Provided as a reference. Optionally returned unmodified.") + @OperationParam(name = Constants.PARAM_NEW_COVERAGE, min = 1, max = 1) + Coverage newCoverage, + @Description( + shortDefinition = + "Consent information. Consent held by the system seeking the match that grants permission to access the patient information.") + @OperationParam(name = Constants.PARAM_CONSENT, min = 1, max = 1) + Consent theConsent, + RequestDetails theRequestDetails) { return doMemberMatchOperation(theMemberPatient, oldCoverage, newCoverage, theConsent, theRequestDetails); } - - private Parameters doMemberMatchOperation(Patient theMemberPatient, - Coverage theCoverageToMatch, Coverage theCoverageToLink, Consent theConsent, RequestDetails theRequestDetails) { + private Parameters doMemberMatchOperation( + Patient theMemberPatient, + Coverage theCoverageToMatch, + Coverage theCoverageToLink, + Consent theConsent, + RequestDetails theRequestDetails) { validateParams(theMemberPatient, theCoverageToMatch, theCoverageToLink, theConsent); - Optional coverageOpt = myMemberMatcherR4Helper.findMatchingCoverage(theCoverageToMatch, theRequestDetails); + Optional coverageOpt = + myMemberMatcherR4Helper.findMatchingCoverage(theCoverageToMatch, theRequestDetails); if (coverageOpt.isEmpty()) { - String i18nMessage = myFhirContext.getLocalizer().getMessage( - "operation.member.match.error.coverage.not.found"); + String i18nMessage = + myFhirContext.getLocalizer().getMessage("operation.member.match.error.coverage.not.found"); throw new UnprocessableEntityException(Msg.code(1155) + i18nMessage); } Coverage coverage = coverageOpt.get(); Optional patientOpt = myMemberMatcherR4Helper.getBeneficiaryPatient(coverage, theRequestDetails); if (patientOpt.isEmpty()) { - String i18nMessage = myFhirContext.getLocalizer().getMessage( - "operation.member.match.error.beneficiary.not.found"); + String i18nMessage = + myFhirContext.getLocalizer().getMessage("operation.member.match.error.beneficiary.not.found"); throw new UnprocessableEntityException(Msg.code(1156) + i18nMessage); } Patient patient = patientOpt.get(); if (!myMemberMatcherR4Helper.validPatientMember(patient, theMemberPatient, theRequestDetails)) { - String i18nMessage = myFhirContext.getLocalizer().getMessage( - "operation.member.match.error.patient.not.found"); + String i18nMessage = + myFhirContext.getLocalizer().getMessage("operation.member.match.error.patient.not.found"); throw new UnprocessableEntityException(Msg.code(2146) + i18nMessage); } if (patient.getIdentifier().isEmpty()) { - String i18nMessage = myFhirContext.getLocalizer().getMessage( - "operation.member.match.error.beneficiary.without.identifier"); + String i18nMessage = myFhirContext + .getLocalizer() + .getMessage("operation.member.match.error.beneficiary.without.identifier"); throw new UnprocessableEntityException(Msg.code(1157) + i18nMessage); } if (!myMemberMatcherR4Helper.validConsentDataAccess(theConsent)) { - String i18nMessage = myFhirContext.getLocalizer().getMessage( - "operation.member.match.error.consent.release.data.mismatch"); + String i18nMessage = myFhirContext + .getLocalizer() + .getMessage("operation.member.match.error.consent.release.data.mismatch"); throw new UnprocessableEntityException(Msg.code(2147) + i18nMessage); } @@ -123,7 +132,8 @@ public class MemberMatchR4ResourceProvider { return myMemberMatcherR4Helper.buildSuccessReturnParameters(theMemberPatient, theCoverageToLink, theConsent); } - private void validateParams(Patient theMemberPatient, Coverage theOldCoverage, Coverage theNewCoverage, Consent theConsent) { + private void validateParams( + Patient theMemberPatient, Coverage theOldCoverage, Coverage theNewCoverage, Consent theConsent) { validateParam(theMemberPatient, Constants.PARAM_MEMBER_PATIENT); validateParam(theOldCoverage, Constants.PARAM_OLD_COVERAGE); validateParam(theNewCoverage, Constants.PARAM_NEW_COVERAGE); @@ -134,8 +144,9 @@ public class MemberMatchR4ResourceProvider { private void validateParam(@Nullable Object theParam, String theParamName) { if (theParam == null) { - String i18nMessage = myFhirContext.getLocalizer().getMessage( - "operation.member.match.error.missing.parameter", theParamName); + String i18nMessage = myFhirContext + .getLocalizer() + .getMessage("operation.member.match.error.missing.parameter", theParamName); throw new UnprocessableEntityException(Msg.code(1158) + i18nMessage); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/MemberMatcherR4Helper.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/MemberMatcherR4Helper.java index 7a82c122055..d9a271feac9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/MemberMatcherR4Helper.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/MemberMatcherR4Helper.java @@ -46,10 +46,10 @@ import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Reference; -import javax.annotation.Nullable; import java.util.List; import java.util.Optional; import java.util.function.Consumer; +import javax.annotation.Nullable; import static ca.uhn.fhir.rest.api.Constants.PARAM_CONSENT; import static ca.uhn.fhir.rest.api.Constants.PARAM_MEMBER_IDENTIFIER; @@ -65,7 +65,8 @@ public class MemberMatcherR4Helper { private static final String COVERAGE_TYPE = "Coverage"; private static final String CONSENT_POLICY_REGULAR_TYPE = "regular"; private static final String CONSENT_POLICY_SENSITIVE_TYPE = "sensitive"; - public static final String CONSENT_IDENTIFIER_CODE_SYSTEM = "https://smilecdr.com/fhir/ns/member-match-source-client"; + public static final String CONSENT_IDENTIFIER_CODE_SYSTEM = + "https://smilecdr.com/fhir/ns/member-match-source-client"; private final FhirContext myFhirContext; private final IFhirResourceDao myCoverageDao; @@ -77,12 +78,11 @@ public class MemberMatcherR4Helper { private boolean myRegularFilterSupported = false; public MemberMatcherR4Helper( - FhirContext theContext, - IFhirResourceDao theCoverageDao, - IFhirResourceDao thePatientDao, - IFhirResourceDao theConsentDao, - @Nullable IMemberMatchConsentHook theConsentModifier - ) { + FhirContext theContext, + IFhirResourceDao theCoverageDao, + IFhirResourceDao thePatientDao, + IFhirResourceDao theConsentDao, + @Nullable IMemberMatchConsentHook theConsentModifier) { myFhirContext = theContext; myConsentDao = theConsentDao; myPatientDao = thePatientDao; @@ -109,35 +109,33 @@ public class MemberMatcherR4Helper { return Optional.empty(); } - - private List findCoverageByCoverageIdentifier(Coverage theCoverageToMatch, RequestDetails theRequestDetails) { + private List findCoverageByCoverageIdentifier( + Coverage theCoverageToMatch, RequestDetails theRequestDetails) { TokenOrListParam identifierParam = new TokenOrListParam(); for (Identifier identifier : theCoverageToMatch.getIdentifier()) { identifierParam.add(identifier.getSystem(), identifier.getValue()); } - SearchParameterMap paramMap = new SearchParameterMap() - .add("identifier", identifierParam); + SearchParameterMap paramMap = new SearchParameterMap().add("identifier", identifierParam); ca.uhn.fhir.rest.api.server.IBundleProvider retVal = myCoverageDao.search(paramMap, theRequestDetails); return retVal.getAllResources(); } - private boolean isCoverage(IBaseResource theIBaseResource) { return theIBaseResource.fhirType().equals(COVERAGE_TYPE); } - - private List findCoverageByCoverageId(Coverage theCoverageToMatch, RequestDetails theRequestDetails) { - SearchParameterMap paramMap = new SearchParameterMap() - .add("_id", new StringParam(theCoverageToMatch.getId())); + private List findCoverageByCoverageId( + Coverage theCoverageToMatch, RequestDetails theRequestDetails) { + SearchParameterMap paramMap = new SearchParameterMap().add("_id", new StringParam(theCoverageToMatch.getId())); ca.uhn.fhir.rest.api.server.IBundleProvider retVal = myCoverageDao.search(paramMap, theRequestDetails); return retVal.getAllResources(); } - public void updateConsentForMemberMatch(Consent theConsent, Patient thePatient, Patient theMemberPatient, RequestDetails theRequestDetails) { + public void updateConsentForMemberMatch( + Consent theConsent, Patient thePatient, Patient theMemberPatient, RequestDetails theRequestDetails) { addIdentifierToConsent(theConsent, theMemberPatient); updateConsentPatientAndPerformer(theConsent, thePatient); myConsentModifier.accept(theConsent); @@ -151,46 +149,44 @@ public class MemberMatcherR4Helper { ParametersUtil.addParameterToParameters(myFhirContext, parameters, PARAM_MEMBER_PATIENT, theMemberPatient); ParametersUtil.addParameterToParameters(myFhirContext, parameters, PARAM_NEW_COVERAGE, theCoverage); ParametersUtil.addParameterToParameters(myFhirContext, parameters, PARAM_CONSENT, theConsent); - ParametersUtil.addParameterToParameters(myFhirContext, parameters, PARAM_MEMBER_IDENTIFIER, getIdentifier(theMemberPatient)); + ParametersUtil.addParameterToParameters( + myFhirContext, parameters, PARAM_MEMBER_IDENTIFIER, getIdentifier(theMemberPatient)); return (Parameters) parameters; } private Identifier getIdentifier(Patient theMemberPatient) { - return theMemberPatient.getIdentifier() - .stream() - .filter(this::isTypeMB) - .findFirst() - .orElseThrow(()->{ - String i18nMessage = myFhirContext.getLocalizer().getMessage( - "operation.member.match.error.beneficiary.without.identifier"); - return new UnprocessableEntityException(Msg.code(2219) + i18nMessage); - }); + return theMemberPatient.getIdentifier().stream() + .filter(this::isTypeMB) + .findFirst() + .orElseThrow(() -> { + String i18nMessage = myFhirContext + .getLocalizer() + .getMessage("operation.member.match.error.beneficiary.without.identifier"); + return new UnprocessableEntityException(Msg.code(2219) + i18nMessage); + }); } private boolean isTypeMB(Identifier theMemberIdentifier) { - return theMemberIdentifier.getType() != null && - theMemberIdentifier.getType().getCoding() - .stream() - .anyMatch(typeCoding->typeCoding.getCode().equals("MB")); + return theMemberIdentifier.getType() != null + && theMemberIdentifier.getType().getCoding().stream() + .anyMatch(typeCoding -> typeCoding.getCode().equals("MB")); } - public void addMemberIdentifierToMemberPatient(Patient theMemberPatient, Identifier theNewIdentifier) { Coding coding = new Coding() - .setSystem(OUT_COVERAGE_IDENTIFIER_CODE_SYSTEM) - .setCode(OUT_COVERAGE_IDENTIFIER_CODE) - .setDisplay(OUT_COVERAGE_IDENTIFIER_TEXT) - .setUserSelected(false); + .setSystem(OUT_COVERAGE_IDENTIFIER_CODE_SYSTEM) + .setCode(OUT_COVERAGE_IDENTIFIER_CODE) + .setDisplay(OUT_COVERAGE_IDENTIFIER_TEXT) + .setUserSelected(false); - CodeableConcept concept = new CodeableConcept() - .setCoding(Lists.newArrayList(coding)) - .setText(OUT_COVERAGE_IDENTIFIER_TEXT); + CodeableConcept concept = + new CodeableConcept().setCoding(Lists.newArrayList(coding)).setText(OUT_COVERAGE_IDENTIFIER_TEXT); Identifier newIdentifier = new Identifier() - .setUse(Identifier.IdentifierUse.USUAL) - .setType(concept) - .setSystem(theNewIdentifier.getSystem()) - .setValue(theNewIdentifier.getValue()); + .setUse(Identifier.IdentifierUse.USUAL) + .setType(concept) + .setSystem(theNewIdentifier.getSystem()) + .setValue(theNewIdentifier.getValue()); theMemberPatient.addIdentifier(newIdentifier); } @@ -201,7 +197,7 @@ public class MemberMatcherR4Helper { } if (theCoverage.getBeneficiaryTarget() != null - && !theCoverage.getBeneficiaryTarget().getIdentifier().isEmpty()) { + && !theCoverage.getBeneficiaryTarget().getIdentifier().isEmpty()) { return Optional.of(theCoverage.getBeneficiaryTarget()); } @@ -225,8 +221,11 @@ public class MemberMatcherR4Helper { /** * Matching by member patient demographics - family name and birthdate only */ - public boolean validPatientMember(Patient thePatientFromContract, Patient thePatientToMatch, RequestDetails theRequestDetails) { - if (thePatientFromContract == null || thePatientFromContract.getIdElement() == null || thePatientToMatch == null) { + public boolean validPatientMember( + Patient thePatientFromContract, Patient thePatientToMatch, RequestDetails theRequestDetails) { + if (thePatientFromContract == null + || thePatientFromContract.getIdElement() == null + || thePatientToMatch == null) { return false; } StringOrListParam familyName = new StringOrListParam(); @@ -234,12 +233,19 @@ public class MemberMatcherR4Helper { familyName.addOr(new StringParam(name.getFamily())); } SearchParameterMap map = new SearchParameterMap() - .add("family", familyName) - .add("birthdate", new DateParam(thePatientToMatch.getBirthDateElement().getValueAsString())); + .add("family", familyName) + .add( + "birthdate", + new DateParam(thePatientToMatch.getBirthDateElement().getValueAsString())); ca.uhn.fhir.rest.api.server.IBundleProvider bundle = myPatientDao.search(map, theRequestDetails); for (IBaseResource patientResource : bundle.getAllResources()) { IIdType patientId = patientResource.getIdElement().toUnqualifiedVersionless(); - if (patientId.getValue().equals(thePatientFromContract.getIdElement().toUnqualifiedVersionless().getValue())) { + if (patientId + .getValue() + .equals(thePatientFromContract + .getIdElement() + .toUnqualifiedVersionless() + .getValue())) { return true; } } @@ -273,7 +279,8 @@ public class MemberMatcherR4Helper { private void addIdentifierToConsent(Consent theConsent, Patient thePatient) { String consentId = getIdentifier(thePatient).getValue(); - Identifier consentIdentifier = new Identifier().setSystem(CONSENT_IDENTIFIER_CODE_SYSTEM).setValue(consentId); + Identifier consentIdentifier = + new Identifier().setSystem(CONSENT_IDENTIFIER_CODE_SYSTEM).setValue(consentId); theConsent.addIdentifier(consentIdentifier); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/Batch2DaoSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/Batch2DaoSvcImpl.java index f802f8235a5..f195fc25475 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/Batch2DaoSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/reindex/Batch2DaoSvcImpl.java @@ -45,11 +45,11 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Date; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; @@ -76,27 +76,35 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc { } @Override - public IResourcePidList fetchResourceIdsPage(Date theStart, Date theEnd, @Nonnull Integer thePageSize, @Nullable RequestPartitionId theRequestPartitionId, @Nullable String theUrl) { + public IResourcePidList fetchResourceIdsPage( + Date theStart, + Date theEnd, + @Nonnull Integer thePageSize, + @Nullable RequestPartitionId theRequestPartitionId, + @Nullable String theUrl) { return myTransactionService - .withSystemRequest() - .withRequestPartitionId(theRequestPartitionId) - .execute(() -> { - if (theUrl == null) { - return fetchResourceIdsPageNoUrl(theStart, theEnd, thePageSize, theRequestPartitionId); - } else { - return fetchResourceIdsPageWithUrl(theStart, theEnd, thePageSize, theUrl, theRequestPartitionId); - } - }); + .withSystemRequest() + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + if (theUrl == null) { + return fetchResourceIdsPageNoUrl(theStart, theEnd, thePageSize, theRequestPartitionId); + } else { + return fetchResourceIdsPageWithUrl( + theStart, theEnd, thePageSize, theUrl, theRequestPartitionId); + } + }); } - private IResourcePidList fetchResourceIdsPageWithUrl(Date theStart, Date theEnd, int thePageSize, String theUrl, RequestPartitionId theRequestPartitionId) { + private IResourcePidList fetchResourceIdsPageWithUrl( + Date theStart, Date theEnd, int thePageSize, String theUrl, RequestPartitionId theRequestPartitionId) { String resourceType = theUrl.substring(0, theUrl.indexOf('?')); RuntimeResourceDefinition def = myFhirContext.getResourceDefinition(resourceType); SearchParameterMap searchParamMap = myMatchUrlService.translateMatchUrl(theUrl, def); searchParamMap.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.ASC)); - DateRangeParam chunkDateRange = DateRangeUtil.narrowDateRange(searchParamMap.getLastUpdated(), theStart, theEnd); + DateRangeParam chunkDateRange = + DateRangeUtil.narrowDateRange(searchParamMap.getLastUpdated(), theStart, theEnd); searchParamMap.setLastUpdated(chunkDateRange); searchParamMap.setCount(thePageSize); @@ -115,15 +123,23 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc { } @Nonnull - private IResourcePidList fetchResourceIdsPageNoUrl(Date theStart, Date theEnd, int thePagesize, RequestPartitionId theRequestPartitionId) { + private IResourcePidList fetchResourceIdsPageNoUrl( + Date theStart, Date theEnd, int thePagesize, RequestPartitionId theRequestPartitionId) { Pageable page = Pageable.ofSize(thePagesize); Slice slice; if (theRequestPartitionId == null || theRequestPartitionId.isAllPartitions()) { - slice = myResourceTableDao.findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldest(page, theStart, theEnd); + slice = myResourceTableDao.findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldest( + page, theStart, theEnd); } else if (theRequestPartitionId.isDefaultPartition()) { - slice = myResourceTableDao.findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldestForDefaultPartition(page, theStart, theEnd); + slice = + myResourceTableDao + .findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldestForDefaultPartition( + page, theStart, theEnd); } else { - slice = myResourceTableDao.findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldestForPartitionIds(page, theStart, theEnd, theRequestPartitionId.getPartitionIds()); + slice = + myResourceTableDao + .findIdsTypesAndUpdateTimesOfResourcesWithinUpdatedRangeOrderedFromOldestForPartitionIds( + page, theStart, theEnd, theRequestPartitionId.getPartitionIds()); } List content = slice.getContent(); @@ -131,15 +147,10 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc { return new EmptyResourcePidList(); } - List ids = content - .stream() - .map(t -> JpaPid.fromId((Long) t[0])) - .collect(Collectors.toList()); + List ids = + content.stream().map(t -> JpaPid.fromId((Long) t[0])).collect(Collectors.toList()); - List types = content - .stream() - .map(t -> (String) t[1]) - .collect(Collectors.toList()); + List types = content.stream().map(t -> (String) t[1]).collect(Collectors.toList()); Date lastDate = (Date) content.get(content.size() - 1)[2]; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java index 6af19809913..289744fca73 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java @@ -36,10 +36,13 @@ public class DatabaseBackedPagingProvider extends BasePagingProvider { @Autowired private DaoRegistry myDaoRegistry; + @Autowired private SearchBuilderFactory mySearchBuilderFactory; + @Autowired private PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; @@ -82,5 +85,4 @@ public class DatabaseBackedPagingProvider extends BasePagingProvider { String uuid = theList.getUuid(); return uuid; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingRoutingBinder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingRoutingBinder.java index 4789f58655e..a4ee0205dac 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingRoutingBinder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingRoutingBinder.java @@ -36,7 +36,11 @@ public class DeferConceptIndexingRoutingBinder implements RoutingBinder { private class TermConceptBridge implements RoutingBridge { @Override - public void route(DocumentRoutes theDocumentRoutes, Object theO, TermConcept theTermConcept, RoutingBridgeRouteContext theRoutingBridgeRouteContext) { + public void route( + DocumentRoutes theDocumentRoutes, + Object theO, + TermConcept theTermConcept, + RoutingBridgeRouteContext theRoutingBridgeRouteContext) { if (theTermConcept.getIndexStatus() == null) { theDocumentRoutes.notIndexed(); } else { @@ -45,7 +49,11 @@ public class DeferConceptIndexingRoutingBinder implements RoutingBinder { } @Override - public void previousRoutes(DocumentRoutes theDocumentRoutes, Object theO, TermConcept theTermConcept, RoutingBridgeRouteContext theRoutingBridgeRouteContext) { + public void previousRoutes( + DocumentRoutes theDocumentRoutes, + Object theO, + TermConcept theTermConcept, + RoutingBridgeRouteContext theRoutingBridgeRouteContext) { theDocumentRoutes.addRoute(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ExceptionService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ExceptionService.java index 2866373dfe8..5802b090fe6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ExceptionService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ExceptionService.java @@ -30,14 +30,14 @@ public class ExceptionService { private final FhirContext myContext; - public ExceptionService(FhirContext theContext) { - myContext = theContext; - } + public ExceptionService(FhirContext theContext) { + myContext = theContext; + } - @Nonnull - public ResourceGoneException newUnknownSearchException(String theUuid) { - ourLog.trace("Client requested unknown paging ID[{}]", theUuid); - String msg = myContext.getLocalizer().getMessage(PageMethodBinding.class, "unknownSearchId", theUuid); - return new ResourceGoneException(msg); - } + @Nonnull + public ResourceGoneException newUnknownSearchException(String theUuid) { + ourLog.trace("Client requested unknown paging ID[{}]", theUuid); + String msg = myContext.getLocalizer().getMessage(PageMethodBinding.class, "unknownSearchId", theUuid); + return new ResourceGoneException(msg); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/HapiHSearchAnalysisConfigurers.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/HapiHSearchAnalysisConfigurers.java index 00232f60074..a280d7e8293 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/HapiHSearchAnalysisConfigurers.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/HapiHSearchAnalysisConfigurers.java @@ -57,127 +57,149 @@ public class HapiHSearchAnalysisConfigurers { @Override public void configure(LuceneAnalysisConfigurationContext theLuceneCtx) { - theLuceneCtx.analyzer("autocompleteEdgeAnalyzer").custom() - .tokenizer(PatternTokenizerFactory.class).param("pattern", "(.*)").param("group", "1") - .tokenFilter(LowerCaseFilterFactory.class) - .tokenFilter(StopFilterFactory.class) - .tokenFilter(EdgeNGramFilterFactory.class) - .param("minGramSize", "3") - .param("maxGramSize", "50"); + theLuceneCtx + .analyzer("autocompleteEdgeAnalyzer") + .custom() + .tokenizer(PatternTokenizerFactory.class) + .param("pattern", "(.*)") + .param("group", "1") + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(StopFilterFactory.class) + .tokenFilter(EdgeNGramFilterFactory.class) + .param("minGramSize", "3") + .param("maxGramSize", "50"); - theLuceneCtx.analyzer("autocompletePhoneticAnalyzer").custom() - .tokenizer(StandardTokenizerFactory.class) - .tokenFilter(StopFilterFactory.class) - .tokenFilter(PhoneticFilterFactory.class).param("encoder", "DoubleMetaphone") - .tokenFilter(SnowballPorterFilterFactory.class).param("language", "English"); + theLuceneCtx + .analyzer("autocompletePhoneticAnalyzer") + .custom() + .tokenizer(StandardTokenizerFactory.class) + .tokenFilter(StopFilterFactory.class) + .tokenFilter(PhoneticFilterFactory.class) + .param("encoder", "DoubleMetaphone") + .tokenFilter(SnowballPorterFilterFactory.class) + .param("language", "English"); - theLuceneCtx.analyzer("autocompleteNGramAnalyzer").custom() - .tokenizer(StandardTokenizerFactory.class) - .tokenFilter(WordDelimiterGraphFilterFactory.class) - .tokenFilter(LowerCaseFilterFactory.class) - .tokenFilter(NGramFilterFactory.class) - .param("minGramSize", "3") - .param("maxGramSize", "20"); + theLuceneCtx + .analyzer("autocompleteNGramAnalyzer") + .custom() + .tokenizer(StandardTokenizerFactory.class) + .tokenFilter(WordDelimiterGraphFilterFactory.class) + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(NGramFilterFactory.class) + .param("minGramSize", "3") + .param("maxGramSize", "20"); - theLuceneCtx.analyzer("autocompleteWordEdgeAnalyzer").custom() - .tokenizer(StandardTokenizerFactory.class) - .tokenFilter(LowerCaseFilterFactory.class) - .tokenFilter(StopFilterFactory.class) - .tokenFilter(EdgeNGramFilterFactory.class) - .param("minGramSize", "3") - .param("maxGramSize", "20"); + theLuceneCtx + .analyzer("autocompleteWordEdgeAnalyzer") + .custom() + .tokenizer(StandardTokenizerFactory.class) + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(StopFilterFactory.class) + .tokenFilter(EdgeNGramFilterFactory.class) + .param("minGramSize", "3") + .param("maxGramSize", "20"); - theLuceneCtx.analyzer(STANDARD_ANALYZER).custom() - .tokenizer(StandardTokenizerFactory.class) - .tokenFilter(LowerCaseFilterFactory.class) - .tokenFilter(ASCIIFoldingFilterFactory.class); + theLuceneCtx + .analyzer(STANDARD_ANALYZER) + .custom() + .tokenizer(StandardTokenizerFactory.class) + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(ASCIIFoldingFilterFactory.class); - theLuceneCtx.analyzer(NORM_STRING_ANALYZER).custom() - .tokenizer(KeywordTokenizerFactory.class) - .tokenFilter(LowerCaseFilterFactory.class) - .tokenFilter(ASCIIFoldingFilterFactory.class); + theLuceneCtx + .analyzer(NORM_STRING_ANALYZER) + .custom() + .tokenizer(KeywordTokenizerFactory.class) + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(ASCIIFoldingFilterFactory.class); - theLuceneCtx.analyzer(EXACT_ANALYZER).custom() - .tokenizer(KeywordTokenizerFactory.class); + theLuceneCtx.analyzer(EXACT_ANALYZER).custom().tokenizer(KeywordTokenizerFactory.class); - theLuceneCtx.analyzer("conceptParentPidsAnalyzer").custom() - .tokenizer(WhitespaceTokenizerFactory.class); - - theLuceneCtx.normalizer(LOWERCASE_ASCIIFOLDING_NORMALIZER).custom() - .tokenFilter(LowerCaseFilterFactory.class) - .tokenFilter(ASCIIFoldingFilterFactory.class); + theLuceneCtx.analyzer("conceptParentPidsAnalyzer").custom().tokenizer(WhitespaceTokenizerFactory.class); + theLuceneCtx + .normalizer(LOWERCASE_ASCIIFOLDING_NORMALIZER) + .custom() + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(ASCIIFoldingFilterFactory.class); } } - public static class HapiElasticsearchAnalysisConfigurer implements ElasticsearchAnalysisConfigurer { @Override public void configure(ElasticsearchAnalysisConfigurationContext theConfigCtx) { - theConfigCtx.analyzer("autocompleteEdgeAnalyzer").custom() - .tokenizer("pattern_all") - .tokenFilters("lowercase", "stop", "edgengram_3_50"); + theConfigCtx + .analyzer("autocompleteEdgeAnalyzer") + .custom() + .tokenizer("pattern_all") + .tokenFilters("lowercase", "stop", "edgengram_3_50"); - theConfigCtx.tokenizer("pattern_all") - .type("pattern") - .param("pattern", "(.*)") - .param("group", "1"); + theConfigCtx + .tokenizer("pattern_all") + .type("pattern") + .param("pattern", "(.*)") + .param("group", "1"); - theConfigCtx.tokenFilter("edgengram_3_50") - .type("edge_ngram") - .param("min_gram", "3") - .param("max_gram", "50"); + theConfigCtx + .tokenFilter("edgengram_3_50") + .type("edge_ngram") + .param("min_gram", "3") + .param("max_gram", "50"); + theConfigCtx + .analyzer("autocompleteWordEdgeAnalyzer") + .custom() + .tokenizer("standard") + .tokenFilters("lowercase", "stop", "wordedgengram_3_50"); - theConfigCtx.analyzer("autocompleteWordEdgeAnalyzer").custom() - .tokenizer("standard") - .tokenFilters("lowercase", "stop", "wordedgengram_3_50"); + theConfigCtx + .tokenFilter("wordedgengram_3_50") + .type("edge_ngram") + .param("min_gram", "3") + .param("max_gram", "20"); - theConfigCtx.tokenFilter("wordedgengram_3_50") - .type("edge_ngram") - .param("min_gram", "3") - .param("max_gram", "20"); + theConfigCtx + .analyzer("autocompletePhoneticAnalyzer") + .custom() + .tokenizer("standard") + .tokenFilters("stop", "snowball_english"); - theConfigCtx.analyzer("autocompletePhoneticAnalyzer").custom() - .tokenizer("standard") - .tokenFilters("stop", "snowball_english"); + theConfigCtx.tokenFilter("snowball_english").type("snowball").param("language", "English"); - theConfigCtx.tokenFilter("snowball_english") - .type("snowball") - .param("language", "English"); + theConfigCtx + .analyzer("autocompleteNGramAnalyzer") + .custom() + .tokenizer("standard") + .tokenFilters("word_delimiter", "lowercase", "ngram_3_20"); - theConfigCtx.analyzer("autocompleteNGramAnalyzer").custom() - .tokenizer("standard") - .tokenFilters("word_delimiter", "lowercase", "ngram_3_20"); + theConfigCtx + .tokenFilter("ngram_3_20") + .type("ngram") + .param("min_gram", "3") + .param("max_gram", "20"); - theConfigCtx.tokenFilter("ngram_3_20") - .type("ngram") - .param("min_gram", "3") - .param("max_gram", "20"); + theConfigCtx + .analyzer(HapiLuceneAnalysisConfigurer.STANDARD_ANALYZER) + .custom() + .tokenizer("standard") + .tokenFilters("lowercase", "asciifolding"); + theConfigCtx + .analyzer(HapiLuceneAnalysisConfigurer.NORM_STRING_ANALYZER) + .custom() + .tokenizer("keyword") // We need the whole string to match, including whitespace. + .tokenFilters("lowercase", "asciifolding"); - theConfigCtx.analyzer(HapiLuceneAnalysisConfigurer.STANDARD_ANALYZER).custom() - .tokenizer("standard") - .tokenFilters("lowercase", "asciifolding"); + theConfigCtx.analyzer("exactAnalyzer").custom().tokenizer("keyword").tokenFilters("unique"); - theConfigCtx.analyzer(HapiLuceneAnalysisConfigurer.NORM_STRING_ANALYZER).custom() - .tokenizer("keyword") // We need the whole string to match, including whitespace. - .tokenFilters("lowercase", "asciifolding"); - - theConfigCtx.analyzer("exactAnalyzer") - .custom() - .tokenizer("keyword") - .tokenFilters("unique"); - - theConfigCtx.analyzer("conceptParentPidsAnalyzer").custom() - .tokenizer("whitespace"); - - theConfigCtx.normalizer( LOWERCASE_ASCIIFOLDING_NORMALIZER ).custom() - .tokenFilters( "lowercase", "asciifolding" ); + theConfigCtx.analyzer("conceptParentPidsAnalyzer").custom().tokenizer("whitespace"); + theConfigCtx + .normalizer(LOWERCASE_ASCIIFOLDING_NORMALIZER) + .custom() + .tokenFilters("lowercase", "asciifolding"); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/IStaleSearchDeletingSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/IStaleSearchDeletingSvc.java index bbd353c16bc..024ce045407 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/IStaleSearchDeletingSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/IStaleSearchDeletingSvc.java @@ -24,5 +24,4 @@ public interface IStaleSearchDeletingSvc { void pollForStaleSearchesAndDeleteThem(); void schedulePollForStaleSearches(); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ISynchronousSearchSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ISynchronousSearchSvc.java index d3aa5ef78fe..ef8d2ee9a0a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ISynchronousSearchSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ISynchronousSearchSvc.java @@ -28,6 +28,11 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; public interface ISynchronousSearchSvc extends ISearchSvc { - IBundleProvider executeQuery(SearchParameterMap theParams, RequestDetails theRequestDetails, String theSearchUuid, ISearchBuilder theSb, Integer theLoadSynchronousUpTo, RequestPartitionId theRequestPartitionId); - + IBundleProvider executeQuery( + SearchParameterMap theParams, + RequestDetails theRequestDetails, + String theSearchUuid, + ISearchBuilder theSb, + Integer theLoadSynchronousUpTo, + RequestPartitionId theRequestPartitionId); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java index 43a5caac636..7e442bda264 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java @@ -63,15 +63,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.function.Function; +import javax.annotation.Nonnull; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; public class PersistedJpaBundleProvider implements IBundleProvider { @@ -81,30 +81,43 @@ public class PersistedJpaBundleProvider implements IBundleProvider { * Autowired fields */ protected final RequestDetails myRequest; + @Autowired protected HapiTransactionService myTxService; + @PersistenceContext private EntityManager myEntityManager; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private SearchBuilderFactory mySearchBuilderFactory; + @Autowired private HistoryBuilderFactory myHistoryBuilderFactory; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private FhirContext myContext; + @Autowired private ISearchCoordinatorSvc mySearchCoordinatorSvc; + @Autowired private ISearchCacheSvc mySearchCacheSvc; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private MemoryCacheService myMemoryCacheService; + @Autowired private IJpaStorageResourceParser myJpaStorageResourceParser; /* @@ -152,12 +165,15 @@ public class PersistedJpaBundleProvider implements IBundleProvider { */ private List doHistoryInTransaction(Integer theOffset, int theFromIndex, int theToIndex) { - HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), - mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh()); + HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder( + mySearchEntity.getResourceType(), + mySearchEntity.getResourceId(), + mySearchEntity.getLastUpdatedLow(), + mySearchEntity.getLastUpdatedHigh()); RequestPartitionId partitionId = getRequestPartitionId(); - List results = historyBuilder.fetchEntities(partitionId, theOffset, theFromIndex, - theToIndex, mySearchEntity.getHistorySearchStyle()); + List results = historyBuilder.fetchEntities( + partitionId, theOffset, theFromIndex, theToIndex, mySearchEntity.getHistorySearchStyle()); List retVal = new ArrayList<>(); for (ResourceHistoryTable next : results) { @@ -168,15 +184,15 @@ public class PersistedJpaBundleProvider implements IBundleProvider { retVal.add(myJpaStorageResourceParser.toResource(resource, true)); } - // Interceptor call: STORAGE_PREACCESS_RESOURCES { SimplePreResourceAccessDetails accessDetails = new SimplePreResourceAccessDetails(retVal); HookParams params = new HookParams() - .add(IPreResourceAccessDetails.class, accessDetails) - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); + .add(IPreResourceAccessDetails.class, accessDetails) + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); for (int i = retVal.size() - 1; i >= 0; i--) { if (accessDetails.isDontReturnResourceAtIndex(i)) { @@ -189,14 +205,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider { { SimplePreResourceShowDetails showDetails = new SimplePreResourceShowDetails(retVal); HookParams params = new HookParams() - .add(IPreResourceShowDetails.class, showDetails) - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); + .add(IPreResourceShowDetails.class, showDetails) + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); retVal = showDetails.toList(); } - return retVal; } @@ -209,7 +225,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider { } else if (mySearchEntity.getSearchType() == SearchTypeEnum.HISTORY) { details = ReadPartitionIdRequestDetails.forHistory(mySearchEntity.getResourceType(), null); } else { - SearchParameterMap params = mySearchEntity.getSearchParameterMap().orElse(null); + SearchParameterMap params = + mySearchEntity.getSearchParameterMap().orElse(null); details = ReadPartitionIdRequestDetails.forSearchType(mySearchEntity.getResourceType(), params, null); } myRequestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(myRequest, details); @@ -227,19 +244,21 @@ public class PersistedJpaBundleProvider implements IBundleProvider { return Collections.emptyList(); } String resourceName = mySearchEntity.getResourceType(); - Class resourceType = myContext.getResourceDefinition(resourceName).getImplementingClass(); + Class resourceType = + myContext.getResourceDefinition(resourceName).getImplementingClass(); IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceName); final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, resourceType); RequestPartitionId requestPartitionId = getRequestPartitionId(); - final List pidsSubList = mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest, requestPartitionId); + final List pidsSubList = + mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest, requestPartitionId); return myTxService - .withRequest(myRequest) - .withRequestPartitionId(requestPartitionId) - .execute(() -> { - return toResourceList(sb, pidsSubList); - }); + .withRequest(myRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + return toResourceList(sb, pidsSubList); + }); } /** @@ -248,16 +267,19 @@ public class PersistedJpaBundleProvider implements IBundleProvider { public boolean ensureSearchEntityLoaded() { if (mySearchEntity == null) { Optional searchOpt = myTxService - .withRequest(myRequest) - .withRequestPartitionId(myRequestPartitionId) - .execute(() -> mySearchCacheSvc.fetchByUuid(myUuid, myRequestPartitionId)); + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .execute(() -> mySearchCacheSvc.fetchByUuid(myUuid, myRequestPartitionId)); if (!searchOpt.isPresent()) { return false; } setSearchEntity(searchOpt.get()); - ourLog.trace("Retrieved search with version {} and total {}", mySearchEntity.getVersion(), mySearchEntity.getTotalCount()); + ourLog.trace( + "Retrieved search with version {} and total {}", + mySearchEntity.getVersion(), + mySearchEntity.getTotalCount()); return true; } @@ -288,13 +310,17 @@ public class PersistedJpaBundleProvider implements IBundleProvider { } Function supplier = k -> myTxService - .withRequest(myRequest) - .withRequestPartitionId(getRequestPartitionId()) - .execute(() -> { - HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh()); - Long count = historyBuilder.fetchCount(getRequestPartitionId()); - return count.intValue(); - }); + .withRequest(myRequest) + .withRequestPartitionId(getRequestPartitionId()) + .execute(() -> { + HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder( + mySearchEntity.getResourceType(), + mySearchEntity.getResourceId(), + mySearchEntity.getLastUpdatedLow(), + mySearchEntity.getLastUpdatedHigh()); + Long count = historyBuilder.fetchCount(getRequestPartitionId()); + return count.intValue(); + }); boolean haveOffset = mySearchEntity.getLastUpdatedLow() != null || mySearchEntity.getLastUpdatedHigh() != null; @@ -315,7 +341,6 @@ public class PersistedJpaBundleProvider implements IBundleProvider { break; } } - } @Override @@ -335,9 +360,9 @@ public class PersistedJpaBundleProvider implements IBundleProvider { switch (mySearchEntity.getSearchType()) { case HISTORY: return myTxService - .withRequest(myRequest) - .withRequestPartitionId(getRequestPartitionId()) - .execute(() -> doHistoryInTransaction(mySearchEntity.getOffset(), theFromIndex, theToIndex)); + .withRequest(myRequest) + .withRequestPartitionId(getRequestPartitionId()) + .execute(() -> doHistoryInTransaction(mySearchEntity.getOffset(), theFromIndex, theToIndex)); case SEARCH: case EVERYTHING: default: @@ -405,9 +430,10 @@ public class PersistedJpaBundleProvider implements IBundleProvider { if (mySearchEntity.getSearchType() == SearchTypeEnum.HISTORY) { return null; } else { - return mySearchCoordinatorSvc.getSearchTotal(myUuid, myRequest, myRequestPartitionId).orElse(null); + return mySearchCoordinatorSvc + .getSearchTotal(myUuid, myRequest, myRequestPartitionId) + .orElse(null); } - } protected boolean hasIncludes() { @@ -428,7 +454,16 @@ public class PersistedJpaBundleProvider implements IBundleProvider { if (performIncludesBeforeRevincludes) { // Load _includes - Set includedPids = theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes); + Set includedPids = theSearchBuilder.loadIncludes( + myContext, + myEntityManager, + thePids, + mySearchEntity.toIncludesList(), + false, + mySearchEntity.getLastUpdated(), + myUuid, + myRequest, + maxIncludes); if (maxIncludes != null) { maxIncludes -= includedPids.size(); } @@ -436,12 +471,30 @@ public class PersistedJpaBundleProvider implements IBundleProvider { includedPidList.addAll(includedPids); // Load _revincludes - Set revIncludedPids = theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes); + Set revIncludedPids = theSearchBuilder.loadIncludes( + myContext, + myEntityManager, + thePids, + mySearchEntity.toRevIncludesList(), + true, + mySearchEntity.getLastUpdated(), + myUuid, + myRequest, + maxIncludes); thePids.addAll(revIncludedPids); includedPidList.addAll(revIncludedPids); } else { // Load _revincludes - Set revIncludedPids = theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes); + Set revIncludedPids = theSearchBuilder.loadIncludes( + myContext, + myEntityManager, + thePids, + mySearchEntity.toRevIncludesList(), + true, + mySearchEntity.getLastUpdated(), + myUuid, + myRequest, + maxIncludes); if (maxIncludes != null) { maxIncludes -= revIncludedPids.size(); } @@ -449,12 +502,19 @@ public class PersistedJpaBundleProvider implements IBundleProvider { includedPidList.addAll(revIncludedPids); // Load _includes - Set includedPids = theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes); + Set includedPids = theSearchBuilder.loadIncludes( + myContext, + myEntityManager, + thePids, + mySearchEntity.toIncludesList(), + false, + mySearchEntity.getLastUpdated(), + myUuid, + myRequest, + maxIncludes); thePids.addAll(includedPids); includedPidList.addAll(includedPids); } - - } // Execute the query and make sure we return distinct results @@ -467,7 +527,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider { } private boolean shouldPerformIncludesBeforeRevincudes() { - // When revincludes contain a :iterate, we should perform them last so they can iterate through the includes found so far + // When revincludes contain a :iterate, we should perform them last so they can iterate through the includes + // found so far boolean retval = false; for (Include nextInclude : mySearchEntity.toRevIncludesList()) { @@ -477,7 +538,6 @@ public class PersistedJpaBundleProvider implements IBundleProvider { } } return retval; - } public void setInterceptorBroadcaster(IInterceptorBroadcaster theInterceptorBroadcaster) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java index 5be99eb2276..5e89f521a09 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java @@ -48,20 +48,54 @@ public class PersistedJpaBundleProviderFactory { } public PersistedJpaBundleProvider newInstance(RequestDetails theRequest, Search theSearch) { - Object retVal = myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_BUNDLE_PROVIDER_BY_SEARCH, theRequest, theSearch); + Object retVal = + myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_BUNDLE_PROVIDER_BY_SEARCH, theRequest, theSearch); return (PersistedJpaBundleProvider) retVal; } - public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) { - return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder, theRequestPartitionId); + public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage( + RequestDetails theRequestDetails, + Search theSearch, + SearchTask theTask, + ISearchBuilder theSearchBuilder, + RequestPartitionId theRequestPartitionId) { + return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean( + JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, + theRequestDetails, + theSearch, + theTask, + theSearchBuilder, + theRequestPartitionId); } - - public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, RequestPartitionId theRequestPartitionId) { - return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null, theRequestPartitionId); + public IBundleProvider history( + RequestDetails theRequest, + String theResourceType, + Long theResourcePid, + Date theRangeStartInclusive, + Date theRangeEndInclusive, + Integer theOffset, + RequestPartitionId theRequestPartitionId) { + return history( + theRequest, + theResourceType, + theResourcePid, + theRangeStartInclusive, + theRangeEndInclusive, + theOffset, + null, + theRequestPartitionId); } - public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType, RequestPartitionId theRequestPartitionId) { + public IBundleProvider history( + RequestDetails theRequest, + String theResourceType, + Long theResourcePid, + Date theRangeStartInclusive, + Date theRangeEndInclusive, + Integer theOffset, + HistorySearchStyleEnum searchParameterType, + RequestPartitionId theRequestPartitionId) { String resourceName = defaultIfBlank(theResourceType, null); Search search = new Search(); @@ -81,5 +115,4 @@ public class PersistedJpaBundleProviderFactory { return provider; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java index be0c352b101..28dbe622acb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java @@ -34,10 +34,10 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundleProvider { private static final Logger ourLog = LoggerFactory.getLogger(PersistedJpaSearchFirstPageBundleProvider.class); @@ -47,7 +47,12 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl /** * Constructor */ - public PersistedJpaSearchFirstPageBundleProvider(Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + public PersistedJpaSearchFirstPageBundleProvider( + Search theSearch, + SearchTask theSearchTask, + ISearchBuilder theSearchBuilder, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { super(theRequest, theSearch.getUuid()); assert theSearch.getSearchType() != SearchTypeEnum.HISTORY; @@ -73,29 +78,26 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl RequestPartitionId requestPartitionId = getRequestPartitionId(); List retVal = myTxService - .withRequest(myRequest) - .withRequestPartitionId(requestPartitionId) - .execute(() -> toResourceList(mySearchBuilder, pids)); + .withRequest(myRequest) + .withRequestPartitionId(requestPartitionId) + .execute(() -> toResourceList(mySearchBuilder, pids)); long totalCountWanted = theToIndex - theFromIndex; - long totalCountMatch = (int) retVal - .stream() - .filter(t -> !isInclude(t)) - .count(); + long totalCountMatch = (int) retVal.stream().filter(t -> !isInclude(t)).count(); if (totalCountMatch < totalCountWanted) { if (getSearchEntity().getStatus() == SearchStatusEnum.PASSCMPLET - || ((getSearchEntity().getStatus() == SearchStatusEnum.FINISHED && getSearchEntity().getNumFound() >= theToIndex))) { + || ((getSearchEntity().getStatus() == SearchStatusEnum.FINISHED + && getSearchEntity().getNumFound() >= theToIndex))) { /* * This is a bit of complexity to account for the possibility that * the consent service has filtered some results. */ - Set existingIds = retVal - .stream() - .map(t -> t.getIdElement().getValue()) - .filter(t -> t != null) - .collect(Collectors.toSet()); + Set existingIds = retVal.stream() + .map(t -> t.getIdElement().getValue()) + .filter(t -> t != null) + .collect(Collectors.toSet()); long remainingWanted = totalCountWanted - totalCountMatch; long fromIndex = theToIndex - remainingWanted; @@ -130,5 +132,4 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl } return super.size(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java index 62470304d6e..01967bb9a9d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/ResourceSearchUrlSvc.java @@ -31,8 +31,8 @@ import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import javax.persistence.EntityManager; import java.util.Date; +import javax.persistence.EntityManager; /** * This service ensures uniqueness of resources during create or create-on-update by storing the resource searchUrl. @@ -49,7 +49,11 @@ public class ResourceSearchUrlSvc { private final FhirContext myFhirContext; - public ResourceSearchUrlSvc(EntityManager theEntityManager, IResourceSearchUrlDao theResourceSearchUrlDao, MatchUrlService theMatchUrlService, FhirContext theFhirContext) { + public ResourceSearchUrlSvc( + EntityManager theEntityManager, + IResourceSearchUrlDao theResourceSearchUrlDao, + MatchUrlService theMatchUrlService, + FhirContext theFhirContext) { myEntityManager = theEntityManager; myResourceSearchUrlDao = theResourceSearchUrlDao; myMatchUrlService = theMatchUrlService; @@ -65,32 +69,33 @@ public class ResourceSearchUrlSvc { ourLog.info("Deleted {} SearchUrls", deletedCount); } - /** * Once a resource is updated or deleted, we can trust that future match checks will find the committed resource in the db. * The use of the constraint table is done, and we can delete it to keep the table small. */ - public void deleteByResId(long theResId){ + public void deleteByResId(long theResId) { myResourceSearchUrlDao.deleteByResId(theResId); } /** * We store a record of match urls with res_id so a db constraint can catch simultaneous creates that slip through. */ - public void enforceMatchUrlResourceUniqueness(String theResourceName, String theMatchUrl, JpaPid theResourcePersistentId) { + public void enforceMatchUrlResourceUniqueness( + String theResourceName, String theMatchUrl, JpaPid theResourcePersistentId) { String canonicalizedUrlForStorage = createCanonicalizedUrlForStorage(theResourceName, theMatchUrl); - ResourceSearchUrlEntity searchUrlEntity = ResourceSearchUrlEntity.from(canonicalizedUrlForStorage, theResourcePersistentId.getId()); + ResourceSearchUrlEntity searchUrlEntity = + ResourceSearchUrlEntity.from(canonicalizedUrlForStorage, theResourcePersistentId.getId()); // calling dao.save performs a merge operation which implies a trip to - // the database to see if the resource exists. Since we don't need the check, we avoid the trip by calling em.persist. + // the database to see if the resource exists. Since we don't need the check, we avoid the trip by calling + // em.persist. myEntityManager.persist(searchUrlEntity); - } /** * Provides a sanitized matchUrl to circumvent ordering matters. */ - private String createCanonicalizedUrlForStorage(String theResourceName, String theMatchUrl){ + private String createCanonicalizedUrlForStorage(String theResourceName, String theMatchUrl) { RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theResourceName); SearchParameterMap matchUrlSearchParameterMap = myMatchUrlService.translateMatchUrl(theMatchUrl, resourceDef); @@ -99,6 +104,4 @@ public class ResourceSearchUrlSvc { return theResourceName + canonicalizedMatchUrl; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java index b4211055369..1ef70a8b469 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java @@ -73,8 +73,6 @@ import org.springframework.data.domain.Sort; import org.springframework.stereotype.Component; import org.springframework.transaction.support.TransactionSynchronizationManager; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.List; @@ -86,6 +84,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.jpa.util.QueryParameterUtils.DEFAULT_SYNC_SIZE; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -124,21 +124,20 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { * Constructor */ public SearchCoordinatorSvcImpl( - FhirContext theContext, - JpaStorageSettings theStorageSettings, - IInterceptorBroadcaster theInterceptorBroadcaster, - HapiTransactionService theTxService, - ISearchCacheSvc theSearchCacheSvc, - ISearchResultCacheSvc theSearchResultCacheSvc, - DaoRegistry theDaoRegistry, - SearchBuilderFactory theSearchBuilderFactory, - ISynchronousSearchSvc theSynchronousSearchSvc, - PersistedJpaBundleProviderFactory thePersistedJpaBundleProviderFactory, - ISearchParamRegistry theSearchParamRegistry, - SearchStrategyFactory theSearchStrategyFactory, - ExceptionService theExceptionSvc, - BeanFactory theBeanFactory - ) { + FhirContext theContext, + JpaStorageSettings theStorageSettings, + IInterceptorBroadcaster theInterceptorBroadcaster, + HapiTransactionService theTxService, + ISearchCacheSvc theSearchCacheSvc, + ISearchResultCacheSvc theSearchResultCacheSvc, + DaoRegistry theDaoRegistry, + SearchBuilderFactory theSearchBuilderFactory, + ISynchronousSearchSvc theSynchronousSearchSvc, + PersistedJpaBundleProviderFactory thePersistedJpaBundleProviderFactory, + ISearchParamRegistry theSearchParamRegistry, + SearchStrategyFactory theSearchStrategyFactory, + ExceptionService theExceptionSvc, + BeanFactory theBeanFactory) { super(); myContext = theContext; myStorageSettings = theStorageSettings; @@ -181,7 +180,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { @Override public void cancelAllActiveSearches() { for (SearchTask next : myIdToSearchTask.values()) { - ourLog.info("Requesting immediate abort of search: {}", next.getSearch().getUuid()); + ourLog.info( + "Requesting immediate abort of search: {}", next.getSearch().getUuid()); next.requestImmediateAbort(); AsyncUtil.awaitLatchAndIgnoreInterrupt(next.getCompletionLatch(), 30, TimeUnit.SECONDS); } @@ -208,7 +208,12 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { * just needs to wait until the first one actually fetches more results. */ @Override - public List getResources(final String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + public List getResources( + final String theUuid, + int theFrom, + int theTo, + @Nullable RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { assert !TransactionSynchronizationManager.isActualTransactionActive(); // If we're actively searching right now, don't try to do anything until at least one batch has been @@ -228,7 +233,12 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { if (searchTask != null) { ourLog.trace("Local search found"); List resourcePids = searchTask.getResourcePids(theFrom, theTo); - ourLog.trace("Local search returned {} pids, wanted {}-{} - Search: {}", resourcePids.size(), theFrom, theTo, searchTask.getSearch()); + ourLog.trace( + "Local search returned {} pids, wanted {}-{} - Search: {}", + resourcePids.size(), + theFrom, + theTo, + searchTask.getSearch()); /* * Generally, if a search task is open, the fastest possible thing is to just return its results. This @@ -236,19 +246,22 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { * results beyond that threashold. In that case, we'll keep going below, since that will trigger another * task. */ - if ((searchTask.getSearch().getNumFound() - searchTask.getSearch().getNumBlocked()) >= theTo || resourcePids.size() == (theTo - theFrom)) { + if ((searchTask.getSearch().getNumFound() + - searchTask.getSearch().getNumBlocked()) + >= theTo + || resourcePids.size() == (theTo - theFrom)) { return resourcePids; } } } Callable searchCallback = () -> mySearchCacheSvc - .fetchByUuid(theUuid, theRequestPartitionId) - .orElseThrow(() -> myExceptionSvc.newUnknownSearchException(theUuid)); + .fetchByUuid(theUuid, theRequestPartitionId) + .orElseThrow(() -> myExceptionSvc.newUnknownSearchException(theUuid)); search = myTxService - .withRequest(theRequestDetails) - .withRequestPartitionId(theRequestPartitionId) - .execute(searchCallback); + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(searchCallback); QueryParameterUtils.verifySearchHasntFailedOrThrowInternalErrorException(search); if (search.getStatus() == SearchStatusEnum.FINISHED) { @@ -261,7 +274,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { } if (sw.getMillis() > myMaxMillisToWaitForRemoteResults) { - ourLog.error("Search {} of type {} for {}{} timed out after {}ms", search.getId(), search.getSearchType(), search.getResourceType(), search.getSearchQueryString(), sw.getMillis()); + ourLog.error( + "Search {} of type {} for {}{} timed out after {}ms", + search.getId(), + search.getSearchType(), + search.getResourceType(), + search.getSearchQueryString(), + sw.getMillis()); throw new InternalErrorException(Msg.code(1163) + "Request timed out after " + sw.getMillis() + "ms"); } @@ -269,27 +288,28 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { // start a new pass if (search.getStatus() == SearchStatusEnum.PASSCMPLET) { ourLog.trace("Going to try to start next search"); - Optional newSearch = mySearchCacheSvc.tryToMarkSearchAsInProgress(search, theRequestPartitionId); + Optional newSearch = + mySearchCacheSvc.tryToMarkSearchAsInProgress(search, theRequestPartitionId); if (newSearch.isPresent()) { ourLog.trace("Launching new search"); search = newSearch.get(); String resourceType = search.getResourceType(); - SearchParameterMap params = search.getSearchParameterMap().orElseThrow(() -> new IllegalStateException("No map in PASSCOMPLET search")); + SearchParameterMap params = search.getSearchParameterMap() + .orElseThrow(() -> new IllegalStateException("No map in PASSCOMPLET search")); IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(resourceType); SearchTaskParameters parameters = new SearchTaskParameters( - search, - resourceDao, - params, - resourceType, - theRequestDetails, - theRequestPartitionId, - myOnRemoveSearchTask, - mySyncSize - ); + search, + resourceDao, + params, + resourceType, + theRequestDetails, + theRequestPartitionId, + myOnRemoveSearchTask, + mySyncSize); parameters.setLoadingThrottleForUnitTests(myLoadingThrottleForUnitTests); - SearchContinuationTask task = (SearchContinuationTask) myBeanFactory.getBean(SearchConfig.CONTINUE_TASK, - parameters); + SearchContinuationTask task = + (SearchContinuationTask) myBeanFactory.getBean(SearchConfig.CONTINUE_TASK, parameters); myIdToSearchTask.put(search.getUuid(), task); task.call(); } @@ -310,8 +330,15 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { } @Nonnull - private List fetchResultPids(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, Search theSearch, RequestPartitionId theRequestPartitionId) { - List pids = mySearchResultCacheSvc.fetchResultPids(theSearch, theFrom, theTo, theRequestDetails, theRequestPartitionId); + private List fetchResultPids( + String theUuid, + int theFrom, + int theTo, + @Nullable RequestDetails theRequestDetails, + Search theSearch, + RequestPartitionId theRequestPartitionId) { + List pids = mySearchResultCacheSvc.fetchResultPids( + theSearch, theFrom, theTo, theRequestDetails, theRequestPartitionId); if (pids == null) { throw myExceptionSvc.newUnknownSearchException(theUuid); } @@ -319,46 +346,60 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { } @Override - public IBundleProvider registerSearch(final IFhirResourceDao theCallingDao, final SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + public IBundleProvider registerSearch( + final IFhirResourceDao theCallingDao, + final SearchParameterMap theParams, + String theResourceType, + CacheControlDirective theCacheControlDirective, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { final String searchUuid = UUID.randomUUID().toString(); final String queryString = theParams.toNormalizedQueryString(myContext); ourLog.debug("Registering new search {}", searchUuid); Search search = new Search(); - QueryParameterUtils.populateSearchEntity(theParams, theResourceType, searchUuid, queryString, search, theRequestPartitionId); + QueryParameterUtils.populateSearchEntity( + theParams, theResourceType, searchUuid, queryString, search, theRequestPartitionId); - myStorageInterceptorHooks.callStoragePresearchRegistered(theRequestDetails, theParams, search, theRequestPartitionId); + myStorageInterceptorHooks.callStoragePresearchRegistered( + theRequestDetails, theParams, search, theRequestPartitionId); validateSearch(theParams); - Class resourceTypeClass = myContext.getResourceDefinition(theResourceType).getImplementingClass(); - final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(theCallingDao, theResourceType, resourceTypeClass); + Class resourceTypeClass = + myContext.getResourceDefinition(theResourceType).getImplementingClass(); + final ISearchBuilder sb = + mySearchBuilderFactory.newSearchBuilder(theCallingDao, theResourceType, resourceTypeClass); sb.setFetchSize(mySyncSize); final Integer loadSynchronousUpTo = getLoadSynchronousUpToOrNull(theCacheControlDirective); boolean isOffsetQuery = theParams.isOffsetQuery(); // todo someday - not today. -// SearchStrategyFactory.ISearchStrategy searchStrategy = mySearchStrategyFactory.pickStrategy(theResourceType, theParams, theRequestDetails); -// return searchStrategy.get(); + // SearchStrategyFactory.ISearchStrategy searchStrategy = mySearchStrategyFactory.pickStrategy(theResourceType, + // theParams, theRequestDetails); + // return searchStrategy.get(); if (theParams.isLoadSynchronous() || loadSynchronousUpTo != null || isOffsetQuery) { if (mySearchStrategyFactory.isSupportsHSearchDirect(theResourceType, theParams, theRequestDetails)) { ourLog.info("Search {} is using direct load strategy", searchUuid); - SearchStrategyFactory.ISearchStrategy direct = mySearchStrategyFactory.makeDirectStrategy(searchUuid, theResourceType, theParams, theRequestDetails); + SearchStrategyFactory.ISearchStrategy direct = mySearchStrategyFactory.makeDirectStrategy( + searchUuid, theResourceType, theParams, theRequestDetails); try { return direct.get(); } catch (ResourceNotFoundInIndexException theE) { // some resources were not found in index, so we will inform this and resort to JPA search - ourLog.warn("Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); + ourLog.warn( + "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); } } ourLog.debug("Search {} is loading in synchronous mode", searchUuid); - return mySynchronousSearchSvc.executeQuery(theParams, theRequestDetails, searchUuid, sb, loadSynchronousUpTo, theRequestPartitionId); + return mySynchronousSearchSvc.executeQuery( + theParams, theRequestDetails, searchUuid, sb, loadSynchronousUpTo, theRequestPartitionId); } /* @@ -373,7 +414,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { if (cacheStatus != SearchCacheStatusEnum.NOT_TRIED) { if (theParams.getEverythingMode() == null) { if (myStorageSettings.getReuseCachedSearchResultsForMillis() != null) { - PersistedJpaBundleProvider foundSearchProvider = findCachedQuery(theParams, theResourceType, theRequestDetails, queryString, theRequestPartitionId); + PersistedJpaBundleProvider foundSearchProvider = findCachedQuery( + theParams, theResourceType, theRequestDetails, queryString, theRequestPartitionId); if (foundSearchProvider != null) { foundSearchProvider.setCacheStatus(SearchCacheStatusEnum.HIT); return foundSearchProvider; @@ -382,7 +424,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { } } - PersistedJpaSearchFirstPageBundleProvider retVal = submitSearch(theCallingDao, theParams, theResourceType, theRequestDetails, sb, theRequestPartitionId, search); + PersistedJpaSearchFirstPageBundleProvider retVal = submitSearch( + theCallingDao, theParams, theResourceType, theRequestDetails, sb, theRequestPartitionId, search); retVal.setCacheStatus(cacheStatus); return retVal; } @@ -404,41 +447,73 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { String paramTargetType = next.getParamTargetType(); if (isBlank(paramType) || isBlank(paramName)) { - String msg = myContext.getLocalizer().getMessageSanitized(SearchCoordinatorSvcImpl.class, "invalidInclude", name, value, ""); + String msg = myContext + .getLocalizer() + .getMessageSanitized(SearchCoordinatorSvcImpl.class, "invalidInclude", name, value, ""); throw new InvalidRequestException(Msg.code(2018) + msg); } if (!myDaoRegistry.isResourceTypeSupported(paramType)) { - String resourceTypeMsg = myContext.getLocalizer().getMessageSanitized(SearchCoordinatorSvcImpl.class, "invalidResourceType", paramType); - String msg = myContext.getLocalizer().getMessage(SearchCoordinatorSvcImpl.class, "invalidInclude", UrlUtil.sanitizeUrlPart(name), UrlUtil.sanitizeUrlPart(value), resourceTypeMsg); // last param is pre-sanitized + String resourceTypeMsg = myContext + .getLocalizer() + .getMessageSanitized(SearchCoordinatorSvcImpl.class, "invalidResourceType", paramType); + String msg = myContext + .getLocalizer() + .getMessage( + SearchCoordinatorSvcImpl.class, + "invalidInclude", + UrlUtil.sanitizeUrlPart(name), + UrlUtil.sanitizeUrlPart(value), + resourceTypeMsg); // last param is pre-sanitized throw new InvalidRequestException(Msg.code(2017) + msg); } if (isNotBlank(paramTargetType) && !myDaoRegistry.isResourceTypeSupported(paramTargetType)) { - String resourceTypeMsg = myContext.getLocalizer().getMessageSanitized(SearchCoordinatorSvcImpl.class, "invalidResourceType", paramTargetType); - String msg = myContext.getLocalizer().getMessage(SearchCoordinatorSvcImpl.class, "invalidInclude", UrlUtil.sanitizeUrlPart(name), UrlUtil.sanitizeUrlPart(value), resourceTypeMsg); // last param is pre-sanitized + String resourceTypeMsg = myContext + .getLocalizer() + .getMessageSanitized(SearchCoordinatorSvcImpl.class, "invalidResourceType", paramTargetType); + String msg = myContext + .getLocalizer() + .getMessage( + SearchCoordinatorSvcImpl.class, + "invalidInclude", + UrlUtil.sanitizeUrlPart(name), + UrlUtil.sanitizeUrlPart(value), + resourceTypeMsg); // last param is pre-sanitized throw new InvalidRequestException(Msg.code(2016) + msg); } - if (!Constants.INCLUDE_STAR.equals(paramName) && mySearchParamRegistry.getActiveSearchParam(paramType, paramName) == null) { - List validNames = mySearchParamRegistry - .getActiveSearchParams(paramType) - .values() - .stream() - .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) - .map(t -> UrlUtil.sanitizeUrlPart(t.getName())) - .sorted() - .collect(Collectors.toList()); - String searchParamMessage = myContext.getLocalizer().getMessage(BaseStorageDao.class, "invalidSearchParameter", UrlUtil.sanitizeUrlPart(paramName), UrlUtil.sanitizeUrlPart(paramType), validNames); - String msg = myContext.getLocalizer().getMessage(SearchCoordinatorSvcImpl.class, "invalidInclude", UrlUtil.sanitizeUrlPart(name), UrlUtil.sanitizeUrlPart(value), searchParamMessage); // last param is pre-sanitized + if (!Constants.INCLUDE_STAR.equals(paramName) + && mySearchParamRegistry.getActiveSearchParam(paramType, paramName) == null) { + List validNames = mySearchParamRegistry.getActiveSearchParams(paramType).values().stream() + .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) + .map(t -> UrlUtil.sanitizeUrlPart(t.getName())) + .sorted() + .collect(Collectors.toList()); + String searchParamMessage = myContext + .getLocalizer() + .getMessage( + BaseStorageDao.class, + "invalidSearchParameter", + UrlUtil.sanitizeUrlPart(paramName), + UrlUtil.sanitizeUrlPart(paramType), + validNames); + String msg = myContext + .getLocalizer() + .getMessage( + SearchCoordinatorSvcImpl.class, + "invalidInclude", + UrlUtil.sanitizeUrlPart(name), + UrlUtil.sanitizeUrlPart(value), + searchParamMessage); // last param is pre-sanitized throw new InvalidRequestException(Msg.code(2015) + msg); } - } } @Override - public Optional getSearchTotal(String theUuid, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + public Optional getSearchTotal( + String theUuid, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { SearchTask task = myIdToSearchTask.get(theUuid); if (task != null) { return Optional.ofNullable(task.awaitInitialSync()); @@ -448,10 +523,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { * In case there is no running search, if the total is listed as accurate we know one is coming * so let's wait a bit for it to show up */ - Optional search = myTxService.withRequest(theRequestDetails).execute(() -> mySearchCacheSvc.fetchByUuid(theUuid, theRequestPartitionId)); + Optional search = myTxService + .withRequest(theRequestDetails) + .execute(() -> mySearchCacheSvc.fetchByUuid(theUuid, theRequestPartitionId)); if (search.isPresent()) { Optional searchParameterMap = search.get().getSearchParameterMap(); - if (searchParameterMap.isPresent() && searchParameterMap.get().getSearchTotalMode() == SearchTotalModeEnum.ACCURATE) { + if (searchParameterMap.isPresent() + && searchParameterMap.get().getSearchTotalMode() == SearchTotalModeEnum.ACCURATE) { for (int i = 0; i < 10; i++) { if (search.isPresent()) { QueryParameterUtils.verifySearchHasntFailedOrThrowInternalErrorException(search.get()); @@ -468,72 +546,95 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { } @Nonnull - private PersistedJpaSearchFirstPageBundleProvider submitSearch(IDao theCallingDao, SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, ISearchBuilder theSb, RequestPartitionId theRequestPartitionId, Search theSearch) { + private PersistedJpaSearchFirstPageBundleProvider submitSearch( + IDao theCallingDao, + SearchParameterMap theParams, + String theResourceType, + RequestDetails theRequestDetails, + ISearchBuilder theSb, + RequestPartitionId theRequestPartitionId, + Search theSearch) { StopWatch w = new StopWatch(); SearchTaskParameters stp = new SearchTaskParameters( - theSearch, - theCallingDao, - theParams, - theResourceType, - theRequestDetails, - theRequestPartitionId, - myOnRemoveSearchTask, - mySyncSize - ); + theSearch, + theCallingDao, + theParams, + theResourceType, + theRequestDetails, + theRequestPartitionId, + myOnRemoveSearchTask, + mySyncSize); stp.setLoadingThrottleForUnitTests(myLoadingThrottleForUnitTests); SearchTask task = (SearchTask) myBeanFactory.getBean(SearchConfig.SEARCH_TASK, stp); myIdToSearchTask.put(theSearch.getUuid(), task); task.call(); - PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb, theRequestPartitionId); + PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage( + theRequestDetails, theSearch, task, theSb, theRequestPartitionId); ourLog.debug("Search initial phase completed in {}ms", w.getMillis()); return retVal; } @Nullable - private PersistedJpaBundleProvider findCachedQuery(SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theQueryString, RequestPartitionId theRequestPartitionId) { + private PersistedJpaBundleProvider findCachedQuery( + SearchParameterMap theParams, + String theResourceType, + RequestDetails theRequestDetails, + String theQueryString, + RequestPartitionId theRequestPartitionId) { // May be null return myTxService - .withRequest(theRequestDetails) - .withRequestPartitionId(theRequestPartitionId) - .execute(() -> { + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { - // Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH - HookParams params = new HookParams() - .add(SearchParameterMap.class, theParams) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); - Object outcome = CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params); - if (Boolean.FALSE.equals(outcome)) { - return null; - } + // Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH + HookParams params = new HookParams() + .add(SearchParameterMap.class, theParams) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + Object outcome = CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( + myInterceptorBroadcaster, + theRequestDetails, + Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, + params); + if (Boolean.FALSE.equals(outcome)) { + return null; + } - // Check for a search matching the given hash - Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType, theRequestPartitionId); - if (searchToUse == null) { - return null; - } + // Check for a search matching the given hash + Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType, theRequestPartitionId); + if (searchToUse == null) { + return null; + } - ourLog.debug("Reusing search {} from cache", searchToUse.getUuid()); - // Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED - params = new HookParams() - .add(SearchParameterMap.class, theParams) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params); + ourLog.debug("Reusing search {} from cache", searchToUse.getUuid()); + // Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED + params = new HookParams() + .add(SearchParameterMap.class, theParams) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, + theRequestDetails, + Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, + params); - return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid()); - }); + return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid()); + }); } @Nullable - private Search findSearchToUseOrNull(String theQueryString, String theResourceType, RequestPartitionId theRequestPartitionId) { + private Search findSearchToUseOrNull( + String theQueryString, String theResourceType, RequestPartitionId theRequestPartitionId) { // createdCutoff is in recent past - final Instant createdCutoff = Instant.now().minus(myStorageSettings.getReuseCachedSearchResultsForMillis(), ChronoUnit.MILLIS); + final Instant createdCutoff = + Instant.now().minus(myStorageSettings.getReuseCachedSearchResultsForMillis(), ChronoUnit.MILLIS); - Optional candidate = mySearchCacheSvc.findCandidatesForReuse(theResourceType, theQueryString, createdCutoff, theRequestPartitionId); + Optional candidate = mySearchCacheSvc.findCandidatesForReuse( + theResourceType, theQueryString, createdCutoff, theRequestPartitionId); return candidate.orElse(null); } @@ -544,7 +645,9 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { if (theCacheControlDirective.getMaxResults() != null) { loadSynchronousUpTo = theCacheControlDirective.getMaxResults(); if (loadSynchronousUpTo > myStorageSettings.getCacheControlNoStoreMaxResultsUpperLimit()) { - throw new InvalidRequestException(Msg.code(1165) + Constants.HEADER_CACHE_CONTROL + " header " + Constants.CACHE_CONTROL_MAX_RESULTS + " value must not exceed " + myStorageSettings.getCacheControlNoStoreMaxResultsUpperLimit()); + throw new InvalidRequestException(Msg.code(1165) + Constants.HEADER_CACHE_CONTROL + " header " + + Constants.CACHE_CONTROL_MAX_RESULTS + " value must not exceed " + + myStorageSettings.getCacheControlNoStoreMaxResultsUpperLimit()); } } else { loadSynchronousUpTo = 100; @@ -555,7 +658,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { return loadSynchronousUpTo; } - /** * Creates a {@link Pageable} using a start and end index */ @@ -576,8 +678,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc { public long getOffset() { return theFromIndex; } - }; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchStrategyFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchStrategyFactory.java index aff9afaeaf4..e6de39ec5d6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchStrategyFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchStrategyFactory.java @@ -27,56 +27,60 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.SimpleBundleProvider; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nullable; import java.util.Collections; import java.util.List; import java.util.function.Supplier; +import javax.annotation.Nullable; /** * Figure out how we're going to run the query up front, and build a branchless strategy object. */ public class SearchStrategyFactory { private final JpaStorageSettings myStorageSettings; + @Nullable private final IFulltextSearchSvc myFulltextSearchSvc; - public interface ISearchStrategy extends Supplier { - - } + public interface ISearchStrategy extends Supplier {} // someday -// public class DirectHSearch implements ISearchStrategy {}; -// public class JPAOffsetSearch implements ISearchStrategy {}; -// public class JPASavedSearch implements ISearchStrategy {}; -// public class JPAHybridHSearchSavedSearch implements ISearchStrategy {}; -// public class SavedSearchAdaptorStrategy implements ISearchStrategy {}; + // public class DirectHSearch implements ISearchStrategy {}; + // public class JPAOffsetSearch implements ISearchStrategy {}; + // public class JPASavedSearch implements ISearchStrategy {}; + // public class JPAHybridHSearchSavedSearch implements ISearchStrategy {}; + // public class SavedSearchAdaptorStrategy implements ISearchStrategy {}; - public SearchStrategyFactory(JpaStorageSettings theStorageSettings, @Nullable IFulltextSearchSvc theFulltextSearchSvc) { + public SearchStrategyFactory( + JpaStorageSettings theStorageSettings, @Nullable IFulltextSearchSvc theFulltextSearchSvc) { myStorageSettings = theStorageSettings; myFulltextSearchSvc = theFulltextSearchSvc; } - public boolean isSupportsHSearchDirect(String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails) { - return - myFulltextSearchSvc != null && - myStorageSettings.isStoreResourceInHSearchIndex() && - myStorageSettings.isAdvancedHSearchIndexing() && - myFulltextSearchSvc.supportsAllOf(theParams) && - theParams.getSummaryMode() == null && - theParams.getSearchTotalMode() == null; + public boolean isSupportsHSearchDirect( + String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails) { + return myFulltextSearchSvc != null + && myStorageSettings.isStoreResourceInHSearchIndex() + && myStorageSettings.isAdvancedHSearchIndexing() + && myFulltextSearchSvc.supportsAllOf(theParams) + && theParams.getSummaryMode() == null + && theParams.getSearchTotalMode() == null; } - public ISearchStrategy makeDirectStrategy(String theSearchUUID, String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails) { + public ISearchStrategy makeDirectStrategy( + String theSearchUUID, + String theResourceType, + SearchParameterMap theParams, + RequestDetails theRequestDetails) { return () -> { if (myFulltextSearchSvc == null) { return new SimpleBundleProvider(Collections.emptyList(), theSearchUUID); } - List resources = myFulltextSearchSvc.searchForResources(theResourceType, theParams, theRequestDetails); + List resources = + myFulltextSearchSvc.searchForResources(theResourceType, theParams, theRequestDetails); SimpleBundleProvider result = new SimpleBundleProvider(resources, theSearchUUID); result.setSize(resources.size()); return result; }; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchUrlJobMaintenanceSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchUrlJobMaintenanceSvcImpl.java index 0ac33f6b8fe..283a9dfe4dd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchUrlJobMaintenanceSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchUrlJobMaintenanceSvcImpl.java @@ -68,8 +68,7 @@ public class SearchUrlJobMaintenanceSvcImpl implements ISearchUrlJobMaintenanceS return new Date(System.currentTimeMillis() - OUR_CUTOFF_IN_MILLISECONDS); } - public static class SearchUrlMaintenanceJob implements HapiJob{ - + public static class SearchUrlMaintenanceJob implements HapiJob { @Autowired private ISearchUrlJobMaintenanceSvc mySearchUrlJobMaintenanceSvc; @@ -78,6 +77,5 @@ public class SearchUrlJobMaintenanceSvcImpl implements ISearchUrlJobMaintenanceS public void execute(JobExecutionContext theJobExecutionContext) throws JobExecutionException { mySearchUrlJobMaintenanceSvc.removeStaleEntries(); } - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/StaleSearchDeletingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/StaleSearchDeletingSvcImpl.java index 9d75460050a..700dfa9484e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/StaleSearchDeletingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/StaleSearchDeletingSvcImpl.java @@ -43,8 +43,10 @@ import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.SEARCH_CLE // public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc, IHasScheduledJobs { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StaleSearchDeletingSvcImpl.class); + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private ISearchCacheSvc mySearchCacheSvc; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java index eae46731eb5..3627b72a5d4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java @@ -50,12 +50,12 @@ import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; -import javax.persistence.EntityManager; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.UUID; +import javax.persistence.EntityManager; import static ca.uhn.fhir.jpa.util.SearchParameterMapCalculator.isWantCount; import static ca.uhn.fhir.jpa.util.SearchParameterMapCalculator.isWantOnlyCount; @@ -91,142 +91,187 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc { private int mySyncSize = 250; @Override - public IBundleProvider executeQuery(SearchParameterMap theParams, RequestDetails theRequestDetails, String theSearchUuid, ISearchBuilder theSb, Integer theLoadSynchronousUpTo, RequestPartitionId theRequestPartitionId) { + public IBundleProvider executeQuery( + SearchParameterMap theParams, + RequestDetails theRequestDetails, + String theSearchUuid, + ISearchBuilder theSb, + Integer theLoadSynchronousUpTo, + RequestPartitionId theRequestPartitionId) { SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequestDetails, theSearchUuid); searchRuntimeDetails.setLoadSynchronous(true); boolean theParamWantOnlyCount = isWantOnlyCount(theParams); - boolean theParamOrConfigWantCount = nonNull(theParams.getSearchTotalMode()) ? isWantCount(theParams) : isWantCount(myStorageSettings.getDefaultTotalMode()); + boolean theParamOrConfigWantCount = nonNull(theParams.getSearchTotalMode()) + ? isWantCount(theParams) + : isWantCount(myStorageSettings.getDefaultTotalMode()); boolean wantCount = theParamWantOnlyCount || theParamOrConfigWantCount; // Execute the query and make sure we return distinct results return myTxService - .withRequest(theRequestDetails) - .withRequestPartitionId(theRequestPartitionId) - .readOnly() - .execute(() -> { + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .readOnly() + .execute(() -> { - // Load the results synchronously - final List pids = new ArrayList<>(); + // Load the results synchronously + final List pids = new ArrayList<>(); - Long count = 0L; - if (wantCount) { + Long count = 0L; + if (wantCount) { - ourLog.trace("Performing count"); - // TODO FulltextSearchSvcImpl will remove necessary parameters from the "theParams", this will cause actual query after count to - // return wrong response. This is some dirty fix to avoid that issue. Params should not be mutated? - // Maybe instead of removing them we could skip them in db query builder if full text search was used? - List> contentAndTerms = theParams.get(Constants.PARAM_CONTENT); - List> textAndTerms = theParams.get(Constants.PARAM_TEXT); + ourLog.trace("Performing count"); + // TODO FulltextSearchSvcImpl will remove necessary parameters from the "theParams", this will + // cause actual query after count to + // return wrong response. This is some dirty fix to avoid that issue. Params should not be + // mutated? + // Maybe instead of removing them we could skip them in db query builder if full text search + // was used? + List> contentAndTerms = theParams.get(Constants.PARAM_CONTENT); + List> textAndTerms = theParams.get(Constants.PARAM_TEXT); - count = theSb.createCountQuery(theParams, theSearchUuid, theRequestDetails, theRequestPartitionId); + count = theSb.createCountQuery( + theParams, theSearchUuid, theRequestDetails, theRequestPartitionId); - if (contentAndTerms != null) theParams.put(Constants.PARAM_CONTENT, contentAndTerms); - if (textAndTerms != null) theParams.put(Constants.PARAM_TEXT, textAndTerms); + if (contentAndTerms != null) theParams.put(Constants.PARAM_CONTENT, contentAndTerms); + if (textAndTerms != null) theParams.put(Constants.PARAM_TEXT, textAndTerms); - ourLog.trace("Got count {}", count); - } - - if (theParamWantOnlyCount) { - SimpleBundleProvider bundleProvider = new SimpleBundleProvider(); - bundleProvider.setSize(count.intValue()); - return bundleProvider; - } - - try (IResultIterator resultIter = theSb.createQuery(theParams, searchRuntimeDetails, theRequestDetails, theRequestPartitionId)) { - while (resultIter.hasNext()) { - pids.add(resultIter.next()); - if (theLoadSynchronousUpTo != null && pids.size() >= theLoadSynchronousUpTo) { - break; + ourLog.trace("Got count {}", count); } - if (theParams.getLoadSynchronousUpTo() != null && pids.size() >= theParams.getLoadSynchronousUpTo()) { - break; + + if (theParamWantOnlyCount) { + SimpleBundleProvider bundleProvider = new SimpleBundleProvider(); + bundleProvider.setSize(count.intValue()); + return bundleProvider; } - } - } catch (IOException e) { - ourLog.error("IO failure during database access", e); - throw new InternalErrorException(Msg.code(1164) + e); - } - JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(pids, () -> theSb); - HookParams params = new HookParams() - .add(IPreResourceAccessDetails.class, accessDetails) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PREACCESS_RESOURCES, params); + try (IResultIterator resultIter = theSb.createQuery( + theParams, searchRuntimeDetails, theRequestDetails, theRequestPartitionId)) { + while (resultIter.hasNext()) { + pids.add(resultIter.next()); + if (theLoadSynchronousUpTo != null && pids.size() >= theLoadSynchronousUpTo) { + break; + } + if (theParams.getLoadSynchronousUpTo() != null + && pids.size() >= theParams.getLoadSynchronousUpTo()) { + break; + } + } + } catch (IOException e) { + ourLog.error("IO failure during database access", e); + throw new InternalErrorException(Msg.code(1164) + e); + } - for (int i = pids.size() - 1; i >= 0; i--) { - if (accessDetails.isDontReturnResourceAtIndex(i)) { - pids.remove(i); - } - } + JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(pids, () -> theSb); + HookParams params = new HookParams() + .add(IPreResourceAccessDetails.class, accessDetails) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PREACCESS_RESOURCES, params); - /* - * For synchronous queries, we load all the includes right away - * since we're returning a static bundle with all the results - * pre-loaded. This is ok because synchronous requests are not - * expected to be paged - * - * On the other hand for async queries we load includes/revincludes - * individually for pages as we return them to clients - */ + for (int i = pids.size() - 1; i >= 0; i--) { + if (accessDetails.isDontReturnResourceAtIndex(i)) { + pids.remove(i); + } + } - // _includes - Integer maxIncludes = myStorageSettings.getMaximumIncludesToLoadPerPage(); - final Set includedPids = theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated(), "(synchronous)", theRequestDetails, maxIncludes); - if (maxIncludes != null) { - maxIncludes -= includedPids.size(); - } - pids.addAll(includedPids); - List includedPidsList = new ArrayList<>(includedPids); + /* + * For synchronous queries, we load all the includes right away + * since we're returning a static bundle with all the results + * pre-loaded. This is ok because synchronous requests are not + * expected to be paged + * + * On the other hand for async queries we load includes/revincludes + * individually for pages as we return them to clients + */ - // _revincludes - if (theParams.getEverythingMode() == null && (maxIncludes == null || maxIncludes > 0)) { - Set revIncludedPids = theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated(), "(synchronous)", theRequestDetails, maxIncludes); - includedPids.addAll(revIncludedPids); - pids.addAll(revIncludedPids); - includedPidsList.addAll(revIncludedPids); - } + // _includes + Integer maxIncludes = myStorageSettings.getMaximumIncludesToLoadPerPage(); + final Set includedPids = theSb.loadIncludes( + myContext, + myEntityManager, + pids, + theParams.getRevIncludes(), + true, + theParams.getLastUpdated(), + "(synchronous)", + theRequestDetails, + maxIncludes); + if (maxIncludes != null) { + maxIncludes -= includedPids.size(); + } + pids.addAll(includedPids); + List includedPidsList = new ArrayList<>(includedPids); - List resources = new ArrayList<>(); - theSb.loadResourcesByPid(pids, includedPidsList, resources, false, theRequestDetails); - // Hook: STORAGE_PRESHOW_RESOURCES - resources = ServerInterceptorUtil.fireStoragePreshowResource(resources, theRequestDetails, myInterceptorBroadcaster); + // _revincludes + if (theParams.getEverythingMode() == null && (maxIncludes == null || maxIncludes > 0)) { + Set revIncludedPids = theSb.loadIncludes( + myContext, + myEntityManager, + pids, + theParams.getIncludes(), + false, + theParams.getLastUpdated(), + "(synchronous)", + theRequestDetails, + maxIncludes); + includedPids.addAll(revIncludedPids); + pids.addAll(revIncludedPids); + includedPidsList.addAll(revIncludedPids); + } - SimpleBundleProvider bundleProvider = new SimpleBundleProvider(resources); - if (theParams.isOffsetQuery()) { - bundleProvider.setCurrentPageOffset(theParams.getOffset()); - bundleProvider.setCurrentPageSize(theParams.getCount()); - } + List resources = new ArrayList<>(); + theSb.loadResourcesByPid(pids, includedPidsList, resources, false, theRequestDetails); + // Hook: STORAGE_PRESHOW_RESOURCES + resources = ServerInterceptorUtil.fireStoragePreshowResource( + resources, theRequestDetails, myInterceptorBroadcaster); - if (wantCount) { - bundleProvider.setSize(count.intValue()); - } else { - Integer queryCount = getQueryCount(theLoadSynchronousUpTo, theParams); - if (queryCount == null || queryCount > resources.size()) { - // No limit, last page or everything was fetched within the limit - bundleProvider.setSize(getTotalCount(queryCount, theParams.getOffset(), resources.size())); - } else { - bundleProvider.setSize(null); - } - } + SimpleBundleProvider bundleProvider = new SimpleBundleProvider(resources); + if (theParams.isOffsetQuery()) { + bundleProvider.setCurrentPageOffset(theParams.getOffset()); + bundleProvider.setCurrentPageSize(theParams.getCount()); + } - bundleProvider.setPreferredPageSize(theParams.getCount()); + if (wantCount) { + bundleProvider.setSize(count.intValue()); + } else { + Integer queryCount = getQueryCount(theLoadSynchronousUpTo, theParams); + if (queryCount == null || queryCount > resources.size()) { + // No limit, last page or everything was fetched within the limit + bundleProvider.setSize(getTotalCount(queryCount, theParams.getOffset(), resources.size())); + } else { + bundleProvider.setSize(null); + } + } - return bundleProvider; - }); + bundleProvider.setPreferredPageSize(theParams.getCount()); + + return bundleProvider; + }); } @Override - public IBundleProvider executeQuery(String theResourceType, SearchParameterMap theSearchParameterMap, RequestPartitionId theRequestPartitionId) { + public IBundleProvider executeQuery( + String theResourceType, + SearchParameterMap theSearchParameterMap, + RequestPartitionId theRequestPartitionId) { final String searchUuid = UUID.randomUUID().toString(); IFhirResourceDao callingDao = myDaoRegistry.getResourceDao(theResourceType); - Class resourceTypeClass = myContext.getResourceDefinition(theResourceType).getImplementingClass(); - final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(callingDao, theResourceType, resourceTypeClass); + Class resourceTypeClass = + myContext.getResourceDefinition(theResourceType).getImplementingClass(); + final ISearchBuilder sb = + mySearchBuilderFactory.newSearchBuilder(callingDao, theResourceType, resourceTypeClass); sb.setFetchSize(mySyncSize); - return executeQuery(theSearchParameterMap, null, searchUuid, sb, theSearchParameterMap.getLoadSynchronousUpTo(), theRequestPartitionId); + return executeQuery( + theSearchParameterMap, + null, + searchUuid, + sb, + theSearchParameterMap.getLoadSynchronousUpTo(), + theRequestPartitionId); } @Autowired diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/WarmSearchDefinition.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/WarmSearchDefinition.java index e427d1773fd..bbc81ef51f7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/WarmSearchDefinition.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/WarmSearchDefinition.java @@ -23,5 +23,4 @@ public class WarmSearchDefinition { private String mySearchUrl; private long myRefreshPeriodMillis; - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteAggregation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteAggregation.java index 942dd34ca80..574055d839d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteAggregation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteAggregation.java @@ -31,10 +31,10 @@ import com.jayway.jsonpath.spi.mapper.GsonMappingProvider; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.IDX_STRING_TEXT; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.NESTED_SEARCH_PARAM_ROOT; @@ -48,63 +48,66 @@ class TokenAutocompleteAggregation { * * https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations.html */ - static final JsonObject AGGREGATION_TEMPLATE = - new Gson().fromJson("" + - " {" + - " \"nested\": { \"path\": \"nsp.PLACEHOLDER\" }," + - " \"aggs\": {" + - " \"search\": {" + - " \"filter\": {" + - " \"bool\": {" + - " \"must\": [" + - " { \"match_bool_prefix\":" + - " { \"nsp.PLACEHOLDER.string.text\": {" + - " \"query\": \"Mors\"}" + - " }" + - " }" + - " ]" + - " }" + - " }," + - " \"aggs\": {" + - " \"group_by_token\": {" + - " \"terms\": {" + - " \"field\": \"nsp.PLACEHOLDER.token.code-system\"," + - " \"size\": 30," + - " \"min_doc_count\": 1," + - " \"shard_min_doc_count\": 0," + - " \"show_term_doc_count_error\": false" + - " }," + - " \"aggs\": {" + - " \"top_tags_hits\": {" + - " \"top_hits\": {" + - " \"_source\": {" + - " \"includes\": [ \"nsp.PLACEHOLDER\" ]" + - " }," + - " \"size\": 1" + - " }" + - " }" + - " }" + - " }" + - " }" + - " }" + - " }" + - " }", JsonObject.class); + static final JsonObject AGGREGATION_TEMPLATE = new Gson() + .fromJson( + "" + " {" + + " \"nested\": { \"path\": \"nsp.PLACEHOLDER\" }," + + " \"aggs\": {" + + " \"search\": {" + + " \"filter\": {" + + " \"bool\": {" + + " \"must\": [" + + " { \"match_bool_prefix\":" + + " { \"nsp.PLACEHOLDER.string.text\": {" + + " \"query\": \"Mors\"}" + + " }" + + " }" + + " ]" + + " }" + + " }," + + " \"aggs\": {" + + " \"group_by_token\": {" + + " \"terms\": {" + + " \"field\": \"nsp.PLACEHOLDER.token.code-system\"," + + " \"size\": 30," + + " \"min_doc_count\": 1," + + " \"shard_min_doc_count\": 0," + + " \"show_term_doc_count_error\": false" + + " }," + + " \"aggs\": {" + + " \"top_tags_hits\": {" + + " \"top_hits\": {" + + " \"_source\": {" + + " \"includes\": [ \"nsp.PLACEHOLDER\" ]" + + " }," + + " \"size\": 1" + + " }" + + " }" + + " }" + + " }" + + " }" + + " }" + + " }" + + " }", + JsonObject.class); - static final Configuration configuration = Configuration - .builder() - .mappingProvider(new GsonMappingProvider()) - .jsonProvider(new GsonJsonProvider()) - .build(); + static final Configuration configuration = Configuration.builder() + .mappingProvider(new GsonMappingProvider()) + .jsonProvider(new GsonJsonProvider()) + .build(); static final ParseContext parseContext = JsonPath.using(configuration); private final String mySpName; private final int myCount; private final JsonObject mySearch; - public TokenAutocompleteAggregation(String theSpName, int theCount, String theSearchText, String theSearchModifier) { + public TokenAutocompleteAggregation( + String theSpName, int theCount, String theSearchText, String theSearchModifier) { Validate.notEmpty(theSpName); - Validate.isTrue(theCount>0, "count must be positive"); - Validate.isTrue("text".equalsIgnoreCase(theSearchModifier) || "".equals(theSearchModifier) || theSearchModifier == null, "Unsupported search modifier " + theSearchModifier); + Validate.isTrue(theCount > 0, "count must be positive"); + Validate.isTrue( + "text".equalsIgnoreCase(theSearchModifier) || "".equals(theSearchModifier) || theSearchModifier == null, + "Unsupported search modifier " + theSearchModifier); mySpName = theSpName; myCount = theCount; mySearch = makeSearch(theSearchText, theSearchModifier); @@ -117,9 +120,11 @@ class TokenAutocompleteAggregation { if (StringUtils.isEmpty(theSearchText)) { return RawElasticJsonBuilder.makeMatchAllPredicate(); } else if ("text".equalsIgnoreCase(theSearchModifier)) { - return RawElasticJsonBuilder.makeMatchBoolPrefixPredicate(NESTED_SEARCH_PARAM_ROOT + "." + mySpName + ".string." + IDX_STRING_TEXT, theSearchText); + return RawElasticJsonBuilder.makeMatchBoolPrefixPredicate( + NESTED_SEARCH_PARAM_ROOT + "." + mySpName + ".string." + IDX_STRING_TEXT, theSearchText); } else { - return RawElasticJsonBuilder.makeWildcardPredicate(NESTED_SEARCH_PARAM_ROOT + "." + mySpName + ".token.code", theSearchText + "*"); + return RawElasticJsonBuilder.makeWildcardPredicate( + NESTED_SEARCH_PARAM_ROOT + "." + mySpName + ".token.code", theSearchText + "*"); } } @@ -135,9 +140,13 @@ class TokenAutocompleteAggregation { String nestedSearchParamPath = NESTED_SEARCH_PARAM_ROOT + "." + mySpName; documentContext.set("nested.path", nestedSearchParamPath); documentContext.set("aggs.search.filter.bool.must[0]", mySearch); - documentContext.set("aggs.search.aggs.group_by_token.terms.field", NESTED_SEARCH_PARAM_ROOT + "." + mySpName + ".token" + ".code-system"); + documentContext.set( + "aggs.search.aggs.group_by_token.terms.field", + NESTED_SEARCH_PARAM_ROOT + "." + mySpName + ".token" + ".code-system"); documentContext.set("aggs.search.aggs.group_by_token.terms.size", myCount); - documentContext.set("aggs.search.aggs.group_by_token.aggs.top_tags_hits.top_hits._source.includes[0]", nestedSearchParamPath); + documentContext.set( + "aggs.search.aggs.group_by_token.aggs.top_tags_hits.top_hits._source.includes[0]", + nestedSearchParamPath); return result; } @@ -152,13 +161,13 @@ class TokenAutocompleteAggregation { Validate.notNull(theAggregationResult); JsonArray buckets = theAggregationResult - .getAsJsonObject("search") - .getAsJsonObject("group_by_token") - .getAsJsonArray("buckets"); + .getAsJsonObject("search") + .getAsJsonObject("group_by_token") + .getAsJsonArray("buckets"); List result = StreamSupport.stream(buckets.spliterator(), false) - .map(b-> bucketToEntry((JsonObject) b)) - .collect(Collectors.toList()); + .map(b -> bucketToEntry((JsonObject) b)) + .collect(Collectors.toList()); return result; } @@ -172,13 +181,12 @@ class TokenAutocompleteAggregation { // wrap the JsonObject for JSONPath. DocumentContext documentContext = parseContext.parse(theBucketJson); - // The outer bucket is keyed by the token value (i.e. "system|code"). + // The outer bucket is keyed by the token value (i.e. "system|code"). String bucketKey = documentContext.read("key", String.class); // The inner bucket has a hits array, and we only need the first. String displayText = documentContext.read("top_tags_hits.hits.hits[0]._source.string.text", String.class); - return new TokenAutocompleteHit(bucketKey,displayText); + return new TokenAutocompleteHit(bucketKey, displayText); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteHit.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteHit.java index cd79686e401..2780964ea3a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteHit.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteHit.java @@ -30,6 +30,7 @@ import javax.annotation.Nonnull; class TokenAutocompleteHit { @Nonnull final String mySystemCode; + final String myDisplayText; TokenAutocompleteHit(@Nonnull String theSystemCode, String theDisplayText) { @@ -46,8 +47,8 @@ class TokenAutocompleteHit { @Override public String toString() { return new ToStringBuilder(this) - .append("mySystemCode", mySystemCode) - .append("myDisplayText", myDisplayText) - .toString(); + .append("mySystemCode", mySystemCode) + .append("myDisplayText", myDisplayText) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteSearch.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteSearch.java index 86f5330e82f..dc14f226676 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteSearch.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/TokenAutocompleteSearch.java @@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.search.autocomplete; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchClauseBuilder; -import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import ca.uhn.fhir.jpa.model.entity.StorageSettings; import com.google.gson.JsonObject; import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension; import org.hibernate.search.engine.search.aggregation.AggregationKey; @@ -34,8 +34,8 @@ import org.hibernate.search.mapper.orm.session.SearchSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -50,13 +50,13 @@ class TokenAutocompleteSearch { private final StorageSettings myStorageSettings; private final SearchSession mySession; - public TokenAutocompleteSearch(FhirContext theFhirContext, StorageSettings theStorageSettings, SearchSession theSession) { + public TokenAutocompleteSearch( + FhirContext theFhirContext, StorageSettings theStorageSettings, SearchSession theSession) { myFhirContext = theFhirContext; myStorageSettings = theStorageSettings; mySession = theSession; } - /** * Search for tokens indexed by theSPName on theResourceName matching theSearchText. * @param theResourceName The resource type (e.g. Observation) @@ -65,22 +65,25 @@ class TokenAutocompleteSearch { * @return A collection of Coding elements */ @Nonnull - public List search(String theResourceName, String theSPName, String theSearchText, String theSearchModifier, int theCount) { + public List search( + String theResourceName, String theSPName, String theSearchText, String theSearchModifier, int theCount) { TokenAutocompleteAggregation tokenAutocompleteAggregation = - new TokenAutocompleteAggregation(theSPName, theCount, theSearchText, theSearchModifier); + new TokenAutocompleteAggregation(theSPName, theCount, theSearchText, theSearchModifier); // compose the query json - SearchQueryOptionsStep query = mySession.search(ResourceTable.class) - .where(predFactory -> predFactory.bool(boolBuilder -> { - ExtendedHSearchClauseBuilder clauseBuilder = new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, boolBuilder, predFactory); + SearchQueryOptionsStep query = mySession + .search(ResourceTable.class) + .where(predFactory -> predFactory.bool(boolBuilder -> { + ExtendedHSearchClauseBuilder clauseBuilder = new ExtendedHSearchClauseBuilder( + myFhirContext, myStorageSettings, boolBuilder, predFactory); - // we apply resource-level predicates here, at the top level - if (isNotBlank(theResourceName)) { - clauseBuilder.addResourceTypeClause(theResourceName); - } - })) - .aggregation(AGGREGATION_KEY, buildAggregation(tokenAutocompleteAggregation)); + // we apply resource-level predicates here, at the top level + if (isNotBlank(theResourceName)) { + clauseBuilder.addResourceTypeClause(theResourceName); + } + })) + .aggregation(AGGREGATION_KEY, buildAggregation(tokenAutocompleteAggregation)); // run the query, but with 0 results. We only care about the aggregations. SearchResult result = query.fetch(0); @@ -99,11 +102,11 @@ class TokenAutocompleteSearch { JsonObject jsonAggregation = tokenAutocompleteAggregation.toJsonAggregation(); SearchAggregation aggregation = mySession - .scope( ResourceTable.class ) - .aggregation() - .extension(ElasticsearchExtension.get()) - .fromJson(jsonAggregation) - .toAggregation(); + .scope(ResourceTable.class) + .aggregation() + .extension(ElasticsearchExtension.get()) + .fromJson(jsonAggregation) + .toAggregation(); return aggregation; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/ValueSetAutocompleteOptions.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/ValueSetAutocompleteOptions.java index 7a0aa904210..1b9b40be8e5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/ValueSetAutocompleteOptions.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/ValueSetAutocompleteOptions.java @@ -67,37 +67,42 @@ public class ValueSetAutocompleteOptions { } public static ValueSetAutocompleteOptions validateAndParseOptions( - JpaStorageSettings theStorageSettings, - IPrimitiveType theContext, - IPrimitiveType theFilter, - IPrimitiveType theCount, - IIdType theId, - IPrimitiveType theUrl, - IBaseResource theValueSet) - { + JpaStorageSettings theStorageSettings, + IPrimitiveType theContext, + IPrimitiveType theFilter, + IPrimitiveType theCount, + IIdType theId, + IPrimitiveType theUrl, + IBaseResource theValueSet) { boolean haveId = theId != null && theId.hasIdPart(); boolean haveIdentifier = theUrl != null && isNotBlank(theUrl.getValue()); boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty(); if (haveId || haveIdentifier || haveValueSet) { - throw new InvalidRequestException(Msg.code(2020) + "$expand with contexDirection='existing' is only supported at the type leve. It is not supported at instance level, with a url specified, or with a ValueSet ."); + throw new InvalidRequestException( + Msg.code(2020) + + "$expand with contexDirection='existing' is only supported at the type leve. It is not supported at instance level, with a url specified, or with a ValueSet ."); } if (!theStorageSettings.isAdvancedHSearchIndexing()) { - throw new InvalidRequestException(Msg.code(2022) + "$expand with contexDirection='existing' requires Extended Lucene Indexing."); + throw new InvalidRequestException( + Msg.code(2022) + "$expand with contexDirection='existing' requires Extended Lucene Indexing."); } if (theContext == null || theContext.isEmpty()) { - throw new InvalidRequestException(Msg.code(2021) + "$expand with contexDirection='existing' requires a context"); + throw new InvalidRequestException( + Msg.code(2021) + "$expand with contexDirection='existing' requires a context"); } String filter = theFilter == null ? null : theFilter.getValue(); - ValueSetAutocompleteOptions result = new ValueSetAutocompleteOptions(theContext.getValue(), filter, IPrimitiveType.toValueOrNull(theCount)); + ValueSetAutocompleteOptions result = + new ValueSetAutocompleteOptions(theContext.getValue(), filter, IPrimitiveType.toValueOrNull(theCount)); if (!ourSupportedModifiers.contains(defaultString(result.getSearchParamModifier()))) { - throw new InvalidRequestException(Msg.code(2069) + "$expand with contexDirection='existing' only supports plain token search, or the :text modifier. Received " + result.getSearchParamModifier()); + throw new InvalidRequestException(Msg.code(2069) + + "$expand with contexDirection='existing' only supports plain token search, or the :text modifier. Received " + + result.getSearchParamModifier()); } return result; } - public String getResourceType() { return myResourceType; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/ValueSetAutocompleteSearch.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/ValueSetAutocompleteSearch.java index 0c660b1acd3..92a5fc6ff3c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/ValueSetAutocompleteSearch.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/ValueSetAutocompleteSearch.java @@ -38,22 +38,26 @@ public class ValueSetAutocompleteSearch { private final TokenAutocompleteSearch myAutocompleteSearch; static final int DEFAULT_SIZE = 30; - public ValueSetAutocompleteSearch(FhirContext theFhirContext, StorageSettings theStorageSettings, SearchSession theSession) { + public ValueSetAutocompleteSearch( + FhirContext theFhirContext, StorageSettings theStorageSettings, SearchSession theSession) { myFhirContext = theFhirContext; myStorageSettings = theStorageSettings; myAutocompleteSearch = new TokenAutocompleteSearch(myFhirContext, myStorageSettings, theSession); } public IBaseResource search(ValueSetAutocompleteOptions theOptions) { - List aggEntries = myAutocompleteSearch.search(theOptions.getResourceType(), theOptions.getSearchParamCode(), theOptions.getFilter(), theOptions.getSearchParamModifier(), (int) theOptions.getCount().orElse(DEFAULT_SIZE)); + List aggEntries = myAutocompleteSearch.search( + theOptions.getResourceType(), + theOptions.getSearchParamCode(), + theOptions.getFilter(), + theOptions.getSearchParamModifier(), + (int) theOptions.getCount().orElse(DEFAULT_SIZE)); ValueSet result = new ValueSet(); ValueSet.ValueSetExpansionComponent expansion = new ValueSet.ValueSetExpansionComponent(); result.setExpansion(expansion); result.setStatus(Enumerations.PublicationStatus.ACTIVE); - aggEntries.stream() - .map(this::makeCoding) - .forEach(expansion::addContains); + aggEntries.stream().map(this::makeCoding).forEach(expansion::addContains); return result; } @@ -63,7 +67,7 @@ public class ValueSetAutocompleteSearch { tokenParam.setValueAsQueryToken(myFhirContext, null, null, theSearchHit.mySystemCode); // R4 only for now. -// IBaseCoding coding = TerserUtil.newElement(myFhirContext, "Coding"); + // IBaseCoding coding = TerserUtil.newElement(myFhirContext, "Coding"); ValueSet.ValueSetExpansionContainsComponent coding = new ValueSet.ValueSetExpansionContainsComponent(); coding.setCode(tokenParam.getValue()); coding.setSystem(tokenParam.getSystem()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/package-info.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/package-info.java index d9e346dd41a..88a3e16b337 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/package-info.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/autocomplete/package-info.java @@ -29,4 +29,3 @@ * */ package ca.uhn.fhir.jpa.search.autocomplete; - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java index e2fdb35e0a4..c701d1392c3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java @@ -108,7 +108,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.CollectionUtils; -import javax.annotation.Nullable; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; @@ -121,6 +120,7 @@ import java.util.Objects; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; +import javax.annotation.Nullable; import static ca.uhn.fhir.jpa.util.QueryParameterUtils.fromOperation; import static ca.uhn.fhir.jpa.util.QueryParameterUtils.getChainedPart; @@ -155,14 +155,34 @@ public class QueryStack { /** * Constructor */ - public QueryStack(SearchParameterMap theSearchParameters, JpaStorageSettings theStorageSettings, FhirContext theFhirContext, SearchQueryBuilder theSqlBuilder, ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings) { - this(theSearchParameters, theStorageSettings, theFhirContext, theSqlBuilder, theSearchParamRegistry, thePartitionSettings, EnumSet.of(PredicateBuilderTypeEnum.DATE)); + public QueryStack( + SearchParameterMap theSearchParameters, + JpaStorageSettings theStorageSettings, + FhirContext theFhirContext, + SearchQueryBuilder theSqlBuilder, + ISearchParamRegistry theSearchParamRegistry, + PartitionSettings thePartitionSettings) { + this( + theSearchParameters, + theStorageSettings, + theFhirContext, + theSqlBuilder, + theSearchParamRegistry, + thePartitionSettings, + EnumSet.of(PredicateBuilderTypeEnum.DATE)); } /** * Constructor */ - private QueryStack(SearchParameterMap theSearchParameters, JpaStorageSettings theStorageSettings, FhirContext theFhirContext, SearchQueryBuilder theSqlBuilder, ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings, EnumSet theReusePredicateBuilderTypes) { + private QueryStack( + SearchParameterMap theSearchParameters, + JpaStorageSettings theStorageSettings, + FhirContext theFhirContext, + SearchQueryBuilder theSqlBuilder, + ISearchParamRegistry theSearchParamRegistry, + PartitionSettings thePartitionSettings, + EnumSet theReusePredicateBuilderTypes) { myPartitionSettings = thePartitionSettings; assert theSearchParameters != null; assert theStorageSettings != null; @@ -193,12 +213,13 @@ public class QueryStack { mySqlBuilder.addSortCoordsNear(coordsBuilder, latitudeValue, longitudeValue, theAscending); handled = true; } - } } if (!handled) { - String msg = myFhirContext.getLocalizer().getMessageSanitized(QueryStack.class, "cantSortOnCoordParamWithoutValues", theParamName); + String msg = myFhirContext + .getLocalizer() + .getMessageSanitized(QueryStack.class, "cantSortOnCoordParamWithoutValues", theParamName); throw new InvalidRequestException(Msg.code(2307) + msg); } } @@ -207,7 +228,8 @@ public class QueryStack { BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder(); DatePredicateBuilder datePredicateBuilder = mySqlBuilder.createDatePredicateBuilder(); - Condition hashIdentityPredicate = datePredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); + Condition hashIdentityPredicate = + datePredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); addSortCustomJoin(firstPredicateBuilder, datePredicateBuilder, hashIdentityPredicate); @@ -220,7 +242,8 @@ public class QueryStack { if (firstPredicateBuilder instanceof ResourceTablePredicateBuilder) { resourceTablePredicateBuilder = (ResourceTablePredicateBuilder) firstPredicateBuilder; } else { - resourceTablePredicateBuilder = mySqlBuilder.addResourceTablePredicateBuilder(firstPredicateBuilder.getResourceIdColumn()); + resourceTablePredicateBuilder = + mySqlBuilder.addResourceTablePredicateBuilder(firstPredicateBuilder.getResourceIdColumn()); } mySqlBuilder.addSortDate(resourceTablePredicateBuilder.getColumnLastUpdated(), theAscending, myUseAggregate); } @@ -229,7 +252,8 @@ public class QueryStack { BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder(); NumberPredicateBuilder numberPredicateBuilder = mySqlBuilder.createNumberPredicateBuilder(); - Condition hashIdentityPredicate = numberPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); + Condition hashIdentityPredicate = + numberPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); addSortCustomJoin(firstPredicateBuilder, numberPredicateBuilder, hashIdentityPredicate); @@ -241,7 +265,8 @@ public class QueryStack { BaseQuantityPredicateBuilder quantityPredicateBuilder = mySqlBuilder.createQuantityPredicateBuilder(); - Condition hashIdentityPredicate = quantityPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); + Condition hashIdentityPredicate = + quantityPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); addSortCustomJoin(firstPredicateBuilder, quantityPredicateBuilder, hashIdentityPredicate); @@ -250,26 +275,34 @@ public class QueryStack { public void addSortOnResourceId(boolean theAscending) { BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder(); - ForcedIdPredicateBuilder sortPredicateBuilder = mySqlBuilder.addForcedIdPredicateBuilder(firstPredicateBuilder.getResourceIdColumn()); + ForcedIdPredicateBuilder sortPredicateBuilder = + mySqlBuilder.addForcedIdPredicateBuilder(firstPredicateBuilder.getResourceIdColumn()); if (!theAscending) { - mySqlBuilder.addSortString(sortPredicateBuilder.getColumnForcedId(), false, OrderObject.NullOrder.FIRST, myUseAggregate); + mySqlBuilder.addSortString( + sortPredicateBuilder.getColumnForcedId(), false, OrderObject.NullOrder.FIRST, myUseAggregate); } else { mySqlBuilder.addSortString(sortPredicateBuilder.getColumnForcedId(), true, myUseAggregate); } mySqlBuilder.addSortNumeric(firstPredicateBuilder.getResourceIdColumn(), theAscending, myUseAggregate); - } - public void addSortOnResourceLink(String theResourceName, String theReferenceTargetType, String theParamName, String theChain, boolean theAscending) { + public void addSortOnResourceLink( + String theResourceName, + String theReferenceTargetType, + String theParamName, + String theChain, + boolean theAscending) { BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder(); ResourceLinkPredicateBuilder resourceLinkPredicateBuilder = mySqlBuilder.createReferencePredicateBuilder(this); - Condition pathPredicate = resourceLinkPredicateBuilder.createPredicateSourcePaths(theResourceName, theParamName); + Condition pathPredicate = + resourceLinkPredicateBuilder.createPredicateSourcePaths(theResourceName, theParamName); addSortCustomJoin(firstPredicateBuilder, resourceLinkPredicateBuilder, pathPredicate); if (isBlank(theChain)) { - mySqlBuilder.addSortNumeric(resourceLinkPredicateBuilder.getColumnTargetResourceId(), theAscending, myUseAggregate); + mySqlBuilder.addSortNumeric( + resourceLinkPredicateBuilder.getColumnTargetResourceId(), theAscending, myUseAggregate); return; } @@ -278,30 +311,37 @@ public class QueryStack { if (theReferenceTargetType != null) { targetType = theReferenceTargetType; } else if (param.getTargets().size() > 1) { - throw new InvalidRequestException(Msg.code(2287) + "Unable to sort on a chained parameter from '" + theParamName + "' as this parameter has multiple target types. Please specify the target type."); + throw new InvalidRequestException(Msg.code(2287) + "Unable to sort on a chained parameter from '" + + theParamName + "' as this parameter has multiple target types. Please specify the target type."); } else if (param.getTargets().size() == 1) { targetType = param.getTargets().iterator().next(); } if (isBlank(targetType)) { - throw new InvalidRequestException(Msg.code(2288) + "Unable to sort on a chained parameter from '" + theParamName + "' as this parameter as this parameter does not define a target type. Please specify the target type."); + throw new InvalidRequestException( + Msg.code(2288) + "Unable to sort on a chained parameter from '" + theParamName + + "' as this parameter as this parameter does not define a target type. Please specify the target type."); } RuntimeSearchParam targetSearchParameter = mySearchParamRegistry.getActiveSearchParam(targetType, theChain); if (targetSearchParameter == null) { - Collection validSearchParameterNames = mySearchParamRegistry - .getActiveSearchParams(targetType) - .values() - .stream() - .filter(t -> - t.getParamType() == RestSearchParameterTypeEnum.STRING || - t.getParamType() == RestSearchParameterTypeEnum.TOKEN || - t.getParamType() == RestSearchParameterTypeEnum.DATE) - .map(RuntimeSearchParam::getName) - .sorted() - .distinct() - .collect(Collectors.toList()); - String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSortParameter", theChain, targetType, validSearchParameterNames); + Collection validSearchParameterNames = + mySearchParamRegistry.getActiveSearchParams(targetType).values().stream() + .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.STRING + || t.getParamType() == RestSearchParameterTypeEnum.TOKEN + || t.getParamType() == RestSearchParameterTypeEnum.DATE) + .map(RuntimeSearchParam::getName) + .sorted() + .distinct() + .collect(Collectors.toList()); + String msg = myFhirContext + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "invalidSortParameter", + theChain, + targetType, + validSearchParameterNames); throw new InvalidRequestException(Msg.code(2289) + msg); } @@ -310,34 +350,29 @@ public class QueryStack { switch (targetSearchParameter.getParamType()) { case STRING: StringPredicateBuilder stringPredicateBuilder = mySqlBuilder.createStringPredicateBuilder(); - sortColumn = new DbColumn[]{ - stringPredicateBuilder.getColumnValueNormalized() - }; + sortColumn = new DbColumn[] {stringPredicateBuilder.getColumnValueNormalized()}; chainedPredicateBuilder = stringPredicateBuilder; break; case TOKEN: TokenPredicateBuilder tokenPredicateBuilder = mySqlBuilder.createTokenPredicateBuilder(); - sortColumn = new DbColumn[]{ - tokenPredicateBuilder.getColumnSystem(), - tokenPredicateBuilder.getColumnValue() - }; + sortColumn = + new DbColumn[] {tokenPredicateBuilder.getColumnSystem(), tokenPredicateBuilder.getColumnValue() + }; chainedPredicateBuilder = tokenPredicateBuilder; break; case DATE: DatePredicateBuilder datePredicateBuilder = mySqlBuilder.createDatePredicateBuilder(); - sortColumn = new DbColumn[]{ - datePredicateBuilder.getColumnValueLow() - }; + sortColumn = new DbColumn[] {datePredicateBuilder.getColumnValueLow()}; chainedPredicateBuilder = datePredicateBuilder; break; - /* - * Note that many of the options below aren't implemented because they - * don't seem useful to me, but they could theoretically be implemented - * if someone ever needed them. I'm not sure why you'd want to do a chained - * sort on a target that was a reference or a quantity, but if someone needed - * that we could implement it here. - */ + /* + * Note that many of the options below aren't implemented because they + * don't seem useful to me, but they could theoretically be implemented + * if someone ever needed them. I'm not sure why you'd want to do a chained + * sort on a target that was a reference or a quantity, but if someone needed + * that we could implement it here. + */ case NUMBER: case REFERENCE: case COMPOSITE: @@ -346,7 +381,9 @@ public class QueryStack { case HAS: case SPECIAL: default: - throw new InvalidRequestException(Msg.code(2290) + "Unable to sort on a chained parameter " + theParamName + "." + theChain + " as this parameter. Can not sort on chains of target type: " + targetSearchParameter.getParamType().name()); + throw new InvalidRequestException(Msg.code(2290) + "Unable to sort on a chained parameter " + + theParamName + "." + theChain + " as this parameter. Can not sort on chains of target type: " + + targetSearchParameter.getParamType().name()); } addSortCustomJoin(resourceLinkPredicateBuilder.getColumnTargetResourceId(), chainedPredicateBuilder, null); @@ -356,14 +393,14 @@ public class QueryStack { for (DbColumn next : sortColumn) { mySqlBuilder.addSortNumeric(next, theAscending, myUseAggregate); } - } public void addSortOnString(String theResourceName, String theParamName, boolean theAscending) { BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder(); StringPredicateBuilder stringPredicateBuilder = mySqlBuilder.createStringPredicateBuilder(); - Condition hashIdentityPredicate = stringPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); + Condition hashIdentityPredicate = + stringPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); addSortCustomJoin(firstPredicateBuilder, stringPredicateBuilder, hashIdentityPredicate); @@ -374,45 +411,51 @@ public class QueryStack { BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder(); TokenPredicateBuilder tokenPredicateBuilder = mySqlBuilder.createTokenPredicateBuilder(); - Condition hashIdentityPredicate = tokenPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); + Condition hashIdentityPredicate = + tokenPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); addSortCustomJoin(firstPredicateBuilder, tokenPredicateBuilder, hashIdentityPredicate); mySqlBuilder.addSortString(tokenPredicateBuilder.getColumnSystem(), theAscending, myUseAggregate); mySqlBuilder.addSortString(tokenPredicateBuilder.getColumnValue(), theAscending, myUseAggregate); - } public void addSortOnUri(String theResourceName, String theParamName, boolean theAscending) { BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder(); UriPredicateBuilder uriPredicateBuilder = mySqlBuilder.createUriPredicateBuilder(); - Condition hashIdentityPredicate = uriPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); + Condition hashIdentityPredicate = + uriPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName); addSortCustomJoin(firstPredicateBuilder, uriPredicateBuilder, hashIdentityPredicate); mySqlBuilder.addSortString(uriPredicateBuilder.getColumnValue(), theAscending, myUseAggregate); } - private void addSortCustomJoin(BaseJoiningPredicateBuilder theFromJoiningPredicateBuilder, BaseJoiningPredicateBuilder theToJoiningPredicateBuilder, Condition theCondition) { - addSortCustomJoin(theFromJoiningPredicateBuilder.getResourceIdColumn(), theToJoiningPredicateBuilder, theCondition); + private void addSortCustomJoin( + BaseJoiningPredicateBuilder theFromJoiningPredicateBuilder, + BaseJoiningPredicateBuilder theToJoiningPredicateBuilder, + Condition theCondition) { + addSortCustomJoin( + theFromJoiningPredicateBuilder.getResourceIdColumn(), theToJoiningPredicateBuilder, theCondition); } - private void addSortCustomJoin(DbColumn theFromDbColumn, BaseJoiningPredicateBuilder theToJoiningPredicateBuilder, Condition theCondition) { - ComboCondition onCondition = mySqlBuilder.createOnCondition( - theFromDbColumn, - theToJoiningPredicateBuilder.getResourceIdColumn() - ); + private void addSortCustomJoin( + DbColumn theFromDbColumn, + BaseJoiningPredicateBuilder theToJoiningPredicateBuilder, + Condition theCondition) { + ComboCondition onCondition = + mySqlBuilder.createOnCondition(theFromDbColumn, theToJoiningPredicateBuilder.getResourceIdColumn()); if (theCondition != null) { onCondition.addCondition(theCondition); } mySqlBuilder.addCustomJoin( - SelectQuery.JoinType.LEFT_OUTER, - theFromDbColumn.getTable(), - theToJoiningPredicateBuilder.getTable(), - onCondition); + SelectQuery.JoinType.LEFT_OUTER, + theFromDbColumn.getTable(), + theToJoiningPredicateBuilder.getTable(), + onCondition); } public void setUseAggregate(boolean theUseAggregate) { @@ -420,7 +463,11 @@ public class QueryStack { } @SuppressWarnings("unchecked") - private PredicateBuilderCacheLookupResult createOrReusePredicateBuilder(PredicateBuilderTypeEnum theType, DbColumn theSourceJoinColumn, String theParamName, Supplier theFactoryMethod) { + private PredicateBuilderCacheLookupResult createOrReusePredicateBuilder( + PredicateBuilderTypeEnum theType, + DbColumn theSourceJoinColumn, + String theParamName, + Supplier theFactoryMethod) { boolean cacheHit = false; BaseJoiningPredicateBuilder retVal; if (myReusePredicateBuilderTypes.contains(theType)) { @@ -449,28 +496,64 @@ public class QueryStack { return new PredicateBuilderCacheLookupResult<>(cacheHit, (T) retVal); } - private Condition createPredicateComposite(@Nullable DbColumn theSourceJoinColumn, String theResourceName, String theSpnamePrefix, RuntimeSearchParam theParamDef, List theNextAnd, RequestPartitionId theRequestPartitionId) { - return createPredicateComposite(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParamDef, theNextAnd, theRequestPartitionId, mySqlBuilder); + private Condition createPredicateComposite( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theParamDef, + List theNextAnd, + RequestPartitionId theRequestPartitionId) { + return createPredicateComposite( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParamDef, + theNextAnd, + theRequestPartitionId, + mySqlBuilder); } - private Condition createPredicateComposite(@Nullable DbColumn theSourceJoinColumn, String theResourceName, String theSpnamePrefix, RuntimeSearchParam theParamDef, List theNextAnd, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + private Condition createPredicateComposite( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theParamDef, + List theNextAnd, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { Condition orCondidtion = null; for (IQueryParameterType next : theNextAnd) { if (!(next instanceof CompositeParam)) { - throw new InvalidRequestException(Msg.code(1203) + "Invalid type for composite param (must be " + CompositeParam.class.getSimpleName() + ": " + next.getClass()); + throw new InvalidRequestException(Msg.code(1203) + "Invalid type for composite param (must be " + + CompositeParam.class.getSimpleName() + ": " + next.getClass()); } CompositeParam cp = (CompositeParam) next; - List componentParams = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParamDef); + List componentParams = + JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParamDef); RuntimeSearchParam left = componentParams.get(0); IQueryParameterType leftValue = cp.getLeftValue(); - Condition leftPredicate = createPredicateCompositePart(theSourceJoinColumn, theResourceName, theSpnamePrefix, left, leftValue, theRequestPartitionId, theSqlBuilder); + Condition leftPredicate = createPredicateCompositePart( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + left, + leftValue, + theRequestPartitionId, + theSqlBuilder); RuntimeSearchParam right = componentParams.get(1); IQueryParameterType rightValue = cp.getRightValue(); - Condition rightPredicate = createPredicateCompositePart(theSourceJoinColumn, theResourceName, theSpnamePrefix, right, rightValue, theRequestPartitionId, theSqlBuilder); + Condition rightPredicate = createPredicateCompositePart( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + right, + rightValue, + theRequestPartitionId, + theSqlBuilder); Condition andCondition = toAndPredicate(leftPredicate, rightPredicate); @@ -484,20 +567,59 @@ public class QueryStack { return orCondidtion; } - private Condition createPredicateCompositePart(@Nullable DbColumn theSourceJoinColumn, String theResourceName, String theSpnamePrefix, RuntimeSearchParam theParam, IQueryParameterType theParamValue, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + private Condition createPredicateCompositePart( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theParam, + IQueryParameterType theParamValue, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { switch (theParam.getParamType()) { case STRING: { - return createPredicateString(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParam, Collections.singletonList(theParamValue), null, theRequestPartitionId, theSqlBuilder); + return createPredicateString( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParam, + Collections.singletonList(theParamValue), + null, + theRequestPartitionId, + theSqlBuilder); } case TOKEN: { - return createPredicateToken(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParam, Collections.singletonList(theParamValue), null, theRequestPartitionId, theSqlBuilder); + return createPredicateToken( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParam, + Collections.singletonList(theParamValue), + null, + theRequestPartitionId, + theSqlBuilder); } case DATE: { - return createPredicateDate(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParam, Collections.singletonList(theParamValue), toOperation(((DateParam) theParamValue).getPrefix()), theRequestPartitionId, theSqlBuilder); + return createPredicateDate( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParam, + Collections.singletonList(theParamValue), + toOperation(((DateParam) theParamValue).getPrefix()), + theRequestPartitionId, + theSqlBuilder); } case QUANTITY: { - return createPredicateQuantity(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParam, Collections.singletonList(theParamValue), null, theRequestPartitionId, theSqlBuilder); + return createPredicateQuantity( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParam, + Collections.singletonList(theParamValue), + null, + theRequestPartitionId, + theSqlBuilder); } case NUMBER: case REFERENCE: @@ -506,19 +628,19 @@ public class QueryStack { case HAS: case SPECIAL: default: - throw new InvalidRequestException(Msg.code(1204) + "Don't know how to handle composite parameter with type of " + theParam.getParamType()); + throw new InvalidRequestException(Msg.code(1204) + + "Don't know how to handle composite parameter with type of " + theParam.getParamType()); } - } - private Condition createMissingParameterQuery( - MissingParameterQueryParams theParams - ) { + private Condition createMissingParameterQuery(MissingParameterQueryParams theParams) { if (theParams.getParamType() == RestSearchParameterTypeEnum.COMPOSITE) { ourLog.error("Cannot create missing parameter query for a composite parameter."); return null; } else if (theParams.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { - if (isEligibleForEmbeddedChainedResourceSearch(theParams.getResourceType(), theParams.getParamName(), theParams.getQueryParameterTypes()).supportsUplifted()) { + if (isEligibleForEmbeddedChainedResourceSearch( + theParams.getResourceType(), theParams.getParamName(), theParams.getQueryParameterTypes()) + .supportsUplifted()) { ourLog.error("Cannot construct missing query parameter search for ContainedResource REFERENCE search."); return null; } @@ -564,7 +686,8 @@ public class QueryStack { * Old way of searching. * Missing values must be indexed! */ - private Condition createMissingPredicateForIndexedMissingFields(MissingParameterQueryParams theParams, SearchQueryBuilder sqlBuilder) { + private Condition createMissingPredicateForIndexedMissingFields( + MissingParameterQueryParams theParams, SearchQueryBuilder sqlBuilder) { PredicateBuilderTypeEnum predicateType = null; Supplier supplier = null; switch (theParams.getParamType()) { @@ -605,35 +728,30 @@ public class QueryStack { if (supplier != null) { BaseSearchParamPredicateBuilder join = (BaseSearchParamPredicateBuilder) createOrReusePredicateBuilder( - predicateType, - theParams.getSourceJoinColumn(), - theParams.getParamName(), - supplier - ).getResult(); + predicateType, theParams.getSourceJoinColumn(), theParams.getParamName(), supplier) + .getResult(); return join.createPredicateParamMissingForNonReference( - theParams.getResourceType(), - theParams.getParamName(), - theParams.isMissing(), - theParams.getRequestPartitionId() - ); - } else { - if (theParams.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { - SearchParamPresentPredicateBuilder join = sqlBuilder.addSearchParamPresentPredicateBuilder(theParams.getSourceJoinColumn()); - return join.createPredicateParamMissingForReference( theParams.getResourceType(), theParams.getParamName(), theParams.isMissing(), - theParams.getRequestPartitionId() - ); + theParams.getRequestPartitionId()); + } else { + if (theParams.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { + SearchParamPresentPredicateBuilder join = + sqlBuilder.addSearchParamPresentPredicateBuilder(theParams.getSourceJoinColumn()); + return join.createPredicateParamMissingForReference( + theParams.getResourceType(), + theParams.getParamName(), + theParams.isMissing(), + theParams.getRequestPartitionId()); } else if (theParams.getParamType() == RestSearchParameterTypeEnum.URI) { UriPredicateBuilder join = sqlBuilder.addUriPredicateBuilder(theParams.getSourceJoinColumn()); return join.createPredicateParamMissingForNonReference( - theParams.getResourceType(), - theParams.getParamName(), - theParams.isMissing(), - theParams.getRequestPartitionId() - ); + theParams.getResourceType(), + theParams.getParamName(), + theParams.isMissing(), + theParams.getRequestPartitionId()); } else { // we don't expect to see this ourLog.error("Invalid param type " + theParams.getParamType().name()); @@ -646,86 +764,109 @@ public class QueryStack { * New way of searching for missing fields. * Missing values must not indexed! */ - private Condition createMissingPredicateForUnindexedMissingFields(MissingParameterQueryParams theParams, SearchQueryBuilder sqlBuilder) { + private Condition createMissingPredicateForUnindexedMissingFields( + MissingParameterQueryParams theParams, SearchQueryBuilder sqlBuilder) { ResourceTablePredicateBuilder table = sqlBuilder.getOrCreateResourceTablePredicateBuilder(); ICanMakeMissingParamPredicate innerQuery = PredicateBuilderFactory.createPredicateBuilderForParamType( - theParams.getParamType(), - theParams.getSqlBuilder(), - this - ); + theParams.getParamType(), theParams.getSqlBuilder(), this); - return innerQuery.createPredicateParamMissingValue( - new MissingQueryParameterPredicateParams( - table, - theParams.isMissing(), - theParams.getParamName(), - theParams.getRequestPartitionId() - ) - ); + return innerQuery.createPredicateParamMissingValue(new MissingQueryParameterPredicateParams( + table, theParams.isMissing(), theParams.getParamName(), theParams.getRequestPartitionId())); } - public Condition createPredicateCoords(@Nullable DbColumn theSourceJoinColumn, - String theResourceName, - String theSpnamePrefix, - RuntimeSearchParam theSearchParam, - List theList, - RequestPartitionId theRequestPartitionId, - SearchQueryBuilder theSqlBuilder) { + public Condition createPredicateCoords( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { Boolean isMissing = theList.get(0).getMissing(); if (isMissing != null) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); - return createMissingParameterQuery( - new MissingParameterQueryParams( + return createMissingParameterQuery(new MissingParameterQueryParams( theSqlBuilder, theSearchParam.getParamType(), theList, paramName, theResourceName, theSourceJoinColumn, - theRequestPartitionId - ) - ); + theRequestPartitionId)); } else { - CoordsPredicateBuilder predicateBuilder = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.COORDS, theSourceJoinColumn, theSearchParam.getName(), () -> mySqlBuilder.addCoordsPredicateBuilder(theSourceJoinColumn)).getResult(); + CoordsPredicateBuilder predicateBuilder = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.COORDS, + theSourceJoinColumn, + theSearchParam.getName(), + () -> mySqlBuilder.addCoordsPredicateBuilder(theSourceJoinColumn)) + .getResult(); List codePredicates = new ArrayList<>(); for (IQueryParameterType nextOr : theList) { - Condition singleCode = predicateBuilder.createPredicateCoords(mySearchParameters, nextOr, theResourceName, theSearchParam, predicateBuilder, theRequestPartitionId); + Condition singleCode = predicateBuilder.createPredicateCoords( + mySearchParameters, + nextOr, + theResourceName, + theSearchParam, + predicateBuilder, + theRequestPartitionId); codePredicates.add(singleCode); } - return predicateBuilder.combineWithRequestPartitionIdPredicate(theRequestPartitionId, ComboCondition.or(codePredicates.toArray(new Condition[0]))); + return predicateBuilder.combineWithRequestPartitionIdPredicate( + theRequestPartitionId, ComboCondition.or(codePredicates.toArray(new Condition[0]))); } } - public Condition createPredicateDate(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { - return createPredicateDate(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, theOperation, theRequestPartitionId, mySqlBuilder); + public Condition createPredicateDate( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { + return createPredicateDate( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theSearchParam, + theList, + theOperation, + theRequestPartitionId, + mySqlBuilder); } - public Condition createPredicateDate(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + public Condition createPredicateDate( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); Boolean isMissing = theList.get(0).getMissing(); if (isMissing != null) { - return createMissingParameterQuery( - new MissingParameterQueryParams( + return createMissingParameterQuery(new MissingParameterQueryParams( theSqlBuilder, theSearchParam.getParamType(), theList, paramName, theResourceName, theSourceJoinColumn, - theRequestPartitionId - ) - ); + theRequestPartitionId)); } else { - PredicateBuilderCacheLookupResult predicateBuilderLookupResult = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.DATE, theSourceJoinColumn, paramName, () -> theSqlBuilder.addDatePredicateBuilder(theSourceJoinColumn)); + PredicateBuilderCacheLookupResult predicateBuilderLookupResult = + createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.DATE, + theSourceJoinColumn, + paramName, + () -> theSqlBuilder.addDatePredicateBuilder(theSourceJoinColumn)); DatePredicateBuilder predicateBuilder = predicateBuilderLookupResult.getResult(); boolean cacheHit = predicateBuilderLookupResult.isCacheHit(); @@ -747,31 +888,64 @@ public class QueryStack { } } - private Condition createPredicateFilter(QueryStack theQueryStack3, SearchFilterParser.BaseFilter theFilter, String theResourceName, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + private Condition createPredicateFilter( + QueryStack theQueryStack3, + SearchFilterParser.BaseFilter theFilter, + String theResourceName, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { if (theFilter instanceof SearchFilterParser.FilterParameter) { - return createPredicateFilter(theQueryStack3, (SearchFilterParser.FilterParameter) theFilter, theResourceName, theRequest, theRequestPartitionId); + return createPredicateFilter( + theQueryStack3, + (SearchFilterParser.FilterParameter) theFilter, + theResourceName, + theRequest, + theRequestPartitionId); } else if (theFilter instanceof SearchFilterParser.FilterLogical) { // Left side - Condition xPredicate = createPredicateFilter(theQueryStack3, ((SearchFilterParser.FilterLogical) theFilter).getFilter1(), theResourceName, theRequest, theRequestPartitionId); + Condition xPredicate = createPredicateFilter( + theQueryStack3, + ((SearchFilterParser.FilterLogical) theFilter).getFilter1(), + theResourceName, + theRequest, + theRequestPartitionId); // Right side - Condition yPredicate = createPredicateFilter(theQueryStack3, ((SearchFilterParser.FilterLogical) theFilter).getFilter2(), theResourceName, theRequest, theRequestPartitionId); + Condition yPredicate = createPredicateFilter( + theQueryStack3, + ((SearchFilterParser.FilterLogical) theFilter).getFilter2(), + theResourceName, + theRequest, + theRequestPartitionId); - if (((SearchFilterParser.FilterLogical) theFilter).getOperation() == SearchFilterParser.FilterLogicalOperation.and) { + if (((SearchFilterParser.FilterLogical) theFilter).getOperation() + == SearchFilterParser.FilterLogicalOperation.and) { return ComboCondition.and(xPredicate, yPredicate); - } else if (((SearchFilterParser.FilterLogical) theFilter).getOperation() == SearchFilterParser.FilterLogicalOperation.or) { + } else if (((SearchFilterParser.FilterLogical) theFilter).getOperation() + == SearchFilterParser.FilterLogicalOperation.or) { return ComboCondition.or(xPredicate, yPredicate); } else { // Shouldn't happen - throw new InvalidRequestException(Msg.code(1205) + "Don't know how to handle operation " + ((SearchFilterParser.FilterLogical) theFilter).getOperation()); + throw new InvalidRequestException(Msg.code(1205) + "Don't know how to handle operation " + + ((SearchFilterParser.FilterLogical) theFilter).getOperation()); } } else { - return createPredicateFilter(theQueryStack3, ((SearchFilterParser.FilterParameterGroup) theFilter).getContained(), theResourceName, theRequest, theRequestPartitionId); + return createPredicateFilter( + theQueryStack3, + ((SearchFilterParser.FilterParameterGroup) theFilter).getContained(), + theResourceName, + theRequest, + theRequestPartitionId); } } - private Condition createPredicateFilter(QueryStack theQueryStack3, SearchFilterParser.FilterParameter theFilter, String theResourceName, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + private Condition createPredicateFilter( + QueryStack theQueryStack3, + SearchFilterParser.FilterParameter theFilter, + String theResourceName, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { String paramName = theFilter.getParamPath().getName(); @@ -779,7 +953,12 @@ public class QueryStack { case IAnyResource.SP_RES_ID: { TokenParam param = new TokenParam(); param.setValueAsQueryToken(null, null, null, theFilter.getValue()); - return theQueryStack3.createPredicateResourceId(null, Collections.singletonList(Collections.singletonList(param)), theResourceName, theFilter.getOperation(), theRequestPartitionId); + return theQueryStack3.createPredicateResourceId( + null, + Collections.singletonList(Collections.singletonList(param)), + theResourceName, + theFilter.getOperation(), + theRequestPartitionId); } case Constants.PARAM_SOURCE: { TokenParam param = new TokenParam(); @@ -789,44 +968,111 @@ public class QueryStack { default: RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramName); if (searchParam == null) { - Collection validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName); - String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", paramName, theResourceName, validNames); + Collection validNames = + mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName); + String msg = myFhirContext + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "invalidSearchParameter", + paramName, + theResourceName, + validNames); throw new InvalidRequestException(Msg.code(1206) + msg); } RestSearchParameterTypeEnum typeEnum = searchParam.getParamType(); if (typeEnum == RestSearchParameterTypeEnum.URI) { - return theQueryStack3.createPredicateUri(null, theResourceName, null, searchParam, Collections.singletonList(new UriParam(theFilter.getValue())), theFilter.getOperation(), theRequest, theRequestPartitionId); + return theQueryStack3.createPredicateUri( + null, + theResourceName, + null, + searchParam, + Collections.singletonList(new UriParam(theFilter.getValue())), + theFilter.getOperation(), + theRequest, + theRequestPartitionId); } else if (typeEnum == RestSearchParameterTypeEnum.STRING) { - return theQueryStack3.createPredicateString(null, theResourceName, null, searchParam, Collections.singletonList(new StringParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); + return theQueryStack3.createPredicateString( + null, + theResourceName, + null, + searchParam, + Collections.singletonList(new StringParam(theFilter.getValue())), + theFilter.getOperation(), + theRequestPartitionId); } else if (typeEnum == RestSearchParameterTypeEnum.DATE) { - return theQueryStack3.createPredicateDate(null, theResourceName, null, searchParam, Collections.singletonList(new DateParam(fromOperation(theFilter.getOperation()), theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); + return theQueryStack3.createPredicateDate( + null, + theResourceName, + null, + searchParam, + Collections.singletonList( + new DateParam(fromOperation(theFilter.getOperation()), theFilter.getValue())), + theFilter.getOperation(), + theRequestPartitionId); } else if (typeEnum == RestSearchParameterTypeEnum.NUMBER) { - return theQueryStack3.createPredicateNumber(null, theResourceName, null, searchParam, Collections.singletonList(new NumberParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); + return theQueryStack3.createPredicateNumber( + null, + theResourceName, + null, + searchParam, + Collections.singletonList(new NumberParam(theFilter.getValue())), + theFilter.getOperation(), + theRequestPartitionId); } else if (typeEnum == RestSearchParameterTypeEnum.REFERENCE) { SearchFilterParser.CompareOperation operation = theFilter.getOperation(); - String resourceType = null; // The value can either have (Patient/123) or not have (123) a resource type, either way it's not needed here - String chain = (theFilter.getParamPath().getNext() != null) ? theFilter.getParamPath().getNext().toString() : null; + String resourceType = + null; // The value can either have (Patient/123) or not have (123) a resource type, either + // way it's not needed here + String chain = (theFilter.getParamPath().getNext() != null) + ? theFilter.getParamPath().getNext().toString() + : null; String value = theFilter.getValue(); ReferenceParam referenceParam = new ReferenceParam(resourceType, chain, value); - return theQueryStack3.createPredicateReference(null, theResourceName, paramName, new ArrayList<>(), Collections.singletonList(referenceParam), operation, theRequest, theRequestPartitionId); + return theQueryStack3.createPredicateReference( + null, + theResourceName, + paramName, + new ArrayList<>(), + Collections.singletonList(referenceParam), + operation, + theRequest, + theRequestPartitionId); } else if (typeEnum == RestSearchParameterTypeEnum.QUANTITY) { - return theQueryStack3.createPredicateQuantity(null, theResourceName, null, searchParam, Collections.singletonList(new QuantityParam(theFilter.getValue())), theFilter.getOperation(), theRequestPartitionId); + return theQueryStack3.createPredicateQuantity( + null, + theResourceName, + null, + searchParam, + Collections.singletonList(new QuantityParam(theFilter.getValue())), + theFilter.getOperation(), + theRequestPartitionId); } else if (typeEnum == RestSearchParameterTypeEnum.COMPOSITE) { - throw new InvalidRequestException(Msg.code(1207) + "Composite search parameters not currently supported with _filter clauses"); + throw new InvalidRequestException(Msg.code(1207) + + "Composite search parameters not currently supported with _filter clauses"); } else if (typeEnum == RestSearchParameterTypeEnum.TOKEN) { TokenParam param = new TokenParam(); - param.setValueAsQueryToken(null, - null, - null, - theFilter.getValue()); - return theQueryStack3.createPredicateToken(null, theResourceName, null, searchParam, Collections.singletonList(param), theFilter.getOperation(), theRequestPartitionId); + param.setValueAsQueryToken(null, null, null, theFilter.getValue()); + return theQueryStack3.createPredicateToken( + null, + theResourceName, + null, + searchParam, + Collections.singletonList(param), + theFilter.getOperation(), + theRequestPartitionId); } break; } return null; } - private Condition createPredicateHas(@Nullable DbColumn theSourceJoinColumn, String theResourceType, List> theHasParameters, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + private Condition createPredicateHas( + @Nullable DbColumn theSourceJoinColumn, + String theResourceType, + List> theHasParameters, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { List andPredicates = new ArrayList<>(); for (List nextOrList : theHasParameters) { @@ -865,7 +1111,8 @@ public class QueryStack { String qualifier = paramName.substring(4); for (IQueryParameterType next : nextOrList) { HasParam nextHasParam = new HasParam(); - nextHasParam.setValueAsQueryToken(myFhirContext, PARAM_HAS, qualifier, next.getValueAsQueryToken(myFhirContext)); + nextHasParam.setValueAsQueryToken( + myFhirContext, PARAM_HAS, qualifier, next.getValueAsQueryToken(myFhirContext)); orValues.add(nextHasParam); } @@ -877,31 +1124,32 @@ public class QueryStack { } else { - //Ensure that the name of the search param + // Ensure that the name of the search param // (e.g. the `code` in Patient?_has:Observation:subject:code=sys|val) // exists on the target resource type. - RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramName); + RuntimeSearchParam owningParameterDef = + mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramName); - //Ensure that the name of the back-referenced search param on the target (e.g. the `subject` in Patient?_has:Observation:subject:code=sys|val) - //exists on the target resource, or in the top-level Resource resource. + // Ensure that the name of the back-referenced search param on the target (e.g. the `subject` in + // Patient?_has:Observation:subject:code=sys|val) + // exists on the target resource, or in the top-level Resource resource. mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramReference); - - IQueryParameterAnd parsedParam = JpaParamUtil.parseQueryParams(mySearchParamRegistry, myFhirContext, owningParameterDef, paramName, parameters); + IQueryParameterAnd parsedParam = JpaParamUtil.parseQueryParams( + mySearchParamRegistry, myFhirContext, owningParameterDef, paramName, parameters); for (IQueryParameterOr next : parsedParam.getValuesAsQueryTokens()) { orValues.addAll(next.getValuesAsQueryTokens()); } - } - //Handle internal chain inside the has. + // Handle internal chain inside the has. if (parameterName.contains(".")) { String chainedPartOfParameter = getChainedPart(parameterName); orValues.stream() - .filter(qp -> qp instanceof ReferenceParam) - .map(qp -> (ReferenceParam) qp) - .forEach(rp -> rp.setChain(getChainedPart(chainedPartOfParameter))); + .filter(qp -> qp instanceof ReferenceParam) + .map(qp -> (ReferenceParam) qp) + .forEach(rp -> rp.setChain(getChainedPart(chainedPartOfParameter))); parameterName = parameterName.substring(0, parameterName.indexOf('.')); } @@ -911,49 +1159,80 @@ public class QueryStack { parameterName = parameterName.substring(0, colonIndex); } - ResourceLinkPredicateBuilder join = mySqlBuilder.addReferencePredicateBuilderReversed(this, theSourceJoinColumn); + ResourceLinkPredicateBuilder join = + mySqlBuilder.addReferencePredicateBuilderReversed(this, theSourceJoinColumn); Condition partitionPredicate = join.createPartitionIdPredicate(theRequestPartitionId); List paths = join.createResourceLinkPaths(targetResourceType, paramReference, new ArrayList<>()); - if (CollectionUtils.isEmpty(paths)) { - throw new InvalidRequestException(Msg.code(2305) + "Reference field does not exist: " + paramReference); - } - Condition typePredicate = BinaryCondition.equalTo(join.getColumnTargetResourceType(), mySqlBuilder.generatePlaceholder(theResourceType)); - Condition pathPredicate = toEqualToOrInPredicate(join.getColumnSourcePath(), mySqlBuilder.generatePlaceholders(paths)); - Condition linkedPredicate = searchForIdsWithAndOr(join.getColumnSrcResourceId(), targetResourceType, parameterName, Collections.singletonList(orValues), theRequest, theRequestPartitionId, SearchContainedModeEnum.FALSE); + if (CollectionUtils.isEmpty(paths)) { + throw new InvalidRequestException(Msg.code(2305) + "Reference field does not exist: " + paramReference); + } + Condition typePredicate = BinaryCondition.equalTo( + join.getColumnTargetResourceType(), mySqlBuilder.generatePlaceholder(theResourceType)); + Condition pathPredicate = + toEqualToOrInPredicate(join.getColumnSourcePath(), mySqlBuilder.generatePlaceholders(paths)); + Condition linkedPredicate = searchForIdsWithAndOr( + join.getColumnSrcResourceId(), + targetResourceType, + parameterName, + Collections.singletonList(orValues), + theRequest, + theRequestPartitionId, + SearchContainedModeEnum.FALSE); andPredicates.add(toAndPredicate(partitionPredicate, pathPredicate, typePredicate, linkedPredicate)); } return toAndPredicate(andPredicates); } - public Condition createPredicateNumber(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { - return createPredicateNumber(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, theOperation, theRequestPartitionId, mySqlBuilder); + public Condition createPredicateNumber( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { + return createPredicateNumber( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theSearchParam, + theList, + theOperation, + theRequestPartitionId, + mySqlBuilder); } - public Condition createPredicateNumber(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + public Condition createPredicateNumber( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); Boolean isMissing = theList.get(0).getMissing(); if (isMissing != null) { - return createMissingParameterQuery( - new MissingParameterQueryParams( + return createMissingParameterQuery(new MissingParameterQueryParams( theSqlBuilder, theSearchParam.getParamType(), theList, paramName, theResourceName, theSourceJoinColumn, - theRequestPartitionId - ) - ); + theRequestPartitionId)); } else { - NumberPredicateBuilder join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.NUMBER, theSourceJoinColumn, paramName, () -> theSqlBuilder.addNumberPredicateBuilder(theSourceJoinColumn)).getResult(); + NumberPredicateBuilder join = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.NUMBER, + theSourceJoinColumn, + paramName, + () -> theSqlBuilder.addNumberPredicateBuilder(theSourceJoinColumn)) + .getResult(); List codePredicates = new ArrayList<>(); for (IQueryParameterType nextOr : theList) { @@ -971,122 +1250,172 @@ public class QueryStack { operation = toOperation(param.getPrefix()); } - - Condition predicate = join.createPredicateNumeric(theResourceName, paramName, operation, value, theRequestPartitionId, nextOr); + Condition predicate = join.createPredicateNumeric( + theResourceName, paramName, operation, value, theRequestPartitionId, nextOr); codePredicates.add(predicate); } else { throw new IllegalArgumentException(Msg.code(1211) + "Invalid token type: " + nextOr.getClass()); } - } - return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, ComboCondition.or(codePredicates.toArray(new Condition[0]))); + return join.combineWithRequestPartitionIdPredicate( + theRequestPartitionId, ComboCondition.or(codePredicates.toArray(new Condition[0]))); } } - public Condition createPredicateQuantity(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { - return createPredicateQuantity(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, theOperation, theRequestPartitionId, mySqlBuilder); + public Condition createPredicateQuantity( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { + return createPredicateQuantity( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theSearchParam, + theList, + theOperation, + theRequestPartitionId, + mySqlBuilder); } - public Condition createPredicateQuantity(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + public Condition createPredicateQuantity( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); Boolean isMissing = theList.get(0).getMissing(); if (isMissing != null) { - return createMissingParameterQuery( - new MissingParameterQueryParams( + return createMissingParameterQuery(new MissingParameterQueryParams( theSqlBuilder, theSearchParam.getParamType(), theList, paramName, theResourceName, theSourceJoinColumn, - theRequestPartitionId - ) - ); + theRequestPartitionId)); } else { - List quantityParams = theList - .stream() - .map(t -> QuantityParam.toQuantityParam(t)) - .collect(Collectors.toList()); + List quantityParams = + theList.stream().map(t -> QuantityParam.toQuantityParam(t)).collect(Collectors.toList()); BaseQuantityPredicateBuilder join = null; - boolean normalizedSearchEnabled = myStorageSettings.getNormalizedQuantitySearchLevel().equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED); + boolean normalizedSearchEnabled = myStorageSettings + .getNormalizedQuantitySearchLevel() + .equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED); if (normalizedSearchEnabled) { - List normalizedQuantityParams = quantityParams - .stream() - .map(t -> UcumServiceUtil.toCanonicalQuantityOrNull(t)) - .filter(t -> t != null) - .collect(Collectors.toList()); + List normalizedQuantityParams = quantityParams.stream() + .map(t -> UcumServiceUtil.toCanonicalQuantityOrNull(t)) + .filter(t -> t != null) + .collect(Collectors.toList()); if (normalizedQuantityParams.size() == quantityParams.size()) { - join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.QUANTITY, theSourceJoinColumn, paramName, () -> theSqlBuilder.addQuantityNormalizedPredicateBuilder(theSourceJoinColumn)).getResult(); + join = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.QUANTITY, + theSourceJoinColumn, + paramName, + () -> theSqlBuilder.addQuantityNormalizedPredicateBuilder(theSourceJoinColumn)) + .getResult(); quantityParams = normalizedQuantityParams; } } if (join == null) { - join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.QUANTITY, theSourceJoinColumn, paramName, () -> theSqlBuilder.addQuantityPredicateBuilder(theSourceJoinColumn)).getResult(); + join = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.QUANTITY, + theSourceJoinColumn, + paramName, + () -> theSqlBuilder.addQuantityPredicateBuilder(theSourceJoinColumn)) + .getResult(); } List codePredicates = new ArrayList<>(); for (QuantityParam nextOr : quantityParams) { - Condition singleCode = join.createPredicateQuantity(nextOr, theResourceName, paramName, null, join, theOperation, theRequestPartitionId); + Condition singleCode = join.createPredicateQuantity( + nextOr, theResourceName, paramName, null, join, theOperation, theRequestPartitionId); codePredicates.add(singleCode); } - return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, ComboCondition.or(codePredicates.toArray(new Condition[0]))); + return join.combineWithRequestPartitionIdPredicate( + theRequestPartitionId, ComboCondition.or(codePredicates.toArray(new Condition[0]))); } } - public Condition createPredicateReference(@Nullable DbColumn theSourceJoinColumn, - String theResourceName, - String theParamName, - List theQualifiers, - List theList, - SearchFilterParser.CompareOperation theOperation, - RequestDetails theRequest, - RequestPartitionId theRequestPartitionId) { - return createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, theQualifiers, theList, theOperation, theRequest, theRequestPartitionId, mySqlBuilder); + public Condition createPredicateReference( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theParamName, + List theQualifiers, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { + return createPredicateReference( + theSourceJoinColumn, + theResourceName, + theParamName, + theQualifiers, + theList, + theOperation, + theRequest, + theRequestPartitionId, + mySqlBuilder); } - public Condition createPredicateReference(@Nullable DbColumn theSourceJoinColumn, - String theResourceName, - String theParamName, - List theQualifiers, - List theList, - SearchFilterParser.CompareOperation theOperation, - RequestDetails theRequest, - RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + public Condition createPredicateReference( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theParamName, + List theQualifiers, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { - if ((theOperation != null) && - (theOperation != SearchFilterParser.CompareOperation.eq) && - (theOperation != SearchFilterParser.CompareOperation.ne)) { - throw new InvalidRequestException(Msg.code(1212) + "Invalid operator specified for reference predicate. Supported operators for reference predicate are \"eq\" and \"ne\"."); + if ((theOperation != null) + && (theOperation != SearchFilterParser.CompareOperation.eq) + && (theOperation != SearchFilterParser.CompareOperation.ne)) { + throw new InvalidRequestException( + Msg.code(1212) + + "Invalid operator specified for reference predicate. Supported operators for reference predicate are \"eq\" and \"ne\"."); } Boolean isMissing = theList.get(0).getMissing(); if (isMissing != null) { - return createMissingParameterQuery( - new MissingParameterQueryParams( + return createMissingParameterQuery(new MissingParameterQueryParams( theSqlBuilder, RestSearchParameterTypeEnum.REFERENCE, theList, theParamName, theResourceName, theSourceJoinColumn, - theRequestPartitionId - ) - ); + theRequestPartitionId)); } else { - ResourceLinkPredicateBuilder predicateBuilder = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.REFERENCE, theSourceJoinColumn, theParamName, () -> theSqlBuilder.addReferencePredicateBuilder(this, theSourceJoinColumn)).getResult(); - return predicateBuilder.createPredicate(theRequest, theResourceName, theParamName, theQualifiers, theList, theOperation, theRequestPartitionId); + ResourceLinkPredicateBuilder predicateBuilder = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.REFERENCE, + theSourceJoinColumn, + theParamName, + () -> theSqlBuilder.addReferencePredicateBuilder(this, theSourceJoinColumn)) + .getResult(); + return predicateBuilder.createPredicate( + theRequest, + theResourceName, + theParamName, + theQualifiers, + theList, + theOperation, + theRequestPartitionId); } } @@ -1095,16 +1424,23 @@ public class QueryStack { mySqlBuilder.getSelect().addGroupings(firstPredicateBuilder.getResourceIdColumn()); } - public Condition createPredicateReferenceForEmbeddedChainedSearchResource(@Nullable DbColumn theSourceJoinColumn, - String theResourceName, RuntimeSearchParam theSearchParam, - List theList, SearchFilterParser.CompareOperation theOperation, - RequestDetails theRequest, RequestPartitionId theRequestPartitionId, - EmbeddedChainedSearchModeEnum theEmbeddedChainedSearchModeEnum) { + public Condition createPredicateReferenceForEmbeddedChainedSearchResource( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId, + EmbeddedChainedSearchModeEnum theEmbeddedChainedSearchModeEnum) { - boolean wantChainedAndNormal = theEmbeddedChainedSearchModeEnum == EmbeddedChainedSearchModeEnum.UPLIFTED_AND_REF_JOIN; + boolean wantChainedAndNormal = + theEmbeddedChainedSearchModeEnum == EmbeddedChainedSearchModeEnum.UPLIFTED_AND_REF_JOIN; - // A bit of a hack, but we need to turn off cache reuse while in this method so that we don't try to reuse builders across different subselects - EnumSet cachedReusePredicateBuilderTypes = EnumSet.copyOf(myReusePredicateBuilderTypes); + // A bit of a hack, but we need to turn off cache reuse while in this method so that we don't try to reuse + // builders across different subselects + EnumSet cachedReusePredicateBuilderTypes = + EnumSet.copyOf(myReusePredicateBuilderTypes); if (wantChainedAndNormal) { myReusePredicateBuilderTypes.clear(); } @@ -1142,25 +1478,28 @@ public class QueryStack { // Create a reference link predicates to the subselect for every link but the last one for (String nextLink : nextReferenceLink) { - // We don't want to call createPredicateReference() here, because the whole point is to avoid the recursion. + // We don't want to call createPredicateReference() here, because the whole point is to avoid the + // recursion. // TODO: Are we missing any important business logic from that method? All tests are passing. - ResourceLinkPredicateBuilder resourceLinkPredicateBuilder = builder.addReferencePredicateBuilder(this, previousJoinColumn); - builder.addPredicate(resourceLinkPredicateBuilder.createPredicateSourcePaths(Lists.newArrayList(nextLink))); + ResourceLinkPredicateBuilder resourceLinkPredicateBuilder = + builder.addReferencePredicateBuilder(this, previousJoinColumn); + builder.addPredicate( + resourceLinkPredicateBuilder.createPredicateSourcePaths(Lists.newArrayList(nextLink))); previousJoinColumn = resourceLinkPredicateBuilder.getColumnTargetResourceId(); } Condition containedCondition = createIndexPredicate( - previousJoinColumn, - leafNodeDefinition.getLeafTarget(), - leafNodeDefinition.getLeafPathPrefix(), - leafNodeDefinition.getLeafParamName(), - leafNodeDefinition.getParamDefinition(), - leafNodeDefinition.getOrValues(), - theOperation, - leafNodeDefinition.getQualifiers(), - theRequest, - theRequestPartitionId, - builder); + previousJoinColumn, + leafNodeDefinition.getLeafTarget(), + leafNodeDefinition.getLeafPathPrefix(), + leafNodeDefinition.getLeafParamName(), + leafNodeDefinition.getParamDefinition(), + leafNodeDefinition.getOrValues(), + theOperation, + leafNodeDefinition.getQualifiers(), + theRequest, + theRequestPartitionId, + builder); if (wantChainedAndNormal) { builder.addPredicate(containedCondition); @@ -1175,16 +1514,16 @@ public class QueryStack { if (wantChainedAndNormal) { if (theSourceJoinColumn == null) { - retVal = new InCondition(mySqlBuilder.getOrCreateFirstPredicateBuilder(false).getResourceIdColumn(), union); + retVal = new InCondition( + mySqlBuilder.getOrCreateFirstPredicateBuilder(false).getResourceIdColumn(), union); } else { - //-- for the resource link, need join with target_resource_id + // -- for the resource link, need join with target_resource_id retVal = new InCondition(theSourceJoinColumn, union); } } else { retVal = toOrPredicate(predicates); - } // restore the state of this collection to turn caching back on before we exit @@ -1192,93 +1531,164 @@ public class QueryStack { return retVal; } - private void collateChainedSearchOptions(Map, Set> referenceLinks, List nextChain, Set leafNodes, EmbeddedChainedSearchModeEnum theEmbeddedChainedSearchModeEnum) { + private void collateChainedSearchOptions( + Map, Set> referenceLinks, + List nextChain, + Set leafNodes, + EmbeddedChainedSearchModeEnum theEmbeddedChainedSearchModeEnum) { // Manually collapse the chain using all possible variants of contained resource patterns. - // This is a bit excruciating to extend beyond three references. Do we want to find a way to automate this someday? - // Note: the first element in each chain is assumed to be discrete. This may need to change when we add proper support for `_contained` + // This is a bit excruciating to extend beyond three references. Do we want to find a way to automate this + // someday? + // Note: the first element in each chain is assumed to be discrete. This may need to change when we add proper + // support for `_contained` if (nextChain.size() == 1) { // discrete -> discrete if (theEmbeddedChainedSearchModeEnum == EmbeddedChainedSearchModeEnum.UPLIFTED_AND_REF_JOIN) { // If !theWantChainedAndNormal that means we're only processing refchains // so the discrete -> contained case is the only one that applies - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(nextChain.get(0).getPath()), leafNodes); + updateMapOfReferenceLinks( + referenceLinks, Lists.newArrayList(nextChain.get(0).getPath()), leafNodes); } // discrete -> contained - RuntimeSearchParam firstParamDefinition = leafNodes.iterator().next().getParamDefinition(); - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(), - leafNodes - .stream() - .map(t -> t.withPathPrefix(nextChain.get(0).getResourceType(), nextChain.get(0).getSearchParameterName())) - // When we're handling discrete->contained the differences between search - // parameters don't matter. E.g. if we're processing "subject.name=foo" - // the name could be Patient:name or Group:name but it doesn't actually - // matter that these are different since in this case both of these end - // up being an identical search in the string table for "subject.name". - .map(t -> t.withParam(firstParamDefinition)) - .collect(Collectors.toSet())); + RuntimeSearchParam firstParamDefinition = + leafNodes.iterator().next().getParamDefinition(); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList(), + leafNodes.stream() + .map(t -> t.withPathPrefix( + nextChain.get(0).getResourceType(), + nextChain.get(0).getSearchParameterName())) + // When we're handling discrete->contained the differences between search + // parameters don't matter. E.g. if we're processing "subject.name=foo" + // the name could be Patient:name or Group:name but it doesn't actually + // matter that these are different since in this case both of these end + // up being an identical search in the string table for "subject.name". + .map(t -> t.withParam(firstParamDefinition)) + .collect(Collectors.toSet())); } else if (nextChain.size() == 2) { // discrete -> discrete -> discrete - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(nextChain.get(0).getPath(), nextChain.get(1).getPath()), leafNodes); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList( + nextChain.get(0).getPath(), nextChain.get(1).getPath()), + leafNodes); // discrete -> discrete -> contained - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(nextChain.get(0).getPath()), - leafNodes - .stream() - .map(t -> t.withPathPrefix(nextChain.get(1).getResourceType(), nextChain.get(1).getSearchParameterName())) - .collect(Collectors.toSet())); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList(nextChain.get(0).getPath()), + leafNodes.stream() + .map(t -> t.withPathPrefix( + nextChain.get(1).getResourceType(), + nextChain.get(1).getSearchParameterName())) + .collect(Collectors.toSet())); // discrete -> contained -> discrete - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(mergePaths(nextChain.get(0).getPath(), nextChain.get(1).getPath())), leafNodes); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList(mergePaths( + nextChain.get(0).getPath(), nextChain.get(1).getPath())), + leafNodes); if (myStorageSettings.isIndexOnContainedResourcesRecursively()) { // discrete -> contained -> contained - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(), - leafNodes - .stream() - .map(t -> t.withPathPrefix(nextChain.get(0).getResourceType(), nextChain.get(0).getSearchParameterName() + "." + nextChain.get(1).getSearchParameterName())) - .collect(Collectors.toSet())); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList(), + leafNodes.stream() + .map(t -> t.withPathPrefix( + nextChain.get(0).getResourceType(), + nextChain.get(0).getSearchParameterName() + "." + + nextChain.get(1).getSearchParameterName())) + .collect(Collectors.toSet())); } } else if (nextChain.size() == 3) { // discrete -> discrete -> discrete -> discrete - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(nextChain.get(0).getPath(), nextChain.get(1).getPath(), nextChain.get(2).getPath()), leafNodes); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList( + nextChain.get(0).getPath(), + nextChain.get(1).getPath(), + nextChain.get(2).getPath()), + leafNodes); // discrete -> discrete -> discrete -> contained - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(nextChain.get(0).getPath(), nextChain.get(1).getPath()), - leafNodes - .stream() - .map(t -> t.withPathPrefix(nextChain.get(2).getResourceType(), nextChain.get(2).getSearchParameterName())) - .collect(Collectors.toSet())); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList( + nextChain.get(0).getPath(), nextChain.get(1).getPath()), + leafNodes.stream() + .map(t -> t.withPathPrefix( + nextChain.get(2).getResourceType(), + nextChain.get(2).getSearchParameterName())) + .collect(Collectors.toSet())); // discrete -> discrete -> contained -> discrete - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(nextChain.get(0).getPath(), mergePaths(nextChain.get(1).getPath(), nextChain.get(2).getPath())), leafNodes); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList( + nextChain.get(0).getPath(), + mergePaths( + nextChain.get(1).getPath(), nextChain.get(2).getPath())), + leafNodes); // discrete -> contained -> discrete -> discrete - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(mergePaths(nextChain.get(0).getPath(), nextChain.get(1).getPath()), nextChain.get(2).getPath()), leafNodes); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList( + mergePaths( + nextChain.get(0).getPath(), nextChain.get(1).getPath()), + nextChain.get(2).getPath()), + leafNodes); // discrete -> contained -> discrete -> contained - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(mergePaths(nextChain.get(0).getPath(), nextChain.get(1).getPath())), - leafNodes - .stream() - .map(t -> t.withPathPrefix(nextChain.get(2).getResourceType(), nextChain.get(2).getSearchParameterName())) - .collect(Collectors.toSet())); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList(mergePaths( + nextChain.get(0).getPath(), nextChain.get(1).getPath())), + leafNodes.stream() + .map(t -> t.withPathPrefix( + nextChain.get(2).getResourceType(), + nextChain.get(2).getSearchParameterName())) + .collect(Collectors.toSet())); if (myStorageSettings.isIndexOnContainedResourcesRecursively()) { // discrete -> contained -> contained -> discrete - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(mergePaths(nextChain.get(0).getPath(), nextChain.get(1).getPath(), nextChain.get(2).getPath())), leafNodes); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList(mergePaths( + nextChain.get(0).getPath(), + nextChain.get(1).getPath(), + nextChain.get(2).getPath())), + leafNodes); // discrete -> discrete -> contained -> contained - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(nextChain.get(0).getPath()), - leafNodes - .stream() - .map(t -> t.withPathPrefix(nextChain.get(1).getResourceType(), nextChain.get(1).getSearchParameterName() + "." + nextChain.get(2).getSearchParameterName())) - .collect(Collectors.toSet())); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList(nextChain.get(0).getPath()), + leafNodes.stream() + .map(t -> t.withPathPrefix( + nextChain.get(1).getResourceType(), + nextChain.get(1).getSearchParameterName() + "." + + nextChain.get(2).getSearchParameterName())) + .collect(Collectors.toSet())); // discrete -> contained -> contained -> contained - updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(), - leafNodes - .stream() - .map(t -> t.withPathPrefix(nextChain.get(0).getResourceType(), nextChain.get(0).getSearchParameterName() + "." + nextChain.get(1).getSearchParameterName() + "." + nextChain.get(2).getSearchParameterName())) - .collect(Collectors.toSet())); + updateMapOfReferenceLinks( + referenceLinks, + Lists.newArrayList(), + leafNodes.stream() + .map(t -> t.withPathPrefix( + nextChain.get(0).getResourceType(), + nextChain.get(0).getSearchParameterName() + "." + + nextChain.get(1).getSearchParameterName() + "." + + nextChain.get(2).getSearchParameterName())) + .collect(Collectors.toSet())); } } else { - // TODO: the chain is too long, it isn't practical to hard-code all the possible patterns. If anyone ever needs this, we should revisit the approach - throw new InvalidRequestException(Msg.code(2011) + - "The search chain is too long. Only chains of up to three references are supported."); + // TODO: the chain is too long, it isn't practical to hard-code all the possible patterns. If anyone ever + // needs this, we should revisit the approach + throw new InvalidRequestException(Msg.code(2011) + + "The search chain is too long. Only chains of up to three references are supported."); } } - private void updateMapOfReferenceLinks(Map, Set> theReferenceLinksMap, ArrayList thePath, Set theLeafNodesToAdd) { + private void updateMapOfReferenceLinks( + Map, Set> theReferenceLinksMap, + ArrayList thePath, + Set theLeafNodesToAdd) { Set leafNodes = theReferenceLinksMap.get(thePath); if (leafNodes == null) { leafNodes = Sets.newHashSet(); @@ -1300,58 +1710,133 @@ public class QueryStack { return result; } - private Condition createIndexPredicate(DbColumn theSourceJoinColumn, String theResourceName, String theSpnamePrefix, String theParamName, RuntimeSearchParam theParamDefinition, ArrayList theOrValues, SearchFilterParser.CompareOperation theOperation, List theQualifiers, RequestDetails theRequest, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + private Condition createIndexPredicate( + DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + String theParamName, + RuntimeSearchParam theParamDefinition, + ArrayList theOrValues, + SearchFilterParser.CompareOperation theOperation, + List theQualifiers, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { Condition containedCondition; switch (theParamDefinition.getParamType()) { case DATE: - containedCondition = createPredicateDate(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParamDefinition, - theOrValues, theOperation, theRequestPartitionId, theSqlBuilder); + containedCondition = createPredicateDate( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParamDefinition, + theOrValues, + theOperation, + theRequestPartitionId, + theSqlBuilder); break; case NUMBER: - containedCondition = createPredicateNumber(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParamDefinition, - theOrValues, theOperation, theRequestPartitionId, theSqlBuilder); + containedCondition = createPredicateNumber( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParamDefinition, + theOrValues, + theOperation, + theRequestPartitionId, + theSqlBuilder); break; case QUANTITY: - containedCondition = createPredicateQuantity(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParamDefinition, - theOrValues, theOperation, theRequestPartitionId, theSqlBuilder); + containedCondition = createPredicateQuantity( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParamDefinition, + theOrValues, + theOperation, + theRequestPartitionId, + theSqlBuilder); break; case STRING: - containedCondition = createPredicateString(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParamDefinition, - theOrValues, theOperation, theRequestPartitionId, theSqlBuilder); + containedCondition = createPredicateString( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParamDefinition, + theOrValues, + theOperation, + theRequestPartitionId, + theSqlBuilder); break; case TOKEN: - containedCondition = createPredicateToken(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParamDefinition, - theOrValues, theOperation, theRequestPartitionId, theSqlBuilder); + containedCondition = createPredicateToken( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParamDefinition, + theOrValues, + theOperation, + theRequestPartitionId, + theSqlBuilder); break; case COMPOSITE: - containedCondition = createPredicateComposite(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParamDefinition, - theOrValues, theRequestPartitionId, theSqlBuilder); + containedCondition = createPredicateComposite( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParamDefinition, + theOrValues, + theRequestPartitionId, + theSqlBuilder); break; case URI: - containedCondition = createPredicateUri(theSourceJoinColumn, theResourceName, theSpnamePrefix, theParamDefinition, - theOrValues, theOperation, theRequest, theRequestPartitionId, theSqlBuilder); + containedCondition = createPredicateUri( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theParamDefinition, + theOrValues, + theOperation, + theRequest, + theRequestPartitionId, + theSqlBuilder); break; case REFERENCE: - containedCondition = createPredicateReference(theSourceJoinColumn, theResourceName, isBlank(theSpnamePrefix) ? theParamName : theSpnamePrefix + "." + theParamName, theQualifiers, - theOrValues, theOperation, theRequest, theRequestPartitionId, theSqlBuilder); + containedCondition = createPredicateReference( + theSourceJoinColumn, + theResourceName, + isBlank(theSpnamePrefix) ? theParamName : theSpnamePrefix + "." + theParamName, + theQualifiers, + theOrValues, + theOperation, + theRequest, + theRequestPartitionId, + theSqlBuilder); break; case HAS: case SPECIAL: default: throw new InvalidRequestException( - Msg.code(1215) + "The search type:" + theParamDefinition.getParamType() + " is not supported."); + Msg.code(1215) + "The search type:" + theParamDefinition.getParamType() + " is not supported."); } return containedCondition; } @Nullable - public Condition createPredicateResourceId(@Nullable DbColumn theSourceJoinColumn, List> theValues, String theResourceName, SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + public Condition createPredicateResourceId( + @Nullable DbColumn theSourceJoinColumn, + List> theValues, + String theResourceName, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { ResourceIdPredicateBuilder builder = mySqlBuilder.newResourceIdBuilder(); - return builder.createPredicateResourceId(theSourceJoinColumn, theResourceName, theValues, theOperation, theRequestPartitionId); + return builder.createPredicateResourceId( + theSourceJoinColumn, theResourceName, theValues, theOperation, theRequestPartitionId); } - private Condition createPredicateSourceForAndList(@Nullable DbColumn theSourceJoinColumn, List> theAndOrParams) { + private Condition createPredicateSourceForAndList( + @Nullable DbColumn theSourceJoinColumn, List> theAndOrParams) { mySqlBuilder.getOrCreateFirstPredicateBuilder(); List andPredicates = new ArrayList<>(theAndOrParams.size()); @@ -1361,13 +1846,20 @@ public class QueryStack { return toAndPredicate(andPredicates); } - private Condition createPredicateSource(@Nullable DbColumn theSourceJoinColumn, List theList) { - if (myStorageSettings.getStoreMetaSourceInformation() == JpaStorageSettings.StoreMetaSourceInformationEnum.NONE) { + private Condition createPredicateSource( + @Nullable DbColumn theSourceJoinColumn, List theList) { + if (myStorageSettings.getStoreMetaSourceInformation() + == JpaStorageSettings.StoreMetaSourceInformationEnum.NONE) { String msg = myFhirContext.getLocalizer().getMessage(QueryStack.class, "sourceParamDisabled"); throw new InvalidRequestException(Msg.code(1216) + msg); } - SourcePredicateBuilder join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.SOURCE, theSourceJoinColumn, Constants.PARAM_SOURCE, () -> mySqlBuilder.addSourcePredicateBuilder(theSourceJoinColumn)).getResult(); + SourcePredicateBuilder join = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.SOURCE, + theSourceJoinColumn, + Constants.PARAM_SOURCE, + () -> mySqlBuilder.addSourcePredicateBuilder(theSourceJoinColumn)) + .getResult(); List orPredicates = new ArrayList<>(); for (IQueryParameterType nextParameter : theList) { @@ -1376,9 +1868,7 @@ public class QueryStack { String requestId = sourceParameter.getRequestId(); if (isNotBlank(sourceUri) && isNotBlank(requestId)) { orPredicates.add(toAndPredicate( - join.createPredicateSourceUri(sourceUri), - join.createPredicateRequestId(requestId) - )); + join.createPredicateSourceUri(sourceUri), join.createPredicateRequestId(requestId))); } else if (isNotBlank(sourceUri)) { orPredicates.add(join.createPredicateSourceUri(sourceUri)); } else if (isNotBlank(requestId)) { @@ -1389,45 +1879,70 @@ public class QueryStack { return toOrPredicate(orPredicates); } - public Condition createPredicateString(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { - return createPredicateString(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, theOperation, theRequestPartitionId, mySqlBuilder); + public Condition createPredicateString( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { + return createPredicateString( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theSearchParam, + theList, + theOperation, + theRequestPartitionId, + mySqlBuilder); } - public Condition createPredicateString(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId, - SearchQueryBuilder theSqlBuilder) { + public Condition createPredicateString( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { Boolean isMissing = theList.get(0).getMissing(); String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); if (isMissing != null) { - return createMissingParameterQuery( - new MissingParameterQueryParams( + return createMissingParameterQuery(new MissingParameterQueryParams( theSqlBuilder, theSearchParam.getParamType(), theList, paramName, theResourceName, theSourceJoinColumn, - theRequestPartitionId - ) - ); + theRequestPartitionId)); } - StringPredicateBuilder join = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.STRING, theSourceJoinColumn, paramName, () -> theSqlBuilder.addStringPredicateBuilder(theSourceJoinColumn)).getResult(); + StringPredicateBuilder join = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.STRING, + theSourceJoinColumn, + paramName, + () -> theSqlBuilder.addStringPredicateBuilder(theSourceJoinColumn)) + .getResult(); List codePredicates = new ArrayList<>(); for (IQueryParameterType nextOr : theList) { - Condition singleCode = join.createPredicateString(nextOr, theResourceName, theSpnamePrefix, theSearchParam, join, theOperation); + Condition singleCode = join.createPredicateString( + nextOr, theResourceName, theSpnamePrefix, theSearchParam, join, theOperation); codePredicates.add(singleCode); } return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, toOrPredicate(codePredicates)); } - public Condition createPredicateTag(@Nullable DbColumn theSourceJoinColumn, List> theList, String theParamName, RequestPartitionId theRequestPartitionId) { + public Condition createPredicateTag( + @Nullable DbColumn theSourceJoinColumn, + List> theList, + String theParamName, + RequestPartitionId theRequestPartitionId) { TagTypeEnum tagType; if (Constants.PARAM_TAG.equals(theParamName)) { tagType = TagTypeEnum.TAG; @@ -1457,7 +1972,8 @@ public class QueryStack { SearchQueryBuilder sqlBuilder = mySqlBuilder.newChildSqlBuilder(); TagPredicateBuilder tagSelector = sqlBuilder.addTagPredicateBuilder(null); - sqlBuilder.addPredicate(tagSelector.createPredicateTag(tagType, tokens, theParamName, theRequestPartitionId)); + sqlBuilder.addPredicate( + tagSelector.createPredicateTag(tagType, tokens, theParamName, theRequestPartitionId)); SelectQuery sql = sqlBuilder.getSelect(); join = mySqlBuilder.getOrCreateFirstPredicateBuilder(); @@ -1465,10 +1981,16 @@ public class QueryStack { tagPredicate = new InCondition(join.getResourceIdColumn(), subSelect).setNegate(true); } else { - // Tag table can't be a query root because it will include deleted resources, and can't select by resource type + // Tag table can't be a query root because it will include deleted resources, and can't select by + // resource type mySqlBuilder.getOrCreateFirstPredicateBuilder(); - TagPredicateBuilder tagJoin = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.TAG, theSourceJoinColumn, theParamName, () -> mySqlBuilder.addTagPredicateBuilder(theSourceJoinColumn)).getResult(); + TagPredicateBuilder tagJoin = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.TAG, + theSourceJoinColumn, + theParamName, + () -> mySqlBuilder.addTagPredicateBuilder(theSourceJoinColumn)) + .getResult(); tagPredicate = tagJoin.createPredicateTag(tagType, tokens, theParamName, theRequestPartitionId); join = tagJoin; } @@ -1479,7 +2001,8 @@ public class QueryStack { return toAndPredicate(andPredicates); } - private boolean populateTokens(List> theTokens, List theAndParams) { + private boolean populateTokens( + List> theTokens, List theAndParams) { boolean paramInverted = false; for (IQueryParameterType nextOrParam : theAndParams) { @@ -1513,7 +2036,8 @@ public class QueryStack { return true; } if (isNotBlank(nextParam.getSystem())) { - throw new TokenParamFormatInvalidRequestException(Msg.code(1218), theParamName, nextParam.getValueAsQueryToken(myFhirContext)); + throw new TokenParamFormatInvalidRequestException( + Msg.code(1218), theParamName, nextParam.getValueAsQueryToken(myFhirContext)); } } @@ -1526,15 +2050,34 @@ public class QueryStack { return false; } - public Condition createPredicateToken(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { - return createPredicateToken(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, theOperation, theRequestPartitionId, mySqlBuilder); + public Condition createPredicateToken( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { + return createPredicateToken( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theSearchParam, + theList, + theOperation, + theRequestPartitionId, + mySqlBuilder); } - public Condition createPredicateToken(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + public Condition createPredicateToken( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { List tokens = new ArrayList<>(); @@ -1548,17 +2091,31 @@ public class QueryStack { if (id.isText()) { // Check whether the :text modifier is actually enabled here - boolean tokenTextIndexingEnabled = BaseSearchParamExtractor.tokenTextIndexingEnabledForSearchParam(myStorageSettings, theSearchParam); + boolean tokenTextIndexingEnabled = + BaseSearchParamExtractor.tokenTextIndexingEnabledForSearchParam( + myStorageSettings, theSearchParam); if (!tokenTextIndexingEnabled) { String msg; if (myStorageSettings.isSuppressStringIndexingInTokens()) { - msg = myFhirContext.getLocalizer().getMessage(QueryStack.class, "textModifierDisabledForServer"); + msg = myFhirContext + .getLocalizer() + .getMessage(QueryStack.class, "textModifierDisabledForServer"); } else { - msg = myFhirContext.getLocalizer().getMessage(QueryStack.class, "textModifierDisabledForSearchParam"); + msg = myFhirContext + .getLocalizer() + .getMessage(QueryStack.class, "textModifierDisabledForSearchParam"); } throw new MethodNotAllowedException(Msg.code(1219) + msg); } - return createPredicateString(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, null, theRequestPartitionId, theSqlBuilder); + return createPredicateString( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theSearchParam, + theList, + null, + theRequestPartitionId, + theSqlBuilder); } modifier = id.getModifier(); @@ -1586,7 +2143,8 @@ public class QueryStack { if (paramInverted) { SearchQueryBuilder sqlBuilder = theSqlBuilder.newChildSqlBuilder(); TokenPredicateBuilder tokenSelector = sqlBuilder.addTokenPredicateBuilder(null); - sqlBuilder.addPredicate(tokenSelector.createPredicateToken(tokens, theResourceName, theSpnamePrefix, theSearchParam, theRequestPartitionId)); + sqlBuilder.addPredicate(tokenSelector.createPredicateToken( + tokens, theResourceName, theSpnamePrefix, theSearchParam, theRequestPartitionId)); SelectQuery sql = sqlBuilder.getSelect(); Expression subSelect = new Subquery(sql); @@ -1595,62 +2153,82 @@ public class QueryStack { if (theSourceJoinColumn == null) { predicate = new InCondition(join.getResourceIdColumn(), subSelect).setNegate(true); } else { - //-- for the resource link, need join with target_resource_id + // -- for the resource link, need join with target_resource_id predicate = new InCondition(theSourceJoinColumn, subSelect).setNegate(true); } } else { Boolean isMissing = theList.get(0).getMissing(); if (isMissing != null) { - return createMissingParameterQuery( - new MissingParameterQueryParams( + return createMissingParameterQuery(new MissingParameterQueryParams( theSqlBuilder, theSearchParam.getParamType(), theList, paramName, theResourceName, theSourceJoinColumn, - theRequestPartitionId - ) - ); + theRequestPartitionId)); } - TokenPredicateBuilder tokenJoin = createOrReusePredicateBuilder(PredicateBuilderTypeEnum.TOKEN, theSourceJoinColumn, paramName, () -> theSqlBuilder.addTokenPredicateBuilder(theSourceJoinColumn)).getResult(); + TokenPredicateBuilder tokenJoin = createOrReusePredicateBuilder( + PredicateBuilderTypeEnum.TOKEN, + theSourceJoinColumn, + paramName, + () -> theSqlBuilder.addTokenPredicateBuilder(theSourceJoinColumn)) + .getResult(); - predicate = tokenJoin.createPredicateToken(tokens, theResourceName, theSpnamePrefix, theSearchParam, theOperation, theRequestPartitionId); + predicate = tokenJoin.createPredicateToken( + tokens, theResourceName, theSpnamePrefix, theSearchParam, theOperation, theRequestPartitionId); join = tokenJoin; } return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); } - public Condition createPredicateUri(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestDetails theRequestDetails, - RequestPartitionId theRequestPartitionId) { - return createPredicateUri(theSourceJoinColumn, theResourceName, theSpnamePrefix, theSearchParam, theList, theOperation, theRequestDetails, theRequestPartitionId, mySqlBuilder); + public Condition createPredicateUri( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { + return createPredicateUri( + theSourceJoinColumn, + theResourceName, + theSpnamePrefix, + theSearchParam, + theList, + theOperation, + theRequestDetails, + theRequestPartitionId, + mySqlBuilder); } - public Condition createPredicateUri(@Nullable DbColumn theSourceJoinColumn, String theResourceName, - String theSpnamePrefix, RuntimeSearchParam theSearchParam, List theList, - SearchFilterParser.CompareOperation theOperation, RequestDetails theRequestDetails, - RequestPartitionId theRequestPartitionId, SearchQueryBuilder theSqlBuilder) { + public Condition createPredicateUri( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + List theList, + SearchFilterParser.CompareOperation theOperation, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId, + SearchQueryBuilder theSqlBuilder) { String paramName = getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); Boolean isMissing = theList.get(0).getMissing(); if (isMissing != null) { - return createMissingParameterQuery( - new MissingParameterQueryParams( + return createMissingParameterQuery(new MissingParameterQueryParams( theSqlBuilder, theSearchParam.getParamType(), theList, paramName, theResourceName, theSourceJoinColumn, - theRequestPartitionId - ) - ); + theRequestPartitionId)); } else { UriPredicateBuilder join = theSqlBuilder.addUriPredicateBuilder(theSourceJoinColumn); @@ -1660,11 +2238,25 @@ public class QueryStack { } public QueryStack newChildQueryFactoryWithFullBuilderReuse() { - return new QueryStack(mySearchParameters, myStorageSettings, myFhirContext, mySqlBuilder, mySearchParamRegistry, myPartitionSettings, EnumSet.allOf(PredicateBuilderTypeEnum.class)); + return new QueryStack( + mySearchParameters, + myStorageSettings, + myFhirContext, + mySqlBuilder, + mySearchParamRegistry, + myPartitionSettings, + EnumSet.allOf(PredicateBuilderTypeEnum.class)); } @Nullable - public Condition searchForIdsWithAndOr(@Nullable DbColumn theSourceJoinColumn, String theResourceName, String theParamName, List> theAndOrParams, RequestDetails theRequest, RequestPartitionId theRequestPartitionId, SearchContainedModeEnum theSearchContainedMode) { + public Condition searchForIdsWithAndOr( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theParamName, + List> theAndOrParams, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId, + SearchContainedModeEnum theSearchContainedMode) { if (theAndOrParams.isEmpty()) { return null; @@ -1672,16 +2264,24 @@ public class QueryStack { switch (theParamName) { case IAnyResource.SP_RES_ID: - return createPredicateResourceId(theSourceJoinColumn, theAndOrParams, theResourceName, null, theRequestPartitionId); + return createPredicateResourceId( + theSourceJoinColumn, theAndOrParams, theResourceName, null, theRequestPartitionId); case PARAM_HAS: - return createPredicateHas(theSourceJoinColumn, theResourceName, theAndOrParams, theRequest, theRequestPartitionId); + return createPredicateHas( + theSourceJoinColumn, theResourceName, theAndOrParams, theRequest, theRequestPartitionId); case Constants.PARAM_TAG: case Constants.PARAM_PROFILE: case Constants.PARAM_SECURITY: if (myStorageSettings.getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.INLINE) { - return createPredicateSearchParameter(theSourceJoinColumn, theResourceName, theParamName, theAndOrParams, theRequest, theRequestPartitionId); + return createPredicateSearchParameter( + theSourceJoinColumn, + theResourceName, + theParamName, + theAndOrParams, + theRequest, + theRequestPartitionId); } else { return createPredicateTag(theSourceJoinColumn, theAndOrParams, theParamName, theRequestPartitionId); } @@ -1690,19 +2290,32 @@ public class QueryStack { return createPredicateSourceForAndList(theSourceJoinColumn, theAndOrParams); default: - return createPredicateSearchParameter(theSourceJoinColumn, theResourceName, theParamName, theAndOrParams, theRequest, theRequestPartitionId); + return createPredicateSearchParameter( + theSourceJoinColumn, + theResourceName, + theParamName, + theAndOrParams, + theRequest, + theRequestPartitionId); } } @Nullable - private Condition createPredicateSearchParameter(@Nullable DbColumn theSourceJoinColumn, String theResourceName, String theParamName, List> theAndOrParams, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + private Condition createPredicateSearchParameter( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theParamName, + List> theAndOrParams, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { List andPredicates = new ArrayList<>(); RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); if (nextParamDef != null) { if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.isIncludePartitionInSearchHashes()) { if (theRequestPartitionId.isAllPartitions()) { - throw new PreconditionFailedException(Msg.code(1220) + "This server is not configured to support search against all partitions"); + throw new PreconditionFailedException( + Msg.code(1220) + "This server is not configured to support search against all partitions"); } } @@ -1716,7 +2329,14 @@ public class QueryStack { DateParam param = (DateParam) nextAnd.get(0); operation = toOperation(param.getPrefix()); } - andPredicates.add(createPredicateDate(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, operation, theRequestPartitionId)); + andPredicates.add(createPredicateDate( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + operation, + theRequestPartitionId)); } break; case QUANTITY: @@ -1726,7 +2346,14 @@ public class QueryStack { QuantityParam param = (QuantityParam) nextAnd.get(0); operation = toOperation(param.getPrefix()); } - andPredicates.add(createPredicateQuantity(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, operation, theRequestPartitionId)); + andPredicates.add(createPredicateQuantity( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + operation, + theRequestPartitionId)); } break; case REFERENCE: @@ -1734,52 +2361,125 @@ public class QueryStack { // Handle Search Parameters where the name is a full chain // (e.g. SearchParameter with name=composition.patient.identifier) - if (handleFullyChainedParameter(theSourceJoinColumn, theResourceName, theParamName, theRequest, theRequestPartitionId, andPredicates, nextAnd)) { + if (handleFullyChainedParameter( + theSourceJoinColumn, + theResourceName, + theParamName, + theRequest, + theRequestPartitionId, + andPredicates, + nextAnd)) { break; } - EmbeddedChainedSearchModeEnum embeddedChainedSearchModeEnum = isEligibleForEmbeddedChainedResourceSearch(theResourceName, theParamName, nextAnd); + EmbeddedChainedSearchModeEnum embeddedChainedSearchModeEnum = + isEligibleForEmbeddedChainedResourceSearch(theResourceName, theParamName, nextAnd); if (embeddedChainedSearchModeEnum == EmbeddedChainedSearchModeEnum.REF_JOIN_ONLY) { - andPredicates.add(createPredicateReference(theSourceJoinColumn, theResourceName, theParamName, new ArrayList<>(), nextAnd, null, theRequest, theRequestPartitionId)); + andPredicates.add(createPredicateReference( + theSourceJoinColumn, + theResourceName, + theParamName, + new ArrayList<>(), + nextAnd, + null, + theRequest, + theRequestPartitionId)); } else { - andPredicates.add(createPredicateReferenceForEmbeddedChainedSearchResource(theSourceJoinColumn, theResourceName, nextParamDef, nextAnd, null, theRequest, theRequestPartitionId, embeddedChainedSearchModeEnum)); + andPredicates.add(createPredicateReferenceForEmbeddedChainedSearchResource( + theSourceJoinColumn, + theResourceName, + nextParamDef, + nextAnd, + null, + theRequest, + theRequestPartitionId, + embeddedChainedSearchModeEnum)); } } break; case STRING: for (List nextAnd : theAndOrParams) { - andPredicates.add(createPredicateString(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, SearchFilterParser.CompareOperation.sw, theRequestPartitionId)); + andPredicates.add(createPredicateString( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + SearchFilterParser.CompareOperation.sw, + theRequestPartitionId)); } break; case TOKEN: for (List nextAnd : theAndOrParams) { if (LOCATION_POSITION.equals(nextParamDef.getPath())) { - andPredicates.add(createPredicateCoords(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, theRequestPartitionId, mySqlBuilder)); + andPredicates.add(createPredicateCoords( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + theRequestPartitionId, + mySqlBuilder)); } else { - andPredicates.add(createPredicateToken(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, null, theRequestPartitionId)); + andPredicates.add(createPredicateToken( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + null, + theRequestPartitionId)); } } break; case NUMBER: for (List nextAnd : theAndOrParams) { - andPredicates.add(createPredicateNumber(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, null, theRequestPartitionId)); + andPredicates.add(createPredicateNumber( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + null, + theRequestPartitionId)); } break; case COMPOSITE: for (List nextAnd : theAndOrParams) { - andPredicates.add(createPredicateComposite(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, theRequestPartitionId)); + andPredicates.add(createPredicateComposite( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + theRequestPartitionId)); } break; case URI: for (List nextAnd : theAndOrParams) { - andPredicates.add(createPredicateUri(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, SearchFilterParser.CompareOperation.eq, theRequest, theRequestPartitionId)); + andPredicates.add(createPredicateUri( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + SearchFilterParser.CompareOperation.eq, + theRequest, + theRequestPartitionId)); } break; case HAS: case SPECIAL: for (List nextAnd : theAndOrParams) { if (LOCATION_POSITION.equals(nextParamDef.getPath())) { - andPredicates.add(createPredicateCoords(theSourceJoinColumn, theResourceName, null, nextParamDef, nextAnd, theRequestPartitionId, mySqlBuilder)); + andPredicates.add(createPredicateCoords( + theSourceJoinColumn, + theResourceName, + null, + nextParamDef, + nextAnd, + theRequestPartitionId, + mySqlBuilder)); } } break; @@ -1791,20 +2491,24 @@ public class QueryStack { // Parse the predicates enumerated in the _filter separated by AND or OR... if (theAndOrParams.get(0).get(0) instanceof StringParam) { - String filterString = ((StringParam) theAndOrParams.get(0).get(0)).getValue(); + String filterString = + ((StringParam) theAndOrParams.get(0).get(0)).getValue(); SearchFilterParser.BaseFilter filter; try { filter = SearchFilterParser.parse(filterString); } catch (SearchFilterParser.FilterSyntaxException theE) { - throw new InvalidRequestException(Msg.code(1221) + "Error parsing _filter syntax: " + theE.getMessage()); + throw new InvalidRequestException( + Msg.code(1221) + "Error parsing _filter syntax: " + theE.getMessage()); } if (filter != null) { if (!myStorageSettings.isFilterParameterEnabled()) { - throw new InvalidRequestException(Msg.code(1222) + Constants.PARAM_FILTER + " parameter is disabled on this server"); + throw new InvalidRequestException(Msg.code(1222) + Constants.PARAM_FILTER + + " parameter is disabled on this server"); } - Condition predicate = createPredicateFilter(this, filter, theResourceName, theRequest, theRequestPartitionId); + Condition predicate = createPredicateFilter( + this, filter, theResourceName, theRequest, theRequestPartitionId); if (predicate != null) { mySqlBuilder.addPredicate(predicate); } @@ -1812,7 +2516,14 @@ public class QueryStack { } } else { - String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", theParamName, theResourceName, mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName)); + String msg = myFhirContext + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "invalidSearchParameter", + theParamName, + theResourceName, + mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName)); throw new InvalidRequestException(Msg.code(1223) + msg); } } @@ -1832,19 +2543,27 @@ public class QueryStack { * @return Returns {@literal true} if the search parameter was handled * by this method */ - private boolean handleFullyChainedParameter(@Nullable DbColumn theSourceJoinColumn, String theResourceName, String theParamName, RequestDetails theRequest, RequestPartitionId theRequestPartitionId, List andPredicates, List nextAnd) { + private boolean handleFullyChainedParameter( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + String theParamName, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId, + List andPredicates, + List nextAnd) { if (!nextAnd.isEmpty() && nextAnd.get(0) instanceof ReferenceParam) { ReferenceParam param = (ReferenceParam) nextAnd.get(0); if (isNotBlank(param.getChain())) { String fullName = theParamName + "." + param.getChain(); - RuntimeSearchParam fullChainParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, fullName); + RuntimeSearchParam fullChainParam = + mySearchParamRegistry.getActiveSearchParam(theResourceName, fullName); if (fullChainParam != null) { - List swappedParamTypes = nextAnd - .stream() - .map(t -> toParameterType(fullChainParam, null, t.getValueAsQueryToken(myFhirContext))) - .collect(Collectors.toList()); + List swappedParamTypes = nextAnd.stream() + .map(t -> toParameterType(fullChainParam, null, t.getValueAsQueryToken(myFhirContext))) + .collect(Collectors.toList()); List> params = List.of(swappedParamTypes); - Condition predicate = createPredicateSearchParameter(theSourceJoinColumn, theResourceName, fullName, params, theRequest, theRequestPartitionId); + Condition predicate = createPredicateSearchParameter( + theSourceJoinColumn, theResourceName, fullName, params, theRequest, theRequestPartitionId); andPredicates.add(predicate); return true; } @@ -1884,7 +2603,8 @@ public class QueryStack { * resources. * */ - private EmbeddedChainedSearchModeEnum isEligibleForEmbeddedChainedResourceSearch(String theResourceType, String theParameterName, List theParameter) { + private EmbeddedChainedSearchModeEnum isEligibleForEmbeddedChainedResourceSearch( + String theResourceType, String theParameterName, List theParameter) { boolean indexOnContainedResources = myStorageSettings.isIndexOnContainedResources(); boolean indexOnUpliftedRefchains = myStorageSettings.isIndexOnUpliftedRefchains(); @@ -1893,18 +2613,20 @@ public class QueryStack { } boolean haveUpliftCandidates = theParameter.stream() - .filter(t -> t instanceof ReferenceParam) - .map(t -> ((ReferenceParam) t).getChain()) - .filter(StringUtils::isNotBlank) - // Chains on _has can't be indexed for contained searches - At least not yet. It's not clear to me if we ever want to support this, it would be really hard to do. - .filter(t -> !t.startsWith(PARAM_HAS + ":")) - .anyMatch(t -> { - if (indexOnContainedResources) { - return true; - } - RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceType, theParameterName); - return param != null && param.hasUpliftRefchain(t); - }); + .filter(t -> t instanceof ReferenceParam) + .map(t -> ((ReferenceParam) t).getChain()) + .filter(StringUtils::isNotBlank) + // Chains on _has can't be indexed for contained searches - At least not yet. It's not clear to me if we + // ever want to support this, it would be really hard to do. + .filter(t -> !t.startsWith(PARAM_HAS + ":")) + .anyMatch(t -> { + if (indexOnContainedResources) { + return true; + } + RuntimeSearchParam param = + mySearchParamRegistry.getActiveSearchParam(theResourceType, theParameterName); + return param != null && param.hasUpliftRefchain(t); + }); if (haveUpliftCandidates) { if (indexOnContainedResources) { @@ -1914,7 +2636,6 @@ public class QueryStack { } else { return EmbeddedChainedSearchModeEnum.REF_JOIN_ONLY; } - } public void addPredicateCompositeUnique(String theIndexString, RequestPartitionId theRequestPartitionId) { @@ -1924,19 +2645,23 @@ public class QueryStack { } public void addPredicateCompositeNonUnique(String theIndexString, RequestPartitionId theRequestPartitionId) { - ComboNonUniqueSearchParameterPredicateBuilder predicateBuilder = mySqlBuilder.addComboNonUniquePredicateBuilder(); + ComboNonUniqueSearchParameterPredicateBuilder predicateBuilder = + mySqlBuilder.addComboNonUniquePredicateBuilder(); Condition predicate = predicateBuilder.createPredicateHashComplete(theRequestPartitionId, theIndexString); mySqlBuilder.addPredicate(predicate); } // expand out the pids - public void addPredicateEverythingOperation(String theResourceName, List theTypeSourceResourceNames, Long... theTargetPids) { + public void addPredicateEverythingOperation( + String theResourceName, List theTypeSourceResourceNames, Long... theTargetPids) { ResourceLinkPredicateBuilder table = mySqlBuilder.addReferencePredicateBuilder(this, null); - Condition predicate = table.createEverythingPredicate(theResourceName, theTypeSourceResourceNames, theTargetPids); + Condition predicate = + table.createEverythingPredicate(theResourceName, theTypeSourceResourceNames, theTargetPids); mySqlBuilder.addPredicate(predicate); } - public IQueryParameterType toParameterType(RuntimeSearchParam theParam, String theQualifier, String theValueAsQueryToken) { + public IQueryParameterType toParameterType( + RuntimeSearchParam theParam, String theQualifier, String theValueAsQueryToken) { IQueryParameterType qp = toParameterType(theParam); qp.setValueAsQueryToken(myFhirContext, theParam.getName(), theQualifier, theValueAsQueryToken); @@ -1963,9 +2688,11 @@ public class QueryStack { qp = new TokenParam(); break; case COMPOSITE: - List compositeOf = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParam); + List compositeOf = + JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParam); if (compositeOf.size() != 2) { - throw new InternalErrorException(Msg.code(1224) + "Parameter " + theParam.getName() + " has " + compositeOf.size() + " composite parts. Don't know how handlt this."); + throw new InternalErrorException(Msg.code(1224) + "Parameter " + theParam.getName() + " has " + + compositeOf.size() + " composite parts. Don't know how handlt this."); } IQueryParameterType leftParam = toParameterType(compositeOf.get(0)); IQueryParameterType rightParam = toParameterType(compositeOf.get(1)); @@ -1982,7 +2709,8 @@ public class QueryStack { break; case HAS: default: - throw new InvalidRequestException(Msg.code(1225) + "The search type: " + theParam.getParamType() + " is not supported."); + throw new InvalidRequestException( + Msg.code(1225) + "The search type: " + theParam.getParamType() + " is not supported."); } return qp; } @@ -1991,7 +2719,6 @@ public class QueryStack { * @see #isEligibleForEmbeddedChainedResourceSearch(String, String, List) for an explanation of the values in this enum */ enum EmbeddedChainedSearchModeEnum { - UPLIFTED_ONLY(true), UPLIFTED_AND_REF_JOIN(true), REF_JOIN_ONLY(false); @@ -2007,7 +2734,7 @@ public class QueryStack { } } - private final static class ChainElement { + private static final class ChainElement { private final String myResourceType; private final String mySearchParameterName; private final String myPath; @@ -2035,7 +2762,9 @@ public class QueryStack { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ChainElement that = (ChainElement) o; - return myResourceType.equals(that.myResourceType) && mySearchParameterName.equals(that.mySearchParameterName) && myPath.equals(that.myPath); + return myResourceType.equals(that.myResourceType) + && mySearchParameterName.equals(that.mySearchParameterName) + && myPath.equals(that.myPath); } @Override @@ -2057,17 +2786,23 @@ public class QueryStack { private List extractPaths(String theResourceType, RuntimeSearchParam theSearchParam) { List pathsForType = theSearchParam.getPathsSplit().stream() - .map(String::trim) - .filter(t -> t.startsWith(theResourceType)) - .collect(Collectors.toList()); + .map(String::trim) + .filter(t -> t.startsWith(theResourceType)) + .collect(Collectors.toList()); if (pathsForType.isEmpty()) { - ourLog.warn("Search parameter {} does not have a path for resource type {}.", theSearchParam.getName(), theResourceType); + ourLog.warn( + "Search parameter {} does not have a path for resource type {}.", + theSearchParam.getName(), + theResourceType); } return pathsForType; } - public void deriveChains(String theResourceType, RuntimeSearchParam theSearchParam, List theList) { + public void deriveChains( + String theResourceType, + RuntimeSearchParam theSearchParam, + List theList) { List paths = extractPaths(theResourceType, theSearchParam); for (String path : paths) { List searchParams = Lists.newArrayList(); @@ -2077,21 +2812,33 @@ public class QueryStack { if (nextOr instanceof ReferenceParam) { ReferenceParam referenceParam = (ReferenceParam) nextOr; if (!isReferenceParamValid(referenceParam)) { - throw new InvalidRequestException(Msg.code(2007) + - "The search chain " + theSearchParam.getName() + "." + referenceParam.getChain() + - " is too long. Only chains up to three references are supported."); + throw new InvalidRequestException(Msg.code(2007) + "The search chain " + + theSearchParam.getName() + "." + referenceParam.getChain() + + " is too long. Only chains up to three references are supported."); } String targetChain = referenceParam.getChain(); List qualifiers = Lists.newArrayList(referenceParam.getResourceType()); - processNextLinkInChain(searchParams, theSearchParam, targetChain, targetValue, qualifiers, referenceParam.getResourceType()); + processNextLinkInChain( + searchParams, + theSearchParam, + targetChain, + targetValue, + qualifiers, + referenceParam.getResourceType()); } } } } - private void processNextLinkInChain(List theSearchParams, RuntimeSearchParam thePreviousSearchParam, String theChain, String theTargetValue, List theQualifiers, String theResourceType) { + private void processNextLinkInChain( + List theSearchParams, + RuntimeSearchParam thePreviousSearchParam, + String theChain, + String theTargetValue, + List theQualifiers, + String theResourceType) { String nextParamName = theChain; String nextChain = null; @@ -2139,7 +2886,8 @@ public class QueryStack { leafNodes = Sets.newHashSet(); myChains.put(theSearchParams, leafNodes); } - leafNodes.add(new LeafNodeDefinition(nextSearchParam, orValues, nextTarget, nextParamName, "", qualifiersBranch)); + leafNodes.add(new LeafNodeDefinition( + nextSearchParam, orValues, nextTarget, nextParamName, "", qualifiersBranch)); } else { List nextPaths = extractPaths(nextTarget, nextSearchParam); for (String nextPath : nextPaths) { @@ -2147,13 +2895,25 @@ public class QueryStack { searchParamBranch.addAll(theSearchParams); searchParamBranch.add(new ChainElement(nextTarget, nextSearchParam.getName(), nextPath)); - processNextLinkInChain(searchParamBranch, nextSearchParam, nextChain, theTargetValue, qualifiersBranch, nextQualifier); + processNextLinkInChain( + searchParamBranch, + nextSearchParam, + nextChain, + theTargetValue, + qualifiersBranch, + nextQualifier); } } } } if (!searchParamFound) { - throw new InvalidRequestException(Msg.code(1214) + myFhirContext.getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", thePreviousSearchParam.getName() + '.' + theChain)); + throw new InvalidRequestException(Msg.code(1214) + + myFhirContext + .getLocalizer() + .getMessage( + BaseStorageDao.class, + "invalidParameterChain", + thePreviousSearchParam.getName() + '.' + theChain)); } } } @@ -2166,7 +2926,13 @@ public class QueryStack { private final String myLeafPathPrefix; private final List myQualifiers; - public LeafNodeDefinition(RuntimeSearchParam theParamDefinition, ArrayList theOrValues, String theLeafTarget, String theLeafParamName, String theLeafPathPrefix, List theQualifiers) { + public LeafNodeDefinition( + RuntimeSearchParam theParamDefinition, + ArrayList theOrValues, + String theLeafTarget, + String theLeafParamName, + String theLeafPathPrefix, + List theQualifiers) { myParamDefinition = theParamDefinition; myOrValues = theOrValues; myLeafTarget = theLeafTarget; @@ -2200,7 +2966,8 @@ public class QueryStack { } public LeafNodeDefinition withPathPrefix(String theResourceType, String theName) { - return new LeafNodeDefinition(myParamDefinition, myOrValues, theResourceType, myLeafParamName, theName, myQualifiers); + return new LeafNodeDefinition( + myParamDefinition, myOrValues, theResourceType, myLeafParamName, theName, myQualifiers); } @Override @@ -2208,12 +2975,18 @@ public class QueryStack { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LeafNodeDefinition that = (LeafNodeDefinition) o; - return Objects.equals(myParamDefinition, that.myParamDefinition) && Objects.equals(myOrValues, that.myOrValues) && Objects.equals(myLeafTarget, that.myLeafTarget) && Objects.equals(myLeafParamName, that.myLeafParamName) && Objects.equals(myLeafPathPrefix, that.myLeafPathPrefix) && Objects.equals(myQualifiers, that.myQualifiers); + return Objects.equals(myParamDefinition, that.myParamDefinition) + && Objects.equals(myOrValues, that.myOrValues) + && Objects.equals(myLeafTarget, that.myLeafTarget) + && Objects.equals(myLeafParamName, that.myLeafParamName) + && Objects.equals(myLeafPathPrefix, that.myLeafPathPrefix) + && Objects.equals(myQualifiers, that.myQualifiers); } @Override public int hashCode() { - return Objects.hash(myParamDefinition, myOrValues, myLeafTarget, myLeafParamName, myLeafPathPrefix, myQualifiers); + return Objects.hash( + myParamDefinition, myOrValues, myLeafTarget, myLeafParamName, myLeafPathPrefix, myQualifiers); } /** @@ -2221,8 +2994,8 @@ public class QueryStack { * but all other values unchanged. */ public LeafNodeDefinition withParam(RuntimeSearchParam theParamDefinition) { - return new LeafNodeDefinition(theParamDefinition, myOrValues, myLeafTarget, myLeafParamName, myLeafPathPrefix, myQualifiers); + return new LeafNodeDefinition( + theParamDefinition, myOrValues, myLeafTarget, myLeafParamName, myLeafPathPrefix, myQualifiers); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index 86fa60fb45e..5e48a02c6ad 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -107,15 +107,6 @@ import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.SingleColumnRowMapper; import org.springframework.transaction.support.TransactionSynchronizationManager; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; -import javax.persistence.Query; -import javax.persistence.Tuple; -import javax.persistence.TypedQuery; -import javax.persistence.criteria.CriteriaBuilder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -128,6 +119,15 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; +import javax.persistence.Query; +import javax.persistence.Tuple; +import javax.persistence.TypedQuery; +import javax.persistence.criteria.CriteriaBuilder; import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; import static org.apache.commons.lang3.StringUtils.defaultString; @@ -147,6 +147,7 @@ public class SearchBuilder implements ISearchBuilder { // NB: keep public @Deprecated public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; + public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50; public static final String RESOURCE_ID_ALIAS = "resource_id"; public static final String RESOURCE_VERSION_ALIAS = "resource_version"; @@ -173,8 +174,10 @@ public class SearchBuilder implements ISearchBuilder { private final IIdHelperService myIdHelperService; private final JpaStorageSettings myStorageSettings; private final IDao myCallingDao; + @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + private List myAlsoIncludePids; private CriteriaBuilder myCriteriaBuilder; private SearchParameterMap myParams; @@ -184,10 +187,13 @@ public class SearchBuilder implements ISearchBuilder { private Set myPidSet; private boolean myHasNextIteratorQuery = false; private RequestPartitionId myRequestPartitionId; + @Autowired(required = false) private IFulltextSearchSvc myFulltextSearchSvc; + @Autowired(required = false) private IElasticsearchSvc myIElasticsearchSvc; + @Autowired private IJpaStorageResourceParser myJpaStorageResourceParser; @@ -195,22 +201,21 @@ public class SearchBuilder implements ISearchBuilder { * Constructor */ public SearchBuilder( - IDao theDao, - String theResourceName, - JpaStorageSettings theStorageSettings, - HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, - SqlObjectFactory theSqlBuilderFactory, - HibernatePropertiesProvider theDialectProvider, - ISearchParamRegistry theSearchParamRegistry, - PartitionSettings thePartitionSettings, - IInterceptorBroadcaster theInterceptorBroadcaster, - IResourceTagDao theResourceTagDao, - DaoRegistry theDaoRegistry, - IResourceSearchViewDao theResourceSearchViewDao, - FhirContext theContext, - IIdHelperService theIdHelperService, - Class theResourceType - ) { + IDao theDao, + String theResourceName, + JpaStorageSettings theStorageSettings, + HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, + SqlObjectFactory theSqlBuilderFactory, + HibernatePropertiesProvider theDialectProvider, + ISearchParamRegistry theSearchParamRegistry, + PartitionSettings thePartitionSettings, + IInterceptorBroadcaster theInterceptorBroadcaster, + IResourceTagDao theResourceTagDao, + DaoRegistry theDaoRegistry, + IResourceSearchViewDao theResourceSearchViewDao, + FhirContext theContext, + IIdHelperService theIdHelperService, + Class theResourceType) { myCallingDao = theDao; myResourceName = theResourceName; myResourceType = theResourceType; @@ -234,7 +239,11 @@ public class SearchBuilder implements ISearchBuilder { myMaxResultsToFetch = theMaxResultsToFetch; } - private void searchForIdsWithAndOr(SearchQueryBuilder theSearchSqlBuilder, QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { + private void searchForIdsWithAndOr( + SearchQueryBuilder theSearchSqlBuilder, + QueryStack theQueryStack, + @Nonnull SearchParameterMap theParams, + RequestDetails theRequest) { myParams = theParams; // Remove any empty parameters @@ -253,8 +262,10 @@ public class SearchBuilder implements ISearchBuilder { SearchContainedModeEnum searchContainedMode = theParams.getSearchContainedMode(); // Handle _id and _tag last, since they can typically be tacked onto a different parameter - List paramNames = myParams.keySet().stream().filter(t -> !t.equals(IAnyResource.SP_RES_ID)) - .filter(t -> !t.equals(Constants.PARAM_TAG)).collect(Collectors.toList()); + List paramNames = myParams.keySet().stream() + .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) + .filter(t -> !t.equals(Constants.PARAM_TAG)) + .collect(Collectors.toList()); if (myParams.containsKey(IAnyResource.SP_RES_ID)) { paramNames.add(IAnyResource.SP_RES_ID); } @@ -265,11 +276,19 @@ public class SearchBuilder implements ISearchBuilder { // Handle each parameter for (String nextParamName : paramNames) { if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { - // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by Elasticsearch + // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by + // Elasticsearch continue; } List> andOrParams = myParams.get(nextParamName); - Condition predicate = theQueryStack.searchForIdsWithAndOr(null, myResourceName, nextParamName, andOrParams, theRequest, myRequestPartitionId, searchContainedMode); + Condition predicate = theQueryStack.searchForIdsWithAndOr( + null, + myResourceName, + nextParamName, + andOrParams, + theRequest, + myRequestPartitionId, + searchContainedMode); if (predicate != null) { theSearchSqlBuilder.addPredicate(predicate); } @@ -281,15 +300,18 @@ public class SearchBuilder implements ISearchBuilder { * parameters all have no modifiers. */ private boolean isCompositeUniqueSpCandidate() { - return myStorageSettings.isUniqueIndexesEnabled() && - myParams.getEverythingMode() == null && - myParams.isAllParametersHaveNoModifier(); + return myStorageSettings.isUniqueIndexesEnabled() + && myParams.getEverythingMode() == null + && myParams.isAllParametersHaveNoModifier(); } @SuppressWarnings("ConstantConditions") @Override - public Long createCountQuery(SearchParameterMap theParams, String theSearchUuid, - RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId) { + public Long createCountQuery( + SearchParameterMap theParams, + String theSearchUuid, + RequestDetails theRequest, + @Nonnull RequestPartitionId theRequestPartitionId) { assert theRequestPartitionId != null; assert TransactionSynchronizationManager.isActualTransactionActive(); @@ -319,7 +341,11 @@ public class SearchBuilder implements ISearchBuilder { @SuppressWarnings("ConstantConditions") @Override - public IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId) { + public IResultIterator createQuery( + SearchParameterMap theParams, + SearchRuntimeDetails theSearchRuntimeDetails, + RequestDetails theRequest, + @Nonnull RequestPartitionId theRequestPartitionId) { assert theRequestPartitionId != null; assert TransactionSynchronizationManager.isActualTransactionActive(); @@ -332,7 +358,6 @@ public class SearchBuilder implements ISearchBuilder { return new QueryIterator(theSearchRuntimeDetails, theRequest); } - private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); // we mutate the params. Make a private copy. @@ -341,8 +366,14 @@ public class SearchBuilder implements ISearchBuilder { myRequestPartitionId = theRequestPartitionId; } - private List createQuery(SearchParameterMap theParams, SortSpec sort, Integer theOffset, Integer theMaximumResults, boolean theCountOnlyFlag, RequestDetails theRequest, - SearchRuntimeDetails theSearchRuntimeDetails) { + private List createQuery( + SearchParameterMap theParams, + SortSpec sort, + Integer theOffset, + Integer theMaximumResults, + boolean theCountOnlyFlag, + RequestDetails theRequest, + SearchRuntimeDetails theSearchRuntimeDetails) { ArrayList queries = new ArrayList<>(); @@ -360,7 +391,8 @@ public class SearchBuilder implements ISearchBuilder { fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); resultCount = fulltextMatchIds.size(); } else { - fulltextExecutor = myFulltextSearchSvc.searchNotScrolled(myResourceName, myParams, myMaxResultsToFetch, theRequest); + fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( + myResourceName, myParams, myMaxResultsToFetch, theRequest); } if (fulltextExecutor == null) { @@ -370,27 +402,34 @@ public class SearchBuilder implements ISearchBuilder { if (theSearchRuntimeDetails != null) { theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, + theRequest, + Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, + params); } // can we skip the database entirely and return the pid list from here? boolean canSkipDatabase = - // if we processed an AND clause, and it returned nothing, then nothing can match. - !fulltextExecutor.hasNext() || - // Our hibernate search query doesn't respect partitions yet - (!myPartitionSettings.isPartitioningEnabled() && - // were there AND terms left? Then we still need the db. - theParams.isEmpty() && - // not every param is a param. :-( - theParams.getNearDistanceParam() == null && - // todo MB don't we support _lastUpdated and _offset now? - theParams.getLastUpdated() == null && - theParams.getEverythingMode() == null && - theParams.getOffset() == null - ); + // if we processed an AND clause, and it returned nothing, then nothing can match. + !fulltextExecutor.hasNext() + || + // Our hibernate search query doesn't respect partitions yet + (!myPartitionSettings.isPartitioningEnabled() + && + // were there AND terms left? Then we still need the db. + theParams.isEmpty() + && + // not every param is a param. :-( + theParams.getNearDistanceParam() == null + && + // todo MB don't we support _lastUpdated and _offset now? + theParams.getLastUpdated() == null + && theParams.getEverythingMode() == null + && theParams.getOffset() == null); if (canSkipDatabase) { ourLog.trace("Query finished after HSearch. Skip db query phase"); @@ -403,11 +442,15 @@ public class SearchBuilder implements ISearchBuilder { // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. // We break the pids into chunks that fit in the 1k limit for jdbc bind params. new QueryChunker() - .chunk(Streams.stream(fulltextExecutor).collect(Collectors.toList()), t -> doCreateChunkedQueries(theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries)); + .chunk( + Streams.stream(fulltextExecutor).collect(Collectors.toList()), + t -> doCreateChunkedQueries( + theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries)); } } else { // do everything in the database. - Optional query = createChunkedQuery(theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null); + Optional query = createChunkedQuery( + theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null); query.ifPresent(queries::add); } @@ -428,15 +471,18 @@ public class SearchBuilder implements ISearchBuilder { failIfUsed(Constants.PARAM_CONTENT); } - // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we can. - return fulltextEnabled && myParams != null && - myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE && - myFulltextSearchSvc.supportsSomeOf(myParams); + // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we + // can. + return fulltextEnabled + && myParams != null + && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE + && myFulltextSearchSvc.supportsSomeOf(myParams); } private void failIfUsed(String theParamName) { if (myParams.containsKey(theParamName)) { - throw new InvalidRequestException(Msg.code(1192) + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); + throw new InvalidRequestException(Msg.code(1192) + + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); } } @@ -444,19 +490,23 @@ public class SearchBuilder implements ISearchBuilder { // Can we use our hibernate search generated index on resource to support lastN?: if (myStorageSettings.isAdvancedHSearchIndexing()) { if (myFulltextSearchSvc == null) { - throw new InvalidRequestException(Msg.code(2027) + "LastN operation is not enabled on this service, can not process this request"); + throw new InvalidRequestException(Msg.code(2027) + + "LastN operation is not enabled on this service, can not process this request"); } - return myFulltextSearchSvc.lastN(myParams, theMaximumResults) - .stream().map(lastNResourceId -> myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, myResourceName, String.valueOf(lastNResourceId))) - .collect(Collectors.toList()); + return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() + .map(lastNResourceId -> myIdHelperService.resolveResourcePersistentIds( + myRequestPartitionId, myResourceName, String.valueOf(lastNResourceId))) + .collect(Collectors.toList()); } else { if (myIElasticsearchSvc == null) { - throw new InvalidRequestException(Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); + throw new InvalidRequestException(Msg.code(2033) + + "LastN operation is not enabled on this service, can not process this request"); } // use the dedicated observation ES/Lucene index to support lastN query return myIElasticsearchSvc.executeLastN(myParams, myContext, theMaximumResults).stream() - .map(lastnResourceId -> myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, myResourceName, lastnResourceId)) - .collect(Collectors.toList()); + .map(lastnResourceId -> myIdHelperService.resolveResourcePersistentIds( + myRequestPartitionId, myResourceName, lastnResourceId)) + .collect(Collectors.toList()); } } @@ -464,7 +514,8 @@ public class SearchBuilder implements ISearchBuilder { JpaPid pid = null; if (myParams.get(IAnyResource.SP_RES_ID) != null) { String idParamValue; - IQueryParameterType idParam = myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); + IQueryParameterType idParam = + myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); if (idParam instanceof TokenParam) { TokenParam idParm = (TokenParam) idParam; idParamValue = idParm.getValue(); @@ -479,11 +530,19 @@ public class SearchBuilder implements ISearchBuilder { return pids; } - private void doCreateChunkedQueries(SearchParameterMap theParams, List thePids, Integer theOffset, SortSpec sort, boolean theCount, RequestDetails theRequest, ArrayList theQueries) { + private void doCreateChunkedQueries( + SearchParameterMap theParams, + List thePids, + Integer theOffset, + SortSpec sort, + boolean theCount, + RequestDetails theRequest, + ArrayList theQueries) { if (thePids.size() < getMaximumPageSize()) { normalizeIdListForLastNInClause(thePids); } - Optional query = createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids); + Optional query = + createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids); query.ifPresent(t -> theQueries.add(t)); } @@ -506,16 +565,16 @@ public class SearchBuilder implements ISearchBuilder { ids.add(((TokenParam) param).getValue()); } else { // we do not expect the _id parameter to be a non-string value - throw new IllegalArgumentException(Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); + throw new IllegalArgumentException( + Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); } } } // fetch our target Pids // this will throw if an id is not found - Map idToPid = myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, - myResourceName, - new ArrayList<>(ids)); + Map idToPid = myIdHelperService.resolveResourcePersistentIds( + myRequestPartitionId, myResourceName, new ArrayList<>(ids)); if (myAlsoIncludePids == null) { myAlsoIncludePids = new ArrayList<>(); } @@ -527,13 +586,33 @@ public class SearchBuilder implements ISearchBuilder { } } - private Optional createChunkedQuery(SearchParameterMap theParams, SortSpec sort, Integer theOffset, Integer theMaximumResults, boolean theCountOnlyFlag, RequestDetails theRequest, List thePidList) { + private Optional createChunkedQuery( + SearchParameterMap theParams, + SortSpec sort, + Integer theOffset, + Integer theMaximumResults, + boolean theCountOnlyFlag, + RequestDetails theRequest, + List thePidList) { String sqlBuilderResourceName = myParams.getEverythingMode() == null ? myResourceName : null; - SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCountOnlyFlag); - QueryStack queryStack3 = new QueryStack(theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); + SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( + myContext, + myStorageSettings, + myPartitionSettings, + myRequestPartitionId, + sqlBuilderResourceName, + mySqlBuilderFactory, + myDialectProvider, + theCountOnlyFlag); + QueryStack queryStack3 = new QueryStack( + theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); - if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS) || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { - List activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet()); + if (theParams.keySet().size() > 1 + || theParams.getSort() != null + || theParams.keySet().contains(Constants.PARAM_HAS) + || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { + List activeComboParams = + mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet()); if (activeComboParams.isEmpty()) { sqlBuilder.setNeedResourceTableRoot(true); } @@ -554,7 +633,15 @@ public class SearchBuilder implements ISearchBuilder { // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) // the one problem with this approach is that it doesn't catch Patients that have absolutely // nothing linked to them. So we do one additional query to make sure we catch those too. - SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder(myContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, myResourceName, mySqlBuilderFactory, myDialectProvider, theCountOnlyFlag); + SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( + myContext, + myStorageSettings, + myPartitionSettings, + myRequestPartitionId, + myResourceName, + mySqlBuilderFactory, + myDialectProvider, + theCountOnlyFlag); GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch); String sql = allTargetsSql.getSql(); Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); @@ -563,7 +650,6 @@ public class SearchBuilder implements ISearchBuilder { myAlsoIncludePids = new ArrayList<>(output.size()); } myAlsoIncludePids.addAll(JpaPid.fromLongList(output)); - } List typeSourceResources = new ArrayList<>(); @@ -571,7 +657,8 @@ public class SearchBuilder implements ISearchBuilder { typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); } - queryStack3.addPredicateEverythingOperation(myResourceName, typeSourceResources, targetPids.toArray(new Long[0])); + queryStack3.addPredicateEverythingOperation( + myResourceName, typeSourceResources, targetPids.toArray(new Long[0])); } else { /* * If we're doing a filter, always use the resource table as the root - This avoids the possibility of @@ -579,7 +666,9 @@ public class SearchBuilder implements ISearchBuilder { * status / partition IDs built into queries. */ if (theParams.containsKey(Constants.PARAM_FILTER)) { - Condition partitionIdPredicate = sqlBuilder.getOrCreateResourceTablePredicateBuilder().createPartitionIdPredicate(myRequestPartitionId); + Condition partitionIdPredicate = sqlBuilder + .getOrCreateResourceTablePredicateBuilder() + .createPartitionIdPredicate(myRequestPartitionId); if (partitionIdPredicate != null) { sqlBuilder.addPredicate(partitionIdPredicate); } @@ -592,7 +681,9 @@ public class SearchBuilder implements ISearchBuilder { // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the // partition ID predicate in that case. if (!sqlBuilder.haveAtLeastOnePredicate()) { - Condition partitionIdPredicate = sqlBuilder.getOrCreateResourceTablePredicateBuilder().createPartitionIdPredicate(myRequestPartitionId); + Condition partitionIdPredicate = sqlBuilder + .getOrCreateResourceTablePredicateBuilder() + .createPartitionIdPredicate(myRequestPartitionId); if (partitionIdPredicate != null) { sqlBuilder.addPredicate(partitionIdPredicate); } @@ -652,7 +743,6 @@ public class SearchBuilder implements ISearchBuilder { createSort(queryStack3, sort, theParams); } - /* * Now perform the search */ @@ -670,14 +760,15 @@ public class SearchBuilder implements ISearchBuilder { List> listOfList = myParams.get(Constants.PARAM_TYPE); // first off, let's flatten the list of list - List iQueryParameterTypesList = listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); + List iQueryParameterTypesList = + listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); // then, extract all elements of each CSV into one big list - List resourceTypes = iQueryParameterTypesList - .stream() - .map(param -> ((StringParam) param).getValue()) - .map(csvString -> List.of(csvString.split(","))) - .flatMap(List::stream).collect(Collectors.toList()); + List resourceTypes = iQueryParameterTypesList.stream() + .map(param -> ((StringParam) param).getValue()) + .map(csvString -> List.of(csvString.split(","))) + .flatMap(List::stream) + .collect(Collectors.toList()); Set knownResourceTypes = myContext.getResourceTypes(); @@ -687,7 +778,8 @@ public class SearchBuilder implements ISearchBuilder { for (String type : resourceTypes) { String trimmed = type.trim(); if (!knownResourceTypes.contains(trimmed)) { - throw new ResourceNotFoundException(Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); + throw new ResourceNotFoundException( + Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); } retVal.add(trimmed); } @@ -696,21 +788,22 @@ public class SearchBuilder implements ISearchBuilder { } private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { - return myStorageSettings.isIndexOnContainedResources() && theParams.values().stream() - .flatMap(Collection::stream) - .flatMap(Collection::stream) - .anyMatch(t -> t instanceof ReferenceParam); + return myStorageSettings.isIndexOnContainedResources() + && theParams.values().stream() + .flatMap(Collection::stream) + .flatMap(Collection::stream) + .anyMatch(t -> t instanceof ReferenceParam); } private List normalizeIdListForLastNInClause(List lastnResourceIds) { /* - The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying - numbers of parameters, this can overwhelm Hibernate's QueryPlanCache and deplete heap space. See the following link for more info: - https://stackoverflow.com/questions/31557076/spring-hibernate-query-plan-cache-memory-usage. + The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying + numbers of parameters, this can overwhelm Hibernate's QueryPlanCache and deplete heap space. See the following link for more info: + https://stackoverflow.com/questions/31557076/spring-hibernate-query-plan-cache-memory-usage. - Normalizing the number of parameters in the "in" clause stabilizes the size of the QueryPlanCache, so long as the number of - arguments never exceeds the maximum specified below. - */ + Normalizing the number of parameters in the "in" clause stabilizes the size of the QueryPlanCache, so long as the number of + arguments never exceeds the maximum specified below. + */ int listSize = lastnResourceIds.size(); if (listSize > 1 && listSize < 10) { @@ -777,7 +870,8 @@ public class SearchBuilder implements ISearchBuilder { referenceParamTargetType = referenceParam.substring(0, colonIdx); referenceParam = referenceParam.substring(colonIdx + 1); } - RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam(myResourceName, referenceParam); + RuntimeSearchParam outerParam = + mySearchParamRegistry.getActiveSearchParam(myResourceName, referenceParam); if (outerParam == null) { throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); } @@ -787,7 +881,8 @@ public class SearchBuilder implements ISearchBuilder { if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { continue; } - RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam(nextTargetType, targetParam); + RuntimeSearchParam innerParam = + mySearchParamRegistry.getActiveSearchParam(nextTargetType, targetParam); if (innerParam != null) { param = innerParam; break; @@ -811,8 +906,11 @@ public class SearchBuilder implements ISearchBuilder { paramName = paramName.substring(0, dotIdx); if (chainName.contains(".")) { String msg = myContext - .getLocalizer() - .getMessageSanitized(BaseStorageDao.class, "invalidSortParameterTooManyChains", paramName + "." + chainName); + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "invalidSortParameterTooManyChains", + paramName + "." + chainName); throw new InvalidRequestException(Msg.code(2286) + msg); } } @@ -826,7 +924,8 @@ public class SearchBuilder implements ISearchBuilder { } if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { - throw new InvalidRequestException(Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); + throw new InvalidRequestException( + Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); } switch (param.getParamType()) { @@ -837,7 +936,8 @@ public class SearchBuilder implements ISearchBuilder { theQueryStack.addSortOnDate(myResourceName, paramName, ascending); break; case REFERENCE: - theQueryStack.addSortOnResourceLink(myResourceName, referenceTargetType, paramName, chainName, ascending); + theQueryStack.addSortOnResourceLink( + myResourceName, referenceTargetType, paramName, chainName, ascending); break; case TOKEN: theQueryStack.addSortOnToken(myResourceName, paramName, ascending); @@ -852,14 +952,16 @@ public class SearchBuilder implements ISearchBuilder { theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); break; case COMPOSITE: - List compositeList = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); + List compositeList = + JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); if (compositeList == null) { - throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName + " is not defined by the resource " + myResourceName); + throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName + + " is not defined by the resource " + myResourceName); } if (compositeList.size() != 2) { throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName - + " must have 2 composite types declared in parameter annotation, found " - + compositeList.size()); + + " must have 2 composite types declared in parameter annotation, found " + + compositeList.size()); } RuntimeSearchParam left = compositeList.get(0); RuntimeSearchParam right = compositeList.get(1); @@ -873,27 +975,41 @@ public class SearchBuilder implements ISearchBuilder { theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); break; } - throw new InvalidRequestException(Msg.code(2306) + "This server does not support _sort specifications of type " + param.getParamType() + " - Can't serve _sort=" + paramName); + throw new InvalidRequestException( + Msg.code(2306) + "This server does not support _sort specifications of type " + + param.getParamType() + " - Can't serve _sort=" + paramName); case HAS: default: - throw new InvalidRequestException(Msg.code(1197) + "This server does not support _sort specifications of type " + param.getParamType() + " - Can't serve _sort=" + paramName); + throw new InvalidRequestException( + Msg.code(1197) + "This server does not support _sort specifications of type " + + param.getParamType() + " - Can't serve _sort=" + paramName); } - } // Recurse createSort(theQueryStack, theSort.getChain(), theParams); - } private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { - Collection validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName); - String msg = myContext.getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSortParameter", theParamName, theResourceName, validSearchParameterNames); + Collection validSearchParameterNames = + mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName); + String msg = myContext + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "invalidSortParameter", + theParamName, + theResourceName, + validSearchParameterNames); throw new InvalidRequestException(Msg.code(1194) + msg); } - private void createCompositeSort(QueryStack theQueryStack, RestSearchParameterTypeEnum theParamType, String theParamName, boolean theAscending) { + private void createCompositeSort( + QueryStack theQueryStack, + RestSearchParameterTypeEnum theParamType, + String theParamName, + boolean theAscending) { switch (theParamType) { case STRING: @@ -915,13 +1031,18 @@ public class SearchBuilder implements ISearchBuilder { case HAS: case SPECIAL: default: - throw new InvalidRequestException(Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType + " on _sort=" + theParamName); + throw new InvalidRequestException( + Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType + + " on _sort=" + theParamName); } - } - private void doLoadPids(Collection thePids, Collection theIncludedPids, List theResourceListToPopulate, boolean theForHistoryOperation, - Map thePosition) { + private void doLoadPids( + Collection thePids, + Collection theIncludedPids, + List theResourceListToPopulate, + boolean theForHistoryOperation, + Map thePosition) { Map resourcePidToVersion = null; for (JpaPid next : thePids) { @@ -939,9 +1060,10 @@ public class SearchBuilder implements ISearchBuilder { } // -- get the resource from the searchView - Collection resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(versionlessPids); + Collection resourceSearchViewList = + myResourceSearchViewDao.findByResourceIds(versionlessPids); - //-- preload all tags with tag definition if any + // -- preload all tags with tag definition if any Map> tagMap = getResourceTagMap(resourceSearchViewList); for (IBaseResourceEntity next : resourceSearchViewList) { @@ -949,7 +1071,8 @@ public class SearchBuilder implements ISearchBuilder { continue; } - Class resourceType = myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); + Class resourceType = + myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); JpaPid resourceId = JpaPid.fromId(next.getResourceId()); @@ -965,16 +1088,22 @@ public class SearchBuilder implements ISearchBuilder { resourceId.setVersion(version); if (version != null && !version.equals(next.getVersion())) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); - next = (IBaseResourceEntity) dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null); + next = (IBaseResourceEntity) + dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null); } } IBaseResource resource = null; if (next != null) { - resource = myJpaStorageResourceParser.toResource(resourceType, next, tagMap.get(next.getId()), theForHistoryOperation); + resource = myJpaStorageResourceParser.toResource( + resourceType, next, tagMap.get(next.getId()), theForHistoryOperation); } if (resource == null) { - ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion()); + ourLog.warn( + "Unable to find resource {}/{}/_history/{} in database", + next.getResourceType(), + next.getIdDt().getIdPart(), + next.getVersion()); continue; } @@ -994,14 +1123,14 @@ public class SearchBuilder implements ISearchBuilder { } } - private Map> getResourceTagMap(Collection theResourceSearchViewList) { + private Map> getResourceTagMap( + Collection theResourceSearchViewList) { List idList = new ArrayList<>(theResourceSearchViewList.size()); - //-- find all resource has tags + // -- find all resource has tags for (IBaseResourceEntity resource : theResourceSearchViewList) { - if (resource.isHasTags()) - idList.add(resource.getId()); + if (resource.isHasTags()) idList.add(resource.getId()); } return getPidToTagMap(idList); @@ -1011,14 +1140,13 @@ public class SearchBuilder implements ISearchBuilder { private Map> getPidToTagMap(List thePidList) { Map> tagMap = new HashMap<>(); - //-- no tags - if (thePidList.size() == 0) - return tagMap; + // -- no tags + if (thePidList.size() == 0) return tagMap; - //-- get all tags for the idList + // -- get all tags for the idList Collection tagList = myResourceTagDao.findByResourceIds(thePidList); - //-- build the map, key = resourceId, value = list of ResourceTag + // -- build the map, key = resourceId, value = list of ResourceTag JpaPid resourceId; Collection tagCol; for (ResourceTag tag : tagList) { @@ -1038,7 +1166,12 @@ public class SearchBuilder implements ISearchBuilder { } @Override - public void loadResourcesByPid(Collection thePids, Collection theIncludedPids, List theResourceListToPopulate, boolean theForHistoryOperation, RequestDetails theDetails) { + public void loadResourcesByPid( + Collection thePids, + Collection theIncludedPids, + List theResourceListToPopulate, + boolean theForHistoryOperation, + RequestDetails theDetails) { if (thePids.isEmpty()) { ourLog.debug("The include pids are empty"); // return; @@ -1062,12 +1195,17 @@ public class SearchBuilder implements ISearchBuilder { } catch (ResourceNotFoundInIndexException theE) { // some resources were not found in index, so we will inform this and resort to JPA search - ourLog.warn("Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); + ourLog.warn( + "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); } } // We only chunk because some jdbc drivers can't handle long param lists. - new QueryChunker().chunk(thePids, t -> doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position)); + new QueryChunker() + .chunk( + thePids, + t -> doLoadPids( + t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position)); } /** @@ -1081,16 +1219,19 @@ public class SearchBuilder implements ISearchBuilder { */ private boolean isLoadingFromElasticSearchSupported(Collection thePids) { // is storage enabled? - return myStorageSettings.isStoreResourceInHSearchIndex() && - myStorageSettings.isAdvancedHSearchIndexing() && - // we don't support history - thePids.stream().noneMatch(p -> p.getVersion() != null) && - // skip the complexity for metadata in dstu2 - myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); + return myStorageSettings.isStoreResourceInHSearchIndex() + && myStorageSettings.isAdvancedHSearchIndexing() + && + // we don't support history + thePids.stream().noneMatch(p -> p.getVersion() != null) + && + // skip the complexity for metadata in dstu2 + myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); } private List loadResourcesFromElasticSearch(Collection thePids) { - // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES only impl + // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES + // only impl // to handle lastN? if (myStorageSettings.isAdvancedHSearchIndexing() && myStorageSettings.isStoreResourceInHSearchIndex()) { List pidList = thePids.stream().map(pid -> (pid).getId()).collect(Collectors.toList()); @@ -1112,16 +1253,15 @@ public class SearchBuilder implements ISearchBuilder { */ @Override public Set loadIncludes( - FhirContext theContext, - EntityManager theEntityManager, - Collection theMatches, - Collection theIncludes, - boolean theReverseMode, - DateRangeParam theLastUpdated, - String theSearchIdOrDescription, - RequestDetails theRequest, - Integer theMaxCount - ) { + FhirContext theContext, + EntityManager theEntityManager, + Collection theMatches, + Collection theIncludes, + boolean theReverseMode, + DateRangeParam theLastUpdated, + String theSearchIdOrDescription, + RequestDetails theRequest, + Integer theMaxCount) { SearchBuilderLoadIncludesParameters parameters = new SearchBuilderLoadIncludesParameters<>(); parameters.setFhirContext(theContext); parameters.setEntityManager(theEntityManager); @@ -1148,7 +1288,8 @@ public class SearchBuilder implements ISearchBuilder { String searchIdOrDescription = theParameters.getSearchIdOrDescription(); List desiredResourceTypes = theParameters.getDesiredResourceTypes(); boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, theParameters.getRequestDetails())) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, theParameters.getRequestDetails())) { CurrentThreadCaptureQueriesListener.startCapturing(); } if (matches.size() == 0) { @@ -1225,8 +1366,7 @@ public class SearchBuilder implements ISearchBuilder { * resource type than the one you are searching on. */ if (wantResourceType != null - && (reverseMode || (myParams != null && myParams.getEverythingMode() != null)) - ) { + && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { // because mySourceResourceType is not part of the HFJ_RES_LINK // index, this might not be the most optimal performance. // but it is for an $everything operation (and maybe we should update the index) @@ -1235,9 +1375,11 @@ public class SearchBuilder implements ISearchBuilder { wantResourceType = null; } - // When calling $everything on a Patient instance, we don't want to recurse into new Patient resources + // When calling $everything on a Patient instance, we don't want to recurse into new Patient + // resources // (e.g. via Provenance, List, or Group) when in an $everything operation - if (myParams != null && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { + if (myParams != null + && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { sqlBuilder.append(" AND r.myTargetResourceType != 'Patient'"); sqlBuilder.append(" AND r.mySourceResourceType != 'Provenance'"); } @@ -1275,7 +1417,8 @@ public class SearchBuilder implements ISearchBuilder { } if (resourceLink != null) { - JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceLink, version, resourceType); + JpaPid pid = + JpaPid.fromIdAndVersionAndResourceType(resourceLink, version, resourceType); pidsToInclude.add(pid); } } @@ -1312,31 +1455,39 @@ public class SearchBuilder implements ISearchBuilder { Set targetResourceTypes = computeTargetResourceTypes(nextInclude, param); for (String nextPath : paths) { - String findPidFieldSqlColumn = findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; + String findPidFieldSqlColumn = findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) + ? "src_resource_id" + : "target_resource_id"; String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; if (findVersionFieldName != null) { fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; } - + // Query for includes lookup has 2 cases // Case 1: Where target_resource_id is available in hfj_res_link table for local references - // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical url in target_resource_url + // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical + // url in target_resource_url // Case 1: Map localReferenceQueryParams = new HashMap<>(); - String searchPidFieldSqlColumn = searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; - StringBuilder localReferenceQuery = new StringBuilder("SELECT " + fieldsToLoad + - " FROM hfj_res_link r " + - " WHERE r.src_path = :src_path AND " + - " r.target_resource_id IS NOT NULL AND " + - " r." + searchPidFieldSqlColumn + " IN (:target_pids) "); + String searchPidFieldSqlColumn = searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) + ? "target_resource_id" + : "src_resource_id"; + StringBuilder localReferenceQuery = + new StringBuilder("SELECT " + fieldsToLoad + " FROM hfj_res_link r " + + " WHERE r.src_path = :src_path AND " + + " r.target_resource_id IS NOT NULL AND " + + " r." + + searchPidFieldSqlColumn + " IN (:target_pids) "); localReferenceQueryParams.put("src_path", nextPath); // we loop over target_pids later. if (targetResourceTypes != null) { if (targetResourceTypes.size() == 1) { localReferenceQuery.append(" AND r.target_resource_type = :target_resource_type "); - localReferenceQueryParams.put("target_resource_type", targetResourceTypes.iterator().next()); + localReferenceQueryParams.put( + "target_resource_type", + targetResourceTypes.iterator().next()); } else { localReferenceQuery.append(" AND r.target_resource_type in (:target_resource_types) "); localReferenceQueryParams.put("target_resource_types", targetResourceTypes); @@ -1344,9 +1495,10 @@ public class SearchBuilder implements ISearchBuilder { } // Case 2: - Pair> canonicalQuery = buildCanonicalUrlQuery(findVersionFieldName, searchPidFieldSqlColumn, targetResourceTypes); + Pair> canonicalQuery = buildCanonicalUrlQuery( + findVersionFieldName, searchPidFieldSqlColumn, targetResourceTypes); - //@formatter:on + // @formatter:on String sql = localReferenceQuery + " UNION " + canonicalQuery.getLeft(); @@ -1364,10 +1516,12 @@ public class SearchBuilder implements ISearchBuilder { List results = q.getResultList(); for (Tuple result : results) { if (result != null) { - Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); + Long resourceId = + NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); Long resourceVersion = null; if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { - resourceVersion = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); + resourceVersion = NumberUtils.createLong( + String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); } pidsToInclude.add(JpaPid.fromIdAndVersion(resourceId, resourceVersion)); } @@ -1379,7 +1533,7 @@ public class SearchBuilder implements ISearchBuilder { nextRoundMatches.clear(); for (JpaPid next : pidsToInclude) { - if ( !original.contains(next) && !allAdded.contains(next) ) { + if (!original.contains(next) && !allAdded.contains(next)) { nextRoundMatches.add(next); } } @@ -1394,11 +1548,16 @@ public class SearchBuilder implements ISearchBuilder { allAdded.removeAll(original); - ourLog.info("Loaded {} {} in {} rounds and {} ms for search {}", allAdded.size(), reverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart(), searchIdOrDescription); + ourLog.info( + "Loaded {} {} in {} rounds and {} ms for search {}", + allAdded.size(), + reverseMode ? "_revincludes" : "_includes", + roundCounts, + w.getMillisAndRestart(), + searchIdOrDescription); - - - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, request)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, request)) { callRawSqlHookWithCurrentThreadQueries(request); } // Interceptor call: STORAGE_PREACCESS_RESOURCES @@ -1406,14 +1565,17 @@ public class SearchBuilder implements ISearchBuilder { // the user has a chance to know that they were in the results if (!allAdded.isEmpty()) { - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, myInterceptorBroadcaster, request)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_PREACCESS_RESOURCES, myInterceptorBroadcaster, request)) { List includedPidList = new ArrayList<>(allAdded); - JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(includedPidList, () -> this); + JpaPreResourceAccessDetails accessDetails = + new JpaPreResourceAccessDetails(includedPidList, () -> this); HookParams params = new HookParams() - .add(IPreResourceAccessDetails.class, accessDetails) - .add(RequestDetails.class, request) - .addIfMatchesType(ServletRequestDetails.class, request); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, request, Pointcut.STORAGE_PREACCESS_RESOURCES, params); + .add(IPreResourceAccessDetails.class, accessDetails) + .add(RequestDetails.class, request) + .addIfMatchesType(ServletRequestDetails.class, request); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, request, Pointcut.STORAGE_PREACCESS_RESOURCES, params); for (int i = includedPidList.size() - 1; i >= 0; i--) { if (accessDetails.isDontReturnResourceAtIndex(i)) { @@ -1436,10 +1598,11 @@ public class SearchBuilder implements ISearchBuilder { private void callRawSqlHookWithCurrentThreadQueries(RequestDetails request) { SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); HookParams params = new HookParams() - .add(RequestDetails.class, request) - .addIfMatchesType(ServletRequestDetails.class, request) - .add(SqlQueryList.class, capturedQueries); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, request, Pointcut.JPA_PERFTRACE_RAW_SQL, params); + .add(RequestDetails.class, request) + .addIfMatchesType(ServletRequestDetails.class, request) + .add(SqlQueryList.class, capturedQueries); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, request, Pointcut.JPA_PERFTRACE_RAW_SQL, params); } @Nullable @@ -1459,7 +1622,8 @@ public class SearchBuilder implements ISearchBuilder { } @Nonnull - private Pair> buildCanonicalUrlQuery(String theVersionFieldName, String thePidFieldSqlColumn, Set theTargetResourceTypes) { + private Pair> buildCanonicalUrlQuery( + String theVersionFieldName, String thePidFieldSqlColumn, Set theTargetResourceTypes) { String fieldsToLoadFromSpidxUriTable = "rUri.res_id"; if (theVersionFieldName != null) { // canonical-uri references aren't versioned, but we need to match the column count for the UNION @@ -1474,27 +1638,27 @@ public class SearchBuilder implements ISearchBuilder { assert !theTargetResourceTypes.isEmpty(); Set identityHashesForTypes = theTargetResourceTypes.stream() - .map(type-> BaseResourceIndexedSearchParam.calculateHashIdentity(myPartitionSettings, myRequestPartitionId, type, "url")) - .collect(Collectors.toSet()); + .map(type -> BaseResourceIndexedSearchParam.calculateHashIdentity( + myPartitionSettings, myRequestPartitionId, type, "url")) + .collect(Collectors.toSet()); Map canonicalUriQueryParams = new HashMap<>(); StringBuilder canonicalUrlQuery = new StringBuilder( - "SELECT " + fieldsToLoadFromSpidxUriTable + - " FROM hfj_res_link r " + - " JOIN hfj_spidx_uri rUri ON ( "); + "SELECT " + fieldsToLoadFromSpidxUriTable + " FROM hfj_res_link r " + " JOIN hfj_spidx_uri rUri ON ( "); // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 if (theTargetResourceTypes.size() == 1) { canonicalUrlQuery.append(" rUri.hash_identity = :uri_identity_hash "); - canonicalUriQueryParams.put("uri_identity_hash", identityHashesForTypes.iterator().next()); + canonicalUriQueryParams.put( + "uri_identity_hash", identityHashesForTypes.iterator().next()); } else { canonicalUrlQuery.append(" rUri.hash_identity in (:uri_identity_hashes) "); canonicalUriQueryParams.put("uri_identity_hashes", identityHashesForTypes); } - canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri )" + - " WHERE r.src_path = :src_path AND " + - " r.target_resource_id IS NULL AND " + - " r." + thePidFieldSqlColumn + " IN (:target_pids) "); + canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri )" + " WHERE r.src_path = :src_path AND " + + " r.target_resource_id IS NULL AND " + + " r." + + thePidFieldSqlColumn + " IN (:target_pids) "); return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); } @@ -1522,23 +1686,25 @@ public class SearchBuilder implements ISearchBuilder { } } - private void attemptComboUniqueSpProcessing(QueryStack theQueryStack3, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { + private void attemptComboUniqueSpProcessing( + QueryStack theQueryStack3, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { RuntimeSearchParam comboParam = null; List comboParamNames = null; - List exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet()); + List exactMatchParams = + mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet()); if (exactMatchParams.size() > 0) { comboParam = exactMatchParams.get(0); comboParamNames = new ArrayList<>(theParams.keySet()); } if (comboParam == null) { - List candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName); + List candidateComboParams = + mySearchParamRegistry.getActiveComboSearchParams(myResourceName); for (RuntimeSearchParam nextCandidate : candidateComboParams) { - List nextCandidateParamNames = JpaParamUtil - .resolveComponentParameters(mySearchParamRegistry, nextCandidate) - .stream() - .map(t -> t.getName()) - .collect(Collectors.toList()); + List nextCandidateParamNames = + JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() + .map(t -> t.getName()) + .collect(Collectors.toList()); if (theParams.keySet().containsAll(nextCandidateParamNames)) { comboParam = nextCandidate; comboParamNames = nextCandidateParamNames; @@ -1563,7 +1729,11 @@ public class SearchBuilder implements ISearchBuilder { // TODO Hack to fix weird IOOB on the next stanza until James comes back and makes sense of this. if (nextValues.isEmpty()) { - ourLog.error("query parameter {} is unexpectedly empty. Encountered while considering {} index for {}", nextParamName, comboParam.getName(), theRequest.getCompleteUrl()); + ourLog.error( + "query parameter {} is unexpectedly empty. Encountered while considering {} index for {}", + nextParamName, + comboParam.getName(), + theRequest.getCompleteUrl()); sb = null; break; } @@ -1575,7 +1745,8 @@ public class SearchBuilder implements ISearchBuilder { // Reference params are only eligible for using a composite index if they // are qualified - RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName); + RuntimeSearchParam nextParamDef = + mySearchParamRegistry.getActiveSearchParam(myResourceName, nextParamName); if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); if (isBlank(param.getResourceType())) { @@ -1604,21 +1775,23 @@ public class SearchBuilder implements ISearchBuilder { nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); sb.append(nextParamName).append('=').append(nextOrValue); - } if (sb != null) { String indexString = sb.toString(); - ourLog.debug("Checking for {} combo index for query: {}", comboParam.getComboSearchParamType(), indexString); + ourLog.debug( + "Checking for {} combo index for query: {}", comboParam.getComboSearchParamType(), indexString); // Interceptor broadcast: JPA_PERFTRACE_INFO StorageProcessingMessage msg = new StorageProcessingMessage() - .setMessage("Using " + comboParam.getComboSearchParamType() + " index for query for search: " + indexString); + .setMessage("Using " + comboParam.getComboSearchParamType() + " index for query for search: " + + indexString); HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(StorageProcessingMessage.class, msg); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(StorageProcessingMessage.class, msg); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); switch (comboParam.getComboSearchParamType()) { case UNIQUE: @@ -1703,7 +1876,16 @@ public class SearchBuilder implements ISearchBuilder { if (includes.isEmpty()) { includes.add(new Include("*", true)); } - Set newPids = loadIncludes(myContext, myEntityManager, myCurrentPids, includes, false, getParams().getLastUpdated(), mySearchUuid, myRequest, null); + Set newPids = loadIncludes( + myContext, + myEntityManager, + myCurrentPids, + includes, + false, + getParams().getLastUpdated(), + mySearchUuid, + myRequest, + null); myCurrentIterator = newPids.iterator(); } @@ -1712,7 +1894,6 @@ public class SearchBuilder implements ISearchBuilder { } else { myNext = NO_MORE; } - } } @@ -1729,7 +1910,6 @@ public class SearchBuilder implements ISearchBuilder { myNext = null; return retVal; } - } /** @@ -1763,9 +1943,10 @@ public class SearchBuilder implements ISearchBuilder { myFetchIncludesForEverythingOperation = true; } - myHavePerfTraceFoundIdHook = CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, myInterceptorBroadcaster, myRequest); - myHaveRawSqlHooks = CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, myRequest); - + myHavePerfTraceFoundIdHook = CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, myInterceptorBroadcaster, myRequest); + myHaveRawSqlHooks = CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, myRequest); } private void fetchNext() { @@ -1794,9 +1975,9 @@ public class SearchBuilder implements ISearchBuilder { } } - if (myNext == null) { - for (Iterator myPreResultsIterator = myAlsoIncludePids.iterator(); myPreResultsIterator.hasNext(); ) { + for (Iterator myPreResultsIterator = myAlsoIncludePids.iterator(); + myPreResultsIterator.hasNext(); ) { JpaPid next = myPreResultsIterator.next(); if (next != null) if (myPidSet.add(next)) { @@ -1815,9 +1996,13 @@ public class SearchBuilder implements ISearchBuilder { Long nextLong = myResultsIterator.next(); if (myHavePerfTraceFoundIdHook) { HookParams params = new HookParams() - .add(Integer.class, System.identityHashCode(this)) - .add(Object.class, nextLong); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); + .add(Integer.class, System.identityHashCode(this)) + .add(Object.class, nextLong); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, + myRequest, + Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, + params); } if (nextLong != null) { @@ -1832,18 +2017,26 @@ public class SearchBuilder implements ISearchBuilder { } if (!myResultsIterator.hasNext()) { - if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) { + if (myMaxResultsToFetch != null + && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) { if (mySkipCount > 0 && myNonSkipCount == 0) { StorageProcessingMessage message = new StorageProcessingMessage(); - String msg = "Pass completed with no matching results seeking rows " + myPidSet.size() + "-" + mySkipCount + ". This indicates an inefficient query! Retrying with new max count of " + myMaxResultsToFetch; + String msg = "Pass completed with no matching results seeking rows " + + myPidSet.size() + "-" + mySkipCount + + ". This indicates an inefficient query! Retrying with new max count of " + + myMaxResultsToFetch; ourLog.warn(msg); message.setMessage(msg); HookParams params = new HookParams() - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest) - .add(StorageProcessingMessage.class, message); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_WARNING, params); + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest) + .add(StorageProcessingMessage.class, message); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, + myRequest, + Pointcut.JPA_PERFTRACE_WARNING, + params); myMaxResultsToFetch += 1000; initializeIteratorQuery(myOffset, myMaxResultsToFetch); @@ -1876,7 +2069,6 @@ public class SearchBuilder implements ISearchBuilder { myNext = NO_MORE; } } - } // if we need to fetch the next result mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); @@ -1890,28 +2082,30 @@ public class SearchBuilder implements ISearchBuilder { if (myFirst) { HookParams params = new HookParams() - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest) - .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest) + .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); myFirst = false; } if (NO_MORE.equals(myNext)) { HookParams params = new HookParams() - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest) - .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest) + .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); } - } private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { if (myQueryList.isEmpty()) { // Capture times for Lucene/Elasticsearch queries as well mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); - myQueryList = createQuery(myParams, mySort, theOffset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails); + myQueryList = createQuery( + myParams, mySort, theOffset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails); } mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); @@ -1931,7 +2125,6 @@ public class SearchBuilder implements ISearchBuilder { myResultsIterator = SearchQueryExecutor.emptyExecutor(); myHasNextIteratorQuery = false; } - } @Override @@ -1977,7 +2170,6 @@ public class SearchBuilder implements ISearchBuilder { } myResultsIterator = null; } - } public static int getMaximumPageSize() { @@ -1991,5 +2183,4 @@ public class SearchBuilder implements ISearchBuilder { public static void setMaxPageSize50ForTest(boolean theIsTest) { myUseMaxPageSize50ForTest = theIsTest; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java index 513ae1f26c0..f099ac9d668 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchQueryExecutors.java @@ -22,9 +22,9 @@ package ca.uhn.fhir.jpa.search.builder; import ca.uhn.fhir.jpa.model.dao.JpaPid; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; import java.util.Iterator; import java.util.List; +import javax.annotation.Nonnull; public class SearchQueryExecutors { @@ -92,11 +92,11 @@ public class SearchQueryExecutors { } } - static public ISearchQueryExecutor from(Iterator theIterator) { + public static ISearchQueryExecutor from(Iterator theIterator) { return new JpaPidQueryAdaptor(theIterator); } - static public ISearchQueryExecutor from(Iterable theIterable) { + public static ISearchQueryExecutor from(Iterable theIterable) { return new JpaPidQueryAdaptor(theIterable.iterator()); } @@ -108,8 +108,7 @@ public class SearchQueryExecutors { } @Override - public void close() { - } + public void close() {} @Override public boolean hasNext() { @@ -121,6 +120,5 @@ public class SearchQueryExecutors { JpaPid next = myIterator.next(); return next == null ? null : next.getId(); } - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java index bbfbc879c40..a6073e14e6e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java @@ -35,6 +35,7 @@ import ca.uhn.fhir.rest.server.util.ICachedSearchDetails; */ public class StorageInterceptorHooksFacade { private final IInterceptorBroadcaster myInterceptorBroadcaster; + public StorageInterceptorHooksFacade(IInterceptorBroadcaster theInterceptorBroadcaster) { myInterceptorBroadcaster = theInterceptorBroadcaster; } @@ -47,14 +48,19 @@ public class StorageInterceptorHooksFacade { * @param search * @param theRequestPartitionId */ - public void callStoragePresearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theParams, Search search, RequestPartitionId theRequestPartitionId) { + public void callStoragePresearchRegistered( + RequestDetails theRequestDetails, + SearchParameterMap theParams, + Search search, + RequestPartitionId theRequestPartitionId) { HookParams params = new HookParams() - .add(ICachedSearchDetails.class, search) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(SearchParameterMap.class, theParams) - .add(RequestPartitionId.class, theRequestPartitionId); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params); + .add(ICachedSearchDetails.class, search) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(SearchParameterMap.class, theParams) + .add(RequestPartitionId.class, theRequestPartitionId); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params); } - //private IInterceptorBroadcaster myInterceptorBroadcaster; + // private IInterceptorBroadcaster myInterceptorBroadcaster; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/MissingParameterQueryParams.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/MissingParameterQueryParams.java index ae7cce2c1c6..606c8483f53 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/MissingParameterQueryParams.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/MissingParameterQueryParams.java @@ -71,21 +71,19 @@ public class MissingParameterQueryParams { private final RequestPartitionId myRequestPartitionId; public MissingParameterQueryParams( - SearchQueryBuilder theSqlBuilder, - RestSearchParameterTypeEnum theParamType, - List theList, - String theParamName, - String theResourceType, - DbColumn theSourceJoinColumn, - RequestPartitionId theRequestPartitionId - ) { + SearchQueryBuilder theSqlBuilder, + RestSearchParameterTypeEnum theParamType, + List theList, + String theParamName, + String theResourceType, + DbColumn theSourceJoinColumn, + RequestPartitionId theRequestPartitionId) { mySqlBuilder = theSqlBuilder; myParamType = theParamType; myQueryParameterTypes = theList; if (theList.isEmpty()) { // this will never happen - throw new InvalidParameterException(Msg.code(2140) - + " Invalid search parameter list. Cannot be empty!"); + throw new InvalidParameterException(Msg.code(2140) + " Invalid search parameter list. Cannot be empty!"); } myIsMissing = theList.get(0).getMissing(); myParamName = theParamName; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/MissingQueryParameterPredicateParams.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/MissingQueryParameterPredicateParams.java index 9c9edde05f6..56bf4537ea9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/MissingQueryParameterPredicateParams.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/MissingQueryParameterPredicateParams.java @@ -42,10 +42,11 @@ public class MissingQueryParameterPredicateParams { */ private RequestPartitionId myRequestPartitionId; - public MissingQueryParameterPredicateParams(ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder theResourceTablePredicateBuilder, - boolean theTheMissing, - String theParamName, - ca.uhn.fhir.interceptor.model.RequestPartitionId theRequestPartitionId) { + public MissingQueryParameterPredicateParams( + ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder theResourceTablePredicateBuilder, + boolean theTheMissing, + String theParamName, + ca.uhn.fhir.interceptor.model.RequestPartitionId theRequestPartitionId) { myResourceTablePredicateBuilder = theResourceTablePredicateBuilder; myIsMissing = theTheMissing; myParamName = theParamName; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/PredicateBuilderCacheKey.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/PredicateBuilderCacheKey.java index 00b2ea5ddcb..2213a7d4cf7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/PredicateBuilderCacheKey.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/PredicateBuilderCacheKey.java @@ -33,7 +33,11 @@ public class PredicateBuilderCacheKey { myDbColumn = theDbColumn; myType = theType; myParamName = theParamName; - myHashCode = new HashCodeBuilder().append(myDbColumn).append(myType).append(myParamName).toHashCode(); + myHashCode = new HashCodeBuilder() + .append(myDbColumn) + .append(myType) + .append(myParamName) + .toHashCode(); } @Override @@ -49,10 +53,10 @@ public class PredicateBuilderCacheKey { PredicateBuilderCacheKey that = (PredicateBuilderCacheKey) theO; return new EqualsBuilder() - .append(myDbColumn, that.myDbColumn) - .append(myType, that.myType) - .append(myParamName, that.myParamName) - .isEquals(); + .append(myDbColumn, that.myDbColumn) + .append(myType, that.myType) + .append(myParamName, that.myParamName) + .isEquals(); } @Override diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/PredicateBuilderTypeEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/PredicateBuilderTypeEnum.java index 901f6f5733c..fa15476ecb1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/PredicateBuilderTypeEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/models/PredicateBuilderTypeEnum.java @@ -20,5 +20,13 @@ package ca.uhn.fhir.jpa.search.builder.models; public enum PredicateBuilderTypeEnum { - DATE, COORDS, NUMBER, QUANTITY, REFERENCE, SOURCE, STRING, TOKEN, TAG + DATE, + COORDS, + NUMBER, + QUANTITY, + REFERENCE, + SOURCE, + STRING, + TOKEN, + TAG } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java index 30cd55920f5..0c34500bba3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseJoiningPredicateBuilder.java @@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.search.builder.predicate; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; +import ca.uhn.fhir.jpa.util.QueryParameterUtils; import com.healthmarketscience.sqlbuilder.Condition; import com.healthmarketscience.sqlbuilder.NotCondition; import com.healthmarketscience.sqlbuilder.UnaryCondition; @@ -30,13 +30,9 @@ import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable; import org.apache.commons.lang3.Validate; -import javax.annotation.Nullable; import java.util.List; import java.util.stream.Collectors; - -import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toAndPredicate; -import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toEqualToOrInPredicate; -import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toOrPredicate; +import javax.annotation.Nullable; public abstract class BaseJoiningPredicateBuilder extends BasePredicateBuilder { @@ -59,7 +55,8 @@ public abstract class BaseJoiningPredicateBuilder extends BasePredicateBuilder { return myColumnPartitionId; } - public Condition combineWithRequestPartitionIdPredicate(RequestPartitionId theRequestPartitionId, Condition theCondition) { + public Condition combineWithRequestPartitionIdPredicate( + RequestPartitionId theRequestPartitionId, Condition theCondition) { Condition partitionIdPredicate = createPartitionIdPredicate(theRequestPartitionId); if (partitionIdPredicate == null) { return theCondition; @@ -67,11 +64,9 @@ public abstract class BaseJoiningPredicateBuilder extends BasePredicateBuilder { return QueryParameterUtils.toAndPredicate(partitionIdPredicate, theCondition); } - @Nullable public Condition createPartitionIdPredicate(RequestPartitionId theRequestPartitionId) { - if (theRequestPartitionId != null && !theRequestPartitionId.isAllPartitions()) { Condition condition; @@ -81,7 +76,8 @@ public abstract class BaseJoiningPredicateBuilder extends BasePredicateBuilder { } else if (theRequestPartitionId.hasDefaultPartitionId() && defaultPartitionIsNull) { List placeholders = generatePlaceholders(theRequestPartitionId.getPartitionIdsWithoutDefault()); UnaryCondition partitionNullPredicate = UnaryCondition.isNull(getPartitionIdColumn()); - Condition partitionIdsPredicate = QueryParameterUtils.toEqualToOrInPredicate(getPartitionIdColumn(), placeholders); + Condition partitionIdsPredicate = + QueryParameterUtils.toEqualToOrInPredicate(getPartitionIdColumn(), placeholders); condition = QueryParameterUtils.toOrPredicate(partitionNullPredicate, partitionIdsPredicate); } else { List partitionIds = theRequestPartitionId.getPartitionIds(); @@ -100,24 +96,22 @@ public abstract class BaseJoiningPredicateBuilder extends BasePredicateBuilder { Validate.notNull(theResourceIds, "theResourceIds must not be null"); // Handle the _id parameter by adding it to the tail - Condition inResourceIds = QueryParameterUtils.toEqualToOrInPredicate(getResourceIdColumn(), generatePlaceholders(theResourceIds)); + Condition inResourceIds = + QueryParameterUtils.toEqualToOrInPredicate(getResourceIdColumn(), generatePlaceholders(theResourceIds)); if (theInverse) { inResourceIds = new NotCondition(inResourceIds); } return inResourceIds; - } - public static List replaceDefaultPartitionIdIfNonNull(PartitionSettings thePartitionSettings, List thePartitionIds) { + public static List replaceDefaultPartitionIdIfNonNull( + PartitionSettings thePartitionSettings, List thePartitionIds) { List partitionIds = thePartitionIds; if (thePartitionSettings.getDefaultPartitionId() != null) { - partitionIds = partitionIds - .stream() - .map(t -> t == null ? thePartitionSettings.getDefaultPartitionId() : t) - .collect(Collectors.toList()); + partitionIds = partitionIds.stream() + .map(t -> t == null ? thePartitionSettings.getDefaultPartitionId() : t) + .collect(Collectors.toList()); } return partitionIds; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BasePredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BasePredicateBuilder.java index 2160ecf2d53..e168c357031 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BasePredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BasePredicateBuilder.java @@ -29,9 +29,9 @@ import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.List; +import javax.annotation.Nonnull; public abstract class BasePredicateBuilder { @@ -41,7 +41,6 @@ public abstract class BasePredicateBuilder { mySearchSqlBuilder = theSearchSqlBuilder; } - PartitionSettings getPartitionSettings() { return mySearchSqlBuilder.getPartitionSettings(); } @@ -76,8 +75,8 @@ public abstract class BasePredicateBuilder { mySearchSqlBuilder.setMatchNothing(); } - - protected BinaryCondition createConditionForValueWithComparator(ParamPrefixEnum theComparator, DbColumn theColumn, Object theValue) { + protected BinaryCondition createConditionForValueWithComparator( + ParamPrefixEnum theComparator, DbColumn theColumn, Object theValue) { return mySearchSqlBuilder.createConditionForValueWithComparator(theComparator, theColumn, theValue); } @@ -88,5 +87,4 @@ public abstract class BasePredicateBuilder { public void addJoin(DbTable theFromTable, DbTable theToTable, DbColumn theFromColumn, DbColumn theToColumn) { mySearchSqlBuilder.addJoin(theFromTable, theToTable, theFromColumn, theToColumn); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseQuantityPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseQuantityPredicateBuilder.java index 07b5a7f7127..78b7f115ab6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseQuantityPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseQuantityPredicateBuilder.java @@ -24,8 +24,8 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParamQuantity; -import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; +import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.rest.param.ParamPrefixEnum; import ca.uhn.fhir.rest.param.QuantityParam; import com.healthmarketscience.sqlbuilder.BinaryCondition; @@ -35,13 +35,12 @@ import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable; import org.springframework.beans.factory.annotation.Autowired; -import javax.persistence.criteria.CriteriaBuilder; import java.math.BigDecimal; +import javax.persistence.criteria.CriteriaBuilder; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.StringUtils.isBlank; - public abstract class BaseQuantityPredicateBuilder extends BaseSearchParamPredicateBuilder { protected DbColumn myColumnHashIdentitySystemUnits; @@ -58,7 +57,14 @@ public abstract class BaseQuantityPredicateBuilder extends BaseSearchParamPredic super(theSearchSqlBuilder, theTable); } - public Condition createPredicateQuantity(QuantityParam theParam, String theResourceName, String theParamName, CriteriaBuilder theBuilder, BaseQuantityPredicateBuilder theFrom, SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + public Condition createPredicateQuantity( + QuantityParam theParam, + String theResourceName, + String theParamName, + CriteriaBuilder theBuilder, + BaseQuantityPredicateBuilder theFrom, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { String systemValue = theParam.getSystem(); String unitsValue = theParam.getUnits(); @@ -67,13 +73,21 @@ public abstract class BaseQuantityPredicateBuilder extends BaseSearchParamPredic Condition hashPredicate; if (!isBlank(systemValue) && !isBlank(unitsValue)) { - long hash = BaseResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(getPartitionSettings(), theRequestPartitionId, theResourceName, theParamName, systemValue, unitsValue); + long hash = BaseResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits( + getPartitionSettings(), + theRequestPartitionId, + theResourceName, + theParamName, + systemValue, + unitsValue); hashPredicate = BinaryCondition.equalTo(myColumnHashIdentitySystemUnits, generatePlaceholder(hash)); } else if (!isBlank(unitsValue)) { - long hash = BaseResourceIndexedSearchParamQuantity.calculateHashUnits(getPartitionSettings(), theRequestPartitionId, theResourceName, theParamName, unitsValue); + long hash = BaseResourceIndexedSearchParamQuantity.calculateHashUnits( + getPartitionSettings(), theRequestPartitionId, theResourceName, theParamName, unitsValue); hashPredicate = BinaryCondition.equalTo(myColumnHashIdentityUnits, generatePlaceholder(hash)); } else { - long hash = BaseResourceIndexedSearchParam.calculateHashIdentity(getPartitionSettings(), theRequestPartitionId, theResourceName, theParamName); + long hash = BaseResourceIndexedSearchParam.calculateHashIdentity( + getPartitionSettings(), theRequestPartitionId, theResourceName, theParamName); hashPredicate = BinaryCondition.equalTo(getColumnHashIdentity(), generatePlaceholder(hash)); } @@ -82,7 +96,8 @@ public abstract class BaseQuantityPredicateBuilder extends BaseSearchParamPredic operation = QueryParameterUtils.toOperation(cmpValue); } operation = defaultIfNull(operation, SearchFilterParser.CompareOperation.eq); - Condition numericPredicate = NumberPredicateBuilder.createPredicateNumeric(this, operation, valueValue, myColumnValue, "invalidQuantityPrefix", myFhirContext, theParam); + Condition numericPredicate = NumberPredicateBuilder.createPredicateNumeric( + this, operation, valueValue, myColumnValue, "invalidQuantityPrefix", myFhirContext, theParam); return ComboCondition.and(hashPredicate, numericPredicate); } @@ -90,5 +105,4 @@ public abstract class BaseQuantityPredicateBuilder extends BaseSearchParamPredic public DbColumn getColumnValue() { return myColumnValue; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseSearchParamPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseSearchParamPredicateBuilder.java index 1058c0051fc..74427f68a91 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseSearchParamPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/BaseSearchParamPredicateBuilder.java @@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.search.builder.predicate; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; -import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.jpa.search.builder.models.MissingQueryParameterPredicateParams; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; +import ca.uhn.fhir.jpa.util.QueryParameterUtils; import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.ComboCondition; import com.healthmarketscience.sqlbuilder.Condition; @@ -33,15 +33,12 @@ import com.healthmarketscience.sqlbuilder.UnaryCondition; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; -import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toAndPredicate; - -public abstract class BaseSearchParamPredicateBuilder - extends BaseJoiningPredicateBuilder - implements ICanMakeMissingParamPredicate { +public abstract class BaseSearchParamPredicateBuilder extends BaseJoiningPredicateBuilder + implements ICanMakeMissingParamPredicate { private final DbColumn myColumnMissing; private final DbColumn myColumnResType; @@ -80,7 +77,8 @@ public abstract class BaseSearchParamPredicateBuilder return myColumnResId; } - public Condition combineWithHashIdentityPredicate(String theResourceName, String theParamName, Condition thePredicate) { + public Condition combineWithHashIdentityPredicate( + String theResourceName, String theParamName, Condition thePredicate) { List andPredicates = new ArrayList<>(); Condition hashIdentityPredicate = createHashIdentityPredicate(theResourceName, theParamName); @@ -92,21 +90,21 @@ public abstract class BaseSearchParamPredicateBuilder @Nonnull public Condition createHashIdentityPredicate(String theResourceType, String theParamName) { - long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(getPartitionSettings(), getRequestPartitionId(), theResourceType, theParamName); + long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( + getPartitionSettings(), getRequestPartitionId(), theResourceType, theParamName); String hashIdentityVal = generatePlaceholder(hashIdentity); return BinaryCondition.equalTo(myColumnHashIdentity, hashIdentityVal); } - public Condition createPredicateParamMissingForNonReference(String theResourceName, String theParamName, Boolean theMissing, RequestPartitionId theRequestPartitionId) { + public Condition createPredicateParamMissingForNonReference( + String theResourceName, String theParamName, Boolean theMissing, RequestPartitionId theRequestPartitionId) { ComboCondition condition = ComboCondition.and( - BinaryCondition.equalTo(getResourceTypeColumn(), generatePlaceholder(theResourceName)), - BinaryCondition.equalTo(getColumnParamName(), generatePlaceholder(theParamName)), - BinaryCondition.equalTo(getMissingColumn(), generatePlaceholder(theMissing)) - ); + BinaryCondition.equalTo(getResourceTypeColumn(), generatePlaceholder(theResourceName)), + BinaryCondition.equalTo(getColumnParamName(), generatePlaceholder(theParamName)), + BinaryCondition.equalTo(getMissingColumn(), generatePlaceholder(theMissing))); return combineWithRequestPartitionIdPredicate(theRequestPartitionId, condition); } - @Override public Condition createPredicateParamMissingValue(MissingQueryParameterPredicateParams theParams) { SelectQuery subquery = new SelectQuery(); @@ -114,19 +112,16 @@ public abstract class BaseSearchParamPredicateBuilder subquery.addFromTable(getTable()); long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( - getPartitionSettings(), - theParams.getRequestPartitionId(), - theParams.getResourceTablePredicateBuilder().getResourceType(), - theParams.getParamName() - ); + getPartitionSettings(), + theParams.getRequestPartitionId(), + theParams.getResourceTablePredicateBuilder().getResourceType(), + theParams.getParamName()); Condition subQueryCondition = ComboCondition.and( - BinaryCondition.equalTo(getResourceIdColumn(), - theParams.getResourceTablePredicateBuilder().getResourceIdColumn() - ), - BinaryCondition.equalTo(getColumnHashIdentity(), - generatePlaceholder(hashIdentity)) - ); + BinaryCondition.equalTo( + getResourceIdColumn(), + theParams.getResourceTablePredicateBuilder().getResourceIdColumn()), + BinaryCondition.equalTo(getColumnHashIdentity(), generatePlaceholder(hashIdentity))); subquery.addCondition(subQueryCondition); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ComboNonUniqueSearchParameterPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ComboNonUniqueSearchParameterPredicateBuilder.java index 42c4f050314..5babd6b0a99 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ComboNonUniqueSearchParameterPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ComboNonUniqueSearchParameterPredicateBuilder.java @@ -38,7 +38,6 @@ public class ComboNonUniqueSearchParameterPredicateBuilder extends BaseSearchPar myColumnIndexString = getTable().addColumn("IDX_STRING"); } - public Condition createPredicateHashComplete(RequestPartitionId theRequestPartitionId, String theIndexString) { BinaryCondition predicate = BinaryCondition.equalTo(myColumnIndexString, generatePlaceholder(theIndexString)); return combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ComboUniqueSearchParameterPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ComboUniqueSearchParameterPredicateBuilder.java index 975ec4edd75..4a64b66186a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ComboUniqueSearchParameterPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ComboUniqueSearchParameterPredicateBuilder.java @@ -38,7 +38,6 @@ public class ComboUniqueSearchParameterPredicateBuilder extends BaseSearchParamP myColumnString = getTable().addColumn("IDX_STRING"); } - public Condition createPredicateIndexString(RequestPartitionId theRequestPartitionId, String theIndexString) { BinaryCondition predicate = BinaryCondition.equalTo(myColumnString, generatePlaceholder(theIndexString)); return combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/CoordsPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/CoordsPredicateBuilder.java index 5c65ac9f68f..18da6a53ed0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/CoordsPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/CoordsPredicateBuilder.java @@ -56,12 +56,13 @@ public class CoordsPredicateBuilder extends BaseSearchParamPredicateBuilder { return myColumnLongitude; } - public Condition createPredicateCoords(SearchParameterMap theParams, - IQueryParameterType theParam, - String theResourceName, - RuntimeSearchParam theSearchParam, - CoordsPredicateBuilder theFrom, - RequestPartitionId theRequestPartitionId) { + public Condition createPredicateCoords( + SearchParameterMap theParams, + IQueryParameterType theParam, + String theResourceName, + RuntimeSearchParam theSearchParam, + CoordsPredicateBuilder theFrom, + RequestPartitionId theRequestPartitionId) { ParsedLocationParam params = ParsedLocationParam.from(theParams, theParam); double distanceKm = params.getDistanceKm(); @@ -74,9 +75,11 @@ public class CoordsPredicateBuilder extends BaseSearchParamPredicateBuilder { latitudePredicate = theFrom.createPredicateLatitudeExact(latitudeValue); longitudePredicate = theFrom.createPredicateLongitudeExact(longitudeValue); } else if (distanceKm < 0.0) { - throw new IllegalArgumentException(Msg.code(1233) + "Invalid " + Location.SP_NEAR_DISTANCE + " parameter '" + distanceKm + "' must be >= 0.0"); + throw new IllegalArgumentException(Msg.code(1233) + "Invalid " + Location.SP_NEAR_DISTANCE + " parameter '" + + distanceKm + "' must be >= 0.0"); } else if (distanceKm > CoordCalculator.MAX_SUPPORTED_DISTANCE_KM) { - throw new IllegalArgumentException(Msg.code(1234) + "Invalid " + Location.SP_NEAR_DISTANCE + " parameter '" + distanceKm + "' must be <= " + CoordCalculator.MAX_SUPPORTED_DISTANCE_KM); + throw new IllegalArgumentException(Msg.code(1234) + "Invalid " + Location.SP_NEAR_DISTANCE + " parameter '" + + distanceKm + "' must be <= " + CoordCalculator.MAX_SUPPORTED_DISTANCE_KM); } else { GeoBoundingBox box = CoordCalculator.getBox(latitudeValue, longitudeValue, distanceKm); latitudePredicate = theFrom.createLatitudePredicateFromBox(box); @@ -86,7 +89,6 @@ public class CoordsPredicateBuilder extends BaseSearchParamPredicateBuilder { return combineWithHashIdentityPredicate(theResourceName, theSearchParam.getName(), singleCode); } - public Condition createPredicateLatitudeExact(double theLatitudeValue) { return BinaryCondition.equalTo(myColumnLatitude, generatePlaceholder(theLatitudeValue)); } @@ -97,22 +99,30 @@ public class CoordsPredicateBuilder extends BaseSearchParamPredicateBuilder { public Condition createLatitudePredicateFromBox(GeoBoundingBox theBox) { return ComboCondition.and( - BinaryCondition.greaterThanOrEq(myColumnLatitude, generatePlaceholder(theBox.bottomRight().latitude())), - BinaryCondition.lessThanOrEq(myColumnLatitude, generatePlaceholder(theBox.topLeft().latitude())) - ); + BinaryCondition.greaterThanOrEq( + myColumnLatitude, + generatePlaceholder(theBox.bottomRight().latitude())), + BinaryCondition.lessThanOrEq( + myColumnLatitude, generatePlaceholder(theBox.topLeft().latitude()))); } public Condition createLongitudePredicateFromBox(GeoBoundingBox theBox) { if (theBox.bottomRight().longitude() < theBox.topLeft().longitude()) { return ComboCondition.or( - BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.bottomRight().longitude())), - BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.topLeft().longitude())) - ); + BinaryCondition.greaterThanOrEq( + myColumnLongitude, + generatePlaceholder(theBox.bottomRight().longitude())), + BinaryCondition.lessThanOrEq( + myColumnLongitude, + generatePlaceholder(theBox.topLeft().longitude()))); } else { return ComboCondition.and( - BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.topLeft().longitude())), - BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.bottomRight().longitude())) - ); + BinaryCondition.greaterThanOrEq( + myColumnLongitude, + generatePlaceholder(theBox.topLeft().longitude())), + BinaryCondition.lessThanOrEq( + myColumnLongitude, + generatePlaceholder(theBox.bottomRight().longitude()))); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/DatePredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/DatePredicateBuilder.java index 8687f11ca19..d3b359524d0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/DatePredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/DatePredicateBuilder.java @@ -47,6 +47,7 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder { private final DbColumn myColumnValueLow; private final DbColumn myColumnValueLowDateOrdinal; private final DbColumn myColumnValueHighDateOrdinal; + @Autowired private JpaStorageSettings myStorageSettings; @@ -67,8 +68,8 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder { myStorageSettings = theStorageSettings; } - public Condition createPredicateDateWithoutIdentityPredicate(IQueryParameterType theParam, - SearchFilterParser.CompareOperation theOperation) { + public Condition createPredicateDateWithoutIdentityPredicate( + IQueryParameterType theParam, SearchFilterParser.CompareOperation theOperation) { Condition p; if (theParam instanceof DateParam) { @@ -93,9 +94,8 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder { return p; } - private Condition createPredicateDateFromRange(DateRangeParam theRange, - SearchFilterParser.CompareOperation theOperation) { - + private Condition createPredicateDateFromRange( + DateRangeParam theRange, SearchFilterParser.CompareOperation theOperation) { Date lowerBoundInstant = theRange.getLowerBoundAsInstant(); Date upperBoundInstant = theRange.getUpperBoundAsInstant(); @@ -111,7 +111,9 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder { * If all present search parameters are of DAY precision, and {@link ca.uhn.fhir.jpa.model.entity.StorageSettings#getUseOrdinalDatesForDayPrecisionSearches()} is true, * then we attempt to use the ordinal field for date comparisons instead of the date field. */ - boolean isOrdinalComparison = isNullOrDatePrecision(lowerBound) && isNullOrDatePrecision(upperBound) && myStorageSettings.getUseOrdinalDatesForDayPrecisionSearches(); + boolean isOrdinalComparison = isNullOrDatePrecision(lowerBound) + && isNullOrDatePrecision(upperBound) + && myStorageSettings.getUseOrdinalDatesForDayPrecisionSearches(); Condition lt; Condition gt; @@ -126,7 +128,9 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder { genericLowerBound = lowerBoundAsOrdinal; genericUpperBound = upperBoundAsOrdinal; if (upperBound != null && upperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.MONTH.ordinal()) { - genericUpperBound = Integer.parseInt(DateUtils.getCompletedDate(upperBound.getValueAsString()).getRight().replace("-", "")); + genericUpperBound = Integer.parseInt(DateUtils.getCompletedDate(upperBound.getValueAsString()) + .getRight() + .replace("-", "")); } } else { lowValueField = DatePredicateBuilder.ColumnEnum.LOW; @@ -134,58 +138,77 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder { genericLowerBound = lowerBoundInstant; genericUpperBound = upperBoundInstant; if (upperBound != null && upperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.MONTH.ordinal()) { - String theCompleteDateStr = DateUtils.getCompletedDate(upperBound.getValueAsString()).getRight().replace("-", ""); + String theCompleteDateStr = DateUtils.getCompletedDate(upperBound.getValueAsString()) + .getRight() + .replace("-", ""); genericUpperBound = DateUtils.parseDate(theCompleteDateStr); } } - if (theOperation == SearchFilterParser.CompareOperation.lt || theOperation == SearchFilterParser.CompareOperation.le) { + if (theOperation == SearchFilterParser.CompareOperation.lt + || theOperation == SearchFilterParser.CompareOperation.le) { // use lower bound first if (lowerBoundInstant != null) { lb = this.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound); if (myStorageSettings.isAccountForDateIndexNulls()) { - lb = ComboCondition.or(lb, this.createPredicate(highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound)); + lb = ComboCondition.or( + lb, + this.createPredicate( + highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound)); } } else if (upperBoundInstant != null) { ub = this.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound); if (myStorageSettings.isAccountForDateIndexNulls()) { - ub = ComboCondition.or(ub, this.createPredicate(highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound)); + ub = ComboCondition.or( + ub, + this.createPredicate( + highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound)); } } else { - throw new InvalidRequestException(Msg.code(1252) + "lowerBound and upperBound value not correctly specified for comparing " + theOperation); + throw new InvalidRequestException(Msg.code(1252) + + "lowerBound and upperBound value not correctly specified for comparing " + theOperation); } - } else if (theOperation == SearchFilterParser.CompareOperation.gt || theOperation == SearchFilterParser.CompareOperation.ge) { + } else if (theOperation == SearchFilterParser.CompareOperation.gt + || theOperation == SearchFilterParser.CompareOperation.ge) { // use upper bound first, e.g value between 6 and 10 if (upperBoundInstant != null) { ub = this.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericUpperBound); if (myStorageSettings.isAccountForDateIndexNulls()) { - ub = ComboCondition.or(ub, this.createPredicate(lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericUpperBound)); + ub = ComboCondition.or( + ub, + this.createPredicate( + lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericUpperBound)); } } else if (lowerBoundInstant != null) { lb = this.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound); if (myStorageSettings.isAccountForDateIndexNulls()) { - lb = ComboCondition.or(lb, this.createPredicate(lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound)); + lb = ComboCondition.or( + lb, + this.createPredicate( + lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound)); } } else { - throw new InvalidRequestException(Msg.code(1253) + "upperBound and lowerBound value not correctly specified for compare theOperation"); + throw new InvalidRequestException(Msg.code(1253) + + "upperBound and lowerBound value not correctly specified for compare theOperation"); } } else if (theOperation == SearchFilterParser.CompareOperation.ne) { - if ((lowerBoundInstant == null) || - (upperBoundInstant == null)) { - throw new InvalidRequestException(Msg.code(1254) + "lowerBound and/or upperBound value not correctly specified for compare theOperation"); + if ((lowerBoundInstant == null) || (upperBoundInstant == null)) { + throw new InvalidRequestException(Msg.code(1254) + + "lowerBound and/or upperBound value not correctly specified for compare theOperation"); } lt = this.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN, genericLowerBound); gt = this.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN, genericUpperBound); lb = ComboCondition.or(lt, gt); } else if ((theOperation == SearchFilterParser.CompareOperation.eq) - || (theOperation == SearchFilterParser.CompareOperation.sa) - || (theOperation == SearchFilterParser.CompareOperation.eb) - || (theOperation == null)) { + || (theOperation == SearchFilterParser.CompareOperation.sa) + || (theOperation == SearchFilterParser.CompareOperation.eb) + || (theOperation == null)) { if (lowerBoundInstant != null) { gt = this.createPredicate(lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound); lt = this.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound); - if (lowerBound.getPrefix() == ParamPrefixEnum.STARTS_AFTER || lowerBound.getPrefix() == ParamPrefixEnum.EQUAL) { + if (lowerBound.getPrefix() == ParamPrefixEnum.STARTS_AFTER + || lowerBound.getPrefix() == ParamPrefixEnum.EQUAL) { lb = gt; } else { lb = ComboCondition.or(gt, lt); @@ -196,16 +219,16 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder { gt = this.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound); lt = this.createPredicate(highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound); - - if (theRange.getUpperBound().getPrefix() == ParamPrefixEnum.ENDS_BEFORE || theRange.getUpperBound().getPrefix() == ParamPrefixEnum.EQUAL) { + if (theRange.getUpperBound().getPrefix() == ParamPrefixEnum.ENDS_BEFORE + || theRange.getUpperBound().getPrefix() == ParamPrefixEnum.EQUAL) { ub = lt; } else { ub = ComboCondition.or(gt, lt); } } } else { - throw new InvalidRequestException(Msg.code(1255) + String.format("Unsupported operator specified, operator=%s", - theOperation.name())); + throw new InvalidRequestException( + Msg.code(1255) + String.format("Unsupported operator specified, operator=%s", theOperation.name())); } if (isOrdinalComparison) { ourLog.trace("Ordinal date range is {} - {} ", lowerBoundAsOrdinal, upperBoundAsOrdinal); @@ -251,17 +274,12 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder { } return createConditionForValueWithComparator(theComparator, column, theValue); - } - public enum ColumnEnum { - LOW, LOW_DATE_ORDINAL, HIGH, HIGH_DATE_ORDINAL - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ForcedIdPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ForcedIdPredicateBuilder.java index 8429ed32c36..6f8d1e1b66c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ForcedIdPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ForcedIdPredicateBuilder.java @@ -45,11 +45,7 @@ public class ForcedIdPredicateBuilder extends BaseJoiningPredicateBuilder { return myColumnResourceId; } - public DbColumn getColumnForcedId() { return myColumnForcedId; } - - } - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/NumberPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/NumberPredicateBuilder.java index 628b91bd939..86246f98087 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/NumberPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/NumberPredicateBuilder.java @@ -45,6 +45,7 @@ public class NumberPredicateBuilder extends BaseSearchParamPredicateBuilder { private static final Logger ourLog = LoggerFactory.getLogger(NumberPredicateBuilder.class); private final DbColumn myColumnValue; + @Autowired private FhirContext myFhirContext; @@ -57,8 +58,15 @@ public class NumberPredicateBuilder extends BaseSearchParamPredicateBuilder { myColumnValue = getTable().addColumn("SP_VALUE"); } - public Condition createPredicateNumeric(String theResourceName, String theParamName, SearchFilterParser.CompareOperation theOperation, BigDecimal theValue, RequestPartitionId theRequestPartitionId, IQueryParameterType theActualParam) { - Condition numericPredicate = createPredicateNumeric(this, theOperation, theValue, myColumnValue, "invalidNumberPrefix", myFhirContext, theActualParam); + public Condition createPredicateNumeric( + String theResourceName, + String theParamName, + SearchFilterParser.CompareOperation theOperation, + BigDecimal theValue, + RequestPartitionId theRequestPartitionId, + IQueryParameterType theActualParam) { + Condition numericPredicate = createPredicateNumeric( + this, theOperation, theValue, myColumnValue, "invalidNumberPrefix", myFhirContext, theActualParam); return combineWithHashIdentityPredicate(theResourceName, theParamName, numericPredicate); } @@ -66,13 +74,21 @@ public class NumberPredicateBuilder extends BaseSearchParamPredicateBuilder { return myColumnValue; } - - static Condition createPredicateNumeric(BaseSearchParamPredicateBuilder theIndexTable, SearchFilterParser.CompareOperation theOperation, BigDecimal theValue, DbColumn theColumn, String theInvalidValueKey, FhirContext theFhirContext, IQueryParameterType theActualParam) { + static Condition createPredicateNumeric( + BaseSearchParamPredicateBuilder theIndexTable, + SearchFilterParser.CompareOperation theOperation, + BigDecimal theValue, + DbColumn theColumn, + String theInvalidValueKey, + FhirContext theFhirContext, + IQueryParameterType theActualParam) { Condition num; - // Per discussions with Grahame Grieve and James Agnew on 11/13/19, modified logic for EQUAL and NOT_EQUAL operators below so as to + // Per discussions with Grahame Grieve and James Agnew on 11/13/19, modified logic for EQUAL and NOT_EQUAL + // operators below so as to // use exact value matching. The "fuzz amount" matching is still used with the APPROXIMATE operator. - SearchFilterParser.CompareOperation operation = defaultIfNull(theOperation, SearchFilterParser.CompareOperation.eq); + SearchFilterParser.CompareOperation operation = + defaultIfNull(theOperation, SearchFilterParser.CompareOperation.eq); switch (operation) { case gt: num = BinaryCondition.greaterThan(theColumn, theIndexTable.generatePlaceholder(theValue)); @@ -103,7 +119,10 @@ public class NumberPredicateBuilder extends BaseSearchParamPredicateBuilder { break; default: String paramValue = theActualParam.getValueAsQueryToken(theFhirContext); - String msg = theIndexTable.getFhirContext().getLocalizer().getMessage(NumberPredicateBuilder.class, theInvalidValueKey, operation, paramValue); + String msg = theIndexTable + .getFhirContext() + .getLocalizer() + .getMessage(NumberPredicateBuilder.class, theInvalidValueKey, operation, paramValue); throw new InvalidRequestException(Msg.code(1235) + msg); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ParsedLocationParam.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ParsedLocationParam.java index 83a039bae4d..17f4795fbef 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ParsedLocationParam.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ParsedLocationParam.java @@ -48,7 +48,8 @@ public class ParsedLocationParam { try { return Double.parseDouble(defaultString(theValue)); } catch (NumberFormatException e) { - throw new InvalidRequestException(Msg.code(2308) + "Invalid lat/lon parameter value: " + UrlUtil.sanitizeUrlPart(theValue)); + throw new InvalidRequestException( + Msg.code(2308) + "Invalid lat/lon parameter value: " + UrlUtil.sanitizeUrlPart(theValue)); } } @@ -74,12 +75,14 @@ public class ParsedLocationParam { String value = param.getValue(); String[] parts = value.split(":"); if (parts.length != 2) { - throw new IllegalArgumentException(Msg.code(1228) + "Invalid position format '" + value + "'. Required format is 'latitude:longitude'"); + throw new IllegalArgumentException(Msg.code(1228) + "Invalid position format '" + value + + "'. Required format is 'latitude:longitude'"); } latitudeValue = parts[0]; longitudeValue = parts[1]; if (isBlank(latitudeValue) || isBlank(longitudeValue)) { - throw new IllegalArgumentException(Msg.code(1229) + "Invalid position format '" + value + "'. Both latitude and longitude must be provided."); + throw new IllegalArgumentException(Msg.code(1229) + "Invalid position format '" + value + + "'. Both latitude and longitude must be provided."); } QuantityParam distanceParam = theParams.getNearDistanceParam(); if (distanceParam != null) { @@ -90,12 +93,15 @@ public class ParsedLocationParam { String value = param.getValue(); String[] parts = StringUtils.split(value, '|'); if (parts.length < 2 || parts.length > 4) { - throw new IllegalArgumentException(Msg.code(1230) + "Invalid position format '" + value + "'. Required format is 'latitude|longitude' or 'latitude|longitude|distance' or 'latitude|longitude|distance|units'"); + throw new IllegalArgumentException( + Msg.code(1230) + "Invalid position format '" + value + + "'. Required format is 'latitude|longitude' or 'latitude|longitude|distance' or 'latitude|longitude|distance|units'"); } latitudeValue = parts[0]; longitudeValue = parts[1]; if (isBlank(latitudeValue) || isBlank(longitudeValue)) { - throw new IllegalArgumentException(Msg.code(1231) + "Invalid position format '" + value + "'. Both latitude and longitude must be provided."); + throw new IllegalArgumentException(Msg.code(1231) + "Invalid position format '" + value + + "'. Both latitude and longitude must be provided."); } if (parts.length >= 3) { String distanceString = parts[2]; @@ -107,14 +113,11 @@ public class ParsedLocationParam { String distanceUnits = parts[3]; distanceKm = UcumServiceUtil.convert(distanceKm, distanceUnits, "km"); } - } } else { throw new IllegalArgumentException(Msg.code(1232) + "Invalid position type: " + theParam.getClass()); } - - return new ParsedLocationParam(latitudeValue, longitudeValue, distanceKm); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/QuantityNormalizedPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/QuantityNormalizedPredicateBuilder.java index 7e860caf1ed..d4bef3a1b02 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/QuantityNormalizedPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/QuantityNormalizedPredicateBuilder.java @@ -28,10 +28,9 @@ public class QuantityNormalizedPredicateBuilder extends BaseQuantityPredicateBui */ public QuantityNormalizedPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_SPIDX_QUANTITY_NRML")); - + myColumnHashIdentitySystemUnits = getTable().addColumn("HASH_IDENTITY_SYS_UNITS"); myColumnHashIdentityUnits = getTable().addColumn("HASH_IDENTITY_AND_UNITS"); myColumnValue = getTable().addColumn("SP_VALUE"); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/QuantityPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/QuantityPredicateBuilder.java index fa1d8769640..c4669bcca0e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/QuantityPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/QuantityPredicateBuilder.java @@ -23,16 +23,14 @@ import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; public class QuantityPredicateBuilder extends BaseQuantityPredicateBuilder { - /** * Constructor */ public QuantityPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_SPIDX_QUANTITY")); - + myColumnHashIdentitySystemUnits = getTable().addColumn("HASH_IDENTITY_SYS_UNITS"); myColumnHashIdentityUnits = getTable().addColumn("HASH_IDENTITY_AND_UNITS"); myColumnValue = getTable().addColumn("SP_VALUE"); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java index 2d6d4df6d99..6aa5376a644 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java @@ -36,10 +36,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nullable; import java.util.HashSet; import java.util.List; import java.util.Set; +import javax.annotation.Nullable; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -57,9 +57,13 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { super(theSearchSqlBuilder); } - @Nullable - public Condition createPredicateResourceId(@Nullable DbColumn theSourceJoinColumn, String theResourceName, List> theValues, SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + public Condition createPredicateResourceId( + @Nullable DbColumn theSourceJoinColumn, + String theResourceName, + List> theValues, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { Set allOrPids = null; SearchFilterParser.CompareOperation defaultOperation = SearchFilterParser.CompareOperation.eq; @@ -82,7 +86,8 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { haveValue = true; try { boolean excludeDeleted = true; - JpaPid pid = myIdHelperService.resolveResourcePersistentIds(theRequestPartitionId, theResourceName, valueAsId.getIdPart(), excludeDeleted); + JpaPid pid = myIdHelperService.resolveResourcePersistentIds( + theRequestPartitionId, theResourceName, valueAsId.getIdPart(), excludeDeleted); orPids.add(pid); } catch (ResourceNotFoundException e) { // This is not an error in a search, it just results in no matches @@ -95,7 +100,6 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { defaultOperation = SearchFilterParser.CompareOperation.ne; } } - } if (haveValue) { if (allOrPids == null) { @@ -103,7 +107,6 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { } else { allOrPids.retainAll(orPids); } - } } @@ -114,7 +117,8 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { } else if (allOrPids != null) { SearchFilterParser.CompareOperation operation = defaultIfNull(theOperation, defaultOperation); - assert operation == SearchFilterParser.CompareOperation.eq || operation == SearchFilterParser.CompareOperation.ne; + assert operation == SearchFilterParser.CompareOperation.eq + || operation == SearchFilterParser.CompareOperation.ne; List resourceIds = JpaPid.toLongList(allOrPids); if (theSourceJoinColumn == null) { @@ -130,13 +134,13 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { return queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); } } else { - return QueryParameterUtils.toEqualToOrInPredicate(theSourceJoinColumn, generatePlaceholders(resourceIds), operation == SearchFilterParser.CompareOperation.ne); + return QueryParameterUtils.toEqualToOrInPredicate( + theSourceJoinColumn, + generatePlaceholders(resourceIds), + operation == SearchFilterParser.CompareOperation.ne); } - } return null; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java index 7d9dd624598..10c17cc2541 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceLinkPredicateBuilder.java @@ -45,7 +45,6 @@ import ca.uhn.fhir.jpa.search.builder.models.MissingQueryParameterPredicateParam import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams; -import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; @@ -54,16 +53,9 @@ import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.api.SearchContainedModeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.param.CompositeParam; -import ca.uhn.fhir.rest.param.DateParam; -import ca.uhn.fhir.rest.param.NumberParam; -import ca.uhn.fhir.rest.param.QuantityParam; import ca.uhn.fhir.rest.param.ReferenceParam; -import ca.uhn.fhir.rest.param.SpecialParam; -import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParamModifier; -import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; @@ -83,8 +75,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -94,13 +84,13 @@ import java.util.List; import java.util.ListIterator; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.trim; -public class ResourceLinkPredicateBuilder - extends BaseJoiningPredicateBuilder - implements ICanMakeMissingParamPredicate { +public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder implements ICanMakeMissingParamPredicate { private static final Logger ourLog = LoggerFactory.getLogger(ResourceLinkPredicateBuilder.class); private final DbColumn myColumnSrcType; @@ -114,21 +104,27 @@ public class ResourceLinkPredicateBuilder @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private IIdHelperService myIdHelperService; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private MatchUrlService myMatchUrlService; /** * Constructor */ - public ResourceLinkPredicateBuilder(QueryStack theQueryStack, SearchQueryBuilder theSearchSqlBuilder, boolean theReversed) { + public ResourceLinkPredicateBuilder( + QueryStack theQueryStack, SearchQueryBuilder theSearchSqlBuilder, boolean theReversed) { super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_LINK")); myColumnSrcResourceId = getTable().addColumn("SRC_RESOURCE_ID"); myColumnSrcType = getTable().addColumn("SOURCE_RESOURCE_TYPE"); @@ -174,7 +170,14 @@ public class ResourceLinkPredicateBuilder } } - public Condition createPredicate(RequestDetails theRequest, String theResourceType, String theParamName, List theQualifiers, List theReferenceOrParamList, SearchFilterParser.CompareOperation theOperation, RequestPartitionId theRequestPartitionId) { + public Condition createPredicate( + RequestDetails theRequest, + String theResourceType, + String theParamName, + List theQualifiers, + List theReferenceOrParamList, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { List targetIds = new ArrayList<>(); List targetQualifiedUrls = new ArrayList<>(); @@ -210,14 +213,20 @@ public class ResourceLinkPredicateBuilder * Handle chained search, e.g. Patient?organization.name=Kwik-e-mart */ - return addPredicateReferenceWithChain(theResourceType, theParamName, theQualifiers, theReferenceOrParamList, ref, theRequest, theRequestPartitionId); - + return addPredicateReferenceWithChain( + theResourceType, + theParamName, + theQualifiers, + theReferenceOrParamList, + ref, + theRequest, + theRequestPartitionId); } } else { - throw new IllegalArgumentException(Msg.code(1241) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass()); + throw new IllegalArgumentException( + Msg.code(1241) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass()); } - } for (IIdType next : targetIds) { @@ -234,7 +243,8 @@ public class ResourceLinkPredicateBuilder inverse = true; } - List targetPids = myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, targetIds); + List targetPids = + myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, targetIds); List targetPidList = JpaPid.toLongList(targetPids); if (targetPidList.isEmpty() && targetQualifiedUrls.isEmpty()) { @@ -244,21 +254,26 @@ public class ResourceLinkPredicateBuilder Condition retVal = createPredicateReference(inverse, pathsToMatch, targetPidList, targetQualifiedUrls); return combineWithRequestPartitionIdPredicate(getRequestPartitionId(), retVal); } - } - private Condition createPredicateReference(boolean theInverse, List thePathsToMatch, List theTargetPidList, List theTargetQualifiedUrls) { + private Condition createPredicateReference( + boolean theInverse, + List thePathsToMatch, + List theTargetPidList, + List theTargetQualifiedUrls) { Condition targetPidCondition = null; if (!theTargetPidList.isEmpty()) { List placeholders = generatePlaceholders(theTargetPidList); - targetPidCondition = QueryParameterUtils.toEqualToOrInPredicate(myColumnTargetResourceId, placeholders, theInverse); + targetPidCondition = + QueryParameterUtils.toEqualToOrInPredicate(myColumnTargetResourceId, placeholders, theInverse); } Condition targetUrlsCondition = null; if (!theTargetQualifiedUrls.isEmpty()) { List placeholders = generatePlaceholders(theTargetQualifiedUrls); - targetUrlsCondition = QueryParameterUtils.toEqualToOrInPredicate(myColumnTargetResourceUrl, placeholders, theInverse); + targetUrlsCondition = + QueryParameterUtils.toEqualToOrInPredicate(myColumnTargetResourceUrl, placeholders, theInverse); } Condition joinedCondition; @@ -286,37 +301,45 @@ public class ResourceLinkPredicateBuilder return createPredicateSourcePaths(pathsToMatch); } - - private void warnAboutPerformanceOnUnqualifiedResources(String theParamName, RequestDetails theRequest, @Nullable List theCandidateTargetTypes) { + private void warnAboutPerformanceOnUnqualifiedResources( + String theParamName, RequestDetails theRequest, @Nullable List theCandidateTargetTypes) { StringBuilder builder = new StringBuilder(); builder.append("This search uses an unqualified resource(a parameter in a chain without a resource type). "); builder.append("This is less efficient than using a qualified type. "); if (theCandidateTargetTypes != null) { - builder.append("[" + theParamName + "] resolves to [" + theCandidateTargetTypes.stream().collect(Collectors.joining(",")) + "]."); + builder.append("[" + theParamName + "] resolves to [" + + theCandidateTargetTypes.stream().collect(Collectors.joining(",")) + "]."); builder.append("If you know what you're looking for, try qualifying it using the form "); - builder.append(theCandidateTargetTypes.stream().map(cls -> "[" + cls + ":" + theParamName + "]").collect(Collectors.joining(" or "))); + builder.append(theCandidateTargetTypes.stream() + .map(cls -> "[" + cls + ":" + theParamName + "]") + .collect(Collectors.joining(" or "))); } else { builder.append("If you know what you're looking for, try qualifying it using the form: '"); builder.append(theParamName).append(":[resourceType]"); builder.append("'"); } - String message = builder - .toString(); - StorageProcessingMessage msg = new StorageProcessingMessage() - .setMessage(message); + String message = builder.toString(); + StorageProcessingMessage msg = new StorageProcessingMessage().setMessage(message); HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(StorageProcessingMessage.class, msg); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_WARNING, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(StorageProcessingMessage.class, msg); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_WARNING, params); } - /** * This is for handling queries like the following: /Observation?device.identifier=urn:system|foo in which we use a chain * on the device. */ - private Condition addPredicateReferenceWithChain(String theResourceName, String theParamName, List theQualifiers, List theList, ReferenceParam theReferenceParam, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + private Condition addPredicateReferenceWithChain( + String theResourceName, + String theParamName, + List theQualifiers, + List theList, + ReferenceParam theReferenceParam, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { /* * Which resource types can the given chained parameter actually link to? This might be a list @@ -326,7 +349,8 @@ public class ResourceLinkPredicateBuilder * If the user has qualified the chain, as in: Observation?subject:Patient.identifier=(...) * this is just a simple 1-entry list. */ - final List resourceTypes = determineCandidateResourceTypesForChain(theResourceName, theParamName, theReferenceParam); + final List resourceTypes = + determineCandidateResourceTypesForChain(theResourceName, theParamName, theReferenceParam); /* * Handle chain on _type @@ -347,7 +371,8 @@ public class ResourceLinkPredicateBuilder throw newInvalidTargetTypeForChainException(theResourceName, theParamName, typeValue); } - Condition condition = BinaryCondition.equalTo(myColumnTargetResourceType, generatePlaceholder(theReferenceParam.getValue())); + Condition condition = BinaryCondition.equalTo( + myColumnTargetResourceType, generatePlaceholder(theReferenceParam.getValue())); return QueryParameterUtils.toAndPredicate(typeCondition, condition); } @@ -400,7 +425,8 @@ public class ResourceLinkPredicateBuilder for (IQueryParameterType next : theList) { String nextValue = next.getValueAsQueryToken(getFhirContext()); - IQueryParameterType chainValue = mapReferenceChainToRawParamType(remainingChain, param, theParamName, qualifier, nextType, chain, isMeta, nextValue); + IQueryParameterType chainValue = mapReferenceChainToRawParamType( + remainingChain, param, theParamName, qualifier, nextType, chain, isMeta, nextValue); if (chainValue == null) { continue; } @@ -416,7 +442,13 @@ public class ResourceLinkPredicateBuilder } if (!foundChainMatch) { - throw new InvalidRequestException(Msg.code(1242) + getFhirContext().getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain())); + throw new InvalidRequestException(Msg.code(1242) + + getFhirContext() + .getLocalizer() + .getMessage( + BaseStorageDao.class, + "invalidParameterChain", + theParamName + '.' + theReferenceParam.getChain())); } candidateTargetTypes.add(nextType); @@ -424,13 +456,26 @@ public class ResourceLinkPredicateBuilder List andPredicates = new ArrayList<>(); List> chainParamValues = Collections.singletonList(orValues); - andPredicates.add(childQueryFactory.searchForIdsWithAndOr(myColumnTargetResourceId, subResourceName, chain, chainParamValues, theRequest, theRequestPartitionId, SearchContainedModeEnum.FALSE)); + andPredicates.add(childQueryFactory.searchForIdsWithAndOr( + myColumnTargetResourceId, + subResourceName, + chain, + chainParamValues, + theRequest, + theRequestPartitionId, + SearchContainedModeEnum.FALSE)); orPredicates.add(QueryParameterUtils.toAndPredicate(andPredicates)); } if (candidateTargetTypes.isEmpty()) { - throw new InvalidRequestException(Msg.code(1243) + getFhirContext().getLocalizer().getMessage(BaseStorageDao.class, "invalidParameterChain", theParamName + '.' + theReferenceParam.getChain())); + throw new InvalidRequestException(Msg.code(1243) + + getFhirContext() + .getLocalizer() + .getMessage( + BaseStorageDao.class, + "invalidParameterChain", + theParamName + '.' + theReferenceParam.getChain())); } if (candidateTargetTypes.size() > 1) { @@ -444,21 +489,23 @@ public class ResourceLinkPredicateBuilder } else { multiTypePredicate = QueryParameterUtils.toOrPredicate(orPredicates); } - + List pathsToMatch = createResourceLinkPaths(theResourceName, theParamName, theQualifiers); Condition pathPredicate = createPredicateSourcePaths(pathsToMatch); return QueryParameterUtils.toAndPredicate(pathPredicate, multiTypePredicate); } @Nonnull - private List determineCandidateResourceTypesForChain(String theResourceName, String theParamName, ReferenceParam theReferenceParam) { + private List determineCandidateResourceTypesForChain( + String theResourceName, String theParamName, ReferenceParam theReferenceParam) { final List> resourceTypes; if (!theReferenceParam.hasResourceType()) { resourceTypes = determineResourceTypes(Collections.singleton(theResourceName), theParamName); if (resourceTypes.isEmpty()) { - RuntimeSearchParam searchParamByName = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); + RuntimeSearchParam searchParamByName = + mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); if (searchParamByName == null) { throw new InternalErrorException(Msg.code(1244) + "Could not find parameter " + theParamName); } @@ -475,7 +522,8 @@ public class ResourceLinkPredicateBuilder } } - Class resourceType = getFhirContext().getResourceDefinition(theResourceName).getImplementingClass(); + Class resourceType = + getFhirContext().getResourceDefinition(theResourceName).getImplementingClass(); BaseRuntimeChildDefinition def = getFhirContext().newTerser().getDefinition(resourceType, paramPath); if (def instanceof RuntimeChildChoiceDefinition) { RuntimeChildChoiceDefinition choiceDef = (RuntimeChildChoiceDefinition) def; @@ -485,11 +533,15 @@ public class ResourceLinkPredicateBuilder resourceTypes.addAll(resDef.getResourceTypes()); if (resourceTypes.size() == 1) { if (resourceTypes.get(0).isInterface()) { - throw new InvalidRequestException(Msg.code(1245) + "Unable to perform search for unqualified chain '" + theParamName + "' as this SearchParameter does not declare any target types. Add a qualifier of the form '" + theParamName + ":[ResourceType]' to perform this search."); + throw new InvalidRequestException( + Msg.code(1245) + "Unable to perform search for unqualified chain '" + theParamName + + "' as this SearchParameter does not declare any target types. Add a qualifier of the form '" + + theParamName + ":[ResourceType]' to perform this search."); } } } else { - throw new ConfigurationException(Msg.code(1246) + "Property " + paramPath + " of type " + getResourceType() + " is not a resource: " + def.getClass()); + throw new ConfigurationException(Msg.code(1246) + "Property " + paramPath + " of type " + + getResourceType() + " is not a resource: " + def.getClass()); } } @@ -505,22 +557,22 @@ public class ResourceLinkPredicateBuilder } else { try { - RuntimeResourceDefinition resDef = getFhirContext().getResourceDefinition(theReferenceParam.getResourceType()); + RuntimeResourceDefinition resDef = + getFhirContext().getResourceDefinition(theReferenceParam.getResourceType()); resourceTypes = new ArrayList<>(1); resourceTypes.add(resDef.getImplementingClass()); } catch (DataFormatException e) { throw newInvalidResourceTypeException(theReferenceParam.getResourceType()); } - } - return resourceTypes - .stream() - .map(t -> getFhirContext().getResourceType(t)) - .collect(Collectors.toList()); + return resourceTypes.stream() + .map(t -> getFhirContext().getResourceType(t)) + .collect(Collectors.toList()); } - private List> determineResourceTypes(Set theResourceNames, String theParamNameChain) { + private List> determineResourceTypes( + Set theResourceNames, String theParamNameChain) { int linkIndex = theParamNameChain.indexOf('.'); if (linkIndex == -1) { Set> resourceTypes = new HashSet<>(); @@ -530,14 +582,15 @@ public class ResourceLinkPredicateBuilder if (param != null && param.hasTargets()) { Set targetTypes = param.getTargets(); for (String next : targetTypes) { - resourceTypes.add(getFhirContext().getResourceDefinition(next).getImplementingClass()); + resourceTypes.add( + getFhirContext().getResourceDefinition(next).getImplementingClass()); } } } return new ArrayList<>(resourceTypes); } else { String paramNameHead = theParamNameChain.substring(0, linkIndex); - String paramNameTail = theParamNameChain.substring(linkIndex+1); + String paramNameTail = theParamNameChain.substring(linkIndex + 1); Set targetResourceTypeNames = new HashSet<>(); for (String resourceName : theResourceNames) { RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(resourceName, paramNameHead); @@ -550,13 +603,15 @@ public class ResourceLinkPredicateBuilder } } - public List createResourceLinkPaths(String theResourceName, String theParamName, List theParamQualifiers) { + public List createResourceLinkPaths( + String theResourceName, String theParamName, List theParamQualifiers) { int linkIndex = theParamName.indexOf('.'); if (linkIndex == -1) { RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName); if (param == null) { - // This can happen during recursion, if not all the possible target types of one link in the chain support the next link + // This can happen during recursion, if not all the possible target types of one link in the chain + // support the next link return new ArrayList<>(); } List path = param.getPathsSplit(); @@ -583,15 +638,17 @@ public class ResourceLinkPredicateBuilder RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramNameHead); if (param == null) { - // This can happen during recursion, if not all the possible target types of one link in the chain support the next link + // This can happen during recursion, if not all the possible target types of one link in the chain + // support the next link return new ArrayList<>(); } Set tailPaths = param.getTargets().stream() - .filter(t -> isBlank(qualifier) || qualifier.equals(t)) - .map(t -> createResourceLinkPaths(t, paramNameTail, theParamQualifiers.subList(1, theParamQualifiers.size()))) - .flatMap(Collection::stream) - .map(t -> t.substring(t.indexOf('.')+1)) - .collect(Collectors.toSet()); + .filter(t -> isBlank(qualifier) || qualifier.equals(t)) + .map(t -> createResourceLinkPaths( + t, paramNameTail, theParamQualifiers.subList(1, theParamQualifiers.size()))) + .flatMap(Collection::stream) + .map(t -> t.substring(t.indexOf('.') + 1)) + .collect(Collectors.toSet()); List path = param.getPathsSplit(); @@ -601,20 +658,29 @@ public class ResourceLinkPredicateBuilder * Then append all the tail paths to each of the applicable head paths */ return path.stream() - .map(String::trim) - .filter(t -> t.startsWith(theResourceName + ".")) - .map(head -> tailPaths.stream().map(tail -> head + "." + tail).collect(Collectors.toSet())) - .flatMap(Collection::stream) - .collect(Collectors.toList()); + .map(String::trim) + .filter(t -> t.startsWith(theResourceName + ".")) + .map(head -> + tailPaths.stream().map(tail -> head + "." + tail).collect(Collectors.toSet())) + .flatMap(Collection::stream) + .collect(Collectors.toList()); } } - - private IQueryParameterType mapReferenceChainToRawParamType(String remainingChain, RuntimeSearchParam param, String theParamName, String qualifier, String nextType, String chain, boolean isMeta, String resourceId) { + private IQueryParameterType mapReferenceChainToRawParamType( + String remainingChain, + RuntimeSearchParam param, + String theParamName, + String qualifier, + String nextType, + String chain, + boolean isMeta, + String resourceId) { IQueryParameterType chainValue; if (remainingChain != null) { if (param == null || param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { - ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", nextType, chain, remainingChain); + ourLog.debug( + "Type {} parameter {} is not a reference, can not chain {}", nextType, chain, remainingChain); return null; } @@ -633,26 +699,34 @@ public class ResourceLinkPredicateBuilder } @Nonnull - private InvalidRequestException newInvalidTargetTypeForChainException(String theResourceName, String theParamName, String theTypeValue) { + private InvalidRequestException newInvalidTargetTypeForChainException( + String theResourceName, String theParamName, String theTypeValue) { String searchParamName = theResourceName + ":" + theParamName; - String msg = getFhirContext().getLocalizer().getMessage(ResourceLinkPredicateBuilder.class, "invalidTargetTypeForChain", theTypeValue, searchParamName); + String msg = getFhirContext() + .getLocalizer() + .getMessage( + ResourceLinkPredicateBuilder.class, "invalidTargetTypeForChain", theTypeValue, searchParamName); return new InvalidRequestException(msg); } @Nonnull private InvalidRequestException newInvalidResourceTypeException(String theResourceType) { - String msg = getFhirContext().getLocalizer().getMessageSanitized(SearchCoordinatorSvcImpl.class, "invalidResourceType", theResourceType); + String msg = getFhirContext() + .getLocalizer() + .getMessageSanitized(SearchCoordinatorSvcImpl.class, "invalidResourceType", theResourceType); throw new InvalidRequestException(Msg.code(1250) + msg); } @Nonnull - public Condition createEverythingPredicate(String theResourceName, List theSourceResourceNames, Long... theTargetPids) { + public Condition createEverythingPredicate( + String theResourceName, List theSourceResourceNames, Long... theTargetPids) { Condition condition; if (theTargetPids != null && theTargetPids.length >= 1) { // if resource ids are provided, we'll create the predicate // with ids in or equal to this value - condition = QueryParameterUtils.toEqualToOrInPredicate(myColumnTargetResourceId, generatePlaceholders(Arrays.asList(theTargetPids))); + condition = QueryParameterUtils.toEqualToOrInPredicate( + myColumnTargetResourceId, generatePlaceholders(Arrays.asList(theTargetPids))); } else { // ... otherwise we look for resource types condition = BinaryCondition.equalTo(myColumnTargetResourceType, generatePlaceholder(theResourceName)); @@ -660,14 +734,14 @@ public class ResourceLinkPredicateBuilder if (!theSourceResourceNames.isEmpty()) { // if source resources are provided, add on predicate for _type operation - Condition typeCondition = QueryParameterUtils.toEqualToOrInPredicate(myColumnSrcType, generatePlaceholders(theSourceResourceNames)); + Condition typeCondition = QueryParameterUtils.toEqualToOrInPredicate( + myColumnSrcType, generatePlaceholders(theSourceResourceNames)); condition = QueryParameterUtils.toAndPredicate(List.of(condition, typeCondition)); } return condition; } - @Override public Condition createPredicateParamMissingValue(MissingQueryParameterPredicateParams theParams) { SelectQuery subquery = new SelectQuery(); @@ -675,12 +749,13 @@ public class ResourceLinkPredicateBuilder subquery.addFromTable(getTable()); Condition subQueryCondition = ComboCondition.and( - BinaryCondition.equalTo(getResourceIdColumn(), - theParams.getResourceTablePredicateBuilder().getResourceIdColumn() - ), - BinaryCondition.equalTo(getResourceTypeColumn(), - generatePlaceholder(theParams.getResourceTablePredicateBuilder().getResourceType())) - ); + BinaryCondition.equalTo( + getResourceIdColumn(), + theParams.getResourceTablePredicateBuilder().getResourceIdColumn()), + BinaryCondition.equalTo( + getResourceTypeColumn(), + generatePlaceholder( + theParams.getResourceTablePredicateBuilder().getResourceType()))); subquery.addCondition(subQueryCondition); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceTablePredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceTablePredicateBuilder.java index 5f77daea0d6..c9fb8845dd7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceTablePredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceTablePredicateBuilder.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.search.builder.predicate; -import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; +import ca.uhn.fhir.jpa.util.QueryParameterUtils; import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.Condition; import com.healthmarketscience.sqlbuilder.NotCondition; @@ -29,9 +29,6 @@ import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; import java.util.Set; -import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toAndPredicate; -import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toEqualToOrInPredicate; - public class ResourceTablePredicateBuilder extends BaseJoiningPredicateBuilder { private final DbColumn myColumnResId; private final DbColumn myColumnResDeletedAt; @@ -51,7 +48,6 @@ public class ResourceTablePredicateBuilder extends BaseJoiningPredicateBuilder { myColumnLanguage = getTable().addColumn("RES_LANGUAGE"); } - @Override public DbColumn getResourceIdColumn() { return myColumnResId; @@ -62,10 +58,7 @@ public class ResourceTablePredicateBuilder extends BaseJoiningPredicateBuilder { if (getResourceType() != null) { typePredicate = BinaryCondition.equalTo(myColumnResType, generatePlaceholder(getResourceType())); } - return QueryParameterUtils.toAndPredicate( - typePredicate, - UnaryCondition.isNull(myColumnResDeletedAt) - ); + return QueryParameterUtils.toAndPredicate(typePredicate, UnaryCondition.isNull(myColumnResDeletedAt)); } public DbColumn getLastUpdatedColumn() { @@ -73,7 +66,8 @@ public class ResourceTablePredicateBuilder extends BaseJoiningPredicateBuilder { } public Condition createLanguagePredicate(Set theValues, boolean theNegated) { - Condition condition = QueryParameterUtils.toEqualToOrInPredicate(myColumnLanguage, generatePlaceholders(theValues)); + Condition condition = + QueryParameterUtils.toEqualToOrInPredicate(myColumnLanguage, generatePlaceholders(theValues)); if (theNegated) { condition = new NotCondition(condition); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SearchParamPresentPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SearchParamPresentPredicateBuilder.java index b41de65659c..fdf6525c249 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SearchParamPresentPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SearchParamPresentPredicateBuilder.java @@ -50,11 +50,11 @@ public class SearchParamPresentPredicateBuilder extends BaseJoiningPredicateBuil return myColumnResourceId; } - - public Condition createPredicateParamMissingForReference(String theResourceName, String theParamName, boolean theMissing, RequestPartitionId theRequestPartitionId) { - Long hash = SearchParamPresentEntity.calculateHashPresence(myPartitionSettings, theRequestPartitionId, theResourceName, theParamName, !theMissing); + public Condition createPredicateParamMissingForReference( + String theResourceName, String theParamName, boolean theMissing, RequestPartitionId theRequestPartitionId) { + Long hash = SearchParamPresentEntity.calculateHashPresence( + myPartitionSettings, theRequestPartitionId, theResourceName, theParamName, !theMissing); BinaryCondition predicate = BinaryCondition.equalTo(myColumnHashPresence, generatePlaceholder(hash)); return combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java index 0366a56646b..d0d572d0899 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/SourcePredicateBuilder.java @@ -44,7 +44,6 @@ public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder { myColumnRequestId = getTable().addColumn("REQUEST_ID"); } - @Override public DbColumn getResourceIdColumn() { return myResourceIdColumn; @@ -57,5 +56,4 @@ public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder { public Condition createPredicateRequestId(String theRequestId) { return BinaryCondition.equalTo(myColumnRequestId, generatePlaceholder(theRequestId)); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/StringPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/StringPredicateBuilder.java index c2a80e77509..2663e127fde 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/StringPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/StringPredicateBuilder.java @@ -19,13 +19,13 @@ */ package ca.uhn.fhir.jpa.search.builder.predicate; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; -import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; +import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.model.api.IPrimitiveDatatype; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.param.StringParam; @@ -49,6 +49,7 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { private final DbColumn myColumnHashNormPrefix; private final DbColumn myColumnHashIdentity; private final DbColumn myColumnHashExact; + @Autowired private JpaStorageSettings myStorageSettings; @@ -74,19 +75,21 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { return myColumnResId; } - public Condition createPredicateString(IQueryParameterType theParameter, - String theResourceName, - String theSpnamePrefix, - RuntimeSearchParam theSearchParam, - StringPredicateBuilder theFrom, - SearchFilterParser.CompareOperation operation) { + public Condition createPredicateString( + IQueryParameterType theParameter, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + StringPredicateBuilder theFrom, + SearchFilterParser.CompareOperation operation) { String rawSearchTerm; String paramName = QueryParameterUtils.getParamNameWithPrefix(theSpnamePrefix, theSearchParam.getName()); - + if (theParameter instanceof TokenParam) { TokenParam id = (TokenParam) theParameter; if (!id.isText()) { - throw new IllegalStateException(Msg.code(1257) + "Trying to process a text search on a non-text token parameter"); + throw new IllegalStateException( + Msg.code(1257) + "Trying to process a text search on a non-text token parameter"); } rawSearchTerm = id.getValue(); } else if (theParameter instanceof StringParam) { @@ -94,7 +97,8 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { rawSearchTerm = id.getValue(); if (id.isContains()) { if (!myStorageSettings.isAllowContainsSearches()) { - throw new MethodNotAllowedException(Msg.code(1258) + ":contains modifier is disabled on this server"); + throw new MethodNotAllowedException( + Msg.code(1258) + ":contains modifier is disabled on this server"); } } else { rawSearchTerm = theSearchParam.encode(rawSearchTerm); @@ -107,8 +111,9 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { } if (rawSearchTerm.length() > ResourceIndexedSearchParamString.MAX_LENGTH) { - throw new InvalidRequestException(Msg.code(1260) + "Parameter[" + paramName + "] has length (" + rawSearchTerm.length() + ") that is longer than maximum allowed (" - + ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm); + throw new InvalidRequestException(Msg.code(1260) + "Parameter[" + paramName + "] has length (" + + rawSearchTerm.length() + ") that is longer than maximum allowed (" + + ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm); } boolean exactMatch = theParameter instanceof StringParam && ((StringParam) theParameter).isExact(); @@ -119,16 +124,15 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { // Normalized Match String normalizedString = StringUtil.normalizeStringForSearchIndexing(rawSearchTerm); String likeExpression; - if ((theParameter instanceof StringParam) && - (((((StringParam) theParameter).isContains()) && - (myStorageSettings.isAllowContainsSearches())) || - (operation == SearchFilterParser.CompareOperation.co))) { + if ((theParameter instanceof StringParam) + && (((((StringParam) theParameter).isContains()) && (myStorageSettings.isAllowContainsSearches())) + || (operation == SearchFilterParser.CompareOperation.co))) { likeExpression = createLeftAndRightMatchLikeExpression(normalizedString); - } else if ((operation != SearchFilterParser.CompareOperation.ne) && - (operation != SearchFilterParser.CompareOperation.gt) && - (operation != SearchFilterParser.CompareOperation.lt) && - (operation != SearchFilterParser.CompareOperation.ge) && - (operation != SearchFilterParser.CompareOperation.le)) { + } else if ((operation != SearchFilterParser.CompareOperation.ne) + && (operation != SearchFilterParser.CompareOperation.gt) + && (operation != SearchFilterParser.CompareOperation.lt) + && (operation != SearchFilterParser.CompareOperation.ge) + && (operation != SearchFilterParser.CompareOperation.le)) { if (operation == SearchFilterParser.CompareOperation.ew) { likeExpression = createRightMatchLikeExpression(normalizedString); } else { @@ -139,11 +143,13 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { } Condition predicate; - if ((operation == null) || - (operation == SearchFilterParser.CompareOperation.sw)) { - predicate = theFrom.createPredicateNormalLike(theResourceName, paramName, normalizedString, likeExpression); - } else if ((operation == SearchFilterParser.CompareOperation.ew) || (operation == SearchFilterParser.CompareOperation.co)) { - predicate = theFrom.createPredicateLikeExpressionOnly(theResourceName, paramName, likeExpression, false); + if ((operation == null) || (operation == SearchFilterParser.CompareOperation.sw)) { + predicate = + theFrom.createPredicateNormalLike(theResourceName, paramName, normalizedString, likeExpression); + } else if ((operation == SearchFilterParser.CompareOperation.ew) + || (operation == SearchFilterParser.CompareOperation.co)) { + predicate = + theFrom.createPredicateLikeExpressionOnly(theResourceName, paramName, likeExpression, false); } else if (operation == SearchFilterParser.CompareOperation.eq) { predicate = theFrom.createPredicateNormal(theResourceName, paramName, normalizedString); } else if (operation == SearchFilterParser.CompareOperation.ne) { @@ -157,7 +163,8 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { } else if (operation == SearchFilterParser.CompareOperation.le) { predicate = theFrom.createPredicateNormalLessThanOrEqual(theResourceName, paramName, likeExpression); } else { - throw new IllegalArgumentException(Msg.code(1261) + "Don't yet know how to handle operation " + operation + " on a string"); + throw new IllegalArgumentException( + Msg.code(1261) + "Don't yet know how to handle operation " + operation + " on a string"); } return predicate; @@ -166,55 +173,82 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { @Nonnull public Condition createPredicateExact(String theResourceType, String theParamName, String theTheValueExact) { - long hash = ResourceIndexedSearchParamString.calculateHashExact(getPartitionSettings(), getRequestPartitionId(), theResourceType, theParamName, theTheValueExact); + long hash = ResourceIndexedSearchParamString.calculateHashExact( + getPartitionSettings(), getRequestPartitionId(), theResourceType, theParamName, theTheValueExact); String placeholderValue = generatePlaceholder(hash); return BinaryCondition.equalTo(myColumnHashExact, placeholderValue); } @Nonnull - public Condition createPredicateNormalLike(String theResourceType, String theParamName, String theNormalizedString, String theLikeExpression) { - Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(getPartitionSettings(), getRequestPartitionId(), getStorageSettings(), theResourceType, theParamName, theNormalizedString); + public Condition createPredicateNormalLike( + String theResourceType, String theParamName, String theNormalizedString, String theLikeExpression) { + Long hash = ResourceIndexedSearchParamString.calculateHashNormalized( + getPartitionSettings(), + getRequestPartitionId(), + getStorageSettings(), + theResourceType, + theParamName, + theNormalizedString); Condition hashPredicate = BinaryCondition.equalTo(myColumnHashNormPrefix, generatePlaceholder(hash)); - Condition valuePredicate = BinaryCondition.like(myColumnValueNormalized, generatePlaceholder(theLikeExpression)); + Condition valuePredicate = + BinaryCondition.like(myColumnValueNormalized, generatePlaceholder(theLikeExpression)); return ComboCondition.and(hashPredicate, valuePredicate); } @Nonnull public Condition createPredicateNormal(String theResourceType, String theParamName, String theNormalizedString) { - Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(getPartitionSettings(), getRequestPartitionId(), getStorageSettings(), theResourceType, theParamName, theNormalizedString); + Long hash = ResourceIndexedSearchParamString.calculateHashNormalized( + getPartitionSettings(), + getRequestPartitionId(), + getStorageSettings(), + theResourceType, + theParamName, + theNormalizedString); Condition hashPredicate = BinaryCondition.equalTo(myColumnHashNormPrefix, generatePlaceholder(hash)); - Condition valuePredicate = BinaryCondition.equalTo(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); + Condition valuePredicate = + BinaryCondition.equalTo(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); return ComboCondition.and(hashPredicate, valuePredicate); } - private Condition createPredicateNormalGreaterThanOrEqual(String theResourceType, String theParamName, String theNormalizedString) { + private Condition createPredicateNormalGreaterThanOrEqual( + String theResourceType, String theParamName, String theNormalizedString) { Condition hashPredicate = createHashIdentityPredicate(theResourceType, theParamName); - Condition valuePredicate = BinaryCondition.greaterThanOrEq(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); + Condition valuePredicate = + BinaryCondition.greaterThanOrEq(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); return ComboCondition.and(hashPredicate, valuePredicate); } - private Condition createPredicateNormalGreaterThan(String theResourceType, String theParamName, String theNormalizedString) { + private Condition createPredicateNormalGreaterThan( + String theResourceType, String theParamName, String theNormalizedString) { Condition hashPredicate = createHashIdentityPredicate(theResourceType, theParamName); - Condition valuePredicate = BinaryCondition.greaterThan(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); + Condition valuePredicate = + BinaryCondition.greaterThan(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); return ComboCondition.and(hashPredicate, valuePredicate); } - private Condition createPredicateNormalLessThanOrEqual(String theResourceType, String theParamName, String theNormalizedString) { + private Condition createPredicateNormalLessThanOrEqual( + String theResourceType, String theParamName, String theNormalizedString) { Condition hashPredicate = createHashIdentityPredicate(theResourceType, theParamName); - Condition valuePredicate = BinaryCondition.lessThanOrEq(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); + Condition valuePredicate = + BinaryCondition.lessThanOrEq(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); return ComboCondition.and(hashPredicate, valuePredicate); } - private Condition createPredicateNormalLessThan(String theResourceType, String theParamName, String theNormalizedString) { + private Condition createPredicateNormalLessThan( + String theResourceType, String theParamName, String theNormalizedString) { Condition hashPredicate = createHashIdentityPredicate(theResourceType, theParamName); - Condition valuePredicate = BinaryCondition.lessThan(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); + Condition valuePredicate = + BinaryCondition.lessThan(myColumnValueNormalized, generatePlaceholder(theNormalizedString)); return ComboCondition.and(hashPredicate, valuePredicate); } @Nonnull - public Condition createPredicateLikeExpressionOnly(String theResourceType, String theParamName, String theLikeExpression, boolean theInverse) { - long hashIdentity = ResourceIndexedSearchParamString.calculateHashIdentity(getPartitionSettings(), getRequestPartitionId(), theResourceType, theParamName); - BinaryCondition identityPredicate = BinaryCondition.equalTo(myColumnHashIdentity, generatePlaceholder(hashIdentity)); + public Condition createPredicateLikeExpressionOnly( + String theResourceType, String theParamName, String theLikeExpression, boolean theInverse) { + long hashIdentity = ResourceIndexedSearchParamString.calculateHashIdentity( + getPartitionSettings(), getRequestPartitionId(), theResourceType, theParamName); + BinaryCondition identityPredicate = + BinaryCondition.equalTo(myColumnHashIdentity, generatePlaceholder(hashIdentity)); BinaryCondition likePredicate; if (theInverse) { likePredicate = BinaryCondition.notLike(myColumnValueNormalized, generatePlaceholder(theLikeExpression)); @@ -235,6 +269,4 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder { public static String createRightMatchLikeExpression(String likeExpression) { return "%" + likeExpression.replace("%", "\\%"); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/TagPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/TagPredicateBuilder.java index eec4cf2076b..4cd690df040 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/TagPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/TagPredicateBuilder.java @@ -61,14 +61,18 @@ public class TagPredicateBuilder extends BaseJoiningPredicateBuilder { myTagDefinitionColumnTagType = myTagDefinitionTable.addColumn("TAG_TYPE"); } - - public Condition createPredicateTag(TagTypeEnum theTagType, List> theTokens, String theParamName, RequestPartitionId theRequestPartitionId) { + public Condition createPredicateTag( + TagTypeEnum theTagType, + List> theTokens, + String theParamName, + RequestPartitionId theRequestPartitionId) { addJoin(getTable(), myTagDefinitionTable, myColumnTagId, myTagDefinitionColumnTagId); return createPredicateTagList(theTagType, theTokens); } private Condition createPredicateTagList(TagTypeEnum theTagType, List> theTokens) { - Condition typePredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagType, generatePlaceholder(theTagType.ordinal())); + Condition typePredicate = + BinaryCondition.equalTo(myTagDefinitionColumnTagType, generatePlaceholder(theTagType.ordinal())); List orPredicates = Lists.newArrayList(); for (Triple next : theTokens) { @@ -81,11 +85,13 @@ public class TagPredicateBuilder extends BaseJoiningPredicateBuilder { } Condition codePredicate = Objects.equals(qualifier, UriParamQualifierEnum.BELOW.getValue()) - ? BinaryCondition.like(myTagDefinitionColumnTagCode, generatePlaceholder(createLeftMatchLikeExpression(code))) - : BinaryCondition.equalTo(myTagDefinitionColumnTagCode, generatePlaceholder(code)); + ? BinaryCondition.like( + myTagDefinitionColumnTagCode, generatePlaceholder(createLeftMatchLikeExpression(code))) + : BinaryCondition.equalTo(myTagDefinitionColumnTagCode, generatePlaceholder(code)); if (isNotBlank(system)) { - Condition systemPredicate = BinaryCondition.equalTo(myTagDefinitionColumnTagSystem, generatePlaceholder(system)); + Condition systemPredicate = + BinaryCondition.equalTo(myTagDefinitionColumnTagSystem, generatePlaceholder(system)); orPredicates.add(ComboCondition.and(typePredicate, systemPredicate, codePredicate)); } else { // Note: We don't have an index for this combo, which means that this may not perform diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/TokenPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/TokenPredicateBuilder.java index 0a8a053c13a..0bfc660dcdb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/TokenPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/TokenPredicateBuilder.java @@ -32,7 +32,6 @@ import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; @@ -82,10 +81,13 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { @Autowired private IValidationSupport myValidationSupport; + @Autowired private ITermReadSvc myTerminologySvc; + @Autowired private FhirContext myContext; + @Autowired private JpaStorageSettings myStorageSettings; @@ -107,27 +109,23 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { return myColumnResId; } - public Condition createPredicateToken(Collection theParameters, - String theResourceName, - String theSpnamePrefix, - RuntimeSearchParam theSearchParam, - RequestPartitionId theRequestPartitionId) { + public Condition createPredicateToken( + Collection theParameters, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + RequestPartitionId theRequestPartitionId) { return createPredicateToken( - theParameters, - theResourceName, - theSpnamePrefix, - theSearchParam, - null, - theRequestPartitionId); + theParameters, theResourceName, theSpnamePrefix, theSearchParam, null, theRequestPartitionId); } - public Condition createPredicateToken(Collection theParameters, - String theResourceName, - String theSpnamePrefix, - RuntimeSearchParam theSearchParam, - SearchFilterParser.CompareOperation theOperation, - RequestPartitionId theRequestPartitionId) { - + public Condition createPredicateToken( + Collection theParameters, + String theResourceName, + String theSpnamePrefix, + RuntimeSearchParam theSearchParam, + SearchFilterParser.CompareOperation theOperation, + RequestPartitionId theRequestPartitionId) { final List codes = new ArrayList<>(); @@ -162,11 +160,21 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { } if (system != null && system.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) { - ourLog.info("Parameter[{}] has system ({}) that is longer than maximum ({}) so will truncate: {} ", paramName, system.length(), ResourceIndexedSearchParamToken.MAX_LENGTH, system); + ourLog.info( + "Parameter[{}] has system ({}) that is longer than maximum ({}) so will truncate: {} ", + paramName, + system.length(), + ResourceIndexedSearchParamToken.MAX_LENGTH, + system); } if (code != null && code.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) { - ourLog.info("Parameter[{}] has code ({}) that is longer than maximum ({}) so will truncate: {} ", paramName, code.length(), ResourceIndexedSearchParamToken.MAX_LENGTH, code); + ourLog.info( + "Parameter[{}] has code ({}) that is longer than maximum ({}) so will truncate: {} ", + paramName, + code.length(), + ResourceIndexedSearchParamToken.MAX_LENGTH, + code); } /* @@ -177,7 +185,8 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { if (myContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2)) { ValueSetExpansionOptions valueSetExpansionOptions = new ValueSetExpansionOptions(); valueSetExpansionOptions.setCount(myStorageSettings.getMaximumExpansionSize()); - IValidationSupport.ValueSetExpansionOutcome expanded = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), valueSetExpansionOptions, code); + IValidationSupport.ValueSetExpansionOutcome expanded = myValidationSupport.expandValueSet( + new ValidationSupportContext(myValidationSupport), valueSetExpansionOptions, code); codes.addAll(extractValueSetCodes(expanded.getValueSet())); } else { @@ -196,7 +205,8 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { codes.addAll(myTerminologySvc.findCodesBelow(system, code)); } else if (modifier == TokenParamModifier.OF_TYPE) { if (!myStorageSettings.isIndexIdentifierOfType()) { - throw new MethodNotAllowedException(Msg.code(2012) + "The :of-type modifier is not enabled on this server"); + throw new MethodNotAllowedException( + Msg.code(2012) + "The :of-type modifier is not enabled on this server"); } if (isBlank(system) || isBlank(code)) { throw new InvalidRequestException(Msg.code(2013) + "Invalid parameter value for :of-type query"); @@ -214,15 +224,13 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { } codes.add(new FhirVersionIndependentConcept(system, code)); } - } - List sortedCodesList = codes - .stream() - .filter(t -> t.getCode() != null || t.getSystem() != null) - .sorted() - .distinct() - .collect(Collectors.toList()); + List sortedCodesList = codes.stream() + .filter(t -> t.getCode() != null || t.getSystem() != null) + .sorted() + .distinct() + .collect(Collectors.toList()); if (codes.isEmpty()) { // This will never match anything @@ -230,7 +238,6 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { return null; } - Condition predicate; if (operation == SearchFilterParser.CompareOperation.ne) { @@ -239,8 +246,10 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { * param name) but not the actual provided token value. */ - long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(getPartitionSettings(), theRequestPartitionId, theResourceName, paramName); - Condition hashIdentityPredicate = BinaryCondition.equalTo(getColumnHashIdentity(), generatePlaceholder(hashIdentity)); + long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( + getPartitionSettings(), theRequestPartitionId, theResourceName, paramName); + Condition hashIdentityPredicate = + BinaryCondition.equalTo(getColumnHashIdentity(), generatePlaceholder(hashIdentity)); Condition hashValuePredicate = createPredicateOrList(theResourceName, paramName, sortedCodesList, false); predicate = QueryParameterUtils.toAndPredicate(hashIdentityPredicate, hashValuePredicate); @@ -248,7 +257,6 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { } else { predicate = createPredicateOrList(theResourceName, paramName, sortedCodesList, true); - } return predicate; @@ -262,7 +270,8 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { Optional expansionOpt = expansionChild.getAccessor().getFirstValueOrNull(theValueSet); if (expansionOpt.isPresent()) { IBase expansion = expansionOpt.get(); - BaseRuntimeElementCompositeDefinition expansionDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(expansion.getClass()); + BaseRuntimeElementCompositeDefinition expansionDef = + (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(expansion.getClass()); BaseRuntimeChildDefinition containsChild = expansionDef.getChildByName("contains"); List contains = containsChild.getAccessor().getValues(expansion); @@ -270,21 +279,27 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { BaseRuntimeChildDefinition.IAccessor codeAccessor = null; for (IBase nextContains : contains) { if (systemAccessor == null) { - systemAccessor = myContext.getElementDefinition(nextContains.getClass()).getChildByName("system").getAccessor(); + systemAccessor = myContext + .getElementDefinition(nextContains.getClass()) + .getChildByName("system") + .getAccessor(); } if (codeAccessor == null) { - codeAccessor = myContext.getElementDefinition(nextContains.getClass()).getChildByName("code").getAccessor(); + codeAccessor = myContext + .getElementDefinition(nextContains.getClass()) + .getChildByName("code") + .getAccessor(); } String system = systemAccessor - .getFirstValueOrNull(nextContains) - .map(t -> (IPrimitiveType) t) - .map(t -> t.getValueAsString()) - .orElse(null); + .getFirstValueOrNull(nextContains) + .map(t -> (IPrimitiveType) t) + .map(t -> t.getValueAsString()) + .orElse(null); String code = codeAccessor - .getFirstValueOrNull(nextContains) - .map(t -> (IPrimitiveType) t) - .map(t -> t.getValueAsString()) - .orElse(null); + .getFirstValueOrNull(nextContains) + .map(t -> (IPrimitiveType) t) + .map(t -> t.getValueAsString()) + .orElse(null); if (isNotBlank(system) && isNotBlank(code)) { retVal.add(new FhirVersionIndependentConcept(system, code)); } @@ -300,8 +315,11 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { if (theSearchParam != null) { Set valueSetUris = Sets.newHashSet(); for (String nextPath : theSearchParam.getPathsSplitForResourceType(getResourceType())) { - Class type = getFhirContext().getResourceDefinition(getResourceType()).getImplementingClass(); - BaseRuntimeChildDefinition def = getFhirContext().newTerser().getDefinition(type, nextPath); + Class type = getFhirContext() + .getResourceDefinition(getResourceType()) + .getImplementingClass(); + BaseRuntimeChildDefinition def = + getFhirContext().newTerser().getDefinition(type, nextPath); if (def instanceof BaseRuntimeDeclaredChildDefinition) { String valueSet = ((BaseRuntimeDeclaredChildDefinition) def).getBindingValueSet(); if (isNotBlank(valueSet)) { @@ -311,9 +329,9 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { } if (valueSetUris.size() == 1) { String valueSet = valueSetUris.iterator().next(); - ValueSetExpansionOptions options = new ValueSetExpansionOptions() - .setFailOnMissingCodeSystem(false); - List candidateCodes = myTerminologySvc.expandValueSetIntoConceptList(options, valueSet); + ValueSetExpansionOptions options = new ValueSetExpansionOptions().setFailOnMissingCodeSystem(false); + List candidateCodes = + myTerminologySvc.expandValueSetIntoConceptList(options, valueSet); for (FhirVersionIndependentConcept nextCandidate : candidateCodes) { if (nextCandidate.getCode().equals(code)) { retVal = nextCandidate.getSystem(); @@ -338,17 +356,30 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { String systemDesc = defaultIfBlank(theSystem, "(missing)"); String codeDesc = defaultIfBlank(theCode, "(missing)"); if (isBlank(theCode)) { - String msg = getFhirContext().getLocalizer().getMessage(TokenPredicateBuilder.class, "invalidCodeMissingSystem", theParamName, systemDesc, codeDesc); + String msg = getFhirContext() + .getLocalizer() + .getMessage( + TokenPredicateBuilder.class, + "invalidCodeMissingSystem", + theParamName, + systemDesc, + codeDesc); throw new InvalidRequestException(Msg.code(1239) + msg); } if (isBlank(theSystem)) { - String msg = getFhirContext().getLocalizer().getMessage(TokenPredicateBuilder.class, "invalidCodeMissingCode", theParamName, systemDesc, codeDesc); + String msg = getFhirContext() + .getLocalizer() + .getMessage( + TokenPredicateBuilder.class, "invalidCodeMissingCode", theParamName, systemDesc, codeDesc); throw new InvalidRequestException(Msg.code(1240) + msg); } } - - private Condition createPredicateOrList(String theResourceType, String theSearchParamName, List theCodes, boolean theWantEquals) { + private Condition createPredicateOrList( + String theResourceType, + String theSearchParamName, + List theCodes, + boolean theWantEquals) { Condition[] conditions = new Condition[theCodes.size()]; Long[] hashes = new Long[theCodes.size()]; @@ -360,13 +391,29 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder { long hash; DbColumn column; if (nextToken.getSystem() == null) { - hash = ResourceIndexedSearchParamToken.calculateHashValue(getPartitionSettings(), getRequestPartitionId(), theResourceType, theSearchParamName, nextToken.getCode()); + hash = ResourceIndexedSearchParamToken.calculateHashValue( + getPartitionSettings(), + getRequestPartitionId(), + theResourceType, + theSearchParamName, + nextToken.getCode()); column = myColumnHashValue; } else if (isBlank(nextToken.getCode())) { - hash = ResourceIndexedSearchParamToken.calculateHashSystem(getPartitionSettings(), getRequestPartitionId(), theResourceType, theSearchParamName, nextToken.getSystem()); + hash = ResourceIndexedSearchParamToken.calculateHashSystem( + getPartitionSettings(), + getRequestPartitionId(), + theResourceType, + theSearchParamName, + nextToken.getSystem()); column = myColumnHashSystem; } else { - hash = ResourceIndexedSearchParamToken.calculateHashSystemAndValue(getPartitionSettings(), getRequestPartitionId(), theResourceType, theSearchParamName, nextToken.getSystem(), nextToken.getCode()); + hash = ResourceIndexedSearchParamToken.calculateHashSystemAndValue( + getPartitionSettings(), + getRequestPartitionId(), + theResourceType, + theSearchParamName, + nextToken.getSystem(), + nextToken.getCode()); column = myColumnHashSystemAndValue; } hashes[i] = hash; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/UriPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/UriPredicateBuilder.java index 6610234b503..d956199e529 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/UriPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/UriPredicateBuilder.java @@ -60,6 +60,7 @@ public class UriPredicateBuilder extends BaseSearchParamPredicateBuilder { @Autowired private IResourceIndexedSearchParamUriDao myResourceIndexedSearchParamUriDao; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; @@ -73,8 +74,11 @@ public class UriPredicateBuilder extends BaseSearchParamPredicateBuilder { myColumnHashUri = getTable().addColumn("HASH_URI"); } - - public Condition addPredicate(List theUriOrParameterList, String theParamName, SearchFilterParser.CompareOperation theOperation, RequestDetails theRequestDetails) { + public Condition addPredicate( + List theUriOrParameterList, + String theParamName, + SearchFilterParser.CompareOperation theOperation, + RequestDetails theRequestDetails) { List codePredicates = new ArrayList<>(); boolean predicateIsHash = false; @@ -101,20 +105,24 @@ public class UriPredicateBuilder extends BaseSearchParamPredicateBuilder { * * If we ever need to make this more efficient, lucene could certainly be used as an optimization. */ - String msg = "Searching for candidate URI:above parameters for Resource["+getResourceType()+"] param["+theParamName+"]"; + String msg = "Searching for candidate URI:above parameters for Resource[" + getResourceType() + + "] param[" + theParamName + "]"; ourLog.info(msg); StorageProcessingMessage message = new StorageProcessingMessage(); ourLog.warn(msg); message.setMessage(msg); HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(StorageProcessingMessage.class, message); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(StorageProcessingMessage.class, message); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params); - long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(getPartitionSettings(), getRequestPartitionId(), getResourceType(), theParamName); - Collection candidates = myResourceIndexedSearchParamUriDao.findAllByHashIdentity(hashIdentity); + long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( + getPartitionSettings(), getRequestPartitionId(), getResourceType(), theParamName); + Collection candidates = + myResourceIndexedSearchParamUriDao.findAllByHashIdentity(hashIdentity); List toFind = new ArrayList<>(); for (String next : candidates) { if (value.length() >= next.length()) { @@ -128,27 +136,37 @@ public class UriPredicateBuilder extends BaseSearchParamPredicateBuilder { continue; } - Condition uriPredicate = QueryParameterUtils.toEqualToOrInPredicate(myColumnUri, generatePlaceholders(toFind)); - Condition hashAndUriPredicate = combineWithHashIdentityPredicate(getResourceType(), theParamName, uriPredicate); + Condition uriPredicate = + QueryParameterUtils.toEqualToOrInPredicate(myColumnUri, generatePlaceholders(toFind)); + Condition hashAndUriPredicate = + combineWithHashIdentityPredicate(getResourceType(), theParamName, uriPredicate); codePredicates.add(hashAndUriPredicate); } else if (param.getQualifier() == UriParamQualifierEnum.BELOW) { - Condition uriPredicate = BinaryCondition.like(myColumnUri, generatePlaceholder(createLeftMatchLikeExpression(value))); - Condition hashAndUriPredicate = combineWithHashIdentityPredicate(getResourceType(), theParamName, uriPredicate); + Condition uriPredicate = BinaryCondition.like( + myColumnUri, generatePlaceholder(createLeftMatchLikeExpression(value))); + Condition hashAndUriPredicate = + combineWithHashIdentityPredicate(getResourceType(), theParamName, uriPredicate); codePredicates.add(hashAndUriPredicate); } else { Condition uriPredicate = null; if (theOperation == null || theOperation == SearchFilterParser.CompareOperation.eq) { - long hashUri = ResourceIndexedSearchParamUri.calculateHashUri(getPartitionSettings(), getRequestPartitionId(), getResourceType(), theParamName, value); + long hashUri = ResourceIndexedSearchParamUri.calculateHashUri( + getPartitionSettings(), + getRequestPartitionId(), + getResourceType(), + theParamName, + value); uriPredicate = BinaryCondition.equalTo(myColumnHashUri, generatePlaceholder(hashUri)); predicateIsHash = true; } else if (theOperation == SearchFilterParser.CompareOperation.ne) { uriPredicate = BinaryCondition.notEqualTo(myColumnUri, generatePlaceholder(value)); } else if (theOperation == SearchFilterParser.CompareOperation.co) { - uriPredicate = BinaryCondition.like(myColumnUri, generatePlaceholder(createLeftAndRightMatchLikeExpression(value))); + uriPredicate = BinaryCondition.like( + myColumnUri, generatePlaceholder(createLeftAndRightMatchLikeExpression(value))); } else if (theOperation == SearchFilterParser.CompareOperation.gt) { uriPredicate = BinaryCondition.greaterThan(myColumnUri, generatePlaceholder(value)); } else if (theOperation == SearchFilterParser.CompareOperation.lt) { @@ -158,12 +176,16 @@ public class UriPredicateBuilder extends BaseSearchParamPredicateBuilder { } else if (theOperation == SearchFilterParser.CompareOperation.le) { uriPredicate = BinaryCondition.lessThanOrEq(myColumnUri, generatePlaceholder(value)); } else if (theOperation == SearchFilterParser.CompareOperation.sw) { - uriPredicate = BinaryCondition.like(myColumnUri, generatePlaceholder(createLeftMatchLikeExpression(value))); + uriPredicate = BinaryCondition.like( + myColumnUri, generatePlaceholder(createLeftMatchLikeExpression(value))); } else if (theOperation == SearchFilterParser.CompareOperation.ew) { - uriPredicate = BinaryCondition.like(myColumnUri, generatePlaceholder(createRightMatchLikeExpression(value))); + uriPredicate = BinaryCondition.like( + myColumnUri, generatePlaceholder(createRightMatchLikeExpression(value))); } else { - throw new IllegalArgumentException(Msg.code(1226) + String.format("Unsupported operator specified in _filter clause, %s", - theOperation.toString())); + throw new IllegalArgumentException(Msg.code(1226) + + String.format( + "Unsupported operator specified in _filter clause, %s", + theOperation.toString())); } codePredicates.add(uriPredicate); @@ -172,7 +194,6 @@ public class UriPredicateBuilder extends BaseSearchParamPredicateBuilder { } else { throw new IllegalArgumentException(Msg.code(1227) + "Invalid URI type: " + nextOr.getClass()); } - } /* @@ -189,7 +210,6 @@ public class UriPredicateBuilder extends BaseSearchParamPredicateBuilder { } else { return combineWithHashIdentityPredicate(getResourceType(), theParamName, orPredicate); } - } public DbColumn getColumnValue() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/PredicateBuilderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/PredicateBuilderFactory.java index 570caa9d607..611d6af8f31 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/PredicateBuilderFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/PredicateBuilderFactory.java @@ -38,10 +38,7 @@ public class PredicateBuilderFactory { private static final Logger ourLog = LoggerFactory.getLogger(PredicateBuilderFactory.class); public static ICanMakeMissingParamPredicate createPredicateBuilderForParamType( - RestSearchParameterTypeEnum theParamType, - SearchQueryBuilder theBuilder, - QueryStack theQueryStack - ) { + RestSearchParameterTypeEnum theParamType, SearchQueryBuilder theBuilder, QueryStack theQueryStack) { switch (theParamType) { case NUMBER: return createNumberPredicateBuilder(theBuilder); @@ -103,9 +100,10 @@ public class PredicateBuilderFactory { return up; } - private static ResourceLinkPredicateBuilder createReferencePredicateBuilder(QueryStack theQueryStack, SearchQueryBuilder theBuilder) { - ResourceLinkPredicateBuilder retVal = theBuilder.getSqlBuilderFactory().referenceIndexTable(theQueryStack, theBuilder, false); + private static ResourceLinkPredicateBuilder createReferencePredicateBuilder( + QueryStack theQueryStack, SearchQueryBuilder theBuilder) { + ResourceLinkPredicateBuilder retVal = + theBuilder.getSqlBuilderFactory().referenceIndexTable(theQueryStack, theBuilder, false); return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java index 5cd88ba15d3..1c1fcafe47e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java @@ -70,14 +70,14 @@ import org.hibernate.engine.spi.RowSelection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.rest.param.ParamPrefixEnum.GREATERTHAN; import static ca.uhn.fhir.rest.param.ParamPrefixEnum.GREATERTHAN_OR_EQUALS; @@ -114,14 +114,42 @@ public class SearchQueryBuilder { /** * Constructor */ - public SearchQueryBuilder(FhirContext theFhirContext, StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, HibernatePropertiesProvider theDialectProvider, boolean theCountQuery) { - this(theFhirContext, theStorageSettings, thePartitionSettings, theRequestPartitionId, theResourceType, theSqlBuilderFactory, UUID.randomUUID() + "-", theDialectProvider.getDialect(), theCountQuery, new ArrayList<>()); + public SearchQueryBuilder( + FhirContext theFhirContext, + StorageSettings theStorageSettings, + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + SqlObjectFactory theSqlBuilderFactory, + HibernatePropertiesProvider theDialectProvider, + boolean theCountQuery) { + this( + theFhirContext, + theStorageSettings, + thePartitionSettings, + theRequestPartitionId, + theResourceType, + theSqlBuilderFactory, + UUID.randomUUID() + "-", + theDialectProvider.getDialect(), + theCountQuery, + new ArrayList<>()); } /** * Constructor for child SQL Builders */ - private SearchQueryBuilder(FhirContext theFhirContext, StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, String theBindVariableSubstitutionBase, Dialect theDialect, boolean theCountQuery, ArrayList theBindVariableValues) { + private SearchQueryBuilder( + FhirContext theFhirContext, + StorageSettings theStorageSettings, + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + SqlObjectFactory theSqlBuilderFactory, + String theBindVariableSubstitutionBase, + Dialect theDialect, + boolean theCountQuery, + ArrayList theBindVariableValues) { myFhirContext = theFhirContext; myStorageSettings = theStorageSettings; myPartitionSettings = thePartitionSettings; @@ -137,7 +165,6 @@ public class SearchQueryBuilder { dialectIsMsSql = true; } - mySpec = new DbSpec(); mySchema = mySpec.addDefaultSchema(); mySelect = new SelectQuery(); @@ -154,7 +181,8 @@ public class SearchQueryBuilder { * Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a Composite Unique search parameter */ public ComboUniqueSearchParameterPredicateBuilder addComboUniquePredicateBuilder() { - ComboUniqueSearchParameterPredicateBuilder retVal = mySqlBuilderFactory.newComboUniqueSearchParameterPredicateBuilder(this); + ComboUniqueSearchParameterPredicateBuilder retVal = + mySqlBuilderFactory.newComboUniqueSearchParameterPredicateBuilder(this); addTable(retVal, null); return retVal; } @@ -163,7 +191,8 @@ public class SearchQueryBuilder { * Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a Composite Unique search parameter */ public ComboNonUniqueSearchParameterPredicateBuilder addComboNonUniquePredicateBuilder() { - ComboNonUniqueSearchParameterPredicateBuilder retVal = mySqlBuilderFactory.newComboNonUniqueSearchParameterPredicateBuilder(this); + ComboNonUniqueSearchParameterPredicateBuilder retVal = + mySqlBuilderFactory.newComboNonUniqueSearchParameterPredicateBuilder(this); addTable(retVal, null); return retVal; } @@ -247,7 +276,8 @@ public class SearchQueryBuilder { return mySqlBuilderFactory.quantityIndexTable(this); } - public QuantityNormalizedPredicateBuilder addQuantityNormalizedPredicateBuilder(@Nullable DbColumn theSourceJoinColumn) { + public QuantityNormalizedPredicateBuilder addQuantityNormalizedPredicateBuilder( + @Nullable DbColumn theSourceJoinColumn) { QuantityNormalizedPredicateBuilder retVal = mySqlBuilderFactory.quantityNormalizedIndexTable(this); addTable(retVal, theSourceJoinColumn); @@ -267,7 +297,8 @@ public class SearchQueryBuilder { /** * Create, add and return a predicate builder (or a root query if no root query exists yet) for selecting on a REFERENCE search parameter */ - public ResourceLinkPredicateBuilder addReferencePredicateBuilder(QueryStack theQueryStack, @Nullable DbColumn theSourceJoinColumn) { + public ResourceLinkPredicateBuilder addReferencePredicateBuilder( + QueryStack theQueryStack, @Nullable DbColumn theSourceJoinColumn) { ResourceLinkPredicateBuilder retVal = createReferencePredicateBuilder(theQueryStack); addTable(retVal, theSourceJoinColumn); return retVal; @@ -284,7 +315,8 @@ public class SearchQueryBuilder { * Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a resource link where the * source and target are reversed. This is used for _has queries. */ - public ResourceLinkPredicateBuilder addReferencePredicateBuilderReversed(QueryStack theQueryStack, DbColumn theSourceJoinColumn) { + public ResourceLinkPredicateBuilder addReferencePredicateBuilderReversed( + QueryStack theQueryStack, DbColumn theSourceJoinColumn) { ResourceLinkPredicateBuilder retVal = mySqlBuilderFactory.referenceIndexTable(theQueryStack, this, true); addTable(retVal, theSourceJoinColumn); return retVal; @@ -331,7 +363,8 @@ public class SearchQueryBuilder { return mySqlBuilderFactory.tokenIndexTable(this); } - public void addCustomJoin(SelectQuery.JoinType theJoinType, DbTable theFromTable, DbTable theToTable, Condition theCondition) { + public void addCustomJoin( + SelectQuery.JoinType theJoinType, DbTable theFromTable, DbTable theToTable, Condition theCondition) { mySelect.addCustomJoin(theJoinType, theFromTable, theToTable, theCondition); } @@ -345,7 +378,8 @@ public class SearchQueryBuilder { /** * Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a :missing search parameter */ - public SearchParamPresentPredicateBuilder addSearchParamPresentPredicateBuilder(@Nullable DbColumn theSourceJoinColumn) { + public SearchParamPresentPredicateBuilder addSearchParamPresentPredicateBuilder( + @Nullable DbColumn theSourceJoinColumn) { SearchParamPresentPredicateBuilder retVal = mySqlBuilderFactory.searchParamPresentPredicateBuilder(this); addTable(retVal, theSourceJoinColumn); return retVal; @@ -375,7 +409,6 @@ public class SearchQueryBuilder { return mySqlBuilderFactory.resourceId(this); } - /** * Add and return a predicate builder (or a root query if no root query exists yet) for an arbitrary table */ @@ -383,11 +416,15 @@ public class SearchQueryBuilder { addTable(thePredicateBuilder, theSourceJoinColumn, SelectQuery.JoinType.INNER); } - private void addTableForSorting(BaseJoiningPredicateBuilder thePredicateBuilder, @Nullable DbColumn theSourceJoinColumn) { + private void addTableForSorting( + BaseJoiningPredicateBuilder thePredicateBuilder, @Nullable DbColumn theSourceJoinColumn) { addTable(thePredicateBuilder, theSourceJoinColumn, SelectQuery.JoinType.LEFT_OUTER); } - private void addTable(BaseJoiningPredicateBuilder thePredicateBuilder, @Nullable DbColumn theSourceJoinColumn, SelectQuery.JoinType theJoinType) { + private void addTable( + BaseJoiningPredicateBuilder thePredicateBuilder, + @Nullable DbColumn theSourceJoinColumn, + SelectQuery.JoinType theJoinType) { if (theSourceJoinColumn != null) { DbTable fromTable = theSourceJoinColumn.getTable(); DbTable toTable = thePredicateBuilder.getTable(); @@ -408,7 +445,8 @@ public class SearchQueryBuilder { } if (myCountQuery) { - mySelect.addCustomColumns(FunctionCall.count().setIsDistinct(true).addColumnParams(root.getResourceIdColumn())); + mySelect.addCustomColumns( + FunctionCall.count().setIsDistinct(true).addColumnParams(root.getResourceIdColumn())); } else { mySelect.addColumns(root.getResourceIdColumn()); } @@ -428,14 +466,20 @@ public class SearchQueryBuilder { } } - - public void addJoin(DbTable theFromTable, DbTable theToTable, DbColumn theFromColumn, DbColumn theToColumn, SelectQuery.JoinType theJoinType) { - Join join = new DbJoin(mySpec, theFromTable, theToTable, new DbColumn[]{theFromColumn}, new DbColumn[]{theToColumn}); + public void addJoin( + DbTable theFromTable, + DbTable theToTable, + DbColumn theFromColumn, + DbColumn theToColumn, + SelectQuery.JoinType theJoinType) { + Join join = new DbJoin( + mySpec, theFromTable, theToTable, new DbColumn[] {theFromColumn}, new DbColumn[] {theToColumn}); mySelect.addJoins(theJoinType, join); } public void addJoin(DbTable theFromTable, DbTable theToTable, DbColumn theFromColumn, DbColumn theToColumn) { - Join join = new DbJoin(mySpec, theFromTable, theToTable, new DbColumn[]{theFromColumn}, new DbColumn[]{theToColumn}); + Join join = new DbJoin( + mySpec, theFromTable, theToTable, new DbColumn[] {theFromColumn}, new DbColumn[] {theToColumn}); mySelect.addJoins(SelectQuery.JoinType.INNER, join); } @@ -511,11 +555,23 @@ public class SearchQueryBuilder { } if (limitHandler.bindLimitParametersInReverseOrder()) { - startOfQueryParameterIndex = bindCountParameter(bindVariables, maxResultsToFetch, limitHandler, startOfQueryParameterIndex, bindLimitParametersFirst); - bindOffsetParameter(bindVariables, offset, limitHandler, startOfQueryParameterIndex, bindLimitParametersFirst); + startOfQueryParameterIndex = bindCountParameter( + bindVariables, + maxResultsToFetch, + limitHandler, + startOfQueryParameterIndex, + bindLimitParametersFirst); + bindOffsetParameter( + bindVariables, offset, limitHandler, startOfQueryParameterIndex, bindLimitParametersFirst); } else { - startOfQueryParameterIndex = bindOffsetParameter(bindVariables, offset, limitHandler, startOfQueryParameterIndex, bindLimitParametersFirst); - bindCountParameter(bindVariables, maxResultsToFetch, limitHandler, startOfQueryParameterIndex, bindLimitParametersFirst); + startOfQueryParameterIndex = bindOffsetParameter( + bindVariables, offset, limitHandler, startOfQueryParameterIndex, bindLimitParametersFirst); + bindCountParameter( + bindVariables, + maxResultsToFetch, + limitHandler, + startOfQueryParameterIndex, + bindLimitParametersFirst); } } } @@ -523,7 +579,12 @@ public class SearchQueryBuilder { return new GeneratedSql(myMatchNothing, sql, bindVariables); } - private int bindCountParameter(List bindVariables, Integer maxResultsToFetch, AbstractLimitHandler limitHandler, int startOfQueryParameterIndex, boolean bindLimitParametersFirst) { + private int bindCountParameter( + List bindVariables, + Integer maxResultsToFetch, + AbstractLimitHandler limitHandler, + int startOfQueryParameterIndex, + boolean bindLimitParametersFirst) { if (limitHandler.supportsLimit()) { if (bindLimitParametersFirst) { bindVariables.add(startOfQueryParameterIndex++, maxResultsToFetch); @@ -534,7 +595,12 @@ public class SearchQueryBuilder { return startOfQueryParameterIndex; } - public int bindOffsetParameter(List theBindVariables, @Nullable Integer theOffset, AbstractLimitHandler theLimitHandler, int theStartOfQueryParameterIndex, boolean theBindLimitParametersFirst) { + public int bindOffsetParameter( + List theBindVariables, + @Nullable Integer theOffset, + AbstractLimitHandler theLimitHandler, + int theStartOfQueryParameterIndex, + boolean theBindLimitParametersFirst) { if (theLimitHandler.supportsLimitOffset() && theOffset != null) { if (theBindLimitParametersFirst) { theBindVariables.add(theStartOfQueryParameterIndex++, theOffset); @@ -555,7 +621,8 @@ public class SearchQueryBuilder { /** * If at least one predicate builder already exists, return the last one added to the chain. If none has been selected, create a builder on HFJ_RESOURCE, add it and return it. */ - public BaseJoiningPredicateBuilder getOrCreateFirstPredicateBuilder(boolean theIncludeResourceTypeAndNonDeletedFlag) { + public BaseJoiningPredicateBuilder getOrCreateFirstPredicateBuilder( + boolean theIncludeResourceTypeAndNonDeletedFlag) { if (myFirstPredicateBuilder == null) { getOrCreateResourceTablePredicateBuilder(theIncludeResourceTypeAndNonDeletedFlag); } @@ -566,7 +633,8 @@ public class SearchQueryBuilder { return getOrCreateResourceTablePredicateBuilder(true); } - public ResourceTablePredicateBuilder getOrCreateResourceTablePredicateBuilder(boolean theIncludeResourceTypeAndNonDeletedFlag) { + public ResourceTablePredicateBuilder getOrCreateResourceTablePredicateBuilder( + boolean theIncludeResourceTypeAndNonDeletedFlag) { if (myResourceTableRoot == null) { ResourceTablePredicateBuilder resourceTable = mySqlBuilderFactory.resourceTable(this); addTable(resourceTable, null); @@ -591,17 +659,13 @@ public class SearchQueryBuilder { } public List generatePlaceholders(Collection theValues) { - return theValues - .stream() - .map(this::generatePlaceholder) - .collect(Collectors.toList()); + return theValues.stream().map(this::generatePlaceholder).collect(Collectors.toList()); } public int countBindVariables() { return myBindVariableValues.size(); } - public void setMatchNothing() { myMatchNothing = true; } @@ -638,20 +702,28 @@ public class SearchQueryBuilder { BinaryCondition condition; if (isNotEqualsComparator(theDateRange)) { - condition = createConditionForValueWithComparator(LESSTHAN, resourceTableRoot.getLastUpdatedColumn(), theDateRange.getLowerBoundAsInstant()); + condition = createConditionForValueWithComparator( + LESSTHAN, resourceTableRoot.getLastUpdatedColumn(), theDateRange.getLowerBoundAsInstant()); conditions.add(condition); - condition = createConditionForValueWithComparator(GREATERTHAN, resourceTableRoot.getLastUpdatedColumn(), theDateRange.getUpperBoundAsInstant()); + condition = createConditionForValueWithComparator( + GREATERTHAN, resourceTableRoot.getLastUpdatedColumn(), theDateRange.getUpperBoundAsInstant()); conditions.add(condition); return ComboCondition.or(conditions.toArray(new Condition[0])); } if (theDateRange.getLowerBoundAsInstant() != null) { - condition = createConditionForValueWithComparator(GREATERTHAN_OR_EQUALS, resourceTableRoot.getLastUpdatedColumn(), theDateRange.getLowerBoundAsInstant()); + condition = createConditionForValueWithComparator( + GREATERTHAN_OR_EQUALS, + resourceTableRoot.getLastUpdatedColumn(), + theDateRange.getLowerBoundAsInstant()); conditions.add(condition); } if (theDateRange.getUpperBoundAsInstant() != null) { - condition = createConditionForValueWithComparator(LESSTHAN_OR_EQUALS, resourceTableRoot.getLastUpdatedColumn(), theDateRange.getUpperBoundAsInstant()); + condition = createConditionForValueWithComparator( + LESSTHAN_OR_EQUALS, + resourceTableRoot.getLastUpdatedColumn(), + theDateRange.getUpperBoundAsInstant()); conditions.add(condition); } @@ -663,12 +735,14 @@ public class SearchQueryBuilder { DateParam lb = theDateRange.getLowerBound(); DateParam ub = theDateRange.getUpperBound(); - return lb != null && ub != null && lb.getPrefix().equals(NOT_EQUAL) && ub.getPrefix().equals(NOT_EQUAL); + return lb != null + && ub != null + && lb.getPrefix().equals(NOT_EQUAL) + && ub.getPrefix().equals(NOT_EQUAL); } return false; } - public void addResourceIdsPredicate(List thePidList) { DbColumn resourceIdColumn = getOrCreateFirstPredicateBuilder().getResourceIdColumn(); InCondition predicate = new InCondition(resourceIdColumn, generatePlaceholders(thePidList)); @@ -678,11 +752,10 @@ public class SearchQueryBuilder { public void excludeResourceIdsPredicate(Set theExistingPidSetToExclude) { // Do nothing if it's empty - if (theExistingPidSetToExclude == null || theExistingPidSetToExclude.isEmpty()) - return; - + if (theExistingPidSetToExclude == null || theExistingPidSetToExclude.isEmpty()) return; + List excludePids = JpaPid.toLongList(theExistingPidSetToExclude); - + ourLog.trace("excludePids = " + excludePids); DbColumn resourceIdColumn = getOrCreateFirstPredicateBuilder().getResourceIdColumn(); @@ -691,7 +764,8 @@ public class SearchQueryBuilder { addPredicate(predicate); } - public BinaryCondition createConditionForValueWithComparator(ParamPrefixEnum theComparator, DbColumn theColumn, Object theValue) { + public BinaryCondition createConditionForValueWithComparator( + ParamPrefixEnum theComparator, DbColumn theColumn, Object theValue) { switch (theComparator) { case LESSTHAN: return BinaryCondition.lessThan(theColumn, generatePlaceholder(theValue)); @@ -713,7 +787,17 @@ public class SearchQueryBuilder { } public SearchQueryBuilder newChildSqlBuilder() { - return new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, myResourceType, mySqlBuilderFactory, myBindVariableSubstitutionBase, myDialect, false, myBindVariableValues); + return new SearchQueryBuilder( + myFhirContext, + myStorageSettings, + myPartitionSettings, + myRequestPartitionId, + myResourceType, + mySqlBuilderFactory, + myBindVariableSubstitutionBase, + myDialect, + false, + myBindVariableValues); } public SelectQuery getSelect() { @@ -724,15 +808,21 @@ public class SearchQueryBuilder { return myHaveAtLeastOnePredicate; } - public void addSortCoordsNear(CoordsPredicateBuilder theCoordsBuilder, double theLatitudeValue, double theLongitudeValue, boolean theAscending) { + public void addSortCoordsNear( + CoordsPredicateBuilder theCoordsBuilder, + double theLatitudeValue, + double theLongitudeValue, + boolean theAscending) { FunctionCall absLatitude = new FunctionCall("ABS"); String latitudePlaceholder = generatePlaceholder(theLatitudeValue); - ComboExpression absLatitudeMiddle = new ComboExpression(ComboExpression.Op.SUBTRACT, theCoordsBuilder.getColumnLatitude(), latitudePlaceholder); + ComboExpression absLatitudeMiddle = new ComboExpression( + ComboExpression.Op.SUBTRACT, theCoordsBuilder.getColumnLatitude(), latitudePlaceholder); absLatitude = absLatitude.addCustomParams(absLatitudeMiddle); FunctionCall absLongitude = new FunctionCall("ABS"); String longitudePlaceholder = generatePlaceholder(theLongitudeValue); - ComboExpression absLongitudeMiddle = new ComboExpression(ComboExpression.Op.SUBTRACT, theCoordsBuilder.getColumnLongitude(), longitudePlaceholder); + ComboExpression absLongitudeMiddle = new ComboExpression( + ComboExpression.Op.SUBTRACT, theCoordsBuilder.getColumnLongitude(), longitudePlaceholder); absLongitude = absLongitude.addCustomParams(absLongitudeMiddle); ComboExpression sum = new ComboExpression(ComboExpression.Op.ADD, absLatitude, absLongitude); @@ -775,11 +865,16 @@ public class SearchQueryBuilder { addSortDate(theColumnValueNormalized, theAscending, nullOrder, theUseAggregate); } - public void addSortString(DbColumn theTheColumnValueNormalized, boolean theTheAscending, OrderObject.NullOrder theNullOrder, boolean theUseAggregate) { + public void addSortString( + DbColumn theTheColumnValueNormalized, + boolean theTheAscending, + OrderObject.NullOrder theNullOrder, + boolean theUseAggregate) { if ((dialectIsMySql || dialectIsMsSql)) { // MariaDB, MySQL and MSSQL do not support "NULLS FIRST" and "NULLS LAST" syntax. String direction = theTheAscending ? " ASC" : " DESC"; - String sortColumnName = theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName(); + String sortColumnName = + theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName(); final StringBuilder sortColumnNameBuilder = new StringBuilder(); // The following block has been commented out for performance. // Uncomment if NullOrder is needed for MariaDB, MySQL or MSSQL @@ -791,7 +886,7 @@ public class SearchQueryBuilder { // 1 for null and 0 non-null so that nulls will be sorted as greater than non-nulls. sortColumnNameBuilder.append( "CASE WHEN " ).append( sortColumnName ).append( " IS NULL THEN 1 ELSE 0 END" ).append(direction).append(", "); } - */ + */ sortColumnName = formatColumnNameForAggregate(theTheAscending, theUseAggregate, sortColumnName); sortColumnNameBuilder.append(sortColumnName).append(direction); mySelect.addCustomOrderings(sortColumnNameBuilder.toString()); @@ -800,7 +895,8 @@ public class SearchQueryBuilder { } } - private static String formatColumnNameForAggregate(boolean theTheAscending, boolean theUseAggregate, String sortColumnName) { + private static String formatColumnNameForAggregate( + boolean theTheAscending, boolean theUseAggregate, String sortColumnName) { if (theUseAggregate) { String aggregateFunction; if (theTheAscending) { @@ -813,15 +909,20 @@ public class SearchQueryBuilder { return sortColumnName; } - public void addSortNumeric(DbColumn theTheColumnValueNormalized, boolean theAscending, OrderObject.NullOrder theNullOrder, boolean theUseAggregate) { + public void addSortNumeric( + DbColumn theTheColumnValueNormalized, + boolean theAscending, + OrderObject.NullOrder theNullOrder, + boolean theUseAggregate) { if ((dialectIsMySql || dialectIsMsSql)) { // MariaDB, MySQL and MSSQL do not support "NULLS FIRST" and "NULLS LAST" syntax. // Null values are always treated as less than non-null values. // As such special handling is required here. String direction; - String sortColumnName = theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName(); + String sortColumnName = + theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName(); if ((theAscending && theNullOrder == OrderObject.NullOrder.LAST) - || (!theAscending && theNullOrder == OrderObject.NullOrder.FIRST)) { + || (!theAscending && theNullOrder == OrderObject.NullOrder.FIRST)) { // Negating the numeric column value and reversing the sort order will ensure that the rows appear // in the correct order with nulls appearing first or last as needed. direction = theAscending ? " DESC" : " ASC"; @@ -836,11 +937,16 @@ public class SearchQueryBuilder { } } - public void addSortDate(DbColumn theTheColumnValueNormalized, boolean theTheAscending, OrderObject.NullOrder theNullOrder, boolean theUseAggregate) { + public void addSortDate( + DbColumn theTheColumnValueNormalized, + boolean theTheAscending, + OrderObject.NullOrder theNullOrder, + boolean theUseAggregate) { if ((dialectIsMySql || dialectIsMsSql)) { // MariaDB, MySQL and MSSQL do not support "NULLS FIRST" and "NULLS LAST" syntax. String direction = theTheAscending ? " ASC" : " DESC"; - String sortColumnName = theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName(); + String sortColumnName = + theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName(); final StringBuilder sortColumnNameBuilder = new StringBuilder(); // The following block has been commented out for performance. // Uncomment if NullOrder is needed for MariaDB, MySQL or MSSQL @@ -852,7 +958,7 @@ public class SearchQueryBuilder { // 1 for null and 0 non-null so that nulls will be sorted as greater than non-nulls. sortColumnNameBuilder.append( "CASE WHEN " ).append( sortColumnName ).append( " IS NULL THEN 1 ELSE 0 END" ).append(direction).append(", "); } - */ + */ sortColumnName = formatColumnNameForAggregate(theTheAscending, theUseAggregate, sortColumnName); sortColumnNameBuilder.append(sortColumnName).append(direction); mySelect.addCustomOrderings(sortColumnNameBuilder.toString()); @@ -861,7 +967,11 @@ public class SearchQueryBuilder { } } - private void addSort(DbColumn theTheColumnValueNormalized, boolean theTheAscending, OrderObject.NullOrder theNullOrder, boolean theUseAggregate) { + private void addSort( + DbColumn theTheColumnValueNormalized, + boolean theTheAscending, + OrderObject.NullOrder theNullOrder, + boolean theUseAggregate) { OrderObject.Dir direction = theTheAscending ? OrderObject.Dir.ASCENDING : OrderObject.Dir.DESCENDING; Object columnToOrder = theTheColumnValueNormalized; if (theUseAggregate) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java index 951600b6d65..47613f4ad20 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryExecutor.java @@ -31,16 +31,15 @@ import org.hibernate.ScrollMode; import org.hibernate.ScrollableResults; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.transaction.support.TransactionSynchronizationManager; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.util.Arrays; import javax.persistence.EntityManager; import javax.persistence.FlushModeType; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; import javax.persistence.Query; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.util.Arrays; public class SearchQueryExecutor implements ISearchQueryExecutor { @@ -53,6 +52,7 @@ public class SearchQueryExecutor implements ISearchQueryExecutor { @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; + private boolean myQueryInitialized; private Connection myConnection; private PreparedStatement myStatement; @@ -131,12 +131,17 @@ public class SearchQueryExecutor implements ISearchQueryExecutor { hibernateQuery.setCacheMode(CacheMode.IGNORE); hibernateQuery.setReadOnly(true); - // This tells hibernate not to flush when we call scroll(), but rather to wait until the transaction commits and - // only flush then. We need to do this so that any exceptions that happen in the transaction happen when + // This tells hibernate not to flush when we call scroll(), but rather to wait until the transaction + // commits and + // only flush then. We need to do this so that any exceptions that happen in the transaction happen + // when // we try to commit the transaction, and not here. - // See the test called testTransaction_multiThreaded (in FhirResourceDaoR4ConcurrentWriteTest) which triggers + // See the test called testTransaction_multiThreaded (in FhirResourceDaoR4ConcurrentWriteTest) which + // triggers // the following exception if we don't set this flush mode: - // java.util.concurrent.ExecutionException: org.springframework.transaction.UnexpectedRollbackException: Transaction silently rolled back because it has been marked as rollback-only + // java.util.concurrent.ExecutionException: + // org.springframework.transaction.UnexpectedRollbackException: Transaction silently rolled back + // because it has been marked as rollback-only hibernateQuery.setFlushMode(FlushModeType.COMMIT); ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); myResultSet = new ScrollableResultsIterator<>(scrollableResults); @@ -162,4 +167,3 @@ public class SearchQueryExecutor implements ISearchQueryExecutor { return NO_VALUE_EXECUTOR; } } - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java index 60ae1a571fc..29e04527f96 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SqlObjectFactory.java @@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ForcedIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; -import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; +import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; @@ -45,15 +45,16 @@ public class SqlObjectFactory { @Autowired private ApplicationContext myApplicationContext; - public ComboUniqueSearchParameterPredicateBuilder newComboUniqueSearchParameterPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + public ComboUniqueSearchParameterPredicateBuilder newComboUniqueSearchParameterPredicateBuilder( + SearchQueryBuilder theSearchSqlBuilder) { return myApplicationContext.getBean(ComboUniqueSearchParameterPredicateBuilder.class, theSearchSqlBuilder); } - public ComboNonUniqueSearchParameterPredicateBuilder newComboNonUniqueSearchParameterPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + public ComboNonUniqueSearchParameterPredicateBuilder newComboNonUniqueSearchParameterPredicateBuilder( + SearchQueryBuilder theSearchSqlBuilder) { return myApplicationContext.getBean(ComboNonUniqueSearchParameterPredicateBuilder.class, theSearchSqlBuilder); } - public CoordsPredicateBuilder coordsPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { return myApplicationContext.getBean(CoordsPredicateBuilder.class, theSearchSqlBuilder); } @@ -77,9 +78,11 @@ public class SqlObjectFactory { public QuantityNormalizedPredicateBuilder quantityNormalizedIndexTable(SearchQueryBuilder theSearchSqlBuilder) { return myApplicationContext.getBean(QuantityNormalizedPredicateBuilder.class, theSearchSqlBuilder); } - - public ResourceLinkPredicateBuilder referenceIndexTable(QueryStack theQueryStack, SearchQueryBuilder theSearchSqlBuilder, boolean theReversed) { - return myApplicationContext.getBean(ResourceLinkPredicateBuilder.class, theQueryStack, theSearchSqlBuilder, theReversed); + + public ResourceLinkPredicateBuilder referenceIndexTable( + QueryStack theQueryStack, SearchQueryBuilder theSearchSqlBuilder, boolean theReversed) { + return myApplicationContext.getBean( + ResourceLinkPredicateBuilder.class, theQueryStack, theSearchSqlBuilder, theReversed); } public ResourceTablePredicateBuilder resourceTable(SearchQueryBuilder theSearchSqlBuilder) { @@ -90,7 +93,8 @@ public class SqlObjectFactory { return myApplicationContext.getBean(ResourceIdPredicateBuilder.class, theSearchSqlBuilder); } - public SearchParamPresentPredicateBuilder searchParamPresentPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) { + public SearchParamPresentPredicateBuilder searchParamPresentPredicateBuilder( + SearchQueryBuilder theSearchSqlBuilder) { return myApplicationContext.getBean(SearchParamPresentPredicateBuilder.class, theSearchSqlBuilder); } @@ -117,5 +121,4 @@ public class SqlObjectFactory { public SearchQueryExecutor newSearchQueryExecutor(GeneratedSql theGeneratedSql, Integer theMaxResultsToFetch) { return myApplicationContext.getBean(SearchQueryExecutor.class, theGeneratedSql, theMaxResultsToFetch); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java index c2921ba2976..d681a5f44c7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java @@ -24,8 +24,8 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; -import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; import ca.uhn.fhir.jpa.search.ExceptionService; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; @@ -44,28 +44,26 @@ public class SearchContinuationTask extends SearchTask { private final RequestDetails myRequestDetails; public SearchContinuationTask( - SearchTaskParameters theCreationParams, - HapiTransactionService theTxService, - FhirContext theContext, - IInterceptorBroadcaster theInterceptorBroadcaster, - SearchBuilderFactory theSearchBuilderFactory, - ISearchResultCacheSvc theSearchResultCacheSvc, - JpaStorageSettings theStorageSettings, - ISearchCacheSvc theSearchCacheSvc, - IPagingProvider thePagingProvider, - ExceptionService theExceptionSvc - ) { + SearchTaskParameters theCreationParams, + HapiTransactionService theTxService, + FhirContext theContext, + IInterceptorBroadcaster theInterceptorBroadcaster, + SearchBuilderFactory theSearchBuilderFactory, + ISearchResultCacheSvc theSearchResultCacheSvc, + JpaStorageSettings theStorageSettings, + ISearchCacheSvc theSearchCacheSvc, + IPagingProvider thePagingProvider, + ExceptionService theExceptionSvc) { super( - theCreationParams, - theTxService, - theContext, - theInterceptorBroadcaster, - theSearchBuilderFactory, - theSearchResultCacheSvc, - theStorageSettings, - theSearchCacheSvc, - thePagingProvider - ); + theCreationParams, + theTxService, + theContext, + theInterceptorBroadcaster, + theSearchBuilderFactory, + theSearchResultCacheSvc, + theStorageSettings, + theSearchCacheSvc, + thePagingProvider); myRequestDetails = theCreationParams.Request; myExceptionSvc = theExceptionSvc; @@ -76,17 +74,22 @@ public class SearchContinuationTask extends SearchTask { try { RequestPartitionId requestPartitionId = getRequestPartitionId(); myTxService - .withRequest(myRequestDetails) - .withRequestPartitionId(requestPartitionId) - .execute(() -> { - List previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids(getSearch(), myRequestDetails, requestPartitionId); - if (previouslyAddedResourcePids == null) { - throw myExceptionSvc.newUnknownSearchException(getSearch().getUuid()); - } + .withRequest(myRequestDetails) + .withRequestPartitionId(requestPartitionId) + .execute(() -> { + List previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids( + getSearch(), myRequestDetails, requestPartitionId); + if (previouslyAddedResourcePids == null) { + throw myExceptionSvc.newUnknownSearchException( + getSearch().getUuid()); + } - ourLog.trace("Have {} previously added IDs in search: {}", previouslyAddedResourcePids.size(), getSearch().getUuid()); - setPreviouslyAddedResourcePids(previouslyAddedResourcePids); - }); + ourLog.trace( + "Have {} previously added IDs in search: {}", + previouslyAddedResourcePids.size(), + getSearch().getUuid()); + setPreviouslyAddedResourcePids(previouslyAddedResourcePids); + }); } catch (Throwable e) { ourLog.error("Failure processing search", e); @@ -102,5 +105,4 @@ public class SearchContinuationTask extends SearchTask { return super.call(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java index bb9ca5838c6..64b39e376d4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java @@ -60,7 +60,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.annotation.Propagation; -import javax.annotation.Nonnull; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; @@ -69,6 +68,7 @@ import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.util.SearchParameterMapCalculator.isWantCount; import static ca.uhn.fhir.jpa.util.SearchParameterMapCalculator.isWantOnlyCount; @@ -126,16 +126,15 @@ public class SearchTask implements Callable { * Constructor */ public SearchTask( - SearchTaskParameters theCreationParams, - HapiTransactionService theManagedTxManager, - FhirContext theContext, - IInterceptorBroadcaster theInterceptorBroadcaster, - SearchBuilderFactory theSearchBuilderFactory, - ISearchResultCacheSvc theSearchResultCacheSvc, - JpaStorageSettings theStorageSettings, - ISearchCacheSvc theSearchCacheSvc, - IPagingProvider thePagingProvider - ) { + SearchTaskParameters theCreationParams, + HapiTransactionService theManagedTxManager, + FhirContext theContext, + IInterceptorBroadcaster theInterceptorBroadcaster, + SearchBuilderFactory theSearchBuilderFactory, + ISearchResultCacheSvc theSearchResultCacheSvc, + JpaStorageSettings theStorageSettings, + ISearchCacheSvc theSearchCacheSvc, + IPagingProvider thePagingProvider) { // beans myTxService = theManagedTxManager; myContext = theContext; @@ -176,7 +175,8 @@ public class SearchTask implements Callable { ourLog.trace("Awaiting initial sync"); do { ourLog.trace("Search {} aborted: {}", getSearch().getUuid(), !isNotAborted()); - if (AsyncUtil.awaitLatchAndThrowInternalErrorExceptionOnInterrupt(getInitialCollectionLatch(), 250L, TimeUnit.MILLISECONDS)) { + if (AsyncUtil.awaitLatchAndThrowInternalErrorExceptionOnInterrupt( + getInitialCollectionLatch(), 250L, TimeUnit.MILLISECONDS)) { break; } } while (getSearch().getStatus() == SearchStatusEnum.LOADING); @@ -199,7 +199,8 @@ public class SearchTask implements Callable { } private ISearchBuilder newSearchBuilder() { - Class resourceTypeClass = myContext.getResourceDefinition(myResourceType).getImplementingClass(); + Class resourceTypeClass = + myContext.getResourceDefinition(myResourceType).getImplementingClass(); return mySearchBuilderFactory.newSearchBuilder(myCallingDao, myResourceType, resourceTypeClass); } @@ -239,7 +240,10 @@ public class SearchTask implements Callable { } if (keepWaiting) { - ourLog.info("Waiting as we only have {} results - Search status: {}", mySyncedPids.size(), mySearch.getStatus()); + ourLog.info( + "Waiting as we only have {} results - Search status: {}", + mySyncedPids.size(), + mySearch.getStatus()); AsyncUtil.sleep(500L); } } while (keepWaiting); @@ -259,105 +263,123 @@ public class SearchTask implements Callable { } } - ourLog.trace("Done syncing results - Wanted {}-{} and returning {} of {}", theFromIndex, theToIndex, retVal.size(), mySyncedPids.size()); + ourLog.trace( + "Done syncing results - Wanted {}-{} and returning {} of {}", + theFromIndex, + theToIndex, + retVal.size(), + mySyncedPids.size()); return retVal; } public void saveSearch() { myTxService - .withRequest(myRequest) - .withRequestPartitionId(myRequestPartitionId) - .withPropagation(Propagation.REQUIRES_NEW) - .execute(() -> doSaveSearch()); + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .withPropagation(Propagation.REQUIRES_NEW) + .execute(() -> doSaveSearch()); } private void saveUnsynced(final IResultIterator theResultIter) { myTxService - .withRequest(myRequest) - .withRequestPartitionId(myRequestPartitionId) - .execute(() -> { - if (mySearch.getId() == null) { + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .execute(() -> { + if (mySearch.getId() == null) { + doSaveSearch(); + } + + ArrayList unsyncedPids = myUnsyncedPids; + int countBlocked = 0; + + // Interceptor call: STORAGE_PREACCESS_RESOURCES + // This can be used to remove results from the search result details before + // the user has a chance to know that they were in the results + if (mySearchRuntimeDetails.getRequestDetails() != null && unsyncedPids.isEmpty() == false) { + JpaPreResourceAccessDetails accessDetails = + new JpaPreResourceAccessDetails(unsyncedPids, () -> newSearchBuilder()); + HookParams params = new HookParams() + .add(IPreResourceAccessDetails.class, accessDetails) + .add(RequestDetails.class, mySearchRuntimeDetails.getRequestDetails()) + .addIfMatchesType( + ServletRequestDetails.class, mySearchRuntimeDetails.getRequestDetails()); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); + + for (int i = unsyncedPids.size() - 1; i >= 0; i--) { + if (accessDetails.isDontReturnResourceAtIndex(i)) { + unsyncedPids.remove(i); + myCountBlockedThisPass++; + myCountSavedTotal++; + countBlocked++; + } + } + } + + // Actually store the results in the query cache storage + myCountSavedTotal += unsyncedPids.size(); + myCountSavedThisPass += unsyncedPids.size(); + mySearchResultCacheSvc.storeResults( + mySearch, mySyncedPids, unsyncedPids, myRequest, getRequestPartitionId()); + + synchronized (mySyncedPids) { + int numSyncedThisPass = unsyncedPids.size(); + ourLog.trace( + "Syncing {} search results - Have more: {}", + numSyncedThisPass, + theResultIter.hasNext()); + mySyncedPids.addAll(unsyncedPids); + unsyncedPids.clear(); + + if (theResultIter.hasNext() == false) { + int skippedCount = theResultIter.getSkippedCount(); + int nonSkippedCount = theResultIter.getNonSkippedCount(); + int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass; + ourLog.trace( + "MaxToFetch[{}] SkippedCount[{}] CountSavedThisPass[{}] CountSavedThisTotal[{}] AdditionalPrefetchRemaining[{}]", + myMaxResultsToFetch, + skippedCount, + myCountSavedThisPass, + myCountSavedTotal, + myAdditionalPrefetchThresholdsRemaining); + + if (nonSkippedCount == 0 + || (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch)) { + ourLog.trace("Setting search status to FINISHED"); + mySearch.setStatus(SearchStatusEnum.FINISHED); + mySearch.setTotalCount(myCountSavedTotal - countBlocked); + } else if (myAdditionalPrefetchThresholdsRemaining) { + ourLog.trace("Setting search status to PASSCMPLET"); + mySearch.setStatus(SearchStatusEnum.PASSCMPLET); + mySearch.setSearchParameterMap(myParams); + } else { + ourLog.trace("Setting search status to FINISHED"); + mySearch.setStatus(SearchStatusEnum.FINISHED); + mySearch.setTotalCount(myCountSavedTotal - countBlocked); + } + } + } + + mySearch.setNumFound(myCountSavedTotal); + mySearch.setNumBlocked(mySearch.getNumBlocked() + countBlocked); + + int numSynced; + synchronized (mySyncedPids) { + numSynced = mySyncedPids.size(); + } + + if (myStorageSettings.getCountSearchResultsUpTo() == null + || myStorageSettings.getCountSearchResultsUpTo() <= 0 + || myStorageSettings.getCountSearchResultsUpTo() <= numSynced) { + myInitialCollectionLatch.countDown(); + } + doSaveSearch(); - } - ArrayList unsyncedPids = myUnsyncedPids; - int countBlocked = 0; - - // Interceptor call: STORAGE_PREACCESS_RESOURCES - // This can be used to remove results from the search result details before - // the user has a chance to know that they were in the results - if (mySearchRuntimeDetails.getRequestDetails() != null && unsyncedPids.isEmpty() == false) { - JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(unsyncedPids, () -> newSearchBuilder()); - HookParams params = new HookParams() - .add(IPreResourceAccessDetails.class, accessDetails) - .add(RequestDetails.class, mySearchRuntimeDetails.getRequestDetails()) - .addIfMatchesType(ServletRequestDetails.class, mySearchRuntimeDetails.getRequestDetails()); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); - - for (int i = unsyncedPids.size() - 1; i >= 0; i--) { - if (accessDetails.isDontReturnResourceAtIndex(i)) { - unsyncedPids.remove(i); - myCountBlockedThisPass++; - myCountSavedTotal++; - countBlocked++; - } - } - } - - // Actually store the results in the query cache storage - myCountSavedTotal += unsyncedPids.size(); - myCountSavedThisPass += unsyncedPids.size(); - mySearchResultCacheSvc.storeResults(mySearch, mySyncedPids, unsyncedPids, myRequest, getRequestPartitionId()); - - synchronized (mySyncedPids) { - int numSyncedThisPass = unsyncedPids.size(); - ourLog.trace("Syncing {} search results - Have more: {}", numSyncedThisPass, theResultIter.hasNext()); - mySyncedPids.addAll(unsyncedPids); - unsyncedPids.clear(); - - if (theResultIter.hasNext() == false) { - int skippedCount = theResultIter.getSkippedCount(); - int nonSkippedCount = theResultIter.getNonSkippedCount(); - int totalFetched = skippedCount + myCountSavedThisPass + myCountBlockedThisPass; - ourLog.trace("MaxToFetch[{}] SkippedCount[{}] CountSavedThisPass[{}] CountSavedThisTotal[{}] AdditionalPrefetchRemaining[{}]", myMaxResultsToFetch, skippedCount, myCountSavedThisPass, myCountSavedTotal, myAdditionalPrefetchThresholdsRemaining); - - if (nonSkippedCount == 0 || (myMaxResultsToFetch != null && totalFetched < myMaxResultsToFetch)) { - ourLog.trace("Setting search status to FINISHED"); - mySearch.setStatus(SearchStatusEnum.FINISHED); - mySearch.setTotalCount(myCountSavedTotal - countBlocked); - } else if (myAdditionalPrefetchThresholdsRemaining) { - ourLog.trace("Setting search status to PASSCMPLET"); - mySearch.setStatus(SearchStatusEnum.PASSCMPLET); - mySearch.setSearchParameterMap(myParams); - } else { - ourLog.trace("Setting search status to FINISHED"); - mySearch.setStatus(SearchStatusEnum.FINISHED); - mySearch.setTotalCount(myCountSavedTotal - countBlocked); - } - } - } - - mySearch.setNumFound(myCountSavedTotal); - mySearch.setNumBlocked(mySearch.getNumBlocked() + countBlocked); - - int numSynced; - synchronized (mySyncedPids) { - numSynced = mySyncedPids.size(); - } - - if (myStorageSettings.getCountSearchResultsUpTo() == null || - myStorageSettings.getCountSearchResultsUpTo() <= 0 || - myStorageSettings.getCountSearchResultsUpTo() <= numSynced) { - myInitialCollectionLatch.countDown(); - } - - doSaveSearch(); - - ourLog.trace("saveUnsynced() - pre-commit"); - }); + ourLog.trace("saveUnsynced() - pre-commit"); + }); ourLog.trace("saveUnsynced() - post-commit"); - } public boolean isNotAborted() { @@ -393,27 +415,35 @@ public class SearchTask implements Callable { saveSearch(); myTxService - .withRequest(myRequest) - .withRequestPartitionId(myRequestPartitionId) - .withIsolation(Isolation.READ_COMMITTED) - .execute(() -> doSearch()); + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .withIsolation(Isolation.READ_COMMITTED) + .execute(() -> doSearch()); mySearchRuntimeDetails.setSearchStatus(mySearch.getStatus()); if (mySearch.getStatus() == SearchStatusEnum.FINISHED) { HookParams params = new HookParams() - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest) - .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_COMPLETE, params); + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest) + .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_COMPLETE, params); } else { HookParams params = new HookParams() - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest) - .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_PASS_COMPLETE, params); + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest) + .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_PASS_COMPLETE, params); } - ourLog.trace("Have completed search for [{}{}] and found {} resources in {}ms - Status is {}", mySearch.getResourceType(), mySearch.getSearchQueryString(), mySyncedPids.size(), sw.getMillis(), mySearch.getStatus()); + ourLog.trace( + "Have completed search for [{}{}] and found {} resources in {}ms - Status is {}", + mySearch.getResourceType(), + mySearch.getSearchQueryString(), + mySyncedPids.size(), + sw.getMillis(), + mySearch.getStatus()); } catch (Throwable t) { @@ -455,10 +485,11 @@ public class SearchTask implements Callable { mySearchRuntimeDetails.setSearchStatus(mySearch.getStatus()); HookParams params = new HookParams() - .add(RequestDetails.class, myRequest) - .addIfMatchesType(ServletRequestDetails.class, myRequest) - .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FAILED, params); + .add(RequestDetails.class, myRequest) + .addIfMatchesType(ServletRequestDetails.class, myRequest) + .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FAILED, params); saveSearch(); span.captureException(t); @@ -468,7 +499,6 @@ public class SearchTask implements Callable { myInitialCollectionLatch.countDown(); markComplete(); span.end(); - } return null; } @@ -496,7 +526,9 @@ public class SearchTask implements Callable { * before doing anything else. */ boolean myParamWantOnlyCount = isWantOnlyCount(myParams); - boolean myParamOrDefaultWantCount = nonNull(myParams.getSearchTotalMode()) ? isWantCount(myParams) : SearchParameterMapCalculator.isWantCount(myStorageSettings.getDefaultTotalMode()); + boolean myParamOrDefaultWantCount = nonNull(myParams.getSearchTotalMode()) + ? isWantCount(myParams) + : SearchParameterMapCalculator.isWantCount(myStorageSettings.getDefaultTotalMode()); if (myParamWantOnlyCount || myParamOrDefaultWantCount) { ourLog.trace("Performing count"); @@ -516,15 +548,15 @@ public class SearchTask implements Callable { ourLog.trace("Got count {}", count); myTxService - .withRequest(myRequest) - .withRequestPartitionId(myRequestPartitionId) - .execute(() -> { - mySearch.setTotalCount(count.intValue()); - if (myParamWantOnlyCount) { - mySearch.setStatus(SearchStatusEnum.FINISHED); - } - doSaveSearch(); - }); + .withRequest(myRequest) + .withRequestPartitionId(myRequestPartitionId) + .execute(() -> { + mySearch.setTotalCount(count.intValue()); + if (myParamWantOnlyCount) { + mySearch.setStatus(SearchStatusEnum.FINISHED); + } + doSaveSearch(); + }); if (myParamWantOnlyCount) { return; } @@ -547,7 +579,9 @@ public class SearchTask implements Callable { minWanted += currentlyLoaded; } - for (Iterator iter = myStorageSettings.getSearchPreFetchThresholds().iterator(); iter.hasNext(); ) { + for (Iterator iter = + myStorageSettings.getSearchPreFetchThresholds().iterator(); + iter.hasNext(); ) { int next = iter.next(); if (next != -1 && next <= currentlyLoaded) { continue; @@ -597,7 +631,8 @@ public class SearchTask implements Callable { * This is an odd implementation behaviour, but the change * for this will require a lot more handling at higher levels */ - try (IResultIterator resultIterator = sb.createQuery(myParams, mySearchRuntimeDetails, myRequest, myRequestPartitionId)) { + try (IResultIterator resultIterator = + sb.createQuery(myParams, mySearchRuntimeDetails, myRequest, myRequestPartitionId)) { assert (resultIterator != null); /* @@ -611,9 +646,9 @@ public class SearchTask implements Callable { boolean shouldSync = myUnsyncedPids.size() >= syncSize; - if (myStorageSettings.getCountSearchResultsUpTo() != null && - myStorageSettings.getCountSearchResultsUpTo() > 0 && - myStorageSettings.getCountSearchResultsUpTo() < myUnsyncedPids.size()) { + if (myStorageSettings.getCountSearchResultsUpTo() != null + && myStorageSettings.getCountSearchResultsUpTo() > 0 + && myStorageSettings.getCountSearchResultsUpTo() < myUnsyncedPids.size()) { shouldSync = false; } @@ -631,7 +666,6 @@ public class SearchTask implements Callable { if (myLoadingThrottleForUnitTests != null) { AsyncUtil.sleep(myLoadingThrottleForUnitTests); } - } // If no abort was requested, bail out diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTaskParameters.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTaskParameters.java index 016cadd504d..348a20ca800 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTaskParameters.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTaskParameters.java @@ -38,15 +38,15 @@ public class SearchTaskParameters { private Integer myLoadingThrottleForUnitTests; - public SearchTaskParameters(ca.uhn.fhir.jpa.entity.Search theSearch, - IDao theCallingDao, - SearchParameterMap theParams, - String theResourceType, - RequestDetails theRequest, - ca.uhn.fhir.interceptor.model.RequestPartitionId theRequestPartitionId, - Consumer theOnRemove, - int theSyncSize - ) { + public SearchTaskParameters( + ca.uhn.fhir.jpa.entity.Search theSearch, + IDao theCallingDao, + SearchParameterMap theParams, + String theResourceType, + RequestDetails theRequest, + ca.uhn.fhir.interceptor.model.RequestPartitionId theRequestPartitionId, + Consumer theOnRemove, + int theSyncSize) { Search = theSearch; CallingDao = theCallingDao; Params = theParams; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java index 43618b428f9..7bf60372f0a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java @@ -70,14 +70,19 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { * the result is to be deleted */ private long myCutoffSlack = SEARCH_CLEANUP_JOB_INTERVAL_MILLIS; + @Autowired private ISearchDao mySearchDao; + @Autowired private ISearchResultDao mySearchResultDao; + @Autowired private ISearchIncludeDao mySearchIncludeDao; + @Autowired private IHapiTransactionService myTransactionService; + @Autowired private JpaStorageSettings myStorageSettings; @@ -89,8 +94,8 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { @Override public Search save(Search theSearch, RequestPartitionId theRequestPartitionId) { return myTransactionService - .withSystemRequestOnPartition(theRequestPartitionId) - .execute(() -> mySearchDao.save(theSearch)); + .withSystemRequestOnPartition(theRequestPartitionId) + .execute(() -> mySearchDao.save(theSearch)); } @Override @@ -98,8 +103,8 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { public Optional fetchByUuid(String theUuid, RequestPartitionId theRequestPartitionId) { Validate.notBlank(theUuid); return myTransactionService - .withSystemRequestOnPartition(theRequestPartitionId) - .execute(() -> mySearchDao.findByUuidAndFetchIncludes(theUuid)); + .withSystemRequestOnPartition(theRequestPartitionId) + .execute(() -> mySearchDao.findByUuidAndFetchIncludes(theUuid)); } void setSearchDaoForUnitTest(ISearchDao theSearchDao) { @@ -112,23 +117,25 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { @Override public Optional tryToMarkSearchAsInProgress(Search theSearch, RequestPartitionId theRequestPartitionId) { - ourLog.trace("Going to try to change search status from {} to {}", theSearch.getStatus(), SearchStatusEnum.LOADING); + ourLog.trace( + "Going to try to change search status from {} to {}", theSearch.getStatus(), SearchStatusEnum.LOADING); try { return myTransactionService - .withSystemRequest() - .withRequestPartitionId(theRequestPartitionId) - .withPropagation(Propagation.REQUIRES_NEW) - .execute(t -> { - Search search = mySearchDao.findById(theSearch.getId()).orElse(theSearch); + .withSystemRequest() + .withRequestPartitionId(theRequestPartitionId) + .withPropagation(Propagation.REQUIRES_NEW) + .execute(t -> { + Search search = mySearchDao.findById(theSearch.getId()).orElse(theSearch); - if (search.getStatus() != SearchStatusEnum.PASSCMPLET) { - throw new IllegalStateException(Msg.code(1167) + "Can't change to LOADING because state is " + search.getStatus()); - } - search.setStatus(SearchStatusEnum.LOADING); - Search newSearch = mySearchDao.save(search); - return Optional.of(newSearch); - }); + if (search.getStatus() != SearchStatusEnum.PASSCMPLET) { + throw new IllegalStateException( + Msg.code(1167) + "Can't change to LOADING because state is " + search.getStatus()); + } + search.setStatus(SearchStatusEnum.LOADING); + Search newSearch = mySearchDao.save(search); + return Optional.of(newSearch); + }); } catch (Exception e) { ourLog.warn("Failed to activate search: {}", e.toString()); ourLog.trace("Failed to activate search", e); @@ -137,18 +144,24 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { } @Override - public Optional findCandidatesForReuse(String theResourceType, String theQueryString, Instant theCreatedAfter, RequestPartitionId theRequestPartitionId) { + public Optional findCandidatesForReuse( + String theResourceType, + String theQueryString, + Instant theCreatedAfter, + RequestPartitionId theRequestPartitionId) { HapiTransactionService.requireTransaction(); String queryString = Search.createSearchQueryStringForStorage(theQueryString, theRequestPartitionId); int hashCode = queryString.hashCode(); - Collection candidates = mySearchDao.findWithCutoffOrExpiry(theResourceType, hashCode, Date.from(theCreatedAfter)); + Collection candidates = + mySearchDao.findWithCutoffOrExpiry(theResourceType, hashCode, Date.from(theCreatedAfter)); for (Search nextCandidateSearch : candidates) { // We should only reuse our search if it was created within the permitted window // Date.after() is unreliable. Instant.isAfter() always works. - if (queryString.equals(nextCandidateSearch.getSearchQueryString()) && nextCandidateSearch.getCreated().toInstant().isAfter(theCreatedAfter)) { + if (queryString.equals(nextCandidateSearch.getSearchQueryString()) + && nextCandidateSearch.getCreated().toInstant().isAfter(theCreatedAfter)) { return Optional.of(nextCandidateSearch); } } @@ -171,54 +184,55 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { final Date cutoff = new Date((now() - cutoffMillis) - myCutoffSlack); if (ourNowForUnitTests != null) { - ourLog.info("Searching for searches which are before {} - now is {}", new InstantType(cutoff), new InstantType(new Date(now()))); + ourLog.info( + "Searching for searches which are before {} - now is {}", + new InstantType(cutoff), + new InstantType(new Date(now()))); } ourLog.debug("Searching for searches which are before {}", cutoff); // Mark searches as deleted if they should be final Slice toMarkDeleted = myTransactionService - .withSystemRequestOnPartition(theRequestPartitionId) - .execute(theStatus -> - mySearchDao.findWhereCreatedBefore(cutoff, new Date(), PageRequest.of(0, ourMaximumSearchesToCheckForDeletionCandidacy)) - ); + .withSystemRequestOnPartition(theRequestPartitionId) + .execute(theStatus -> mySearchDao.findWhereCreatedBefore( + cutoff, new Date(), PageRequest.of(0, ourMaximumSearchesToCheckForDeletionCandidacy))); assert toMarkDeleted != null; for (final Long nextSearchToDelete : toMarkDeleted) { ourLog.debug("Deleting search with PID {}", nextSearchToDelete); myTransactionService - .withSystemRequest() - .withRequestPartitionId(theRequestPartitionId) - .execute(t -> { - mySearchDao.updateDeleted(nextSearchToDelete, true); - return null; - }); + .withSystemRequest() + .withRequestPartitionId(theRequestPartitionId) + .execute(t -> { + mySearchDao.updateDeleted(nextSearchToDelete, true); + return null; + }); } // Delete searches that are marked as deleted final Slice toDelete = myTransactionService - .withSystemRequestOnPartition(theRequestPartitionId) - .execute(theStatus -> - mySearchDao.findDeleted(PageRequest.of(0, ourMaximumSearchesToCheckForDeletionCandidacy)) - ); + .withSystemRequestOnPartition(theRequestPartitionId) + .execute(theStatus -> + mySearchDao.findDeleted(PageRequest.of(0, ourMaximumSearchesToCheckForDeletionCandidacy))); assert toDelete != null; for (final Long nextSearchToDelete : toDelete) { ourLog.debug("Deleting search with PID {}", nextSearchToDelete); myTransactionService - .withSystemRequest() - .withRequestPartitionId(theRequestPartitionId) - .execute(t -> { - deleteSearch(nextSearchToDelete); - return null; - }); + .withSystemRequest() + .withRequestPartitionId(theRequestPartitionId) + .execute(t -> { + deleteSearch(nextSearchToDelete); + return null; + }); } int count = toDelete.getContent().size(); if (count > 0) { if (ourLog.isDebugEnabled() || HapiSystemProperties.isTestModeEnabled()) { Long total = myTransactionService - .withSystemRequest() - .withRequestPartitionId(theRequestPartitionId) - .execute(t -> mySearchDao.count()); + .withSystemRequest() + .withRequestPartitionId(theRequestPartitionId) + .execute(t -> mySearchDao.count()); ourLog.debug("Deleted {} searches, {} remaining", count, total); } } @@ -239,25 +253,34 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { int max = ourMaximumResultsToDeleteInOnePass; Slice resultPids = mySearchResultDao.findForSearch(PageRequest.of(0, max), searchToDelete.getId()); if (resultPids.hasContent()) { - List> partitions = Lists.partition(resultPids.getContent(), ourMaximumResultsToDeleteInOneStatement); + List> partitions = + Lists.partition(resultPids.getContent(), ourMaximumResultsToDeleteInOneStatement); for (List nextPartition : partitions) { mySearchResultDao.deleteByIds(nextPartition); } - } // Only delete if we don't have results left in this search if (resultPids.getNumberOfElements() < max) { - ourLog.debug("Deleting search {}/{} - Created[{}]", searchToDelete.getId(), searchToDelete.getUuid(), new InstantType(searchToDelete.getCreated())); + ourLog.debug( + "Deleting search {}/{} - Created[{}]", + searchToDelete.getId(), + searchToDelete.getUuid(), + new InstantType(searchToDelete.getCreated())); mySearchDao.deleteByPid(searchToDelete.getId()); } else { - ourLog.debug("Purged {} search results for deleted search {}/{}", resultPids.getSize(), searchToDelete.getId(), searchToDelete.getUuid()); + ourLog.debug( + "Purged {} search results for deleted search {}/{}", + resultPids.getSize(), + searchToDelete.getId(), + searchToDelete.getUuid()); } }); } @VisibleForTesting - public static void setMaximumSearchesToCheckForDeletionCandidacyForUnitTest(int theMaximumSearchesToCheckForDeletionCandidacy) { + public static void setMaximumSearchesToCheckForDeletionCandidacyForUnitTest( + int theMaximumSearchesToCheckForDeletionCandidacy) { ourMaximumSearchesToCheckForDeletionCandidacy = theMaximumSearchesToCheckForDeletionCandidacy; } @@ -285,5 +308,4 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc { } return System.currentTimeMillis(); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java index 2f5580edc2a..60d77dce3e8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java @@ -47,61 +47,74 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc { private IHapiTransactionService myTransactionService; @Override - public List fetchResultPids(Search theSearch, int theFrom, int theTo, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + public List fetchResultPids( + Search theSearch, + int theFrom, + int theTo, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { return myTransactionService - .withRequest(theRequestDetails) - .withRequestPartitionId(theRequestPartitionId) - .execute(() -> { - final Pageable page = toPage(theFrom, theTo); - if (page == null) { - return Collections.emptyList(); - } + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + final Pageable page = toPage(theFrom, theTo); + if (page == null) { + return Collections.emptyList(); + } - List retVal = mySearchResultDao - .findWithSearchPid(theSearch.getId(), page) - .getContent(); + List retVal = mySearchResultDao + .findWithSearchPid(theSearch.getId(), page) + .getContent(); - ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size()); + ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size()); - return JpaPid.fromLongList(retVal); - }); + return JpaPid.fromLongList(retVal); + }); } @Override - public List fetchAllResultPids(Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + public List fetchAllResultPids( + Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { return myTransactionService - .withRequest(theRequestDetails) - .withRequestPartitionId(theRequestPartitionId) - .execute(() -> { - List retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId()); - ourLog.trace("fetchAllResultPids returned {} pids", retVal.size()); - return JpaPid.fromLongList(retVal); - }); + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + List retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId()); + ourLog.trace("fetchAllResultPids returned {} pids", retVal.size()); + return JpaPid.fromLongList(retVal); + }); } @Override - public void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + public void storeResults( + Search theSearch, + List thePreviouslyStoredResourcePids, + List theNewResourcePids, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { myTransactionService - .withRequest(theRequestDetails) - .withRequestPartitionId(theRequestPartitionId) - .execute(() -> { - List resultsToSave = Lists.newArrayList(); + .withRequest(theRequestDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + List resultsToSave = Lists.newArrayList(); - ourLog.debug("Storing {} results with {} previous for search", theNewResourcePids.size(), thePreviouslyStoredResourcePids.size()); + ourLog.debug( + "Storing {} results with {} previous for search", + theNewResourcePids.size(), + thePreviouslyStoredResourcePids.size()); - int order = thePreviouslyStoredResourcePids.size(); - for (JpaPid nextPid : theNewResourcePids) { - SearchResult nextResult = new SearchResult(theSearch); - nextResult.setResourcePid(nextPid.getId()); - nextResult.setOrder(order); - resultsToSave.add(nextResult); - ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid()); + int order = thePreviouslyStoredResourcePids.size(); + for (JpaPid nextPid : theNewResourcePids) { + SearchResult nextResult = new SearchResult(theSearch); + nextResult.setResourcePid(nextPid.getId()); + nextResult.setOrder(order); + resultsToSave.add(nextResult); + ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid()); - order++; - } + order++; + } - mySearchResultDao.saveAll(resultsToSave); - }); + mySearchResultDao.saveAll(resultsToSave); + }); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchCacheSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchCacheSvc.java index b78113cc84b..34c662b83f7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchCacheSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchCacheSvc.java @@ -75,7 +75,11 @@ public interface ISearchCacheSvc { * @param theRequestPartitionId Search should examine only the requested partitions. Cache MUST not return results matching the given partition IDs * @return A collection of candidate searches */ - Optional findCandidatesForReuse(String theResourceType, String theQueryString, Instant theCreatedAfter, RequestPartitionId theRequestPartitionId); + Optional findCandidatesForReuse( + String theResourceType, + String theQueryString, + Instant theCreatedAfter, + RequestPartitionId theRequestPartitionId); /** * This method will be called periodically to delete stale searches. Implementations are not required to do anything diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java index 4c01e36b978..68dc8745f28 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java @@ -24,8 +24,8 @@ import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.rest.api.server.RequestDetails; -import javax.annotation.Nullable; import java.util.List; +import javax.annotation.Nullable; public interface ISearchResultCacheSvc { /** @@ -33,7 +33,12 @@ public interface ISearchResultCacheSvc { * @param thePreviouslyStoredResourcePids A list of resource PIDs that have previously been saved to this search * @param theNewResourcePids A list of new resource PIDs to add to this search (these ones have not been previously saved) */ - void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); + void storeResults( + Search theSearch, + List thePreviouslyStoredResourcePids, + List theNewResourcePids, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId); /** * Fetch a subset of the search result IDs from the cache @@ -45,7 +50,12 @@ public interface ISearchResultCacheSvc { * have been removed from the cache for some reason, such as expiry or manual purge) */ @Nullable - List fetchResultPids(Search theSearch, int theFrom, int theTo, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); + List fetchResultPids( + Search theSearch, + int theFrom, + int theTo, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId); /** * Fetch all result PIDs for a given search with no particular order required @@ -55,6 +65,6 @@ public interface ISearchResultCacheSvc { * have been removed from the cache for some reason, such as expiry or manual purge) */ @Nullable - List fetchAllResultPids(Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); - + List fetchAllResultPids( + Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/SearchCacheStatusEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/SearchCacheStatusEnum.java index 71bc961f031..a5d24d75165 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/SearchCacheStatusEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/SearchCacheStatusEnum.java @@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.search.cache; public enum SearchCacheStatusEnum { - NOT_TRIED, MISS, HIT - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchHibernatePropertiesBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchHibernatePropertiesBuilder.java index ab5c8c66257..3dcd02aa63f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchHibernatePropertiesBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchHibernatePropertiesBuilder.java @@ -47,7 +47,6 @@ import static org.slf4j.LoggerFactory.getLogger; public class ElasticsearchHibernatePropertiesBuilder { private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class); - private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW; private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE; @@ -59,7 +58,6 @@ public class ElasticsearchHibernatePropertiesBuilder { return myAwsRegion; } - private String myAwsRegion; private long myIndexManagementWaitTimeoutMillis = 10000L; private long myScrollTimeoutSecs = SCROLL_TIMEOUT; @@ -81,8 +79,9 @@ public class ElasticsearchHibernatePropertiesBuilder { // the below properties are used for ElasticSearch integration theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch"); - theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), - HapiHSearchAnalysisConfigurers.HapiElasticsearchAnalysisConfigurer.class.getName()); + theProperties.put( + BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), + HapiHSearchAnalysisConfigurers.HapiElasticsearchAnalysisConfigurer.class.getName()); theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myHosts); theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol); @@ -92,32 +91,56 @@ public class ElasticsearchHibernatePropertiesBuilder { if (StringUtils.isNotBlank(myPassword)) { theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PASSWORD), myPassword); } - theProperties.put(HibernateOrmMapperSettings.SCHEMA_MANAGEMENT_STRATEGY, myIndexSchemaManagementStrategy.externalRepresentation()); - theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS_WAIT_TIMEOUT), Long.toString(myIndexManagementWaitTimeoutMillis)); - theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS), myRequiredIndexStatus.externalRepresentation()); + theProperties.put( + HibernateOrmMapperSettings.SCHEMA_MANAGEMENT_STRATEGY, + myIndexSchemaManagementStrategy.externalRepresentation()); + theProperties.put( + BackendSettings.backendKey( + ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS_WAIT_TIMEOUT), + Long.toString(myIndexManagementWaitTimeoutMillis)); + theProperties.put( + BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS), + myRequiredIndexStatus.externalRepresentation()); // Need the mapping to be dynamic because of terminology indexes. theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.DYNAMIC_MAPPING), "true"); // Only for unit tests theProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, myDebugSyncStrategy); - theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(myDebugPrettyPrintJsonLog)); - theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.SCROLL_TIMEOUT), Long.toString(myScrollTimeoutSecs)); + theProperties.put( + BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), + Boolean.toString(myDebugPrettyPrintJsonLog)); + theProperties.put( + BackendSettings.backendKey(ElasticsearchBackendSettings.SCROLL_TIMEOUT), + Long.toString(myScrollTimeoutSecs)); - //This tells elasticsearch to use our custom index naming strategy. - theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName()); + // This tells elasticsearch to use our custom index naming strategy. + theProperties.put( + BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), + IndexNamePrefixLayoutStrategy.class.getName()); - //This tells hibernate search to use this custom file for creating index settings. We use this to add a custom max_ngram_diff - theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_SETTINGS_FILE), "ca/uhn/fhir/jpa/elastic/index-settings.json"); + // This tells hibernate search to use this custom file for creating index settings. We use this to add a custom + // max_ngram_diff + theProperties.put( + BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_SETTINGS_FILE), + "ca/uhn/fhir/jpa/elastic/index-settings.json"); if (!StringUtils.isBlank(myAwsRegion)) { theProperties.put(BackendSettings.backendKey(ElasticsearchAwsBackendSettings.REGION), myAwsRegion); theProperties.put(BackendSettings.backendKey(ElasticsearchAwsBackendSettings.SIGNING_ENABLED), true); if (!StringUtils.isBlank(myUsername) && !StringUtils.isBlank(myPassword)) { - theProperties.put(BackendSettings.backendKey(ElasticsearchAwsBackendSettings.CREDENTIALS_TYPE), ElasticsearchAwsCredentialsTypeNames.STATIC); - theProperties.put(BackendSettings.backendKey(ElasticsearchAwsBackendSettings.CREDENTIALS_ACCESS_KEY_ID), myUsername); - theProperties.put(BackendSettings.backendKey(ElasticsearchAwsBackendSettings.CREDENTIALS_SECRET_ACCESS_KEY), myPassword); + theProperties.put( + BackendSettings.backendKey(ElasticsearchAwsBackendSettings.CREDENTIALS_TYPE), + ElasticsearchAwsCredentialsTypeNames.STATIC); + theProperties.put( + BackendSettings.backendKey(ElasticsearchAwsBackendSettings.CREDENTIALS_ACCESS_KEY_ID), + myUsername); + theProperties.put( + BackendSettings.backendKey(ElasticsearchAwsBackendSettings.CREDENTIALS_SECRET_ACCESS_KEY), + myPassword); } else { - //Default to Standard IAM Auth provider if username or password is absent. - theProperties.put(BackendSettings.backendKey(ElasticsearchAwsBackendSettings.CREDENTIALS_TYPE), ElasticsearchAwsCredentialsTypeNames.DEFAULT); + // Default to Standard IAM Auth provider if username or password is absent. + theProperties.put( + BackendSettings.backendKey(ElasticsearchAwsBackendSettings.CREDENTIALS_TYPE), + ElasticsearchAwsCredentialsTypeNames.DEFAULT); } } } @@ -129,7 +152,9 @@ public class ElasticsearchHibernatePropertiesBuilder { public ElasticsearchHibernatePropertiesBuilder setHosts(String hosts) { if (hosts.contains("://")) { - throw new ConfigurationException(Msg.code(2139) + "Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL."); + throw new ConfigurationException( + Msg.code(2139) + + "Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL."); } myHosts = hosts; return this; @@ -140,12 +165,14 @@ public class ElasticsearchHibernatePropertiesBuilder { return this; } - public ElasticsearchHibernatePropertiesBuilder setIndexSchemaManagementStrategy(SchemaManagementStrategyName theIndexSchemaManagementStrategy) { + public ElasticsearchHibernatePropertiesBuilder setIndexSchemaManagementStrategy( + SchemaManagementStrategyName theIndexSchemaManagementStrategy) { myIndexSchemaManagementStrategy = theIndexSchemaManagementStrategy; return this; } - public ElasticsearchHibernatePropertiesBuilder setIndexManagementWaitTimeoutMillis(long theIndexManagementWaitTimeoutMillis) { + public ElasticsearchHibernatePropertiesBuilder setIndexManagementWaitTimeoutMillis( + long theIndexManagementWaitTimeoutMillis) { myIndexManagementWaitTimeoutMillis = theIndexManagementWaitTimeoutMillis; return this; } @@ -160,7 +187,6 @@ public class ElasticsearchHibernatePropertiesBuilder { return this; } - public ElasticsearchHibernatePropertiesBuilder setDebugPrettyPrintJsonLog(boolean theDebugPrettyPrintJsonLog) { myDebugPrettyPrintJsonLog = theDebugPrettyPrintJsonLog; return this; @@ -178,5 +204,4 @@ public class ElasticsearchHibernatePropertiesBuilder { myAwsRegion = theAwsRegion; return this; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/IndexNamePrefixLayoutStrategy.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/IndexNamePrefixLayoutStrategy.java index 41943bcfc9d..e9c00b16dac 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/IndexNamePrefixLayoutStrategy.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/IndexNamePrefixLayoutStrategy.java @@ -55,7 +55,7 @@ public class IndexNamePrefixLayoutStrategy implements IndexLayoutStrategy { @Override public String createWriteAlias(String hibernateSearchIndexName) { - return addPrefixIfNecessary(hibernateSearchIndexName +"-write"); + return addPrefixIfNecessary(hibernateSearchIndexName + "-write"); } @Override @@ -83,7 +83,7 @@ public class IndexNamePrefixLayoutStrategy implements IndexLayoutStrategy { if (!matcher.matches()) { throw log.invalidIndexPrimaryName(elasticsearchIndexName, UNIQUE_KEY_EXTRACTION_PATTERN); } else { - String candidateUniqueKey= matcher.group(1); + String candidateUniqueKey = matcher.group(1); return removePrefixIfNecessary(candidateUniqueKey); } } @@ -96,9 +96,12 @@ public class IndexNamePrefixLayoutStrategy implements IndexLayoutStrategy { return theCandidateUniqueKey; } } + private void validateStorageSettingsIsPresent() { if (myStorageSettings == null) { - throw new ConfigurationException(Msg.code(1168) + "While attempting to boot HAPI FHIR, the Hibernate Search bootstrapper failed to find the StorageSettings. This probably means Hibernate Search has been recently upgraded, or somebody modified HapiFhirLocalContainerEntityManagerFactoryBean."); + throw new ConfigurationException( + Msg.code(1168) + + "While attempting to boot HAPI FHIR, the Hibernate Search bootstrapper failed to find the StorageSettings. This probably means Hibernate Search has been recently upgraded, or somebody modified HapiFhirLocalContainerEntityManagerFactoryBean."); } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchRestClientFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchRestClientFactory.java index 74f71cade5c..a5c8481b8a5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchRestClientFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchRestClientFactory.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.search.lastn; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import org.apache.commons.lang3.StringUtils; import org.apache.http.Header; import org.apache.http.HttpHost; @@ -34,47 +34,50 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.RestHighLevelClient; -import javax.annotation.Nullable; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class ElasticsearchRestClientFactory { - - static public RestHighLevelClient createElasticsearchHighLevelRestClient( - String protocol, String hosts, @Nullable String theUsername, @Nullable String thePassword) { + public static RestHighLevelClient createElasticsearchHighLevelRestClient( + String protocol, String hosts, @Nullable String theUsername, @Nullable String thePassword) { if (hosts.contains("://")) { - throw new ConfigurationException(Msg.code(1173) + "Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL."); + throw new ConfigurationException( + Msg.code(1173) + + "Elasticsearch URLs cannot include a protocol, that is a separate property. Remove http:// or https:// from this URL."); } String[] hostArray = hosts.split(","); List clientNodes = Arrays.stream(hostArray) - .map(String::trim) - .filter(s -> s.contains(":")) - .map(h -> { - int colonIndex = h.indexOf(":"); - String host = h.substring(0, colonIndex); - int port = Integer.parseInt(h.substring(colonIndex + 1)); - return new Node(new HttpHost(host, port, protocol)); - }) - .collect(Collectors.toList()); + .map(String::trim) + .filter(s -> s.contains(":")) + .map(h -> { + int colonIndex = h.indexOf(":"); + String host = h.substring(0, colonIndex); + int port = Integer.parseInt(h.substring(colonIndex + 1)); + return new Node(new HttpHost(host, port, protocol)); + }) + .collect(Collectors.toList()); if (hostArray.length != clientNodes.size()) { - throw new ConfigurationException(Msg.code(1174) + "Elasticsearch URLs have to contain ':' as a host:port separator. Example: localhost:9200,localhost:9201,localhost:9202"); + throw new ConfigurationException( + Msg.code(1174) + + "Elasticsearch URLs have to contain ':' as a host:port separator. Example: localhost:9200,localhost:9201,localhost:9202"); } RestClientBuilder clientBuilder = RestClient.builder(clientNodes.toArray(new Node[0])); if (StringUtils.isNotBlank(theUsername) && StringUtils.isNotBlank(thePassword)) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(theUsername, thePassword)); - clientBuilder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder - .setDefaultCredentialsProvider(credentialsProvider)); + credentialsProvider.setCredentials( + AuthScope.ANY, new UsernamePasswordCredentials(theUsername, thePassword)); + clientBuilder.setHttpClientConfigCallback( + httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider)); } - Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")}; + Header[] defaultHeaders = new Header[] {new BasicHeader("Content-Type", "application/json")}; clientBuilder.setDefaultHeaders(defaultHeaders); return new RestHighLevelClient(clientBuilder); - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchSvcImpl.java index c5aad25b680..1479b017cd8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchSvcImpl.java @@ -23,7 +23,6 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.dao.TolerantJsonParser; import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; import ca.uhn.fhir.jpa.model.util.CodeSystemHash; import ca.uhn.fhir.jpa.search.lastn.json.CodeJson; import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson; @@ -79,7 +78,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nullable; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; @@ -89,6 +87,7 @@ import java.util.Collection; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -99,8 +98,10 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { // Index Constants public static final String OBSERVATION_INDEX = "observation_index"; public static final String OBSERVATION_CODE_INDEX = "code_index"; - public static final String OBSERVATION_DOCUMENT_TYPE = "ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity"; - public static final String CODE_DOCUMENT_TYPE = "ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity"; + public static final String OBSERVATION_DOCUMENT_TYPE = + "ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity"; + public static final String CODE_DOCUMENT_TYPE = + "ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity"; public static final String OBSERVATION_INDEX_SCHEMA_FILE = "ObservationIndexSchema.json"; public static final String OBSERVATION_CODE_INDEX_SCHEMA_FILE = "ObservationCodeIndexSchema.json"; @@ -141,14 +142,21 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { @Autowired private FhirContext myContext; - //This constructor used to inject a dummy partitionsettings in test. - public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theProtocol, String theHostname, @Nullable String theUsername, @Nullable String thePassword) { + // This constructor used to inject a dummy partitionsettings in test. + public ElasticsearchSvcImpl( + PartitionSettings thePartitionSetings, + String theProtocol, + String theHostname, + @Nullable String theUsername, + @Nullable String thePassword) { this(theProtocol, theHostname, theUsername, thePassword); this.myPartitionSettings = thePartitionSetings; } - public ElasticsearchSvcImpl(String theProtocol, String theHostname, @Nullable String theUsername, @Nullable String thePassword) { - myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theProtocol, theHostname, theUsername, thePassword); + public ElasticsearchSvcImpl( + String theProtocol, String theHostname, @Nullable String theUsername, @Nullable String thePassword) { + myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient( + theProtocol, theHostname, theUsername, thePassword); try { createObservationIndexIfMissing(); @@ -159,7 +167,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { } private String getIndexSchema(String theSchemaFileName) throws IOException { - InputStreamReader input = new InputStreamReader(ElasticsearchSvcImpl.class.getResourceAsStream(theSchemaFileName)); + InputStreamReader input = + new InputStreamReader(ElasticsearchSvcImpl.class.getResourceAsStream(theSchemaFileName)); BufferedReader reader = new BufferedReader(input); StringBuilder sb = new StringBuilder(); String str; @@ -188,15 +197,14 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { if (!createIndex(OBSERVATION_CODE_INDEX, observationCodeMapping)) { throw new RuntimeException(Msg.code(1177) + "Failed to create observation code index"); } - } private boolean createIndex(String theIndexName, String theMapping) throws IOException { CreateIndexRequest request = new CreateIndexRequest(theIndexName); request.source(theMapping, XContentType.JSON); - CreateIndexResponse createIndexResponse = myRestHighLevelClient.indices().create(request, RequestOptions.DEFAULT); + CreateIndexResponse createIndexResponse = + myRestHighLevelClient.indices().create(request, RequestOptions.DEFAULT); return createIndexResponse.isAcknowledged(); - } private boolean indexExists(String theIndexName) throws IOException { @@ -205,44 +213,62 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { } @Override - public List executeLastN(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, Integer theMaxResultsToFetch) { - Validate.isTrue(!myPartitionSettings.isPartitioningEnabled(), "$lastn is not currently supported on partitioned servers"); + public List executeLastN( + SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, Integer theMaxResultsToFetch) { + Validate.isTrue( + !myPartitionSettings.isPartitioningEnabled(), + "$lastn is not currently supported on partitioned servers"); String[] topHitsInclude = {OBSERVATION_IDENTIFIER_FIELD_NAME}; - return buildAndExecuteSearch(theSearchParameterMap, theFhirContext, topHitsInclude, - ObservationJson::getIdentifier, theMaxResultsToFetch); + return buildAndExecuteSearch( + theSearchParameterMap, + theFhirContext, + topHitsInclude, + ObservationJson::getIdentifier, + theMaxResultsToFetch); } - private List buildAndExecuteSearch(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, - String[] topHitsInclude, Function setValue, Integer theMaxResultsToFetch) { + private List buildAndExecuteSearch( + SearchParameterMap theSearchParameterMap, + FhirContext theFhirContext, + String[] topHitsInclude, + Function setValue, + Integer theMaxResultsToFetch) { String patientParamName = LastNParameterHelper.getPatientParamName(theFhirContext); String subjectParamName = LastNParameterHelper.getSubjectParamName(theFhirContext); List searchResults = new ArrayList<>(); if (theSearchParameterMap.containsKey(patientParamName) - || theSearchParameterMap.containsKey(subjectParamName)) { - for (String subject : getSubjectReferenceCriteria(patientParamName, subjectParamName, theSearchParameterMap)) { + || theSearchParameterMap.containsKey(subjectParamName)) { + for (String subject : + getSubjectReferenceCriteria(patientParamName, subjectParamName, theSearchParameterMap)) { if (theMaxResultsToFetch != null && searchResults.size() >= theMaxResultsToFetch) { break; } - SearchRequest myLastNRequest = buildObservationsSearchRequest(subject, theSearchParameterMap, theFhirContext, - createObservationSubjectAggregationBuilder(getMaxParameter(theSearchParameterMap), topHitsInclude)); + SearchRequest myLastNRequest = buildObservationsSearchRequest( + subject, + theSearchParameterMap, + theFhirContext, + createObservationSubjectAggregationBuilder( + getMaxParameter(theSearchParameterMap), topHitsInclude)); ourLog.debug("ElasticSearch query: {}", myLastNRequest.source().toString()); try { SearchResponse lastnResponse = executeSearchRequest(myLastNRequest); - searchResults.addAll(buildObservationList(lastnResponse, setValue, theSearchParameterMap, theFhirContext, - theMaxResultsToFetch)); + searchResults.addAll(buildObservationList( + lastnResponse, setValue, theSearchParameterMap, theFhirContext, theMaxResultsToFetch)); } catch (IOException theE) { throw new InvalidRequestException(Msg.code(1178) + "Unable to execute LastN request", theE); } } } else { - SearchRequest myLastNRequest = buildObservationsSearchRequest(theSearchParameterMap, theFhirContext, - createObservationCodeAggregationBuilder(getMaxParameter(theSearchParameterMap), topHitsInclude)); + SearchRequest myLastNRequest = buildObservationsSearchRequest( + theSearchParameterMap, + theFhirContext, + createObservationCodeAggregationBuilder(getMaxParameter(theSearchParameterMap), topHitsInclude)); ourLog.debug("ElasticSearch query: {}", myLastNRequest.source().toString()); try { SearchResponse lastnResponse = executeSearchRequest(myLastNRequest); - searchResults.addAll(buildObservationList(lastnResponse, setValue, theSearchParameterMap, theFhirContext, - theMaxResultsToFetch)); + searchResults.addAll(buildObservationList( + lastnResponse, setValue, theSearchParameterMap, theFhirContext, theMaxResultsToFetch)); } catch (IOException theE) { throw new InvalidRequestException(Msg.code(1179) + "Unable to execute LastN request", theE); } @@ -258,7 +284,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { } } - private List getSubjectReferenceCriteria(String thePatientParamName, String theSubjectParamName, SearchParameterMap theSearchParameterMap) { + private List getSubjectReferenceCriteria( + String thePatientParamName, String theSubjectParamName, SearchParameterMap theSearchParameterMap) { List subjectReferenceCriteria = new ArrayList<>(); List> patientParams = new ArrayList<>(); @@ -285,32 +312,41 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { referenceList.add(ref.getValue()); } } else { - throw new IllegalArgumentException(Msg.code(1180) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass()); + throw new IllegalArgumentException( + Msg.code(1180) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass()); } } return referenceList; } - private CompositeAggregationBuilder createObservationSubjectAggregationBuilder(Integer theMaxNumberObservationsPerCode, String[] theTopHitsInclude) { - CompositeValuesSourceBuilder subjectValuesBuilder = new TermsValuesSourceBuilder(OBSERVATION_SUBJECT_FIELD_NAME).field(OBSERVATION_SUBJECT_FIELD_NAME); + private CompositeAggregationBuilder createObservationSubjectAggregationBuilder( + Integer theMaxNumberObservationsPerCode, String[] theTopHitsInclude) { + CompositeValuesSourceBuilder subjectValuesBuilder = + new TermsValuesSourceBuilder(OBSERVATION_SUBJECT_FIELD_NAME).field(OBSERVATION_SUBJECT_FIELD_NAME); List> compositeAggSubjectSources = new ArrayList<>(); compositeAggSubjectSources.add(subjectValuesBuilder); - CompositeAggregationBuilder compositeAggregationSubjectBuilder = new CompositeAggregationBuilder(GROUP_BY_SUBJECT, compositeAggSubjectSources); - compositeAggregationSubjectBuilder.subAggregation(createObservationCodeAggregationBuilder(theMaxNumberObservationsPerCode, theTopHitsInclude)); + CompositeAggregationBuilder compositeAggregationSubjectBuilder = + new CompositeAggregationBuilder(GROUP_BY_SUBJECT, compositeAggSubjectSources); + compositeAggregationSubjectBuilder.subAggregation( + createObservationCodeAggregationBuilder(theMaxNumberObservationsPerCode, theTopHitsInclude)); compositeAggregationSubjectBuilder.size(10000); return compositeAggregationSubjectBuilder; } - private TermsAggregationBuilder createObservationCodeAggregationBuilder(int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) { - TermsAggregationBuilder observationCodeCodeAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_CODE).field(OBSERVATION_CODEVALUE_FIELD_NAME); + private TermsAggregationBuilder createObservationCodeAggregationBuilder( + int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) { + TermsAggregationBuilder observationCodeCodeAggregationBuilder = + new TermsAggregationBuilder(GROUP_BY_CODE).field(OBSERVATION_CODEVALUE_FIELD_NAME); observationCodeCodeAggregationBuilder.order(BucketOrder.key(true)); // Top Hits Aggregation observationCodeCodeAggregationBuilder.subAggregation(AggregationBuilders.topHits(MOST_RECENT_EFFECTIVE) - .sort(OBSERVATION_EFFECTIVEDTM_FIELD_NAME, SortOrder.DESC) - .fetchSource(theTopHitsInclude, null).size(theMaxNumberObservationsPerCode)); + .sort(OBSERVATION_EFFECTIVEDTM_FIELD_NAME, SortOrder.DESC) + .fetchSource(theTopHitsInclude, null) + .size(theMaxNumberObservationsPerCode)); observationCodeCodeAggregationBuilder.size(10000); - TermsAggregationBuilder observationCodeSystemAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_SYSTEM).field(OBSERVATION_CODESYSTEM_FIELD_NAME); + TermsAggregationBuilder observationCodeSystemAggregationBuilder = + new TermsAggregationBuilder(GROUP_BY_SYSTEM).field(OBSERVATION_CODESYSTEM_FIELD_NAME); observationCodeSystemAggregationBuilder.order(BucketOrder.key(true)); observationCodeSystemAggregationBuilder.subAggregation(observationCodeCodeAggregationBuilder); return observationCodeSystemAggregationBuilder; @@ -320,12 +356,16 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { return myRestHighLevelClient.search(searchRequest, RequestOptions.DEFAULT); } - private List buildObservationList(SearchResponse theSearchResponse, Function setValue, - SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, - Integer theMaxResultsToFetch) throws IOException { + private List buildObservationList( + SearchResponse theSearchResponse, + Function setValue, + SearchParameterMap theSearchParameterMap, + FhirContext theFhirContext, + Integer theMaxResultsToFetch) + throws IOException { List theObservationList = new ArrayList<>(); if (theSearchParameterMap.containsKey(LastNParameterHelper.getPatientParamName(theFhirContext)) - || theSearchParameterMap.containsKey(LastNParameterHelper.getSubjectParamName(theFhirContext))) { + || theSearchParameterMap.containsKey(LastNParameterHelper.getSubjectParamName(theFhirContext))) { for (ParsedComposite.ParsedBucket subjectBucket : getSubjectBuckets(theSearchResponse)) { if (theMaxResultsToFetch != null && theObservationList.size() >= theMaxResultsToFetch) { break; @@ -339,7 +379,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { break; } String indexedObservation = lastNMatch.getSourceAsString(); - ObservationJson observationJson = objectMapper.readValue(indexedObservation, ObservationJson.class); + ObservationJson observationJson = + objectMapper.readValue(indexedObservation, ObservationJson.class); theObservationList.add(setValue.apply(observationJson)); } } @@ -396,7 +437,10 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { return parsedTopHits.getHits().getHits(); } - private SearchRequest buildObservationsSearchRequest(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, AggregationBuilder theAggregationBuilder) { + private SearchRequest buildObservationsSearchRequest( + SearchParameterMap theSearchParameterMap, + FhirContext theFhirContext, + AggregationBuilder theAggregationBuilder) { SearchRequest searchRequest = new SearchRequest(OBSERVATION_INDEX); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); // Query @@ -418,8 +462,11 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { return searchRequest; } - private SearchRequest buildObservationsSearchRequest(String theSubjectParam, SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, - AggregationBuilder theAggregationBuilder) { + private SearchRequest buildObservationsSearchRequest( + String theSubjectParam, + SearchParameterMap theSearchParameterMap, + FhirContext theFhirContext, + AggregationBuilder theAggregationBuilder) { SearchRequest searchRequest = new SearchRequest(OBSERVATION_INDEX); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); // Query @@ -438,15 +485,19 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { return searchRequest; } - private Boolean searchParamsHaveLastNCriteria(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) { - return theSearchParameterMap != null && - (theSearchParameterMap.containsKey(LastNParameterHelper.getPatientParamName(theFhirContext)) - || theSearchParameterMap.containsKey(LastNParameterHelper.getSubjectParamName(theFhirContext)) - || theSearchParameterMap.containsKey(LastNParameterHelper.getCategoryParamName(theFhirContext)) - || theSearchParameterMap.containsKey(LastNParameterHelper.getCodeParamName(theFhirContext))); + private Boolean searchParamsHaveLastNCriteria( + SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) { + return theSearchParameterMap != null + && (theSearchParameterMap.containsKey(LastNParameterHelper.getPatientParamName(theFhirContext)) + || theSearchParameterMap.containsKey(LastNParameterHelper.getSubjectParamName(theFhirContext)) + || theSearchParameterMap.containsKey(LastNParameterHelper.getCategoryParamName(theFhirContext)) + || theSearchParameterMap.containsKey(LastNParameterHelper.getCodeParamName(theFhirContext))); } - private void addCategoriesCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) { + private void addCategoriesCriteria( + BoolQueryBuilder theBoolQueryBuilder, + SearchParameterMap theSearchParameterMap, + FhirContext theFhirContext) { String categoryParamName = LastNParameterHelper.getCategoryParamName(theFhirContext); if (theSearchParameterMap.containsKey(categoryParamName)) { ArrayList codeSystemHashList = new ArrayList<>(); @@ -461,24 +512,27 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { textOnlyList.addAll(getCodingTextOnlyValues(nextAnd)); } if (codeSystemHashList.size() > 0) { - theBoolQueryBuilder.must(QueryBuilders.termsQuery(OBSERVATION_CATEGORYHASH_FIELD_NAME, codeSystemHashList)); + theBoolQueryBuilder.must( + QueryBuilders.termsQuery(OBSERVATION_CATEGORYHASH_FIELD_NAME, codeSystemHashList)); } if (codeOnlyList.size() > 0) { theBoolQueryBuilder.must(QueryBuilders.termsQuery(OBSERVATION_CATEGORYVALUE_FIELD_NAME, codeOnlyList)); } if (systemOnlyList.size() > 0) { - theBoolQueryBuilder.must(QueryBuilders.termsQuery(OBSERVATION_CATEGORYSYSTEM_FIELD_NAME, systemOnlyList)); + theBoolQueryBuilder.must( + QueryBuilders.termsQuery(OBSERVATION_CATEGORYSYSTEM_FIELD_NAME, systemOnlyList)); } if (textOnlyList.size() > 0) { BoolQueryBuilder myTextBoolQueryBuilder = QueryBuilders.boolQuery(); for (String textOnlyParam : textOnlyList) { - myTextBoolQueryBuilder.should(QueryBuilders.matchPhrasePrefixQuery(OBSERVATION_CATEGORYDISPLAY_FIELD_NAME, textOnlyParam)); - myTextBoolQueryBuilder.should(QueryBuilders.matchPhrasePrefixQuery(OBSERVATION_CATEGORYTEXT_FIELD_NAME, textOnlyParam)); + myTextBoolQueryBuilder.should(QueryBuilders.matchPhrasePrefixQuery( + OBSERVATION_CATEGORYDISPLAY_FIELD_NAME, textOnlyParam)); + myTextBoolQueryBuilder.should( + QueryBuilders.matchPhrasePrefixQuery(OBSERVATION_CATEGORYTEXT_FIELD_NAME, textOnlyParam)); } theBoolQueryBuilder.must(myTextBoolQueryBuilder); } } - } private List getCodingCodeSystemValues(List codeParams) { @@ -487,10 +541,12 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { if (nextOr instanceof TokenParam) { TokenParam ref = (TokenParam) nextOr; if (ref.getSystem() != null && ref.getValue() != null) { - codeSystemHashList.add(String.valueOf(CodeSystemHash.hashCodeSystem(ref.getSystem(), ref.getValue()))); + codeSystemHashList.add( + String.valueOf(CodeSystemHash.hashCodeSystem(ref.getSystem(), ref.getValue()))); } } else { - throw new IllegalArgumentException(Msg.code(1181) + "Invalid token type (expecting TokenParam): " + nextOr.getClass()); + throw new IllegalArgumentException( + Msg.code(1181) + "Invalid token type (expecting TokenParam): " + nextOr.getClass()); } } return codeSystemHashList; @@ -506,7 +562,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { codeOnlyList.add(ref.getValue()); } } else { - throw new IllegalArgumentException(Msg.code(1182) + "Invalid token type (expecting TokenParam): " + nextOr.getClass()); + throw new IllegalArgumentException( + Msg.code(1182) + "Invalid token type (expecting TokenParam): " + nextOr.getClass()); } } return codeOnlyList; @@ -522,7 +579,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { systemOnlyList.add(ref.getSystem()); } } else { - throw new IllegalArgumentException(Msg.code(1183) + "Invalid token type (expecting TokenParam): " + nextOr.getClass()); + throw new IllegalArgumentException( + Msg.code(1183) + "Invalid token type (expecting TokenParam): " + nextOr.getClass()); } } return systemOnlyList; @@ -538,13 +596,17 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { textOnlyList.add(ref.getValue()); } } else { - throw new IllegalArgumentException(Msg.code(1184) + "Invalid token type (expecting TokenParam): " + nextOr.getClass()); + throw new IllegalArgumentException( + Msg.code(1184) + "Invalid token type (expecting TokenParam): " + nextOr.getClass()); } } return textOnlyList; } - private void addObservationCodeCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) { + private void addObservationCodeCriteria( + BoolQueryBuilder theBoolQueryBuilder, + SearchParameterMap theSearchParameterMap, + FhirContext theFhirContext) { String codeParamName = LastNParameterHelper.getCodeParamName(theFhirContext); if (theSearchParameterMap.containsKey(codeParamName)) { ArrayList codeSystemHashList = new ArrayList<>(); @@ -570,16 +632,20 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { if (textOnlyList.size() > 0) { BoolQueryBuilder myTextBoolQueryBuilder = QueryBuilders.boolQuery(); for (String textOnlyParam : textOnlyList) { - myTextBoolQueryBuilder.should(QueryBuilders.matchPhrasePrefixQuery(OBSERVATION_CODEDISPLAY_FIELD_NAME, textOnlyParam)); - myTextBoolQueryBuilder.should(QueryBuilders.matchPhrasePrefixQuery(OBSERVATION_CODE_TEXT_FIELD_NAME, textOnlyParam)); + myTextBoolQueryBuilder.should( + QueryBuilders.matchPhrasePrefixQuery(OBSERVATION_CODEDISPLAY_FIELD_NAME, textOnlyParam)); + myTextBoolQueryBuilder.should( + QueryBuilders.matchPhrasePrefixQuery(OBSERVATION_CODE_TEXT_FIELD_NAME, textOnlyParam)); } theBoolQueryBuilder.must(myTextBoolQueryBuilder); } } - } - private void addDateCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) { + private void addDateCriteria( + BoolQueryBuilder theBoolQueryBuilder, + SearchParameterMap theSearchParameterMap, + FhirContext theFhirContext) { String dateParamName = LastNParameterHelper.getEffectiveParamName(theFhirContext); if (theSearchParameterMap.containsKey(dateParamName)) { List> andOrParams = theSearchParameterMap.get(dateParamName); @@ -615,7 +681,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { } @VisibleForTesting - public List executeLastNWithAllFieldsForTest(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) { + public List executeLastNWithAllFieldsForTest( + SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) { return buildAndExecuteSearch(theSearchParameterMap, theFhirContext, null, t -> t, 100); } @@ -644,13 +711,15 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { @Override public ObservationJson getObservationDocument(String theDocumentID) { if (theDocumentID == null) { - throw new InvalidRequestException(Msg.code(1185) + "Require non-null document ID for observation document query"); + throw new InvalidRequestException( + Msg.code(1185) + "Require non-null document ID for observation document query"); } SearchRequest theSearchRequest = buildSingleObservationSearchRequest(theDocumentID); ObservationJson observationDocumentJson = null; try { SearchResponse observationDocumentResponse = executeSearchRequest(theSearchRequest); - SearchHit[] observationDocumentHits = observationDocumentResponse.getHits().getHits(); + SearchHit[] observationDocumentHits = + observationDocumentResponse.getHits().getHits(); if (observationDocumentHits.length > 0) { // There should be no more than one hit for the identifier String observationDocument = observationDocumentHits[0].getSourceAsString(); @@ -658,7 +727,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { } } catch (IOException theE) { - throw new InvalidRequestException(Msg.code(1186) + "Unable to execute observation document query for ID " + theDocumentID, theE); + throw new InvalidRequestException( + Msg.code(1186) + "Unable to execute observation document query for ID " + theDocumentID, theE); } return observationDocumentJson; @@ -680,13 +750,15 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { @Override public CodeJson getObservationCodeDocument(String theCodeSystemHash, String theText) { if (theCodeSystemHash == null && theText == null) { - throw new InvalidRequestException(Msg.code(1187) + "Require a non-null code system hash value or display value for observation code document query"); + throw new InvalidRequestException(Msg.code(1187) + + "Require a non-null code system hash value or display value for observation code document query"); } SearchRequest theSearchRequest = buildSingleObservationCodeSearchRequest(theCodeSystemHash, theText); CodeJson observationCodeDocumentJson = null; try { SearchResponse observationCodeDocumentResponse = executeSearchRequest(theSearchRequest); - SearchHit[] observationCodeDocumentHits = observationCodeDocumentResponse.getHits().getHits(); + SearchHit[] observationCodeDocumentHits = + observationCodeDocumentResponse.getHits().getHits(); if (observationCodeDocumentHits.length > 0) { // There should be no more than one hit for the code lookup. String observationCodeDocument = observationCodeDocumentHits[0].getSourceAsString(); @@ -694,7 +766,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { } } catch (IOException theE) { - throw new InvalidRequestException(Msg.code(1188) + "Unable to execute observation code document query hash code or display", theE); + throw new InvalidRequestException( + Msg.code(1188) + "Unable to execute observation code document query hash code or display", theE); } return observationCodeDocumentJson; @@ -722,27 +795,39 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { public Boolean createOrUpdateObservationIndex(String theDocumentId, ObservationJson theObservationDocument) { try { String documentToIndex = objectMapper.writeValueAsString(theObservationDocument); - return performIndex(OBSERVATION_INDEX, theDocumentId, documentToIndex, ElasticsearchSvcImpl.OBSERVATION_DOCUMENT_TYPE); + return performIndex( + OBSERVATION_INDEX, theDocumentId, documentToIndex, ElasticsearchSvcImpl.OBSERVATION_DOCUMENT_TYPE); } catch (IOException theE) { - throw new InvalidRequestException(Msg.code(1189) + "Unable to persist Observation document " + theDocumentId); + throw new InvalidRequestException( + Msg.code(1189) + "Unable to persist Observation document " + theDocumentId); } } @Override - public Boolean createOrUpdateObservationCodeIndex(String theCodeableConceptID, CodeJson theObservationCodeDocument) { + public Boolean createOrUpdateObservationCodeIndex( + String theCodeableConceptID, CodeJson theObservationCodeDocument) { try { String documentToIndex = objectMapper.writeValueAsString(theObservationCodeDocument); - return performIndex(OBSERVATION_CODE_INDEX, theCodeableConceptID, documentToIndex, ElasticsearchSvcImpl.CODE_DOCUMENT_TYPE); + return performIndex( + OBSERVATION_CODE_INDEX, + theCodeableConceptID, + documentToIndex, + ElasticsearchSvcImpl.CODE_DOCUMENT_TYPE); } catch (IOException theE) { - throw new InvalidRequestException(Msg.code(1190) + "Unable to persist Observation Code document " + theCodeableConceptID); + throw new InvalidRequestException( + Msg.code(1190) + "Unable to persist Observation Code document " + theCodeableConceptID); } } - private boolean performIndex(String theIndexName, String theDocumentId, String theIndexDocument, String theDocumentType) throws IOException { - IndexResponse indexResponse = myRestHighLevelClient.index(createIndexRequest(theIndexName, theDocumentId, theIndexDocument, theDocumentType), - RequestOptions.DEFAULT); + private boolean performIndex( + String theIndexName, String theDocumentId, String theIndexDocument, String theDocumentType) + throws IOException { + IndexResponse indexResponse = myRestHighLevelClient.index( + createIndexRequest(theIndexName, theDocumentId, theIndexDocument, theDocumentType), + RequestOptions.DEFAULT); - return (indexResponse.getResult() == DocWriteResponse.Result.CREATED) || (indexResponse.getResult() == DocWriteResponse.Result.UPDATED); + return (indexResponse.getResult() == DocWriteResponse.Result.CREATED) + || (indexResponse.getResult() == DocWriteResponse.Result.UPDATED); } @Override @@ -755,19 +840,22 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { SearchRequest searchRequest = buildObservationResourceSearchRequest(thePids); try { SearchResponse observationDocumentResponse = executeSearchRequest(searchRequest); - SearchHit[] observationDocumentHits = observationDocumentResponse.getHits().getHits(); + SearchHit[] observationDocumentHits = + observationDocumentResponse.getHits().getHits(); IParser parser = TolerantJsonParser.createWithLenientErrorHandling(myContext, null); - Class resourceType = myContext.getResourceDefinition(OBSERVATION_RESOURCE_NAME).getImplementingClass(); + Class resourceType = + myContext.getResourceDefinition(OBSERVATION_RESOURCE_NAME).getImplementingClass(); /** * @see ca.uhn.fhir.jpa.dao.BaseHapiFhirDao#toResource(Class, IBaseResourceEntity, Collection, boolean) for * details about parsing raw json to BaseResource */ return Arrays.stream(observationDocumentHits) - .map(this::parseObservationJson) - .map(observationJson -> parser.parseResource(resourceType, observationJson.getResource())) - .collect(Collectors.toList()); + .map(this::parseObservationJson) + .map(observationJson -> parser.parseResource(resourceType, observationJson.getResource())) + .collect(Collectors.toList()); } catch (IOException theE) { - throw new InvalidRequestException(Msg.code(2003) + "Unable to execute observation document query for provided IDs " + thePids, theE); + throw new InvalidRequestException( + Msg.code(2003) + "Unable to execute observation document query for provided IDs " + thePids, theE); } } @@ -792,8 +880,8 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { return searchRequest; } - - private IndexRequest createIndexRequest(String theIndexName, String theDocumentId, String theObservationDocument, String theDocumentType) { + private IndexRequest createIndexRequest( + String theIndexName, String theDocumentId, String theObservationDocument, String theDocumentType) { IndexRequest request = new IndexRequest(theIndexName); request.id(theDocumentId); request.source(theObservationDocument, XContentType.JSON); @@ -822,5 +910,4 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { public void refreshIndex(String theIndexName) throws IOException { myRestHighLevelClient.indices().refresh(new RefreshRequest(theIndexName), RequestOptions.DEFAULT); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/IElasticsearchSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/IElasticsearchSvc.java index 3b02c38567d..c6f8209fe0f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/IElasticsearchSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/IElasticsearchSvc.java @@ -40,7 +40,8 @@ public interface IElasticsearchSvc { * @param theMaxResultsToFetch The maximum number of results to return for the purpose of paging. * @return */ - List executeLastN(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, Integer theMaxResultsToFetch); + List executeLastN( + SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, Integer theMaxResultsToFetch); /** * Returns index document for a single Observation @@ -96,5 +97,4 @@ public interface IElasticsearchSvc { * @return Resources list or empty if nothing found */ List getObservationResources(Collection thePids); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/json/CodeJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/json/CodeJson.java index 8b7fc717525..202c927cbf5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/json/CodeJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/json/CodeJson.java @@ -28,29 +28,33 @@ import java.util.ArrayList; import java.util.List; @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public class CodeJson { - @JsonProperty(value = "codeable_concept_id", required = false) - private String myCodeableConceptId; + @JsonProperty(value = "codeable_concept_id", required = false) + private String myCodeableConceptId; - @JsonProperty(value = "text", required = false) - private String myCodeableConceptText; + @JsonProperty(value = "text", required = false) + private String myCodeableConceptText; - @JsonProperty(value = "codingcode", required = false) - private List myCoding_code = new ArrayList<>(); + @JsonProperty(value = "codingcode", required = false) + private List myCoding_code = new ArrayList<>(); - @JsonProperty(value = "codingcode_system_hash", required = true) - private List myCoding_code_system_hash = new ArrayList<>(); + @JsonProperty(value = "codingcode_system_hash", required = true) + private List myCoding_code_system_hash = new ArrayList<>(); - @JsonProperty(value = "codingdisplay", required = false) - private List myCoding_display = new ArrayList<>(); + @JsonProperty(value = "codingdisplay", required = false) + private List myCoding_display = new ArrayList<>(); - @JsonProperty(value = "codingsystem", required = false) - private List myCoding_system = new ArrayList<>(); + @JsonProperty(value = "codingsystem", required = false) + private List myCoding_system = new ArrayList<>(); - public CodeJson(){ - } + public CodeJson() {} public void setCodeableConceptId(String theCodeableConceptId) { myCodeableConceptId = theCodeableConceptId; @@ -64,30 +68,30 @@ public class CodeJson { } public String getCodeableConceptId() { - return myCodeableConceptId; - } + return myCodeableConceptId; + } - public void setCodeableConceptText(String theCodeableConceptText) { + public void setCodeableConceptText(String theCodeableConceptText) { myCodeableConceptText = theCodeableConceptText; - } + } - public String getCodeableConceptText() { - return myCodeableConceptText; - } + public String getCodeableConceptText() { + return myCodeableConceptText; + } - public List getCoding_code() { - return myCoding_code; - } + public List getCoding_code() { + return myCoding_code; + } public List getCoding_code_system_hash() { - return myCoding_code_system_hash; - } + return myCoding_code_system_hash; + } public List getCoding_display() { - return myCoding_display; - } + return myCoding_display; + } - public List getCoding_system() { - return myCoding_system; - } + public List getCoding_system() { + return myCoding_system; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/json/ObservationJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/json/ObservationJson.java index 8abbdd9bdcc..78b949a7a79 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/json/ObservationJson.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/json/ObservationJson.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.search.lastn.json; -import ca.uhn.fhir.jpa.model.util.CodeSystemHash; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; @@ -29,7 +28,12 @@ import java.util.Date; import java.util.List; @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public class ObservationJson { @JsonProperty(value = "identifier", required = true) @@ -77,8 +81,7 @@ public class ObservationJson { @JsonProperty(value = "resource") private String myResource; - public ObservationJson() { - } + public ObservationJson() {} public void setIdentifier(String theIdentifier) { myIdentifier = theIdentifier; @@ -142,7 +145,6 @@ public class ObservationJson { myCode_coding_code = theCode.getCoding_code().get(0); myCode_coding_display = theCode.getCoding_display().get(0); myCode_coding_system = theCode.getCoding_system().get(0); - } public CodeJson getCode() { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IInstanceReindexService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IInstanceReindexService.java index 2015ceea533..c56932a5168 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IInstanceReindexService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/IInstanceReindexService.java @@ -23,15 +23,16 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nullable; import java.util.Set; +import javax.annotation.Nullable; public interface IInstanceReindexService { /** * Simulate a reindex and return the details about what would change */ - IBaseParameters reindexDryRun(RequestDetails theRequestDetails, IIdType theResourceId, @Nullable Set theParameters); + IBaseParameters reindexDryRun( + RequestDetails theRequestDetails, IIdType theResourceId, @Nullable Set theParameters); /** * Perform a reindex on a single resource and return details about what changed diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java index 3ed59e6f82a..574cda69677 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/InstanceReindexServiceImpl.java @@ -59,16 +59,15 @@ import org.hl7.fhir.r4.model.UriType; import org.hl7.fhir.r4.model.UrlType; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.jpa.dao.index.DaoSearchParamSynchronizer.subtract; import static java.util.Comparator.comparing; @@ -79,23 +78,33 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class InstanceReindexServiceImpl implements IInstanceReindexService { private final FhirContext myContextR4 = FhirContext.forR4Cached(); + @Autowired protected IJpaStorageResourceParser myJpaStorageResourceParser; + @Autowired private SearchParamExtractorService mySearchParamExtractorService; + @Autowired private BaseRequestPartitionHelperSvc myPartitionHelperSvc; + @Autowired private IHapiTransactionService myTransactionService; + @Autowired private IInterceptorService myInterceptorService; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private VersionCanonicalizer myVersionCanonicalizer; + @Autowired private PartitionSettings myPartitionSettings; + private final CustomThymeleafNarrativeGenerator myNarrativeGenerator; + @Autowired private ISearchParamRegistry mySearchParamRegistry; @@ -103,19 +112,22 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { * Constructor */ public InstanceReindexServiceImpl() { - myNarrativeGenerator = new CustomThymeleafNarrativeGenerator("classpath:ca/uhn/fhir/jpa/search/reindex/reindex-outcome-narrative.properties"); + myNarrativeGenerator = new CustomThymeleafNarrativeGenerator( + "classpath:ca/uhn/fhir/jpa/search/reindex/reindex-outcome-narrative.properties"); } @Override - public IBaseParameters reindexDryRun(RequestDetails theRequestDetails, IIdType theResourceId, @Nullable Set theParameters) { + public IBaseParameters reindexDryRun( + RequestDetails theRequestDetails, IIdType theResourceId, @Nullable Set theParameters) { RequestPartitionId partitionId = determinePartition(theRequestDetails, theResourceId); TransactionDetails transactionDetails = new TransactionDetails(); Parameters retValCanonical = myTransactionService - .withRequest(theRequestDetails) - .withTransactionDetails(transactionDetails) - .withRequestPartitionId(partitionId) - .execute(() -> reindexDryRunInTransaction(theRequestDetails, theResourceId, partitionId, transactionDetails, theParameters)); + .withRequest(theRequestDetails) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(partitionId) + .execute(() -> reindexDryRunInTransaction( + theRequestDetails, theResourceId, partitionId, transactionDetails, theParameters)); return myVersionCanonicalizer.parametersFromCanonical(retValCanonical); } @@ -126,10 +138,10 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { TransactionDetails transactionDetails = new TransactionDetails(); Parameters retValCanonical = myTransactionService - .withRequest(theRequestDetails) - .withTransactionDetails(transactionDetails) - .withRequestPartitionId(partitionId) - .execute(() -> reindexInTransaction(theRequestDetails, theResourceId)); + .withRequest(theRequestDetails) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(partitionId) + .execute(() -> reindexInTransaction(theRequestDetails, theResourceId)); return myVersionCanonicalizer.parametersFromCanonical(retValCanonical); } @@ -144,7 +156,8 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { // Invoke the pre-access and pre-show interceptors in case there are any security // restrictions or audit requirements around the user accessing this resource - BaseHapiFhirResourceDao.invokeStoragePreAccessResources(myInterceptorService, theRequestDetails, theResourceId, resource); + BaseHapiFhirResourceDao.invokeStoragePreAccessResources( + myInterceptorService, theRequestDetails, theResourceId, resource); BaseHapiFhirResourceDao.invokeStoragePreShowResources(myInterceptorService, theRequestDetails, resource); ResourceIndexedSearchParams existingParamsToPopulate = new ResourceIndexedSearchParams(entity); @@ -167,7 +180,12 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { } @Nonnull - private Parameters reindexDryRunInTransaction(RequestDetails theRequestDetails, IIdType theResourceId, RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails, Set theParameters) { + private Parameters reindexDryRunInTransaction( + RequestDetails theRequestDetails, + IIdType theResourceId, + RequestPartitionId theRequestPartitionId, + TransactionDetails theTransactionDetails, + Set theParameters) { StopWatch sw = new StopWatch(); IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType()); @@ -176,26 +194,36 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { // Invoke the pre-access and pre-show interceptors in case there are any security // restrictions or audit requirements around the user accessing this resource - BaseHapiFhirResourceDao.invokeStoragePreAccessResources(myInterceptorService, theRequestDetails, theResourceId, resource); + BaseHapiFhirResourceDao.invokeStoragePreAccessResources( + myInterceptorService, theRequestDetails, theResourceId, resource); BaseHapiFhirResourceDao.invokeStoragePreShowResources(myInterceptorService, theRequestDetails, resource); ISearchParamExtractor.ISearchParamFilter searchParamFilter = ISearchParamExtractor.ALL_PARAMS; if (theParameters != null) { - searchParamFilter = params -> params - .stream() - .filter(t -> theParameters.contains(t.getName())) - .collect(Collectors.toSet()); + searchParamFilter = params -> params.stream() + .filter(t -> theParameters.contains(t.getName())) + .collect(Collectors.toSet()); } ResourceIndexedSearchParams newParamsToPopulate = new ResourceIndexedSearchParams(); - mySearchParamExtractorService.extractFromResource(theRequestPartitionId, theRequestDetails, newParamsToPopulate, new ResourceIndexedSearchParams(), entity, resource, theTransactionDetails, false, searchParamFilter); + mySearchParamExtractorService.extractFromResource( + theRequestPartitionId, + theRequestDetails, + newParamsToPopulate, + new ResourceIndexedSearchParams(), + entity, + resource, + theTransactionDetails, + false, + searchParamFilter); ResourceIndexedSearchParams existingParamsToPopulate; boolean showAction; if (theParameters == null) { existingParamsToPopulate = new ResourceIndexedSearchParams(entity); existingParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents()); - fillInParamNames(entity, existingParamsToPopulate.mySearchParamPresentEntities, theResourceId.getResourceType()); + fillInParamNames( + entity, existingParamsToPopulate.mySearchParamPresentEntities, theResourceId.getResourceType()); showAction = true; } else { existingParamsToPopulate = new ResourceIndexedSearchParams(); @@ -214,7 +242,11 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { @Nonnull @VisibleForTesting - Parameters buildIndexResponse(ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, boolean theShowAction, List theMessages) { + Parameters buildIndexResponse( + ResourceIndexedSearchParams theExistingParams, + ResourceIndexedSearchParams theNewParams, + boolean theShowAction, + List theMessages) { Parameters parameters = new Parameters(); Parameters.ParametersParameterComponent narrativeParameter = parameters.addParameter(); @@ -225,32 +257,164 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { } // Normal indexes - addParamsNonMissing(parameters, "CoordinateIndexes", "Coords", theExistingParams.myCoordsParams, theNewParams.myCoordsParams, new CoordsParamPopulator(), theShowAction); - addParamsNonMissing(parameters, "DateIndexes", "Date", theExistingParams.myDateParams, theNewParams.myDateParams, new DateParamPopulator(), theShowAction); - addParamsNonMissing(parameters, "NumberIndexes", "Number", theExistingParams.myNumberParams, theNewParams.myNumberParams, new NumberParamPopulator(), theShowAction); - addParamsNonMissing(parameters, "QuantityIndexes", "Quantity", theExistingParams.myQuantityParams, theNewParams.myQuantityParams, new QuantityParamPopulator(), theShowAction); - addParamsNonMissing(parameters, "QuantityIndexes", "QuantityNormalized", theExistingParams.myQuantityNormalizedParams, theNewParams.myQuantityNormalizedParams, new QuantityNormalizedParamPopulator(), theShowAction); - addParamsNonMissing(parameters, "UriIndexes", "Uri", theExistingParams.myUriParams, theNewParams.myUriParams, new UriParamPopulator(), theShowAction); - addParamsNonMissing(parameters, "StringIndexes", "String", theExistingParams.myStringParams, theNewParams.myStringParams, new StringParamPopulator(), theShowAction); - addParamsNonMissing(parameters, "TokenIndexes", "Token", theExistingParams.myTokenParams, theNewParams.myTokenParams, new TokenParamPopulator(), theShowAction); + addParamsNonMissing( + parameters, + "CoordinateIndexes", + "Coords", + theExistingParams.myCoordsParams, + theNewParams.myCoordsParams, + new CoordsParamPopulator(), + theShowAction); + addParamsNonMissing( + parameters, + "DateIndexes", + "Date", + theExistingParams.myDateParams, + theNewParams.myDateParams, + new DateParamPopulator(), + theShowAction); + addParamsNonMissing( + parameters, + "NumberIndexes", + "Number", + theExistingParams.myNumberParams, + theNewParams.myNumberParams, + new NumberParamPopulator(), + theShowAction); + addParamsNonMissing( + parameters, + "QuantityIndexes", + "Quantity", + theExistingParams.myQuantityParams, + theNewParams.myQuantityParams, + new QuantityParamPopulator(), + theShowAction); + addParamsNonMissing( + parameters, + "QuantityIndexes", + "QuantityNormalized", + theExistingParams.myQuantityNormalizedParams, + theNewParams.myQuantityNormalizedParams, + new QuantityNormalizedParamPopulator(), + theShowAction); + addParamsNonMissing( + parameters, + "UriIndexes", + "Uri", + theExistingParams.myUriParams, + theNewParams.myUriParams, + new UriParamPopulator(), + theShowAction); + addParamsNonMissing( + parameters, + "StringIndexes", + "String", + theExistingParams.myStringParams, + theNewParams.myStringParams, + new StringParamPopulator(), + theShowAction); + addParamsNonMissing( + parameters, + "TokenIndexes", + "Token", + theExistingParams.myTokenParams, + theNewParams.myTokenParams, + new TokenParamPopulator(), + theShowAction); // Resource links - addParams(parameters, "ResourceLinks", "Reference", normalizeLinks(theExistingParams.myLinks), normalizeLinks(theNewParams.myLinks), new ResourceLinkPopulator(), theShowAction); + addParams( + parameters, + "ResourceLinks", + "Reference", + normalizeLinks(theExistingParams.myLinks), + normalizeLinks(theNewParams.myLinks), + new ResourceLinkPopulator(), + theShowAction); // Combo search params - addParams(parameters, "UniqueIndexes", "ComboStringUnique", theExistingParams.myComboStringUniques, theNewParams.myComboStringUniques, new ComboStringUniquePopulator(), theShowAction); - addParams(parameters, "NonUniqueIndexes", "ComboTokenNonUnique", theExistingParams.myComboTokenNonUnique, theNewParams.myComboTokenNonUnique, new ComboTokenNonUniquePopulator(), theShowAction); + addParams( + parameters, + "UniqueIndexes", + "ComboStringUnique", + theExistingParams.myComboStringUniques, + theNewParams.myComboStringUniques, + new ComboStringUniquePopulator(), + theShowAction); + addParams( + parameters, + "NonUniqueIndexes", + "ComboTokenNonUnique", + theExistingParams.myComboTokenNonUnique, + theNewParams.myComboTokenNonUnique, + new ComboTokenNonUniquePopulator(), + theShowAction); // Missing (:missing) indexes - addParamsMissing(parameters, "Coords", theExistingParams.myCoordsParams, theNewParams.myCoordsParams, new MissingIndexParamPopulator<>(), theShowAction); - addParamsMissing(parameters, "Date", theExistingParams.myDateParams, theNewParams.myDateParams, new MissingIndexParamPopulator<>(), theShowAction); - addParamsMissing(parameters, "Number", theExistingParams.myNumberParams, theNewParams.myNumberParams, new MissingIndexParamPopulator<>(), theShowAction); - addParamsMissing(parameters, "Quantity", theExistingParams.myQuantityParams, theNewParams.myQuantityParams, new MissingIndexParamPopulator<>(), theShowAction); - addParamsMissing(parameters, "QuantityNormalized", theExistingParams.myQuantityNormalizedParams, theNewParams.myQuantityNormalizedParams, new MissingIndexParamPopulator<>(), theShowAction); - addParamsMissing(parameters, "Uri", theExistingParams.myUriParams, theNewParams.myUriParams, new MissingIndexParamPopulator<>(), theShowAction); - addParamsMissing(parameters, "String", theExistingParams.myStringParams, theNewParams.myStringParams, new MissingIndexParamPopulator<>(), theShowAction); - addParamsMissing(parameters, "Token", theExistingParams.myTokenParams, theNewParams.myTokenParams, new MissingIndexParamPopulator<>(), theShowAction); - addParams(parameters, "MissingIndexes", "Reference", theExistingParams.mySearchParamPresentEntities, theNewParams.mySearchParamPresentEntities, new SearchParamPresentParamPopulator(), theShowAction); + addParamsMissing( + parameters, + "Coords", + theExistingParams.myCoordsParams, + theNewParams.myCoordsParams, + new MissingIndexParamPopulator<>(), + theShowAction); + addParamsMissing( + parameters, + "Date", + theExistingParams.myDateParams, + theNewParams.myDateParams, + new MissingIndexParamPopulator<>(), + theShowAction); + addParamsMissing( + parameters, + "Number", + theExistingParams.myNumberParams, + theNewParams.myNumberParams, + new MissingIndexParamPopulator<>(), + theShowAction); + addParamsMissing( + parameters, + "Quantity", + theExistingParams.myQuantityParams, + theNewParams.myQuantityParams, + new MissingIndexParamPopulator<>(), + theShowAction); + addParamsMissing( + parameters, + "QuantityNormalized", + theExistingParams.myQuantityNormalizedParams, + theNewParams.myQuantityNormalizedParams, + new MissingIndexParamPopulator<>(), + theShowAction); + addParamsMissing( + parameters, + "Uri", + theExistingParams.myUriParams, + theNewParams.myUriParams, + new MissingIndexParamPopulator<>(), + theShowAction); + addParamsMissing( + parameters, + "String", + theExistingParams.myStringParams, + theNewParams.myStringParams, + new MissingIndexParamPopulator<>(), + theShowAction); + addParamsMissing( + parameters, + "Token", + theExistingParams.myTokenParams, + theNewParams.myTokenParams, + new MissingIndexParamPopulator<>(), + theShowAction); + addParams( + parameters, + "MissingIndexes", + "Reference", + theExistingParams.mySearchParamPresentEntities, + theNewParams.mySearchParamPresentEntities, + new SearchParamPresentParamPopulator(), + theShowAction); String narrativeText = myNarrativeGenerator.generateResourceNarrative(myContextR4, parameters); narrativeParameter.setValue(new StringType(narrativeText)); @@ -263,12 +427,19 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { * in the database entity, it only stores a hash. So we brute force possible hashes here * to figure out the associated param names. */ - private void fillInParamNames(ResourceTable theEntity, Collection theTarget, String theResourceName) { + private void fillInParamNames( + ResourceTable theEntity, Collection theTarget, String theResourceName) { Map hashes = new HashMap<>(); ResourceSearchParams searchParams = mySearchParamRegistry.getActiveSearchParams(theResourceName); for (RuntimeSearchParam next : searchParams.values()) { - hashes.put(SearchParamPresentEntity.calculateHashPresence(myPartitionSettings, theEntity.getPartitionId(), theResourceName, next.getName(), true), next.getName()); - hashes.put(SearchParamPresentEntity.calculateHashPresence(myPartitionSettings, theEntity.getPartitionId(), theResourceName, next.getName(), false), next.getName()); + hashes.put( + SearchParamPresentEntity.calculateHashPresence( + myPartitionSettings, theEntity.getPartitionId(), theResourceName, next.getName(), true), + next.getName()); + hashes.put( + SearchParamPresentEntity.calculateHashPresence( + myPartitionSettings, theEntity.getPartitionId(), theResourceName, next.getName(), false), + next.getName()); } for (SearchParamPresentEntity next : theTarget) { @@ -281,31 +452,24 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { } private enum ActionEnum { - ADD, REMOVE, UNKNOWN, NO_CHANGE - } - private static abstract class BaseParamPopulator { - + private abstract static class BaseParamPopulator { @Nonnull - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, T theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = theParent - .addPart() - .setName(toPartName(theParam)); - retVal - .addPart() - .setName("Action") - .setValue(new CodeType(theAction.name())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + T theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = theParent.addPart().setName(toPartName(theParam)); + retVal.addPart().setName("Action").setValue(new CodeType(theAction.name())); if (theParamTypeName != null) { - retVal - .addPart() - .setName("Type") - .setValue(new CodeType(theParamTypeName)); + retVal.addPart().setName("Type").setValue(new CodeType(theParamTypeName)); } return retVal; } @@ -317,12 +481,12 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { } } - public static abstract class BaseIndexParamPopulator extends BaseParamPopulator { + public abstract static class BaseIndexParamPopulator + extends BaseParamPopulator { @Override protected String toPartName(T theParam) { return theParam.getParamName(); } - } private static class ComboStringUniquePopulator extends BaseParamPopulator { @@ -330,7 +494,6 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { protected String toPartName(ResourceIndexedComboStringUnique theParam) { return theParam.getIndexString(); } - } private static class ComboTokenNonUniquePopulator extends BaseParamPopulator { @@ -343,141 +506,130 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { private static class CoordsParamPopulator extends BaseIndexParamPopulator { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceIndexedSearchParamCoords theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("Latitude") - .setValue(new DecimalType(theParam.getLatitude())); - retVal - .addPart() - .setName("Longitude") - .setValue(new DecimalType(theParam.getLongitude())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceIndexedSearchParamCoords theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("Latitude").setValue(new DecimalType(theParam.getLatitude())); + retVal.addPart().setName("Longitude").setValue(new DecimalType(theParam.getLongitude())); return retVal; } - - } private static class DateParamPopulator extends BaseIndexParamPopulator { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceIndexedSearchParamDate theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("High") - .setValue(new InstantType(theParam.getValueHigh())); - retVal - .addPart() - .setName("Low") - .setValue(new InstantType(theParam.getValueLow())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceIndexedSearchParamDate theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("High").setValue(new InstantType(theParam.getValueHigh())); + retVal.addPart().setName("Low").setValue(new InstantType(theParam.getValueLow())); return retVal; } } - private static class MissingIndexParamPopulator extends BaseIndexParamPopulator { + private static class MissingIndexParamPopulator + extends BaseIndexParamPopulator { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, T theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("Missing") - .setValue(new BooleanType(theParam.isMissing())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + T theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("Missing").setValue(new BooleanType(theParam.isMissing())); return retVal; } - - } private static class NumberParamPopulator extends BaseIndexParamPopulator { - @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceIndexedSearchParamNumber theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("Value") - .setValue(new DecimalType(theParam.getValue())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceIndexedSearchParamNumber theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("Value").setValue(new DecimalType(theParam.getValue())); return retVal; } - } private static class QuantityParamPopulator extends BaseIndexParamPopulator { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceIndexedSearchParamQuantity theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("Value") - .setValue(new DecimalType(theParam.getValue())); - retVal - .addPart() - .setName("System") - .setValue(new UriType(theParam.getSystem())); - retVal - .addPart() - .setName("Units") - .setValue(new CodeType(theParam.getUnits())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceIndexedSearchParamQuantity theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("Value").setValue(new DecimalType(theParam.getValue())); + retVal.addPart().setName("System").setValue(new UriType(theParam.getSystem())); + retVal.addPart().setName("Units").setValue(new CodeType(theParam.getUnits())); return retVal; } - } - private static class QuantityNormalizedParamPopulator extends BaseIndexParamPopulator { + private static class QuantityNormalizedParamPopulator + extends BaseIndexParamPopulator { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceIndexedSearchParamQuantityNormalized theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("Value") - .setValue(new DecimalType(theParam.getValue())); - retVal - .addPart() - .setName("System") - .setValue(new UriType(theParam.getSystem())); - retVal - .addPart() - .setName("Units") - .setValue(new CodeType(theParam.getUnits())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceIndexedSearchParamQuantityNormalized theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("Value").setValue(new DecimalType(theParam.getValue())); + retVal.addPart().setName("System").setValue(new UriType(theParam.getSystem())); + retVal.addPart().setName("Units").setValue(new CodeType(theParam.getUnits())); return retVal; } - } private static class ResourceLinkPopulator extends BaseParamPopulator { - @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceLink theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceLink theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); if (theParam.getTargetResourceId() != null) { - retVal - .addPart() - .setName("TargetId") - .setValue(new StringType(theParam.getTargetResourceType() + "/" + theParam.getTargetResourceId())); + retVal.addPart() + .setName("TargetId") + .setValue(new StringType( + theParam.getTargetResourceType() + "/" + theParam.getTargetResourceId())); } else if (theParam.getTargetResourceUrl() != null) { - retVal - .addPart() - .setName("TargetUrl") - .setValue(new UrlType(theParam.getTargetResourceUrl())); + retVal.addPart().setName("TargetUrl").setValue(new UrlType(theParam.getTargetResourceUrl())); } if (theParam.getTargetResourceVersion() != null) { - retVal - .addPart() - .setName("TargetVersion") - .setValue(new StringType(theParam.getTargetResourceVersion().toString())); + retVal.addPart() + .setName("TargetVersion") + .setValue(new StringType( + theParam.getTargetResourceVersion().toString())); } return retVal; @@ -487,18 +639,19 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { protected String toPartName(ResourceLink theParam) { return theParam.getSourcePath(); } - } private static class SearchParamPresentParamPopulator extends BaseParamPopulator { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, SearchParamPresentEntity theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("Missing") - .setValue(new BooleanType(!theParam.isPresent())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + SearchParamPresentEntity theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("Missing").setValue(new BooleanType(!theParam.isPresent())); return retVal; } @@ -506,23 +659,21 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { protected String toPartName(SearchParamPresentEntity theParam) { return theParam.getParamName(); } - } private static class StringParamPopulator extends BaseIndexParamPopulator { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceIndexedSearchParamString theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("ValueNormalized") - .setValue(new StringType(theParam.getValueNormalized())); - retVal - .addPart() - .setName("ValueExact") - .setValue(new StringType(theParam.getValueExact())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceIndexedSearchParamString theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("ValueNormalized").setValue(new StringType(theParam.getValueNormalized())); + retVal.addPart().setName("ValueExact").setValue(new StringType(theParam.getValueExact())); return retVal; } } @@ -531,19 +682,18 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceIndexedSearchParamToken theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceIndexedSearchParamToken theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); if (isNotBlank(theParam.getSystem())) { - retVal - .addPart() - .setName("System") - .setValue(new StringType(theParam.getSystem())); + retVal.addPart().setName("System").setValue(new StringType(theParam.getSystem())); } if (isNotBlank(theParam.getValue())) { - retVal - .addPart() - .setName("Value") - .setValue(new StringType(theParam.getValue())); + retVal.addPart().setName("Value").setValue(new StringType(theParam.getValue())); } return retVal; } @@ -553,12 +703,14 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { @Nonnull @Override - public Parameters.ParametersParameterComponent addIndexValue(ActionEnum theAction, Parameters.ParametersParameterComponent theParent, ResourceIndexedSearchParamUri theParam, String theParamTypeName) { - Parameters.ParametersParameterComponent retVal = super.addIndexValue(theAction, theParent, theParam, theParamTypeName); - retVal - .addPart() - .setName("Value") - .setValue(new UriType(theParam.getUri())); + public Parameters.ParametersParameterComponent addIndexValue( + ActionEnum theAction, + Parameters.ParametersParameterComponent theParent, + ResourceIndexedSearchParamUri theParam, + String theParamTypeName) { + Parameters.ParametersParameterComponent retVal = + super.addIndexValue(theAction, theParent, theParam, theParamTypeName); + retVal.addPart().setName("Value").setValue(new UriType(theParam.getUri())); return retVal; } } @@ -570,13 +722,17 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { * will actually be equal. */ private static List normalizeLinks(Collection theLinks) { - return theLinks - .stream() - .map(ResourceLink::cloneWithoutTargetPid) - .collect(Collectors.toList()); + return theLinks.stream().map(ResourceLink::cloneWithoutTargetPid).collect(Collectors.toList()); } - private static void addParams(Parameters theParameters, String theSectionName, String theTypeName, Collection theExistingParams, Collection theNewParams, BaseParamPopulator thePopulator, boolean theShowAction) { + private static void addParams( + Parameters theParameters, + String theSectionName, + String theTypeName, + Collection theExistingParams, + Collection theNewParams, + BaseParamPopulator thePopulator, + boolean theShowAction) { List addedParams = subtract(theNewParams, theExistingParams); thePopulator.sort(addedParams); for (T next : addedParams) { @@ -601,30 +757,43 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { Parameters.ParametersParameterComponent parent = getOrCreateSection(theParameters, theSectionName); thePopulator.addIndexValue(ActionEnum.NO_CHANGE, parent, next, theTypeName); } - } - private static void addParamsNonMissing(Parameters theParameters, String theSectionName, String theTypeName, Collection theExistingParams, Collection theNewParams, BaseParamPopulator thePopulator, boolean theShowAction) { + private static void addParamsNonMissing( + Parameters theParameters, + String theSectionName, + String theTypeName, + Collection theExistingParams, + Collection theNewParams, + BaseParamPopulator thePopulator, + boolean theShowAction) { Collection existingParams = filterWantMissing(theExistingParams, false); Collection newParams = filterWantMissing(theNewParams, false); addParams(theParameters, theSectionName, theTypeName, existingParams, newParams, thePopulator, theShowAction); } - private static void addParamsMissing(Parameters theParameters, String theTypeName, Collection theExistingParams, Collection theNewParams, BaseParamPopulator thePopulator, boolean theShowAction) { + private static void addParamsMissing( + Parameters theParameters, + String theTypeName, + Collection theExistingParams, + Collection theNewParams, + BaseParamPopulator thePopulator, + boolean theShowAction) { Collection existingParams = filterWantMissing(theExistingParams, true); Collection newParams = filterWantMissing(theNewParams, true); addParams(theParameters, "MissingIndexes", theTypeName, existingParams, newParams, thePopulator, theShowAction); } - private static Collection filterWantMissing(Collection theNewParams, boolean theWantMissing) { - return theNewParams - .stream() - .filter(t -> t.isMissing() == theWantMissing) - .collect(Collectors.toList()); + private static Collection filterWantMissing( + Collection theNewParams, boolean theWantMissing) { + return theNewParams.stream() + .filter(t -> t.isMissing() == theWantMissing) + .collect(Collectors.toList()); } @Nonnull - private static Parameters.ParametersParameterComponent getOrCreateSection(Parameters theParameters, String theSectionName) { + private static Parameters.ParametersParameterComponent getOrCreateSection( + Parameters theParameters, String theSectionName) { Parameters.ParametersParameterComponent parent = theParameters.getParameter(theSectionName); if (parent == null) { parent = theParameters.addParameter(); @@ -632,6 +801,4 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService { } return parent; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java index 99e673a38e8..ada96b93ad7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexer.java @@ -46,14 +46,19 @@ import static org.apache.commons.lang3.StringUtils.isBlank; @Service public class ResourceReindexer { private static final Logger ourLog = LoggerFactory.getLogger(ResourceReindexer.class); + @Autowired private IResourceHistoryTableDao myResourceHistoryTableDao; + @Autowired private IForcedIdDao myForcedIdDao; + @Autowired private IResourceTableDao myResourceTableDao; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired(required = false) private IFulltextSearchSvc myFulltextSearchSvc; @@ -64,7 +69,8 @@ public class ResourceReindexer { } public void readAndReindexResourceByPid(Long theResourcePid) { - ResourceTable resourceTable = myResourceTableDao.findById(theResourcePid).orElseThrow(IllegalStateException::new); + ResourceTable resourceTable = + myResourceTableDao.findById(theResourcePid).orElseThrow(IllegalStateException::new); reindexResourceEntity(resourceTable); } @@ -75,7 +81,10 @@ public class ResourceReindexer { ForcedId forcedId = theResourceTable.getForcedId(); if (forcedId != null) { if (isBlank(forcedId.getResourceType())) { - ourLog.info("Updating resource {} forcedId type to {}", forcedId.getForcedId(), theResourceTable.getResourceType()); + ourLog.info( + "Updating resource {} forcedId type to {}", + forcedId.getForcedId(), + theResourceTable.getResourceType()); forcedId.setResourceType(theResourceTable.getResourceType()); myForcedIdDao.save(forcedId); } @@ -86,12 +95,17 @@ public class ResourceReindexer { IBaseResource resource = dao.readByPid(JpaPid.fromId(theResourceTable.getId()), true); if (resource == null) { - throw new InternalErrorException(Msg.code(1171) + "Could not find resource version " + theResourceTable.getIdDt().toUnqualified().getValue() + " in database"); + throw new InternalErrorException(Msg.code(1171) + "Could not find resource version " + + theResourceTable.getIdDt().toUnqualified().getValue() + " in database"); } Long actualVersion = resource.getIdElement().getVersionIdPartAsLong(); if (actualVersion < expectedVersion) { - ourLog.warn("Resource {} version {} does not exist, renumbering version {}", resource.getIdElement().toUnqualifiedVersionless().getValue(), resource.getIdElement().getVersionIdPart(), expectedVersion); + ourLog.warn( + "Resource {} version {} does not exist, renumbering version {}", + resource.getIdElement().toUnqualifiedVersionless().getValue(), + resource.getIdElement().getVersionIdPart(), + expectedVersion); myResourceHistoryTableDao.updateVersion(theResourceTable.getId(), actualVersion, expectedVersion); } @@ -108,6 +122,5 @@ public class ResourceReindexer { // update the full-text index, if active. myFulltextSearchSvc.reindex(theResourceTable); } - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java index 1e2dd9b79b7..38243681312 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java @@ -57,12 +57,6 @@ import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; -import javax.persistence.Query; import java.util.Collection; import java.util.Date; import java.util.List; @@ -76,6 +70,12 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; +import javax.persistence.Query; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -90,27 +90,39 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc private static final Logger ourLog = LoggerFactory.getLogger(ResourceReindexingSvcImpl.class); private static final int PASS_SIZE = 25000; private final ReentrantLock myIndexingLock = new ReentrantLock(); + @Autowired private IResourceReindexJobDao myReindexJobDao; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private PlatformTransactionManager myTxManager; + private TransactionTemplate myTxTemplate; - private final ThreadFactory myReindexingThreadFactory = new BasicThreadFactory.Builder().namingPattern("ResourceReindex-%d").build(); + private final ThreadFactory myReindexingThreadFactory = + new BasicThreadFactory.Builder().namingPattern("ResourceReindex-%d").build(); private ThreadPoolExecutor myTaskExecutor; + @Autowired private IResourceTableDao myResourceTableDao; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private IForcedIdDao myForcedIdDao; + @Autowired private FhirContext myContext; + @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private ResourceReindexer myResourceReindexer; @@ -134,12 +146,14 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc // Create the threadpool executor used for reindex jobs int reindexThreadCount = myStorageSettings.getReindexThreadCount(); RejectedExecutionHandler rejectHandler = new BlockPolicy(); - myTaskExecutor = new ThreadPoolExecutor(0, reindexThreadCount, - 0L, TimeUnit.MILLISECONDS, - new LinkedBlockingQueue<>(100), - myReindexingThreadFactory, - rejectHandler - ); + myTaskExecutor = new ThreadPoolExecutor( + 0, + reindexThreadCount, + 0L, + TimeUnit.MILLISECONDS, + new LinkedBlockingQueue<>(100), + myReindexingThreadFactory, + rejectHandler); } @Override @@ -261,7 +275,9 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc int count = 0; for (ResourceReindexJobEntity next : jobs) { - if (next.getThresholdLow() != null && next.getThresholdLow().getTime() >= next.getThresholdHigh().getTime()) { + if (next.getThresholdLow() != null + && next.getThresholdLow().getTime() + >= next.getThresholdHigh().getTime()) { markJobAsDeleted(next); continue; } @@ -277,7 +293,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc } private Collection getResourceReindexJobEntities() { - Collection jobs = myTxTemplate.execute(t -> myReindexJobDao.findAll(PageRequest.of(0, 10), false)); + Collection jobs = + myTxTemplate.execute(t -> myReindexJobDao.findAll(PageRequest.of(0, 10), false)); assert jobs != null; return jobs; } @@ -325,7 +342,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc Slice range = myTxTemplate.execute(t -> { PageRequest page = PageRequest.of(0, PASS_SIZE); if (isNotBlank(theJob.getResourceType())) { - return myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(page, theJob.getResourceType(), low, high); + return myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest( + page, theJob.getResourceType(), low, high); } else { return myResourceTableDao.findIdsOfResourcesWithinUpdatedRangeOrderedFromOldest(page, low, high); } @@ -341,10 +359,9 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc } // Submit each resource requiring reindexing - List> futures = range - .stream() - .map(t -> myTaskExecutor.submit(new ResourceReindexingTask(t, counter))) - .collect(Collectors.toList()); + List> futures = range.stream() + .map(t -> myTaskExecutor.submit(new ResourceReindexingTask(t, counter))) + .collect(Collectors.toList()); Date latestDate = null; for (Future next : futures) { @@ -375,7 +392,10 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc // Just in case we end up in some sort of infinite loop. This shouldn't happen, and couldn't really // happen unless there were 10000 resources with the exact same update time down to the // millisecond. - ourLog.error("Final pass time for reindex JOB[{}] has same ending low value: {}", theJob.getId(), latestDate); + ourLog.error( + "Final pass time for reindex JOB[{}] has same ending low value: {}", + theJob.getId(), + latestDate); } newLow = new Date(latestDate.getTime() + 1); @@ -385,13 +405,20 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc myTxTemplate.execute(t -> { myReindexJobDao.setThresholdLow(theJob.getId(), newLow); - Integer existingCount = myReindexJobDao.getReindexCount(theJob.getId()).orElse(0); + Integer existingCount = + myReindexJobDao.getReindexCount(theJob.getId()).orElse(0); int newCount = existingCount + counter.get(); myReindexJobDao.setReindexCount(theJob.getId(), newCount); return null; }); - ourLog.info("Completed pass of reindex JOB[{}] - Indexed {} resources in {} ({} / sec) - Have indexed until: {}", theJob.getId(), count, sw, sw.formatThroughput(count, TimeUnit.SECONDS), new InstantType(newLow)); + ourLog.info( + "Completed pass of reindex JOB[{}] - Indexed {} resources in {} ({} / sec) - Have indexed until: {}", + theJob.getId(), + count, + sw, + sw.formatThroughput(count, TimeUnit.SECONDS), + new InstantType(newLow)); return counter.get(); } @@ -418,7 +445,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc q.setParameter("id", theId); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResourcePid = :id"); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResourcePid = :id"); q.setParameter("id", theId); q.executeUpdate(); @@ -426,23 +454,28 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc q.setParameter("id", theId); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResourcePid = :id"); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResourcePid = :id"); q.setParameter("id", theId); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResourcePid = :id"); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResourcePid = :id"); q.setParameter("id", theId); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResourcePid = :id"); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResourcePid = :id"); q.setParameter("id", theId); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :id"); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :id"); q.setParameter("id", theId); q.executeUpdate(); - q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :id"); + q = myEntityManager.createQuery( + "DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :id"); q.setParameter("id", theId); q.executeUpdate(); @@ -486,7 +519,9 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc * not get this error, so we'll let the other one fail and try * again later. */ - ourLog.info("Failed to reindex because of a version conflict. Leaving in unindexed state: {}", e.getMessage()); + ourLog.info( + "Failed to reindex because of a version conflict. Leaving in unindexed state: {}", + e.getMessage()); reindexFailure = null; } @@ -502,7 +537,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc private Throwable readResourceAndReindex() { Throwable reindexFailure; reindexFailure = myTxTemplate.execute(t -> { - ResourceTable resourceTable = myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new); + ResourceTable resourceTable = + myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new); myUpdated = resourceTable.getUpdatedDate(); try { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/CacheWarmingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/CacheWarmingSvcImpl.java index ca7d6dcf198..1cbc2a5e69f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/CacheWarmingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/warm/CacheWarmingSvcImpl.java @@ -41,25 +41,30 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; +import javax.annotation.PostConstruct; @Component public class CacheWarmingSvcImpl implements ICacheWarmingSvc, IHasScheduledJobs { private static final Logger ourLog = LoggerFactory.getLogger(CacheWarmingSvcImpl.class); + @Autowired private JpaStorageSettings myStorageSettings; + private Map myCacheEntryToNextRefresh = new LinkedHashMap<>(); + @Autowired private FhirContext myCtx; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private MatchUrlService myMatchUrlService; @@ -78,11 +83,8 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc, IHasScheduledJobs // Set the next time to warm this search nextRefresh = nextCacheEntry.getPeriodMillis() + System.currentTimeMillis(); myCacheEntryToNextRefresh.put(nextCacheEntry, nextRefresh); - } - } - } private void refreshNow(WarmCacheEntry theCacheEntry) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sp/ISearchParamPresenceSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sp/ISearchParamPresenceSvc.java index d455ffe8304..fd275d7b555 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sp/ISearchParamPresenceSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sp/ISearchParamPresenceSvc.java @@ -28,5 +28,4 @@ import java.util.Collection; public interface ISearchParamPresenceSvc { AddRemoveCount updatePresence(ResourceTable theResource, Collection thePresenceEntities); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sp/SearchParamPresenceSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sp/SearchParamPresenceSvcImpl.java index 09326ceadc9..001444b4c6d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sp/SearchParamPresenceSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/sp/SearchParamPresenceSvcImpl.java @@ -50,7 +50,8 @@ public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc { } @Override - public AddRemoveCount updatePresence(ResourceTable theResource, Collection thePresenceEntities) { + public AddRemoveCount updatePresence( + ResourceTable theResource, Collection thePresenceEntities) { AddRemoveCount retVal = new AddRemoveCount(); if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.DISABLED) { return retVal; @@ -102,5 +103,4 @@ public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc { return retVal; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermVersionAdapterSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermVersionAdapterSvcImpl.java index 2edb04b7afe..d1840ba0a9e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermVersionAdapterSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermVersionAdapterSvcImpl.java @@ -25,9 +25,8 @@ import org.hl7.fhir.r4.model.CodeSystem; public abstract class BaseTermVersionAdapterSvcImpl implements ITermVersionAdapterSvc { - protected void validateCodeSystemForStorage(CodeSystem theCodeSystemResource) { - ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theCodeSystemResource.getUrl(), "Can not store a CodeSystem without a valid URL"); + ValidateUtil.isNotBlankOrThrowUnprocessableEntity( + theCodeSystemResource.getUrl(), "Can not store a CodeSystem without a valid URL"); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ExpansionFilter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ExpansionFilter.java index d7d3402f506..f6f11baa8c8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ExpansionFilter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ExpansionFilter.java @@ -24,10 +24,10 @@ import ca.uhn.fhir.util.FhirVersionIndependentConcept; import org.apache.commons.lang3.Validate; import org.hl7.fhir.r4.model.ValueSet; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collections; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNoneBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -49,14 +49,21 @@ class ExpansionFilter { /** * Constructor */ - ExpansionFilter(ExpansionFilter theExpansionFilter, List theFilters, Integer theMaxCount) { + ExpansionFilter( + ExpansionFilter theExpansionFilter, + List theFilters, + Integer theMaxCount) { this(theExpansionFilter.getSystem(), theExpansionFilter.getCode(), theFilters, theMaxCount); } /** * Constructor */ - ExpansionFilter(@Nullable String theSystem, @Nullable String theCode, @Nonnull List theFilters, Integer theMaxCount) { + ExpansionFilter( + @Nullable String theSystem, + @Nullable String theCode, + @Nonnull List theFilters, + Integer theMaxCount) { Validate.isTrue(isNotBlank(theSystem) == isNotBlank(theCode)); Validate.notNull(theFilters); @@ -101,10 +108,11 @@ class ExpansionFilter { public static ExpansionFilter fromFilterString(@Nullable String theFilter) { ExpansionFilter filter; if (isNoneBlank(theFilter)) { - List filters = Collections.singletonList(new ValueSet.ConceptSetFilterComponent() - .setProperty(JpaConstants.VALUESET_FILTER_DISPLAY) - .setOp(ValueSet.FilterOperator.EQUAL) - .setValue(theFilter)); + List filters = + Collections.singletonList(new ValueSet.ConceptSetFilterComponent() + .setProperty(JpaConstants.VALUESET_FILTER_DISPLAY) + .setOp(ValueSet.FilterOperator.EQUAL) + .setValue(theFilter)); filter = new ExpansionFilter(null, null, filters, null); } else { filter = ExpansionFilter.NO_FILTER; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IValueSetConceptAccumulator.java index 906a13f087f..f72f70c33ed 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IValueSetConceptAccumulator.java @@ -21,16 +21,29 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; -import javax.annotation.Nullable; import java.util.Collection; +import javax.annotation.Nullable; public interface IValueSetConceptAccumulator { void addMessage(String theMessage); - void includeConcept(String theSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids, @Nullable String theSystemVersion); + void includeConcept( + String theSystem, + String theCode, + String theDisplay, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + @Nullable String theSystemVersion); - void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, @Nullable Collection theDesignations, Long theSourceConceptPid, String theSourceConceptDirectParentPids, @Nullable String theSystemVersion); + void includeConceptWithDesignations( + String theSystem, + String theCode, + String theDisplay, + @Nullable Collection theDesignations, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + @Nullable String theSystemVersion); /** * @return Returns true if the code was actually present and was removed @@ -67,5 +80,4 @@ public interface IValueSetConceptAccumulator { default void incrementOrDecrementTotalConcepts(boolean theAdd, int theDelta) { // nothing } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IZipContentsHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IZipContentsHandler.java index e64af4f4f4d..af941b3daad 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IZipContentsHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IZipContentsHandler.java @@ -26,5 +26,4 @@ import java.io.Reader; public interface IZipContentsHandler { void handle(Reader theReader, String theFilename) throws IOException; - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IZipContentsHandlerCsv.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IZipContentsHandlerCsv.java index f025c719ceb..52e397a7043 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IZipContentsHandlerCsv.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/IZipContentsHandlerCsv.java @@ -22,5 +22,5 @@ package ca.uhn.fhir.jpa.term; import org.apache.commons.csv.CSVRecord; public interface IZipContentsHandlerCsv { - void accept(CSVRecord theRecord); + void accept(CSVRecord theRecord); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java index 3d4d8b27df7..4a0bf394196 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/LoadedFileDescriptors.java @@ -85,19 +85,16 @@ public class LoadedFileDescriptors implements Closeable { } else { myUncompressedFileDescriptors.add(next); } - } } catch (IOException e) { throw new InternalErrorException(Msg.code(861) + e); } - } public boolean hasFile(String theFilename) { - return myUncompressedFileDescriptors - .stream() - .map(t -> t.getFilename().replaceAll(".*[\\\\/]", "")) // Strip the path from the filename - .anyMatch(t -> t.equals(theFilename)); + return myUncompressedFileDescriptors.stream() + .map(t -> t.getFilename().replaceAll(".*[\\\\/]", "")) // Strip the path from the filename + .anyMatch(t -> t.equals(theFilename)); } @Override @@ -131,7 +128,8 @@ public class LoadedFileDescriptors implements Closeable { void verifyMandatoryFilesExist(List theExpectedFilenameFragments) { List notFound = notFound(theExpectedFilenameFragments); if (!notFound.isEmpty()) { - throw new UnprocessableEntityException(Msg.code(862) + "Could not find the following mandatory files in input: " + notFound); + throw new UnprocessableEntityException( + Msg.code(862) + "Could not find the following mandatory files in input: " + notFound); } } @@ -158,13 +156,14 @@ public class LoadedFileDescriptors implements Closeable { if (!multiPartFilesFound && !singlePartFilesFound) { msg = "Could not find any of the PartLink files: " + notFoundMulti + " nor " + notFoundSingle; } else { - msg = "Only either the single PartLink file or the split PartLink files can be present. Found both the single PartLink file, " + theSinglePartLinkFile + ", and the split PartLink files: " + theMultiPartLinkFiles; + msg = + "Only either the single PartLink file or the split PartLink files can be present. Found both the single PartLink file, " + + theSinglePartLinkFile + ", and the split PartLink files: " + theMultiPartLinkFiles; } throw new UnprocessableEntityException(Msg.code(863) + msg); } } - private static class NonClosableBOMInputStream extends BOMInputStream { NonClosableBOMInputStream(InputStream theWrap) { super(theWrap); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java index a4f0d767cd3..fe35f7c8d4d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java @@ -65,10 +65,6 @@ import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.support.TransactionSynchronizationManager; -import javax.annotation.Nonnull; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -85,6 +81,10 @@ import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; import static ca.uhn.fhir.jpa.api.dao.IDao.RESOURCE_PID_KEY; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -94,39 +94,52 @@ import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LO public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { private static final Logger ourLog = LoggerFactory.getLogger(TermCodeSystemStorageSvcImpl.class); private static final Object PLACEHOLDER_OBJECT = new Object(); + @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + @Autowired protected ITermCodeSystemDao myCodeSystemDao; + @Autowired protected ITermCodeSystemVersionDao myCodeSystemVersionDao; + @Autowired protected ITermConceptDao myConceptDao; + @Autowired protected ITermConceptPropertyDao myConceptPropertyDao; + @Autowired protected ITermConceptDesignationDao myConceptDesignationDao; + @Autowired protected IIdHelperService myIdHelperService; + @Autowired private ITermConceptParentChildLinkDao myConceptParentChildLinkDao; + @Autowired private ITermVersionAdapterSvc myTerminologyVersionAdapterSvc; + @Autowired private ITermDeferredStorageSvc myDeferredStorageSvc; + @Autowired private FhirContext myContext; + @Autowired private ITermReadSvc myTerminologySvc; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private IResourceTableDao myResourceTableDao; @Autowired private TermConceptDaoSvc myTermConceptDaoSvc; - @Transactional @Override public UploadStatistics applyDeltaCodeSystemsAdd(String theSystem, CustomTerminologySet theAdditions) { @@ -152,7 +165,9 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { CodeSystem codeSystem = myTerminologySvc.fetchCanonicalCodeSystemFromCompleteContext(theSystem); if (codeSystem.getContent() != CodeSystem.CodeSystemContentMode.NOTPRESENT) { - throw new InvalidRequestException(Msg.code(844) + "CodeSystem with url[" + Constants.codeSystemWithDefaultDescription(theSystem) + "] can not apply a delta - wrong content mode: " + codeSystem.getContent()); + throw new InvalidRequestException( + Msg.code(844) + "CodeSystem with url[" + Constants.codeSystemWithDefaultDescription(theSystem) + + "] can not apply a delta - wrong content mode: " + codeSystem.getContent()); } Validate.notNull(cs); @@ -186,29 +201,27 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { AtomicInteger removeCounter = new AtomicInteger(0); - //We need to delete all termconcepts, and their children. This stream flattens the TermConcepts and their - //children into a single set of TermConcept objects retrieved from the DB. Note that we have to do this because - //deleteById() in JPA doesnt appear to actually commit or flush a transaction until way later, and we end up - //iterating multiple times over the same elements, which screws up our counter. + // We need to delete all termconcepts, and their children. This stream flattens the TermConcepts and their + // children into a single set of TermConcept objects retrieved from the DB. Note that we have to do this because + // deleteById() in JPA doesnt appear to actually commit or flush a transaction until way later, and we end up + // iterating multiple times over the same elements, which screws up our counter. - - //Grab the actual entities + // Grab the actual entities List collect = theValue.getRootConcepts().stream() - .map(val -> myTerminologySvc.findCode(theSystem, val.getCode())) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toList()); + .map(val -> myTerminologySvc.findCode(theSystem, val.getCode())) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); - //Iterate over the actual entities and fill out their children - Set allFoundTermConcepts = collect - .stream() - .flatMap(concept -> flattenChildren(concept).stream()) - .map(suppliedTermConcept -> myTerminologySvc.findCode(theSystem, suppliedTermConcept.getCode())) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toSet()); + // Iterate over the actual entities and fill out their children + Set allFoundTermConcepts = collect.stream() + .flatMap(concept -> flattenChildren(concept).stream()) + .map(suppliedTermConcept -> myTerminologySvc.findCode(theSystem, suppliedTermConcept.getCode())) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toSet()); - //Delete everything about these codes. + // Delete everything about these codes. for (TermConcept code : allFoundTermConcepts) { deleteEverythingRelatedToConcept(code, removeCounter); } @@ -239,7 +252,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { ourLog.info("Deleting concept {} - Code {}", theConcept.getId(), theConcept.getCode()); myConceptDao.deleteById(theConcept.getId()); -// myEntityManager.remove(theConcept); + // myEntityManager.remove(theConcept); theRemoveCounter.incrementAndGet(); } @@ -249,13 +262,13 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { return Arrays.asList(theTermConcept); } - //Recursively flatten children + // Recursively flatten children List childTermConcepts = theTermConcept.getChildren().stream() - .map(TermConceptParentChildLink::getChild) - .flatMap(childConcept -> flattenChildren(childConcept).stream()) - .collect(Collectors.toList()); + .map(TermConceptParentChildLink::getChild) + .flatMap(childConcept -> flattenChildren(childConcept).stream()) + .collect(Collectors.toList()); - //Add itself before its list of children + // Add itself before its list of children childTermConcepts.add(0, theTermConcept); return childTermConcepts; } @@ -270,13 +283,19 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { @Override @Transactional(propagation = Propagation.MANDATORY) - public void storeNewCodeSystemVersionIfNeeded(CodeSystem theCodeSystem, ResourceTable theResourceEntity, RequestDetails theRequestDetails) { + public void storeNewCodeSystemVersionIfNeeded( + CodeSystem theCodeSystem, ResourceTable theResourceEntity, RequestDetails theRequestDetails) { if (theCodeSystem != null && isNotBlank(theCodeSystem.getUrl())) { String codeSystemUrl = theCodeSystem.getUrl(); - if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.COMPLETE || theCodeSystem.getContent() == null || theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) { - ourLog.info("CodeSystem {} has a status of {}, going to store concepts in terminology tables", theResourceEntity.getIdDt().getValue(), theCodeSystem.getContentElement().getValueAsString()); + if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.COMPLETE + || theCodeSystem.getContent() == null + || theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) { + ourLog.info( + "CodeSystem {} has a status of {}, going to store concepts in terminology tables", + theResourceEntity.getIdDt().getValue(), + theCodeSystem.getContentElement().getValueAsString()); - Long pid = (Long)theCodeSystem.getUserData(RESOURCE_PID_KEY); + Long pid = (Long) theCodeSystem.getUserData(RESOURCE_PID_KEY); assert pid != null; JpaPid codeSystemResourcePid = JpaPid.fromId(pid); @@ -289,9 +308,15 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { if (theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT) { TermCodeSystem termCodeSystem = myCodeSystemDao.findByCodeSystemUri(theCodeSystem.getUrl()); if (termCodeSystem != null) { - TermCodeSystemVersion codeSystemVersion = getExistingTermCodeSystemVersion(termCodeSystem.getPid(), theCodeSystem.getVersion()); + TermCodeSystemVersion codeSystemVersion = + getExistingTermCodeSystemVersion(termCodeSystem.getPid(), theCodeSystem.getVersion()); if (codeSystemVersion != null) { - TermCodeSystem myCodeSystemEntity = getOrCreateDistinctTermCodeSystem(codeSystemResourcePid, theCodeSystem.getUrl(), theCodeSystem.getUrl(), theCodeSystem.getVersion(), theResourceEntity); + TermCodeSystem myCodeSystemEntity = getOrCreateDistinctTermCodeSystem( + codeSystemResourcePid, + theCodeSystem.getUrl(), + theCodeSystem.getUrl(), + theCodeSystem.getVersion(), + theResourceEntity); return; } } @@ -303,17 +328,26 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { persCs.getConcepts().addAll(TermReadSvcImpl.toPersistedConcepts(theCodeSystem.getConcept(), persCs)); ourLog.debug("Code system has {} concepts", persCs.getConcepts().size()); - storeNewCodeSystemVersion(codeSystemResourcePid, codeSystemUrl, theCodeSystem.getName(), - theCodeSystem.getVersion(), persCs, theResourceEntity, theRequestDetails); + storeNewCodeSystemVersion( + codeSystemResourcePid, + codeSystemUrl, + theCodeSystem.getName(), + theCodeSystem.getVersion(), + persCs, + theResourceEntity, + theRequestDetails); } - } } @Override @Transactional - public IIdType storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, - RequestDetails theRequest, List theValueSets, List theConceptMaps) { + public IIdType storeNewCodeSystemVersion( + CodeSystem theCodeSystemResource, + TermCodeSystemVersion theCodeSystemVersion, + RequestDetails theRequest, + List theValueSets, + List theConceptMaps) { assert TransactionSynchronizationManager.isActualTransactionActive(); Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL"); @@ -321,15 +355,22 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { // Note that this creates the TermCodeSystem and TermCodeSystemVersion entities if needed IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource, theRequest); - JpaPid codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), csId.getResourceType(), csId.getIdPart()); + JpaPid codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds( + RequestPartitionId.allPartitions(), csId.getResourceType(), csId.getIdPart()); ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid.getId()); ourLog.info("CodeSystem resource has ID: {}", csId.getValue()); populateCodeSystemVersionProperties(theCodeSystemVersion, theCodeSystemResource, resource); - storeNewCodeSystemVersion(codeSystemResourcePid, theCodeSystemResource.getUrl(), theCodeSystemResource.getName(), - theCodeSystemResource.getVersion(), theCodeSystemVersion, resource, theRequest); + storeNewCodeSystemVersion( + codeSystemResourcePid, + theCodeSystemResource.getUrl(), + theCodeSystemResource.getName(), + theCodeSystemResource.getVersion(), + theCodeSystemVersion, + resource, + theRequest); myDeferredStorageSvc.addConceptMapsToStorageQueue(theConceptMaps); myDeferredStorageSvc.addValueSetsToStorageQueue(theValueSets); @@ -339,9 +380,14 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { @Override @Transactional - public void storeNewCodeSystemVersion(IResourcePersistentId theCodeSystemResourcePid, String theSystemUri, - String theSystemName, String theCodeSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, - ResourceTable theCodeSystemResourceTable, RequestDetails theRequestDetails) { + public void storeNewCodeSystemVersion( + IResourcePersistentId theCodeSystemResourcePid, + String theSystemUri, + String theSystemName, + String theCodeSystemVersionId, + TermCodeSystemVersion theCodeSystemVersion, + ResourceTable theCodeSystemResourceTable, + RequestDetails theRequestDetails) { assert TransactionSynchronizationManager.isActualTransactionActive(); ourLog.debug("Storing code system"); @@ -350,11 +396,18 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { ValidateUtil.isTrueOrThrowInvalidRequest(codeSystemToStore.getResource() != null, "No resource supplied"); ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystemUri, "No system URI supplied"); - TermCodeSystem codeSystem = getOrCreateDistinctTermCodeSystem(theCodeSystemResourcePid, theSystemUri, theSystemName, theCodeSystemVersionId, theCodeSystemResourceTable); + TermCodeSystem codeSystem = getOrCreateDistinctTermCodeSystem( + theCodeSystemResourcePid, + theSystemUri, + theSystemName, + theCodeSystemVersionId, + theCodeSystemResourceTable); - List existing = myCodeSystemVersionDao.findByCodeSystemResourcePid(((JpaPid)theCodeSystemResourcePid).getId()); + List existing = + myCodeSystemVersionDao.findByCodeSystemResourcePid(((JpaPid) theCodeSystemResourcePid).getId()); for (TermCodeSystemVersion next : existing) { - if (Objects.equals(next.getCodeSystemVersionId(), theCodeSystemVersionId) && myConceptDao.countByCodeSystemVersion(next.getPid()) == 0) { + if (Objects.equals(next.getCodeSystemVersionId(), theCodeSystemVersionId) + && myConceptDao.countByCodeSystemVersion(next.getPid()) == 0) { /* * If we already have a CodeSystemVersion that matches the version we're storing, we @@ -375,7 +428,6 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { next.setCodeSystemVersionId("DELETED_" + UUID.randomUUID().toString()); myCodeSystemVersionDao.saveAndFlush(next); myDeferredStorageSvc.deleteCodeSystemVersion(next); - } } @@ -428,31 +480,40 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } } - - private TermCodeSystemVersion getExistingTermCodeSystemVersion(Long theCodeSystemVersionPid, String theCodeSystemVersion) { + private TermCodeSystemVersion getExistingTermCodeSystemVersion( + Long theCodeSystemVersionPid, String theCodeSystemVersion) { TermCodeSystemVersion existing; if (theCodeSystemVersion == null) { existing = myCodeSystemVersionDao.findByCodeSystemPidVersionIsNull(theCodeSystemVersionPid); } else { - existing = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(theCodeSystemVersionPid, theCodeSystemVersion); + existing = + myCodeSystemVersionDao.findByCodeSystemPidAndVersion(theCodeSystemVersionPid, theCodeSystemVersion); } return existing; } - private void validateDstu3OrNewer() { - Validate.isTrue(myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), "Terminology operations only supported in DSTU3+ mode"); + Validate.isTrue( + myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), + "Terminology operations only supported in DSTU3+ mode"); } - private void addConceptInHierarchy(TermCodeSystemVersion theCsv, Collection theParentCodes, TermConcept theConceptToAdd, UploadStatistics theStatisticsTracker, Map theCodeToConcept, int theSequence) { + private void addConceptInHierarchy( + TermCodeSystemVersion theCsv, + Collection theParentCodes, + TermConcept theConceptToAdd, + UploadStatistics theStatisticsTracker, + Map theCodeToConcept, + int theSequence) { TermConcept conceptToAdd = theConceptToAdd; List childrenToAdd = theConceptToAdd.getChildren(); String nextCodeToAdd = conceptToAdd.getCode(); String parentDescription = "(root concept)"; - ourLog.info("Saving concept {} with parent {}", theStatisticsTracker.getUpdatedConceptCount(), parentDescription); + ourLog.info( + "Saving concept {} with parent {}", theStatisticsTracker.getUpdatedConceptCount(), parentDescription); Optional existingCodeOpt = myConceptDao.findByCodeSystemAndCode(theCsv.getPid(), nextCodeToAdd); List existingParentLinks; @@ -470,16 +531,20 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { for (String nextParentCode : theParentCodes) { // Don't add parent links that already exist for the code - if (existingParentLinks.stream().anyMatch(t -> t.getParent().getCode().equals(nextParentCode))) { + if (existingParentLinks.stream() + .anyMatch(t -> t.getParent().getCode().equals(nextParentCode))) { continue; } TermConcept nextParentOpt = theCodeToConcept.get(nextParentCode); if (nextParentOpt == null) { - nextParentOpt = myConceptDao.findByCodeSystemAndCode(theCsv.getPid(), nextParentCode).orElse(null); + nextParentOpt = myConceptDao + .findByCodeSystemAndCode(theCsv.getPid(), nextParentCode) + .orElse(null); } if (nextParentOpt == null) { - throw new InvalidRequestException(Msg.code(846) + "Unable to add code \"" + nextCodeToAdd + "\" to unknown parent: " + nextParentCode); + throw new InvalidRequestException(Msg.code(846) + "Unable to add code \"" + nextCodeToAdd + + "\" to unknown parent: " + nextParentCode); } parentConceptsWeShouldLinkTo.add(nextParentOpt); } @@ -519,14 +584,17 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { parentLink.setRelationshipType(TermConceptParentChildLink.RelationshipTypeEnum.ISA); nextParentConcept.getChildren().add(parentLink); conceptToAdd.getParents().add(parentLink); - ourLog.info("Saving parent/child link - Parent[{}] Child[{}]", parentLink.getParent().getCode(), parentLink.getChild().getCode()); + ourLog.info( + "Saving parent/child link - Parent[{}] Child[{}]", + parentLink.getParent().getCode(), + parentLink.getChild().getCode()); - if (theStatisticsTracker.getUpdatedConceptCount() <= myStorageSettings.getDeferIndexingForCodesystemsOfSize()) { + if (theStatisticsTracker.getUpdatedConceptCount() + <= myStorageSettings.getDeferIndexingForCodesystemsOfSize()) { myConceptParentChildLinkDao.save(parentLink); } else { myDeferredStorageSvc.addConceptLinkToStorageQueue(parentLink); } - } ourLog.trace("About to save parent-child links"); @@ -539,10 +607,13 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { for (int i = 0; i < nextChild.getParents().size(); i++) { if (nextChild.getParents().get(i).getId() == null) { - String parentCode = nextChild.getParents().get(i).getParent().getCode(); + String parentCode = + nextChild.getParents().get(i).getParent().getCode(); TermConcept parentConcept = theCodeToConcept.get(parentCode); if (parentConcept == null) { - parentConcept = myConceptDao.findByCodeSystemAndCode(theCsv.getPid(), parentCode).orElse(null); + parentConcept = myConceptDao + .findByCodeSystemAndCode(theCsv.getPid(), parentCode) + .orElse(null); } if (parentConcept == null) { throw new IllegalArgumentException(Msg.code(847) + "Unknown parent code: " + parentCode); @@ -552,22 +623,29 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } } - Collection parentCodes = nextChild.getParents().stream().map(t -> t.getParent().getCode()).collect(Collectors.toList()); + Collection parentCodes = nextChild.getParents().stream() + .map(t -> t.getParent().getCode()) + .collect(Collectors.toList()); addConceptInHierarchy(theCsv, parentCodes, nextChild, theStatisticsTracker, theCodeToConcept, childIndex); childIndex++; } - } - private void persistChildren(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, IdentityHashMap theConceptsStack, int theTotalConcepts) { + private void persistChildren( + TermConcept theConcept, + TermCodeSystemVersion theCodeSystem, + IdentityHashMap theConceptsStack, + int theTotalConcepts) { if (theConceptsStack.put(theConcept, PLACEHOLDER_OBJECT) != null) { return; } if ((theConceptsStack.size() + 1) % 10000 == 0) { float pct = (float) theConceptsStack.size() / (float) theTotalConcepts; - ourLog.info("Have processed {}/{} concepts ({}%)", theConceptsStack.size(), theTotalConcepts, (int) (pct * 100.0f)); + ourLog.info( + "Have processed {}/{} concepts ({}%)", + theConceptsStack.size(), theTotalConcepts, (int) (pct * 100.0f)); } theConcept.setCodeSystemVersion(theCodeSystem); @@ -590,7 +668,6 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { myDeferredStorageSvc.addConceptLinkToStorageQueue(next); } } - } private void populateVersion(TermConcept theNext, TermCodeSystemVersion theCodeSystemVersion) { @@ -629,15 +706,21 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } @Nonnull - private TermCodeSystem getOrCreateDistinctTermCodeSystem(IResourcePersistentId theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, ResourceTable theCodeSystemResourceTable) { + private TermCodeSystem getOrCreateDistinctTermCodeSystem( + IResourcePersistentId theCodeSystemResourcePid, + String theSystemUri, + String theSystemName, + String theSystemVersionId, + ResourceTable theCodeSystemResourceTable) { TermCodeSystem codeSystem = myCodeSystemDao.findByCodeSystemUri(theSystemUri); if (codeSystem == null) { - codeSystem = myCodeSystemDao.findByResourcePid(((JpaPid)theCodeSystemResourcePid).getId()); + codeSystem = myCodeSystemDao.findByResourcePid(((JpaPid) theCodeSystemResourcePid).getId()); if (codeSystem == null) { codeSystem = new TermCodeSystem(); } } else { - checkForCodeSystemVersionDuplicate(codeSystem, theSystemUri, theSystemVersionId, theCodeSystemResourceTable); + checkForCodeSystemVersionDuplicate( + codeSystem, theSystemUri, theSystemVersionId, theCodeSystemResourceTable); } codeSystem.setResource(theCodeSystemResourceTable); @@ -647,20 +730,46 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { return codeSystem; } - private void checkForCodeSystemVersionDuplicate(TermCodeSystem theCodeSystem, String theSystemUri, String theSystemVersionId, ResourceTable theCodeSystemResourceTable) { + private void checkForCodeSystemVersionDuplicate( + TermCodeSystem theCodeSystem, + String theSystemUri, + String theSystemVersionId, + ResourceTable theCodeSystemResourceTable) { TermCodeSystemVersion codeSystemVersionEntity; String msg = null; if (theSystemVersionId == null) { // Check if a non-versioned TermCodeSystemVersion entity already exists for this TermCodeSystem. codeSystemVersionEntity = myCodeSystemVersionDao.findByCodeSystemPidVersionIsNull(theCodeSystem.getPid()); if (codeSystemVersionEntity != null) { - msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "cannotCreateDuplicateCodeSystemUrl", theSystemUri, codeSystemVersionEntity.getResource().getIdDt().toUnqualifiedVersionless().getValue()); + msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "cannotCreateDuplicateCodeSystemUrl", + theSystemUri, + codeSystemVersionEntity + .getResource() + .getIdDt() + .toUnqualifiedVersionless() + .getValue()); } } else { // Check if a TermCodeSystemVersion entity already exists for this TermCodeSystem and version. - codeSystemVersionEntity = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(theCodeSystem.getPid(), theSystemVersionId); + codeSystemVersionEntity = + myCodeSystemVersionDao.findByCodeSystemPidAndVersion(theCodeSystem.getPid(), theSystemVersionId); if (codeSystemVersionEntity != null) { - msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "cannotCreateDuplicateCodeSystemUrlAndVersion", theSystemUri, theSystemVersionId, codeSystemVersionEntity.getResource().getIdDt().toUnqualifiedVersionless().getValue()); + msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "cannotCreateDuplicateCodeSystemUrlAndVersion", + theSystemUri, + theSystemVersionId, + codeSystemVersionEntity + .getResource() + .getIdDt() + .toUnqualifiedVersionless() + .getValue()); } } // Throw exception if the TermCodeSystemVersion is being duplicated. @@ -671,21 +780,29 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } } - private void populateCodeSystemVersionProperties(TermCodeSystemVersion theCodeSystemVersion, CodeSystem theCodeSystemResource, ResourceTable theResourceTable) { + private void populateCodeSystemVersionProperties( + TermCodeSystemVersion theCodeSystemVersion, + CodeSystem theCodeSystemResource, + ResourceTable theResourceTable) { theCodeSystemVersion.setResource(theResourceTable); theCodeSystemVersion.setCodeSystemDisplayName(theCodeSystemResource.getName()); theCodeSystemVersion.setCodeSystemVersionId(theCodeSystemResource.getVersion()); } - - private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystemVersion, ArrayList theConceptsStack, - IdentityHashMap theAllConcepts) { - ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() != null, "CodeSystemVersion is null"); - ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "CodeSystem contains a code with no code value"); + private int validateConceptForStorage( + TermConcept theConcept, + TermCodeSystemVersion theCodeSystemVersion, + ArrayList theConceptsStack, + IdentityHashMap theAllConcepts) { + ValidateUtil.isTrueOrThrowInvalidRequest( + theConcept.getCodeSystemVersion() != null, "CodeSystemVersion is null"); + ValidateUtil.isNotBlankOrThrowInvalidRequest( + theConcept.getCode(), "CodeSystem contains a code with no code value"); theConcept.setCodeSystemVersion(theCodeSystemVersion); if (theConceptsStack.contains(theConcept.getCode())) { - throw new InvalidRequestException(Msg.code(849) + "CodeSystem contains circular reference around code " + theConcept.getCode()); + throw new InvalidRequestException( + Msg.code(849) + "CodeSystem contains circular reference around code " + theConcept.getCode()); } theConceptsStack.add(theConcept.getCode()); @@ -699,13 +816,12 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { for (TermConceptParentChildLink next : theConcept.getChildren()) { next.setCodeSystem(theCodeSystemVersion); - retVal += validateConceptForStorage(next.getChild(), theCodeSystemVersion, theConceptsStack, theAllConcepts); + retVal += + validateConceptForStorage(next.getChild(), theCodeSystemVersion, theConceptsStack, theAllConcepts); } theConceptsStack.remove(theConceptsStack.size() - 1); return retVal; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvc.java index ace6483f35e..09cd7591641 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptDaoSvc.java @@ -79,7 +79,6 @@ public class TermConceptDaoSvc { for (TermConceptDesignation next : theConcept.getDesignations()) { myConceptDesignationDao.save(next); } - } ourLog.trace("Saved {} and got PID {}", theConcept.getCode(), theConcept.getId()); @@ -106,4 +105,3 @@ public class TermConceptDaoSvc { return retVal; } } - diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImpl.java index b35e5a7086f..d4c7928b62a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermConceptMappingSvcImpl.java @@ -66,6 +66,11 @@ import org.springframework.data.domain.Pageable; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; @@ -75,11 +80,6 @@ import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Join; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Optional; -import java.util.Set; import static ca.uhn.fhir.jpa.term.TermReadSvcImpl.isPlaceholder; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -91,20 +91,28 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { private static boolean ourLastResultsFromTranslationCache; // For testing. private static boolean ourLastResultsFromTranslationWithReverseCache; // For testing. private final int myFetchSize = TermReadSvcImpl.DEFAULT_FETCH_SIZE; + @Autowired protected ITermConceptMapDao myConceptMapDao; + @Autowired protected ITermConceptMapGroupDao myConceptMapGroupDao; + @Autowired protected ITermConceptMapGroupElementDao myConceptMapGroupElementDao; + @Autowired protected ITermConceptMapGroupElementTargetDao myConceptMapGroupElementTargetDao; + @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + @Autowired private FhirContext myContext; + @Autowired private MemoryCacheService myMemoryCacheService; + @Autowired private IIdHelperService myIdHelperService; @@ -136,20 +144,29 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { ValidateUtil.isTrueOrThrowInvalidRequest(theResourceTable != null, "No resource supplied"); if (isPlaceholder(theConceptMap)) { - ourLog.info("Not storing TermConceptMap for placeholder {}", theConceptMap.getIdElement().toVersionless().getValueAsString()); + ourLog.info( + "Not storing TermConceptMap for placeholder {}", + theConceptMap.getIdElement().toVersionless().getValueAsString()); return; } - ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theConceptMap.getUrl(), "ConceptMap has no value for ConceptMap.url"); - ourLog.info("Storing TermConceptMap for {}", theConceptMap.getIdElement().toVersionless().getValueAsString()); + ValidateUtil.isNotBlankOrThrowUnprocessableEntity( + theConceptMap.getUrl(), "ConceptMap has no value for ConceptMap.url"); + ourLog.info( + "Storing TermConceptMap for {}", + theConceptMap.getIdElement().toVersionless().getValueAsString()); TermConceptMap termConceptMap = new TermConceptMap(); termConceptMap.setResource(theResourceTable); termConceptMap.setUrl(theConceptMap.getUrl()); termConceptMap.setVersion(theConceptMap.getVersion()); - String source = theConceptMap.hasSourceUriType() ? theConceptMap.getSourceUriType().getValueAsString() : null; - String target = theConceptMap.hasTargetUriType() ? theConceptMap.getTargetUriType().getValueAsString() : null; + String source = theConceptMap.hasSourceUriType() + ? theConceptMap.getSourceUriType().getValueAsString() + : null; + String target = theConceptMap.hasTargetUriType() + ? theConceptMap.getTargetUriType().getValueAsString() + : null; /* * If this is a mapping between "resources" instead of purely between @@ -159,8 +176,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { * See here for a description of what that is: * http://hl7.org/fhir/conceptmap.html#bnr */ - if ("StructureDefinition".equals(new IdType(source).getResourceType()) || - "StructureDefinition".equals(new IdType(target).getResourceType())) { + if ("StructureDefinition".equals(new IdType(source).getResourceType()) + || "StructureDefinition".equals(new IdType(target).getResourceType())) { return; } @@ -185,7 +202,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { if (isBlank(conceptMapVersion)) { optionalExistingTermConceptMapByUrl = myConceptMapDao.findTermConceptMapByUrlAndNullVersion(conceptMapUrl); } else { - optionalExistingTermConceptMapByUrl = myConceptMapDao.findTermConceptMapByUrlAndVersion(conceptMapUrl, conceptMapVersion); + optionalExistingTermConceptMapByUrl = + myConceptMapDao.findTermConceptMapByUrlAndVersion(conceptMapUrl, conceptMapVersion); } if (!optionalExistingTermConceptMapByUrl.isPresent()) { try { @@ -209,7 +227,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { groupSource = source; } if (isBlank(groupSource)) { - throw new UnprocessableEntityException(Msg.code(838) + "ConceptMap[url='" + theConceptMap.getUrl() + "'] contains at least one group without a value in ConceptMap.group.source"); + throw new UnprocessableEntityException(Msg.code(838) + "ConceptMap[url='" + theConceptMap.getUrl() + + "'] contains at least one group without a value in ConceptMap.group.source"); } String groupTarget = group.getTarget(); @@ -217,7 +236,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { groupTarget = target; } if (isBlank(groupTarget)) { - throw new UnprocessableEntityException(Msg.code(839) + "ConceptMap[url='" + theConceptMap.getUrl() + "'] contains at least one group without a value in ConceptMap.group.target"); + throw new UnprocessableEntityException(Msg.code(839) + "ConceptMap[url='" + theConceptMap.getUrl() + + "'] contains at least one group without a value in ConceptMap.group.target"); } termConceptMapGroup = new TermConceptMapGroup(); @@ -253,7 +273,9 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { termConceptMapGroupElementTarget.setCode(elementTarget.getCode()); termConceptMapGroupElementTarget.setDisplay(elementTarget.getDisplay()); termConceptMapGroupElementTarget.setEquivalence(elementTarget.getEquivalence()); - termConceptMapGroupElement.getConceptMapGroupElementTargets().add(termConceptMapGroupElementTarget); + termConceptMapGroupElement + .getConceptMapGroupElementTargets() + .add(termConceptMapGroupElementTarget); myConceptMapGroupElementTargetDao.save(termConceptMapGroupElementTarget); if (++codesSaved % 250 == 0) { @@ -270,24 +292,40 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapByUrl.get(); if (isBlank(conceptMapVersion)) { - String msg = myContext.getLocalizer().getMessage( - TermReadSvcImpl.class, - "cannotCreateDuplicateConceptMapUrl", - conceptMapUrl, - existingTermConceptMap.getResource().getIdDt().toUnqualifiedVersionless().getValue()); + String msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "cannotCreateDuplicateConceptMapUrl", + conceptMapUrl, + existingTermConceptMap + .getResource() + .getIdDt() + .toUnqualifiedVersionless() + .getValue()); throw new UnprocessableEntityException(Msg.code(840) + msg); } else { - String msg = myContext.getLocalizer().getMessage( - TermReadSvcImpl.class, - "cannotCreateDuplicateConceptMapUrlAndVersion", - conceptMapUrl, conceptMapVersion, - existingTermConceptMap.getResource().getIdDt().toUnqualifiedVersionless().getValue()); + String msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "cannotCreateDuplicateConceptMapUrlAndVersion", + conceptMapUrl, + conceptMapVersion, + existingTermConceptMap + .getResource() + .getIdDt() + .toUnqualifiedVersionless() + .getValue()); throw new UnprocessableEntityException(Msg.code(841) + msg); } } - ourLog.info("Done storing TermConceptMap[{}] for {}", termConceptMap.getId(), theConceptMap.getIdElement().toVersionless().getValueAsString()); + ourLog.info( + "Done storing TermConceptMap[{}] for {}", + termConceptMap.getId(), + theConceptMap.getIdElement().toVersionless().getValueAsString()); } @Override @@ -296,10 +334,12 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { TranslateConceptResults retVal = new TranslateConceptResults(); CriteriaBuilder criteriaBuilder = myEntityManager.getCriteriaBuilder(); - CriteriaQuery query = criteriaBuilder.createQuery(TermConceptMapGroupElementTarget.class); + CriteriaQuery query = + criteriaBuilder.createQuery(TermConceptMapGroupElementTarget.class); Root root = query.from(TermConceptMapGroupElementTarget.class); - Join elementJoin = root.join("myConceptMapGroupElement"); + Join elementJoin = + root.join("myConceptMapGroupElement"); Join groupJoin = elementJoin.join("myConceptMapGroup"); Join conceptMapJoin = groupJoin.join("myConceptMap"); @@ -308,13 +348,14 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { ArrayList predicates; Coding coding; - //-- get the latest ConceptMapVersion if theTranslationRequest has ConceptMap url but no ConceptMap version + // -- get the latest ConceptMapVersion if theTranslationRequest has ConceptMap url but no ConceptMap version String latestConceptMapVersion = null; if (theTranslationRequest.hasUrl() && !theTranslationRequest.hasConceptMapVersion()) latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest); for (TranslationQuery translationQuery : translationQueries) { - cachedTargets = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION, translationQuery); + cachedTargets = myMemoryCacheService.getIfPresent( + MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION, translationQuery); if (cachedTargets == null) { final List targets = new ArrayList<>(); @@ -324,7 +365,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { if (coding.hasCode()) { predicates.add(criteriaBuilder.equal(elementJoin.get("myCode"), coding.getCode())); } else { - throw new InvalidRequestException(Msg.code(842) + "A code must be provided for translation to occur."); + throw new InvalidRequestException( + Msg.code(842) + "A code must be provided for translation to occur."); } if (coding.hasSystem()) { @@ -336,18 +378,21 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { } if (translationQuery.hasTargetSystem()) { - predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), translationQuery.getTargetSystem())); + predicates.add( + criteriaBuilder.equal(groupJoin.get("myTarget"), translationQuery.getTargetSystem())); } if (translationQuery.hasUrl()) { predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl())); if (translationQuery.hasConceptMapVersion()) { // both url and conceptMapVersion - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion())); + predicates.add(criteriaBuilder.equal( + conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion())); } else { if (StringUtils.isNotBlank(latestConceptMapVersion)) { // only url and use latestConceptMapVersion - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion)); + predicates.add( + criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion)); } else { predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion"))); } @@ -364,7 +409,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { if (translationQuery.hasResourceId()) { IIdType resourceId = translationQuery.getResourceId(); - JpaPid resourcePid = myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId); + JpaPid resourcePid = + myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId); predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId())); } @@ -372,11 +418,14 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { query.where(outerPredicate); // Use scrollable results. - final TypedQuery typedQuery = myEntityManager.createQuery(query.select(root)); - org.hibernate.query.Query hibernateQuery = (org.hibernate.query.Query) typedQuery; + final TypedQuery typedQuery = + myEntityManager.createQuery(query.select(root)); + org.hibernate.query.Query hibernateQuery = + (org.hibernate.query.Query) typedQuery; hibernateQuery.setFetchSize(myFetchSize); ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); - try (ScrollableResultsIterator scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) { + try (ScrollableResultsIterator scrollableResultsIterator = + new ScrollableResultsIterator<>(scrollableResults)) { Set matches = new HashSet<>(); while (scrollableResultsIterator.hasNext()) { @@ -385,7 +434,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { TranslateConceptResult translationMatch = new TranslateConceptResult(); if (next.getEquivalence() != null) { - translationMatch.setEquivalence(next.getEquivalence().toCode()); + translationMatch.setEquivalence( + next.getEquivalence().toCode()); } translationMatch.setCode(next.getCode()); @@ -399,7 +449,6 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { targets.add(translationMatch); } } - } ourLastResultsFromTranslationCache = false; // For testing. @@ -424,7 +473,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { CriteriaQuery query = criteriaBuilder.createQuery(TermConceptMapGroupElement.class); Root root = query.from(TermConceptMapGroupElement.class); - Join targetJoin = root.join("myConceptMapGroupElementTargets"); + Join targetJoin = + root.join("myConceptMapGroupElementTargets"); Join groupJoin = root.join("myConceptMapGroup"); Join conceptMapJoin = groupJoin.join("myConceptMap"); @@ -433,13 +483,14 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { ArrayList predicates; Coding coding; - //-- get the latest ConceptMapVersion if theTranslationRequest has ConceptMap url but no ConceptMap version + // -- get the latest ConceptMapVersion if theTranslationRequest has ConceptMap url but no ConceptMap version String latestConceptMapVersion = null; if (theTranslationRequest.hasUrl() && !theTranslationRequest.hasConceptMapVersion()) latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest); for (TranslationQuery translationQuery : translationQueries) { - cachedElements = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery); + cachedElements = myMemoryCacheService.getIfPresent( + MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery); if (cachedElements == null) { final List elements = new ArrayList<>(); @@ -452,7 +503,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { predicates.add(criteriaBuilder.equal(targetJoin.get("myCode"), coding.getCode())); targetCode = coding.getCode(); } else { - throw new InvalidRequestException(Msg.code(843) + "A code must be provided for translation to occur."); + throw new InvalidRequestException( + Msg.code(843) + "A code must be provided for translation to occur."); } if (coding.hasSystem()) { @@ -468,11 +520,13 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl())); if (translationQuery.hasConceptMapVersion()) { // both url and conceptMapVersion - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion())); + predicates.add(criteriaBuilder.equal( + conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion())); } else { if (StringUtils.isNotBlank(latestConceptMapVersion)) { // only url and use latestConceptMapVersion - predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion)); + predicates.add( + criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion)); } else { predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion"))); } @@ -480,7 +534,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { } if (translationQuery.hasTargetSystem()) { - predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), translationQuery.getTargetSystem())); + predicates.add( + criteriaBuilder.equal(groupJoin.get("mySource"), translationQuery.getTargetSystem())); } if (translationQuery.hasSource()) { @@ -493,7 +548,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { if (translationQuery.hasResourceId()) { IIdType resourceId = translationQuery.getResourceId(); - JpaPid resourcePid = myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId); + JpaPid resourcePid = + myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId); predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId())); } @@ -501,11 +557,14 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { query.where(outerPredicate); // Use scrollable results. - final TypedQuery typedQuery = myEntityManager.createQuery(query.select(root)); - org.hibernate.query.Query hibernateQuery = (org.hibernate.query.Query) typedQuery; + final TypedQuery typedQuery = + myEntityManager.createQuery(query.select(root)); + org.hibernate.query.Query hibernateQuery = + (org.hibernate.query.Query) typedQuery; hibernateQuery.setFetchSize(myFetchSize); ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); - try (ScrollableResultsIterator scrollableResultsIterator = new ScrollableResultsIterator<>(scrollableResults)) { + try (ScrollableResultsIterator scrollableResultsIterator = + new ScrollableResultsIterator<>(scrollableResults)) { Set matches = new HashSet<>(); while (scrollableResultsIterator.hasNext()) { @@ -515,15 +574,18 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { * but removing it causes tests in TerminologySvcImplR4Test to fail. We use the outcome * in a trace log to avoid ErrorProne flagging an unused return value. */ - int size = nextElement.getConceptMapGroupElementTargets().size(); + int size = + nextElement.getConceptMapGroupElementTargets().size(); ourLog.trace("Have {} targets", size); myEntityManager.detach(nextElement); if (isNotBlank(targetCode)) { - for (TermConceptMapGroupElementTarget next : nextElement.getConceptMapGroupElementTargets()) { + for (TermConceptMapGroupElementTarget next : + nextElement.getConceptMapGroupElementTargets()) { if (matches.add(next)) { - if (isBlank(targetCodeSystem) || StringUtils.equals(targetCodeSystem, next.getSystem())) { + if (isBlank(targetCodeSystem) + || StringUtils.equals(targetCodeSystem, next.getSystem())) { if (StringUtils.equals(targetCode, next.getCode())) { TranslateConceptResult translationMatch = new TranslateConceptResult(); translationMatch.setCode(nextElement.getCode()); @@ -534,26 +596,27 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { translationMatch.setSystemVersion(nextElement.getSystemVersion()); translationMatch.setConceptMapUrl(nextElement.getConceptMapUrl()); if (next.getEquivalence() != null) { - translationMatch.setEquivalence(next.getEquivalence().toCode()); + translationMatch.setEquivalence( + next.getEquivalence().toCode()); } - if (alreadyContainsMapping(elements, translationMatch) || alreadyContainsMapping(retVal.getResults(), translationMatch)) { + if (alreadyContainsMapping(elements, translationMatch) + || alreadyContainsMapping(retVal.getResults(), translationMatch)) { continue; } elements.add(translationMatch); } } - } } } } - } ourLastResultsFromTranslationWithReverseCache = false; // For testing. - myMemoryCacheService.put(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery, elements); + myMemoryCacheService.put( + MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery, elements); retVal.getResults().addAll(elements); } else { ourLastResultsFromTranslationWithReverseCache = true; // For testing. @@ -565,7 +628,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { return retVal; } - private boolean alreadyContainsMapping(List elements, TranslateConceptResult translationMatch) { + private boolean alreadyContainsMapping( + List elements, TranslateConceptResult translationMatch) { for (TranslateConceptResult nextExistingElement : elements) { if (StringUtils.equals(nextExistingElement.getSystem(), translationMatch.getSystem())) { if (StringUtils.equals(nextExistingElement.getSystemVersion(), translationMatch.getSystemVersion())) { @@ -580,7 +644,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { public void deleteConceptMap(ResourceTable theResourceTable) { // Get existing entity so it can be deleted. - Optional optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId()); + Optional optionalExistingTermConceptMapById = + myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId()); if (optionalExistingTermConceptMapById.isPresent()) { TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get(); @@ -611,8 +676,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { private String getLatestConceptMapVersion(TranslationRequest theTranslationRequest) { Pageable page = PageRequest.of(0, 1); - List theConceptMapList = myConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, - theTranslationRequest.getUrl()); + List theConceptMapList = myConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate( + page, theTranslationRequest.getUrl()); if (!theConceptMapList.isEmpty()) { return theConceptMapList.get(0).getVersion(); } @@ -632,10 +697,8 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { msg = myContext.getLocalizer().getMessage(TermConceptMappingSvcImpl.class, "matchesFound"); theTranslationResult.setMessage(msg); } - } - /** * This method is present only for unit tests, do not call from client code */ @@ -678,20 +741,25 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc { } for (TranslateConceptResult translationMatch : theTranslationResult.getResults()) { - Parameters.ParametersParameterComponent matchParam = retVal.addParameter().setName("match"); + Parameters.ParametersParameterComponent matchParam = + retVal.addParameter().setName("match"); populateTranslateMatchParts(translationMatch, matchParam); } return retVal; } - private static void populateTranslateMatchParts(TranslateConceptResult theTranslationMatch, Parameters.ParametersParameterComponent theParam) { + private static void populateTranslateMatchParts( + TranslateConceptResult theTranslationMatch, Parameters.ParametersParameterComponent theParam) { if (theTranslationMatch.getEquivalence() != null) { theParam.addPart().setName("equivalence").setValue(new CodeType(theTranslationMatch.getEquivalence())); } - if (isNotBlank(theTranslationMatch.getSystem()) || isNotBlank(theTranslationMatch.getCode()) || isNotBlank(theTranslationMatch.getDisplay())) { - Coding value = new Coding(theTranslationMatch.getSystem(), theTranslationMatch.getCode(), theTranslationMatch.getDisplay()); + if (isNotBlank(theTranslationMatch.getSystem()) + || isNotBlank(theTranslationMatch.getCode()) + || isNotBlank(theTranslationMatch.getDisplay())) { + Coding value = new Coding( + theTranslationMatch.getSystem(), theTranslationMatch.getCode(), theTranslationMatch.getDisplay()); if (isNotBlank(theTranslationMatch.getSystemVersion())) { value.setVersion(theTranslationMatch.getSystemVersion()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java index 5a55bd2cec2..2f47cac377a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java @@ -84,7 +84,8 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas private final List myDeferredConcepts = Collections.synchronizedList(new ArrayList<>()); private final List myDeferredValueSets = Collections.synchronizedList(new ArrayList<>()); private final List myDeferredConceptMaps = Collections.synchronizedList(new ArrayList<>()); - private final List myConceptLinksToSaveLater = Collections.synchronizedList(new ArrayList<>()); + private final List myConceptLinksToSaveLater = + Collections.synchronizedList(new ArrayList<>()); // TODO - why is this needed? it's cumbersome to maintain; consider removing it /** @@ -95,15 +96,21 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas @Autowired protected ITermConceptDao myConceptDao; + @Autowired protected ITermCodeSystemDao myCodeSystemDao; + @Autowired protected ITermCodeSystemVersionDao myCodeSystemVersionDao; + @Autowired protected PlatformTransactionManager myTransactionMgr; + private boolean myProcessDeferred = true; + @Autowired private ITermConceptParentChildLinkDao myConceptParentChildLinkDao; + @Autowired private ITermVersionAdapterSvc myTerminologyVersionAdapterSvc; @@ -139,9 +146,12 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas @Override public void deleteCodeSystemForResource(ResourceTable theCodeSystemToDelete) { - // there are use cases (at least in tests) where the code system is not present for the resource but versions are, - // so, as code system deletion also deletes versions, we try the system first but if not present we also try versions - TermCodeSystem termCodeSystemToDelete = myCodeSystemDao.findByResourcePid(theCodeSystemToDelete.getResourceId()); + // there are use cases (at least in tests) where the code system is not present for the resource but versions + // are, + // so, as code system deletion also deletes versions, we try the system first but if not present we also try + // versions + TermCodeSystem termCodeSystemToDelete = + myCodeSystemDao.findByResourcePid(theCodeSystemToDelete.getResourceId()); if (termCodeSystemToDelete != null) { termCodeSystemToDelete.setCodeSystemUri("urn:uuid:" + UUID.randomUUID()); myCodeSystemDao.save(termCodeSystemToDelete); @@ -149,7 +159,8 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas return; } - List codeSystemVersionsToDelete = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemToDelete.getResourceId()); + List codeSystemVersionsToDelete = + myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemToDelete.getResourceId()); for (TermCodeSystemVersion codeSystemVersionToDelete : codeSystemVersionsToDelete) { if (codeSystemVersionToDelete != null) { myDeferredCodeSystemVersionsDeletions.add(codeSystemVersionToDelete); @@ -157,7 +168,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas } } - @Override public void setProcessDeferred(boolean theProcessDeferred) { myProcessDeferred = theProcessDeferred; @@ -181,22 +191,35 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas ourLog.debug("Saving {} deferred concepts...", count); while (codeCount < count && myDeferredConcepts.size() > 0) { TermConcept next = myDeferredConcepts.remove(0); - if (myCodeSystemVersionDao.findById(next.getCodeSystemVersion().getPid()).isPresent()) { + if (myCodeSystemVersionDao + .findById(next.getCodeSystemVersion().getPid()) + .isPresent()) { try { codeCount += myTermConceptDaoSvc.saveConcept(next); } catch (Exception theE) { - ourLog.error("Exception thrown when attempting to save TermConcept {} in Code System {}", - next.getCode(), next.getCodeSystemVersion().getCodeSystemDisplayName(), theE); + ourLog.error( + "Exception thrown when attempting to save TermConcept {} in Code System {}", + next.getCode(), + next.getCodeSystemVersion().getCodeSystemDisplayName(), + theE); } } else { - ourLog.warn("Unable to save deferred TermConcept {} because Code System {} version PID {} is no longer valid. Code system may have since been replaced.", - next.getCode(), next.getCodeSystemVersion().getCodeSystemDisplayName(), next.getCodeSystemVersion().getPid()); + ourLog.warn( + "Unable to save deferred TermConcept {} because Code System {} version PID {} is no longer valid. Code system may have since been replaced.", + next.getCode(), + next.getCodeSystemVersion().getCodeSystemDisplayName(), + next.getCodeSystemVersion().getPid()); } } if (codeCount > 0) { - ourLog.info("Saved {} deferred concepts ({} codes remain and {} relationships remain) in {}ms ({} codes/sec)", - codeCount, myDeferredConcepts.size(), myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.formatThroughput(codeCount, TimeUnit.SECONDS)); + ourLog.info( + "Saved {} deferred concepts ({} codes remain and {} relationships remain) in {}ms ({} codes/sec)", + codeCount, + myDeferredConcepts.size(), + myConceptLinksToSaveLater.size(), + stopwatch.getMillis(), + stopwatch.formatThroughput(codeCount, TimeUnit.SECONDS)); } if (codeCount == 0) { @@ -207,9 +230,18 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas assert next.getChild() != null; assert next.getParent() != null; - if ((next.getChild().getId() == null || !myConceptDao.findById(next.getChild().getId()).isPresent()) - || (next.getParent().getId() == null || !myConceptDao.findById(next.getParent().getId()).isPresent())) { - ourLog.warn("Not inserting link from child {} to parent {} because it appears to have been deleted", next.getParent().getCode(), next.getChild().getCode()); + if ((next.getChild().getId() == null + || !myConceptDao + .findById(next.getChild().getId()) + .isPresent()) + || (next.getParent().getId() == null + || !myConceptDao + .findById(next.getParent().getId()) + .isPresent())) { + ourLog.warn( + "Not inserting link from child {} to parent {} because it appears to have been deleted", + next.getParent().getCode(), + next.getChild().getCode()); continue; } @@ -219,8 +251,12 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas } if (relCount > 0) { - ourLog.info("Saved {} deferred relationships ({} remain) in {}ms ({} entries/sec)", - relCount, myConceptLinksToSaveLater.size(), stopwatch.getMillis(), stopwatch.formatThroughput(relCount, TimeUnit.SECONDS)); + ourLog.info( + "Saved {} deferred relationships ({} remain) in {}ms ({} entries/sec)", + relCount, + myConceptLinksToSaveLater.size(), + stopwatch.getMillis(), + stopwatch.formatThroughput(relCount, TimeUnit.SECONDS)); } if ((myDeferredConcepts.size() + myConceptLinksToSaveLater.size()) == 0) { @@ -275,9 +311,10 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas public void saveAllDeferred() { TimeoutManager timeoutManager = null; if (myAllowDeferredTasksTimeout) { - timeoutManager = new TimeoutManager(TermDeferredStorageSvcImpl.class.getName() + ".saveAllDeferred()", - Duration.of(SAVE_ALL_DEFERRED_WARN_MINUTES, ChronoUnit.MINUTES), - Duration.of(SAVE_ALL_DEFERRED_ERROR_MINUTES, ChronoUnit.MINUTES)); + timeoutManager = new TimeoutManager( + TermDeferredStorageSvcImpl.class.getName() + ".saveAllDeferred()", + Duration.of(SAVE_ALL_DEFERRED_WARN_MINUTES, ChronoUnit.MINUTES), + Duration.of(SAVE_ALL_DEFERRED_ERROR_MINUTES, ChronoUnit.MINUTES)); } // Don't include executing jobs here since there's no point in thrashing over and over @@ -300,11 +337,11 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas } for (int i = 0; i < 10; i++) { - if (!isDeferredConcepts() && - !isConceptLinksToSaveLater() && - !isDeferredValueSets() && - !isDeferredConceptMaps() && - !isDeferredCodeSystemDeletions()) { + if (!isDeferredConcepts() + && !isConceptLinksToSaveLater() + && !isDeferredValueSets() + && !isDeferredConceptMaps() + && !isDeferredCodeSystemDeletions()) { return; } @@ -356,7 +393,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas myDeferredCodeSystemsDeletions.clear(); } - private void processDeferredCodeSystemVersionDeletions() { for (TermCodeSystemVersion next : myDeferredCodeSystemVersionsDeletions) { deleteTermCodeSystemVersionOffline(next.getPid()); @@ -364,7 +400,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas myDeferredCodeSystemVersionsDeletions.clear(); } - private void deleteTermCodeSystemVersionOffline(Long theCodeSystemVersionPid) { JobInstanceStartRequest request = new JobInstanceStartRequest(); request.setJobDefinitionId(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME); @@ -387,7 +422,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas myJobExecutions.add(response.getInstanceId()); } - @Override public boolean isStorageQueueEmpty(boolean theIncludeExecutingJobs) { boolean retVal = !isProcessDeferredPaused(); @@ -467,7 +501,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas myTransactionMgr = theTxManager; } - @VisibleForTesting void setTermConceptDaoSvc(TermConceptDaoSvc theTermConceptDaoSvc) { myTermConceptDaoSvc = theTermConceptDaoSvc; @@ -528,14 +561,14 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas @Override public String toString() { return new ToStringBuilder(this) - .append("myDeferredCodeSystemsDeletions", myDeferredCodeSystemsDeletions.size()) - .append("myDeferredCodeSystemVersionsDeletions", myDeferredCodeSystemVersionsDeletions.size()) - .append("myDeferredConcepts", myDeferredConcepts.size()) - .append("myDeferredValueSets", myDeferredValueSets.size()) - .append("myDeferredConceptMaps", myDeferredConceptMaps.size()) - .append("myConceptLinksToSaveLater", myConceptLinksToSaveLater.size()) - .append("myJobExecutions", myJobExecutions.size()) - .append("myProcessDeferred", myProcessDeferred) - .toString(); + .append("myDeferredCodeSystemsDeletions", myDeferredCodeSystemsDeletions.size()) + .append("myDeferredCodeSystemVersionsDeletions", myDeferredCodeSystemVersionsDeletions.size()) + .append("myDeferredConcepts", myDeferredConcepts.size()) + .append("myDeferredValueSets", myDeferredValueSets.size()) + .append("myDeferredConceptMaps", myDeferredConceptMaps.size()) + .append("myConceptLinksToSaveLater", myConceptLinksToSaveLater.size()) + .append("myJobExecutions", myJobExecutions.size()) + .append("myProcessDeferred", myProcessDeferred) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java index 4004a810288..ba636bd5f83 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermLoaderSvcImpl.java @@ -83,7 +83,6 @@ import org.springframework.aop.support.AopUtils; import org.springframework.beans.factory.annotation.Autowired; import org.xml.sax.SAXException; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -104,6 +103,7 @@ import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc.MAKE_LOADING_VERSION_CURRENT; import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_FILE; @@ -179,35 +179,44 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { private final ITermCodeSystemStorageSvc myCodeSystemStorageSvc; @Autowired - public TermLoaderSvcImpl(ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { + public TermLoaderSvcImpl( + ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) { this(theDeferredStorageSvc, theCodeSystemStorageSvc, true); } - private TermLoaderSvcImpl(ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc, boolean theProxyCheck) { + private TermLoaderSvcImpl( + ITermDeferredStorageSvc theDeferredStorageSvc, + ITermCodeSystemStorageSvc theCodeSystemStorageSvc, + boolean theProxyCheck) { if (theProxyCheck) { - // If these validations start failing, it likely means a cyclic dependency has been introduced into the Spring Application - // Context that is preventing the Spring auto-proxy bean post-processor from being able to proxy these beans. Check + // If these validations start failing, it likely means a cyclic dependency has been introduced into the + // Spring Application + // Context that is preventing the Spring auto-proxy bean post-processor from being able to proxy these + // beans. Check // for recent changes to the Spring @Configuration that may have caused this. - Validate.isTrue(AopUtils.isAopProxy(theDeferredStorageSvc), theDeferredStorageSvc.getClass().getName() + " is not a proxy. @Transactional annotations will be ignored."); - Validate.isTrue(AopUtils.isAopProxy(theCodeSystemStorageSvc), theCodeSystemStorageSvc.getClass().getName() + " is not a proxy. @Transactional annotations will be ignored."); + Validate.isTrue( + AopUtils.isAopProxy(theDeferredStorageSvc), + theDeferredStorageSvc.getClass().getName() + + " is not a proxy. @Transactional annotations will be ignored."); + Validate.isTrue( + AopUtils.isAopProxy(theCodeSystemStorageSvc), + theCodeSystemStorageSvc.getClass().getName() + + " is not a proxy. @Transactional annotations will be ignored."); } myDeferredStorageSvc = theDeferredStorageSvc; myCodeSystemStorageSvc = theCodeSystemStorageSvc; - } @VisibleForTesting - public static TermLoaderSvcImpl withoutProxyCheck(ITermDeferredStorageSvc theTermDeferredStorageSvc, ITermCodeSystemStorageSvc theTermCodeSystemStorageSvc) { + public static TermLoaderSvcImpl withoutProxyCheck( + ITermDeferredStorageSvc theTermDeferredStorageSvc, ITermCodeSystemStorageSvc theTermCodeSystemStorageSvc) { return new TermLoaderSvcImpl(theTermDeferredStorageSvc, theTermCodeSystemStorageSvc, false); } @Override public UploadStatistics loadImgthla(List theFiles, RequestDetails theRequestDetails) { try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) { - List mandatoryFilenameFragments = Arrays.asList( - IMGTHLA_HLA_NOM_TXT, - IMGTHLA_HLA_XML - ); + List mandatoryFilenameFragments = Arrays.asList(IMGTHLA_HLA_NOM_TXT, IMGTHLA_HLA_XML); descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments); ourLog.info("Beginning IMGTHLA processing"); @@ -227,48 +236,74 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { Properties uploadProperties = getProperties(descriptors, LOINC_UPLOAD_PROPERTIES_FILE.getCode()); String codeSystemVersionId = uploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); - boolean isMakeCurrentVersion = Boolean.parseBoolean( - uploadProperties.getProperty(LOINC_CODESYSTEM_MAKE_CURRENT.getCode(), "true")); + boolean isMakeCurrentVersion = + Boolean.parseBoolean(uploadProperties.getProperty(LOINC_CODESYSTEM_MAKE_CURRENT.getCode(), "true")); - if (StringUtils.isBlank(codeSystemVersionId) && ! isMakeCurrentVersion) { - throw new InvalidRequestException(Msg.code(864) + "'" + LOINC_CODESYSTEM_VERSION.getCode() + - "' property is required when '" + LOINC_CODESYSTEM_MAKE_CURRENT.getCode() + "' property is 'false'"); + if (StringUtils.isBlank(codeSystemVersionId) && !isMakeCurrentVersion) { + throw new InvalidRequestException( + Msg.code(864) + "'" + LOINC_CODESYSTEM_VERSION.getCode() + "' property is required when '" + + LOINC_CODESYSTEM_MAKE_CURRENT.getCode() + "' property is 'false'"); } List mandatoryFilenameFragments = Arrays.asList( - uploadProperties.getProperty(LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_DOCUMENT_ONTOLOGY_FILE.getCode(), LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_HIERARCHY_FILE.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE.getCode(), LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_IMAGING_DOCUMENT_CODES_FILE.getCode(), LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_PART_FILE.getCode(), LOINC_PART_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_PART_RELATED_CODE_MAPPING_FILE.getCode(), LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_RSNA_PLAYBOOK_FILE.getCode(), LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE.getCode(), LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode()) - ); + uploadProperties.getProperty( + LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_DOCUMENT_ONTOLOGY_FILE.getCode(), LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_HIERARCHY_FILE.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE.getCode(), + LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_IMAGING_DOCUMENT_CODES_FILE.getCode(), + LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_PART_FILE.getCode(), LOINC_PART_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_PART_RELATED_CODE_MAPPING_FILE.getCode(), + LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_RSNA_PLAYBOOK_FILE.getCode(), LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE.getCode(), + LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode())); descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments); List splitPartLinkFilenameFragments = Arrays.asList( - uploadProperties.getProperty(LOINC_PART_LINK_FILE_PRIMARY.getCode(), LOINC_PART_LINK_FILE_PRIMARY_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_PART_LINK_FILE_SUPPLEMENTARY.getCode(), LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT.getCode()) - ); - descriptors.verifyPartLinkFilesExist(splitPartLinkFilenameFragments, uploadProperties.getProperty(LOINC_PART_LINK_FILE.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode())); + uploadProperties.getProperty( + LOINC_PART_LINK_FILE_PRIMARY.getCode(), LOINC_PART_LINK_FILE_PRIMARY_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_PART_LINK_FILE_SUPPLEMENTARY.getCode(), + LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT.getCode())); + descriptors.verifyPartLinkFilesExist( + splitPartLinkFilenameFragments, + uploadProperties.getProperty( + LOINC_PART_LINK_FILE.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode())); List optionalFilenameFragments = Arrays.asList( - uploadProperties.getProperty(LOINC_GROUP_FILE.getCode(), LOINC_GROUP_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_GROUP_TERMS_FILE.getCode(), LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_GROUP_FILE.getCode(), LOINC_GROUP_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_GROUP_TERMS_FILE.getCode(), LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), + LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), + LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()), + uploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode()), - //-- optional consumer name - uploadProperties.getProperty(LOINC_CONSUMER_NAME_FILE.getCode(), LOINC_CONSUMER_NAME_FILE_DEFAULT.getCode()), - uploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_FILE.getCode(), LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT.getCode()) + // -- optional consumer name + uploadProperties.getProperty( + LOINC_CONSUMER_NAME_FILE.getCode(), LOINC_CONSUMER_NAME_FILE_DEFAULT.getCode()), + uploadProperties.getProperty( + LOINC_LINGUISTIC_VARIANTS_FILE.getCode(), + LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT.getCode())); - ); descriptors.verifyOptionalFilesExist(optionalFilenameFragments); ourLog.info("Beginning LOINC processing"); @@ -293,10 +328,8 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { public UploadStatistics loadSnomedCt(List theFiles, RequestDetails theRequestDetails) { try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) { - List expectedFilenameFragments = Arrays.asList( - SCT_FILE_DESCRIPTION, - SCT_FILE_RELATIONSHIP, - SCT_FILE_CONCEPT); + List expectedFilenameFragments = + Arrays.asList(SCT_FILE_DESCRIPTION, SCT_FILE_RELATIONSHIP, SCT_FILE_CONCEPT); descriptors.verifyMandatoryFilesExist(expectedFilenameFragments); ourLog.info("Beginning SNOMED CT processing"); @@ -321,7 +354,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { for (FileDescriptor nextDescriptor : compressedDescriptors.getUncompressedFileDescriptors()) { if (nextDescriptor.getFilename().toLowerCase(Locale.US).endsWith(".xml")) { try (InputStream inputStream = nextDescriptor.getInputStream(); - InputStreamReader reader = new InputStreamReader(inputStream, Charsets.UTF_8) ) { + InputStreamReader reader = new InputStreamReader(inputStream, Charsets.UTF_8)) { Icd10Loader loader = new Icd10Loader(codeSystem, codeSystemVersion); loader.load(reader); count += loader.getConceptCount(); @@ -355,7 +388,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { for (FileDescriptor nextDescriptor : compressedDescriptors.getUncompressedFileDescriptors()) { if (nextDescriptor.getFilename().toLowerCase(Locale.US).endsWith(".xml")) { try (InputStream inputStream = nextDescriptor.getInputStream(); - InputStreamReader reader = new InputStreamReader(inputStream, Charsets.UTF_8) ) { + InputStreamReader reader = new InputStreamReader(inputStream, Charsets.UTF_8)) { Icd10CmLoader loader = new Icd10CmLoader(codeSystemVersion); loader.load(reader); count += loader.getConceptCount(); @@ -373,17 +406,23 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { } @Override - public UploadStatistics loadCustom(String theSystem, List theFiles, RequestDetails theRequestDetails) { + public UploadStatistics loadCustom( + String theSystem, List theFiles, RequestDetails theRequestDetails) { try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) { Optional codeSystemContent = loadFile(descriptors, CUSTOM_CODESYSTEM_JSON, CUSTOM_CODESYSTEM_XML); CodeSystem codeSystem; if (codeSystemContent.isPresent()) { - codeSystem = EncodingEnum - .detectEncoding(codeSystemContent.get()) - .newParser(myCtx) - .parseResource(CodeSystem.class, codeSystemContent.get()); - ValidateUtil.isTrueOrThrowInvalidRequest(theSystem.equalsIgnoreCase(codeSystem.getUrl()), "CodeSystem.url does not match the supplied system: %s", theSystem); - ValidateUtil.isTrueOrThrowInvalidRequest(CodeSystem.CodeSystemContentMode.NOTPRESENT.equals(codeSystem.getContent()), "CodeSystem.content does not match the expected value: %s", CodeSystem.CodeSystemContentMode.NOTPRESENT.toCode()); + codeSystem = EncodingEnum.detectEncoding(codeSystemContent.get()) + .newParser(myCtx) + .parseResource(CodeSystem.class, codeSystemContent.get()); + ValidateUtil.isTrueOrThrowInvalidRequest( + theSystem.equalsIgnoreCase(codeSystem.getUrl()), + "CodeSystem.url does not match the supplied system: %s", + theSystem); + ValidateUtil.isTrueOrThrowInvalidRequest( + CodeSystem.CodeSystemContentMode.NOTPRESENT.equals(codeSystem.getContent()), + "CodeSystem.content does not match the expected value: %s", + CodeSystem.CodeSystemContentMode.NOTPRESENT.toCode()); } else { codeSystem = new CodeSystem(); codeSystem.setUrl(theSystem); @@ -398,10 +437,13 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { } } - @Override - public UploadStatistics loadDeltaAdd(String theSystem, List theFiles, RequestDetails theRequestDetails) { - ourLog.info("Processing terminology delta ADD for system[{}] with files: {}", theSystem, theFiles.stream().map(FileDescriptor::getFilename).collect(Collectors.toList())); + public UploadStatistics loadDeltaAdd( + String theSystem, List theFiles, RequestDetails theRequestDetails) { + ourLog.info( + "Processing terminology delta ADD for system[{}] with files: {}", + theSystem, + theFiles.stream().map(FileDescriptor::getFilename).collect(Collectors.toList())); try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) { CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, false); return myCodeSystemStorageSvc.applyDeltaCodeSystemsAdd(theSystem, terminologySet); @@ -409,18 +451,25 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { } @Override - public UploadStatistics loadDeltaRemove(String theSystem, List theFiles, RequestDetails theRequestDetails) { - ourLog.info("Processing terminology delta REMOVE for system[{}] with files: {}", theSystem, theFiles.stream().map(FileDescriptor::getFilename).collect(Collectors.toList())); + public UploadStatistics loadDeltaRemove( + String theSystem, List theFiles, RequestDetails theRequestDetails) { + ourLog.info( + "Processing terminology delta REMOVE for system[{}] with files: {}", + theSystem, + theFiles.stream().map(FileDescriptor::getFilename).collect(Collectors.toList())); try (LoadedFileDescriptors descriptors = getLoadedFileDescriptors(theFiles)) { CustomTerminologySet terminologySet = CustomTerminologySet.load(descriptors, true); return myCodeSystemStorageSvc.applyDeltaCodeSystemsRemove(theSystem, terminologySet); } } - private void dropCircularRefs(TermConcept theConcept, ArrayList theChain, Map theCode2concept) { + private void dropCircularRefs( + TermConcept theConcept, ArrayList theChain, Map theCode2concept) { theChain.add(theConcept.getCode()); - for (Iterator childIter = theConcept.getChildren().iterator(); childIter.hasNext(); ) { + for (Iterator childIter = + theConcept.getChildren().iterator(); + childIter.hasNext(); ) { TermConceptParentChildLink next = childIter.next(); TermConcept nextChild = next.getChild(); if (theChain.contains(nextChild.getCode())) { @@ -435,7 +484,10 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { TermConcept nextCode = theCode2concept.get(nextInChain); b.append(nextCode.getCode()); b.append('['); - b.append(StringUtils.substring(nextCode.getDisplay(), 0, 20).replace("[", "").replace("]", "").trim()); + b.append(StringUtils.substring(nextCode.getDisplay(), 0, 20) + .replace("[", "") + .replace("]", "") + .trim()); b.append("] "); } ourLog.info(b.toString(), theConcept.getCode()); @@ -447,7 +499,6 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { } } theChain.remove(theChain.size() - 1); - } @VisibleForTesting @@ -455,7 +506,8 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { Properties getProperties(LoadedFileDescriptors theDescriptors, String thePropertiesFile) { Properties retVal = new Properties(); - try (InputStream propertyStream = ca.uhn.fhir.jpa.term.TermLoaderSvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/loinc/loincupload.properties")) { + try (InputStream propertyStream = ca.uhn.fhir.jpa.term.TermLoaderSvcImpl.class.getResourceAsStream( + "/ca/uhn/fhir/jpa/term/loinc/loincupload.properties")) { retVal.load(propertyStream); } catch (IOException e) { throw new InternalErrorException(Msg.code(866) + "Failed to process loinc.properties", e); @@ -491,14 +543,17 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { return Optional.empty(); } - private UploadStatistics processImgthlaFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) { + private UploadStatistics processImgthlaFiles( + LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) { final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion(); final List valueSets = new ArrayList<>(); final List conceptMaps = new ArrayList<>(); CodeSystem imgthlaCs; try { - String imgthlaCsString = IOUtils.toString(TermReadSvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/imgthla/imgthla.xml"), Charsets.UTF_8); + String imgthlaCsString = IOUtils.toString( + TermReadSvcImpl.class.getResourceAsStream("/ca/uhn/fhir/jpa/term/imgthla/imgthla.xml"), + Charsets.UTF_8); imgthlaCs = FhirContext.forR4Cached().newXmlParser().parseResource(CodeSystem.class, imgthlaCsString); } catch (IOException e) { throw new InternalErrorException(Msg.code(869) + "Failed to load imgthla.xml", e); @@ -509,8 +564,10 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) { String nextFilename = nextZipBytes.getFilename(); - if (!IMGTHLA_HLA_NOM_TXT.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT) - && !IMGTHLA_HLA_XML.equals(nextFilename) && !nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) { + if (!IMGTHLA_HLA_NOM_TXT.equals(nextFilename) + && !nextFilename.endsWith("/" + IMGTHLA_HLA_NOM_TXT) + && !IMGTHLA_HLA_XML.equals(nextFilename) + && !nextFilename.endsWith("/" + IMGTHLA_HLA_XML)) { ourLog.info("Skipping unexpected file {}", nextFilename); continue; } @@ -519,16 +576,17 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { // process colon-delimited hla_nom.txt file ourLog.info("Processing file {}", nextFilename); -// IRecordHandler handler = new HlaNomTxtHandler(codeSystemVersion, code2concept, propertyNamesToTypes); -// AntigenSource antigenSource = new WmdaAntigenSource(hlaNomFilename, relSerSerFilename, relDnaSerFilename); + // IRecordHandler handler = new HlaNomTxtHandler(codeSystemVersion, code2concept, + // propertyNamesToTypes); + // AntigenSource antigenSource = new WmdaAntigenSource(hlaNomFilename, relSerSerFilename, + // relDnaSerFilename); Reader reader = null; try { reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8); LineNumberReader lnr = new LineNumberReader(reader); - while (lnr.readLine() != null) { - } + while (lnr.readLine() != null) {} ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber()); } catch (IOException e) { @@ -544,16 +602,15 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { // process hla.xml file ourLog.info("Processing file {}", nextFilename); -// IRecordHandler handler = new HlaXmlHandler(codeSystemVersion, code2concept, propertyNamesToTypes); -// AlleleSource alleleSource = new HlaXmlAlleleSource(hlaXmlFilename); + // IRecordHandler handler = new HlaXmlHandler(codeSystemVersion, code2concept, propertyNamesToTypes); + // AlleleSource alleleSource = new HlaXmlAlleleSource(hlaXmlFilename); Reader reader = null; try { reader = new InputStreamReader(nextZipBytes.getInputStream(), Charsets.UTF_8); LineNumberReader lnr = new LineNumberReader(reader); - while (lnr.readLine() != null) { - } + while (lnr.readLine() != null) {} ourLog.warn("Lines read from {}: {}", nextFilename, lnr.getLineNumber()); } catch (IOException e) { @@ -564,7 +621,6 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { foundHlaXml = true; } - } if (!foundHlaNom) { @@ -577,17 +633,26 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { int valueSetCount = valueSets.size(); int rootConceptCount = codeSystemVersion.getConcepts().size(); - ourLog.info("Have {} total concepts, {} root concepts, {} ValueSets", rootConceptCount, rootConceptCount, valueSetCount); + ourLog.info( + "Have {} total concepts, {} root concepts, {} ValueSets", + rootConceptCount, + rootConceptCount, + valueSetCount); // remove this when fully implemented ... - throw new InternalErrorException(Msg.code(874) + "HLA nomenclature terminology upload not yet fully implemented."); + throw new InternalErrorException( + Msg.code(874) + "HLA nomenclature terminology upload not yet fully implemented."); -// IIdType target = storeCodeSystem(theRequestDetails, codeSystemVersion, imgthlaCs, valueSets, conceptMaps); -// -// return new UploadStatistics(conceptCount, target); + // IIdType target = storeCodeSystem(theRequestDetails, codeSystemVersion, imgthlaCs, valueSets, conceptMaps); + // + // return new UploadStatistics(conceptCount, target); } - UploadStatistics processLoincFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails, Properties theUploadProperties, Boolean theCloseFiles) { + UploadStatistics processLoincFiles( + LoadedFileDescriptors theDescriptors, + RequestDetails theRequestDetails, + Properties theUploadProperties, + Boolean theCloseFiles) { final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion(); final Map code2concept = new HashMap<>(); final List valueSets = new ArrayList<>(); @@ -604,8 +669,9 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { CodeSystem loincCs = FhirContext.forR4Cached().newXmlParser().parseResource(CodeSystem.class, loincCsString); if (isNotBlank(loincCs.getVersion())) { - throw new InvalidRequestException(Msg.code(876) + "'loinc.xml' file must not have a version defined. To define a version use '" + - LOINC_CODESYSTEM_VERSION.getCode() + "' property of 'loincupload.properties' file"); + throw new InvalidRequestException( + Msg.code(876) + "'loinc.xml' file must not have a version defined. To define a version use '" + + LOINC_CODESYSTEM_VERSION.getCode() + "' property of 'loincupload.properties' file"); } String codeSystemVersionId = theUploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); @@ -623,7 +689,8 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { } } - // TODO: DM 2019-09-13 - Manually add EXTERNAL_COPYRIGHT_NOTICE property until Regenstrief adds this to loinc.xml + // TODO: DM 2019-09-13 - Manually add EXTERNAL_COPYRIGHT_NOTICE property until Regenstrief adds this to + // loinc.xml if (!propertyNamesToTypes.containsKey("EXTERNAL_COPYRIGHT_NOTICE")) { String externalCopyRightNoticeCode = "EXTERNAL_COPYRIGHT_NOTICE"; CodeSystem.PropertyType externalCopyRightNoticeType = CodeSystem.PropertyType.STRING; @@ -634,101 +701,286 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { // Part handler = new LoincPartHandler(codeSystemVersion, code2concept); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_PART_FILE.getCode(), LOINC_PART_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - Map partTypeAndPartNameToPartNumber = ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber(); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty(LOINC_PART_FILE.getCode(), LOINC_PART_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); + Map partTypeAndPartNameToPartNumber = + ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber(); // LOINC string properties - handler = new LoincHandler(codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincHandler( + codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // LOINC hierarchy handler = new LoincHierarchyHandler(codeSystemVersion, code2concept); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_HIERARCHY_FILE.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty(LOINC_HIERARCHY_FILE.getCode(), LOINC_HIERARCHY_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Answer lists (ValueSets of potential answers/values for LOINC "questions") - handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincAnswerListHandler( + codeSystemVersion, code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_ANSWERLIST_FILE.getCode(), LOINC_ANSWERLIST_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Answer list links (connects LOINC observation codes to answer list codes) handler = new LoincAnswerListLinkHandler(code2concept); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_ANSWERLIST_LINK_FILE.getCode(), LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // RSNA playbook // Note that this should come before the "Part Related Code Mapping" // file because there are some duplicate mappings between these // two files, and the RSNA Playbook file has more metadata - handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_RSNA_PLAYBOOK_FILE.getCode(), LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincRsnaPlaybookHandler( + code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_RSNA_PLAYBOOK_FILE.getCode(), LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Part related code mapping - handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_PART_RELATED_CODE_MAPPING_FILE.getCode(), LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincPartRelatedCodeMappingHandler( + code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_PART_RELATED_CODE_MAPPING_FILE.getCode(), + LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Document ontology - handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_DOCUMENT_ONTOLOGY_FILE.getCode(), LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincDocumentOntologyHandler( + code2concept, + propertyNamesToTypes, + valueSets, + conceptMaps, + theUploadProperties, + loincCs.getCopyright()); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_DOCUMENT_ONTOLOGY_FILE.getCode(), LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Top 2000 codes - US - handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); - iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincTop2000LabResultsUsHandler( + code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); + iterateOverZipFileCsvOptional( + theDescriptors, + theUploadProperties.getProperty( + LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE.getCode(), + LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Top 2000 codes - SI - handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); - iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincTop2000LabResultsSiHandler( + code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); + iterateOverZipFileCsvOptional( + theDescriptors, + theUploadProperties.getProperty( + LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE.getCode(), + LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Universal lab order ValueSet handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, theUploadProperties); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE.getCode(), LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE.getCode(), + LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // IEEE medical device codes - handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE.getCode(), LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincIeeeMedicalDeviceCodeHandler( + code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE.getCode(), + LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Imaging document codes handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, theUploadProperties); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_IMAGING_DOCUMENT_CODES_FILE.getCode(), LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_IMAGING_DOCUMENT_CODES_FILE.getCode(), + LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Group - handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_GROUP_FILE.getCode(), LOINC_GROUP_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + handler = new LoincGroupFileHandler( + code2concept, valueSets, conceptMaps, theUploadProperties, loincCs.getCopyright()); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty(LOINC_GROUP_FILE.getCode(), LOINC_GROUP_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Group terms handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_GROUP_TERMS_FILE.getCode(), LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_GROUP_TERMS_FILE.getCode(), LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Parent group handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, theUploadProperties); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty( + LOINC_PARENT_GROUP_FILE.getCode(), LOINC_PARENT_GROUP_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Part link handler = new LoincPartLinkHandler(codeSystemVersion, code2concept, propertyNamesToTypes); - iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_PART_LINK_FILE.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_PART_LINK_FILE_PRIMARY.getCode(), LOINC_PART_LINK_FILE_PRIMARY_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); - iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_PART_LINK_FILE_SUPPLEMENTARY.getCode(), LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsvOptional( + theDescriptors, + theUploadProperties.getProperty(LOINC_PART_LINK_FILE.getCode(), LOINC_PART_LINK_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); + iterateOverZipFileCsvOptional( + theDescriptors, + theUploadProperties.getProperty( + LOINC_PART_LINK_FILE_PRIMARY.getCode(), LOINC_PART_LINK_FILE_PRIMARY_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); + iterateOverZipFileCsvOptional( + theDescriptors, + theUploadProperties.getProperty( + LOINC_PART_LINK_FILE_SUPPLEMENTARY.getCode(), + LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Consumer Name handler = new LoincConsumerNameHandler(code2concept); - iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_CONSUMER_NAME_FILE.getCode(), LOINC_CONSUMER_NAME_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsvOptional( + theDescriptors, + theUploadProperties.getProperty( + LOINC_CONSUMER_NAME_FILE.getCode(), LOINC_CONSUMER_NAME_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // LOINC coding properties (must run after all TermConcepts were created) handler = new LoincCodingPropertiesHandler(code2concept, propertyNamesToTypes); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty(LOINC_FILE.getCode(), LOINC_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); // Linguistic Variants handler = new LoincLinguisticVariantsHandler(linguisticVariants); - iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_FILE.getCode(), LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsvOptional( + theDescriptors, + theUploadProperties.getProperty( + LOINC_LINGUISTIC_VARIANTS_FILE.getCode(), LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); String langFileName; for (LoincLinguisticVariantsHandler.LinguisticVariant linguisticVariant : linguisticVariants) { handler = new LoincLinguisticVariantHandler(code2concept, linguisticVariant.getLanguageCode()); langFileName = linguisticVariant.getLinguisticVariantFileName(); - iterateOverZipFileCsvOptional(theDescriptors, theUploadProperties.getProperty(LOINC_LINGUISTIC_VARIANTS_PATH.getCode() + langFileName, LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + langFileName), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsvOptional( + theDescriptors, + theUploadProperties.getProperty( + LOINC_LINGUISTIC_VARIANTS_PATH.getCode() + langFileName, + LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + langFileName), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); } - if (theDescriptors.isOptionalFilesExist(List.of(theUploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode())))) { + if (theDescriptors.isOptionalFilesExist(List.of( + theUploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode())))) { // LOINC MapTo codes (last to make sure that all concepts were added to code2concept map) handler = new LoincMapToHandler(code2concept); - iterateOverZipFileCsv(theDescriptors, theUploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode()), handler, ',', QuoteMode.NON_NUMERIC, false); + iterateOverZipFileCsv( + theDescriptors, + theUploadProperties.getProperty(LOINC_MAPTO_FILE.getCode(), LOINC_MAPTO_FILE_DEFAULT.getCode()), + handler, + ',', + QuoteMode.NON_NUMERIC, + false); } if (theCloseFiles) { @@ -747,7 +999,11 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { int valueSetCount = valueSets.size(); int rootConceptCount = codeSystemVersion.getConcepts().size(); int conceptCount = code2concept.size(); - ourLog.info("Have {} total concepts, {} root concepts, {} ValueSets", conceptCount, rootConceptCount, valueSetCount); + ourLog.info( + "Have {} total concepts, {} root concepts, {} ValueSets", + conceptCount, + rootConceptCount, + valueSetCount); IIdType target = storeCodeSystem(theRequestDetails, codeSystemVersion, loincCs, valueSets, conceptMaps); @@ -759,7 +1015,6 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { return new LoincXmlFileZipContentsHandler(); } - private ValueSet getValueSetLoincAll(Properties theUploadProperties, String theCopyrightStatement) { ValueSet retVal = new ValueSet(); @@ -784,7 +1039,8 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { return retVal; } - private UploadStatistics processSnomedCtFiles(LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) { + private UploadStatistics processSnomedCtFiles( + LoadedFileDescriptors theDescriptors, RequestDetails theRequestDetails) { final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion(); final Map id2concept = new HashMap<>(); final Map code2concept = new HashMap<>(); @@ -808,16 +1064,22 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { ourLog.info("Looking for root codes"); rootConcepts - .entrySet() - .removeIf(theStringTermConceptEntry -> !theStringTermConceptEntry.getValue().getParents().isEmpty()); + .entrySet() + .removeIf(theStringTermConceptEntry -> + !theStringTermConceptEntry.getValue().getParents().isEmpty()); - ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(), code2concept.size()); + ourLog.info( + "Done loading SNOMED CT files - {} root codes, {} total codes", + rootConcepts.size(), + code2concept.size()); Counter circularCounter = new Counter(); for (TermConcept next : rootConcepts.values()) { long count = circularCounter.getThenAdd(); float pct = ((float) count / rootConcepts.size()) * 100.0f; - ourLog.info(" * Scanning for circular refs - have scanned {} / {} codes ({}%)", count, rootConcepts.size(), pct); + ourLog.info( + " * Scanning for circular refs - have scanned {} / {} codes ({}%)", + count, rootConcepts.size(), pct); dropCircularRefs(next, new ArrayList<>(), code2concept); } @@ -833,7 +1095,12 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { return new UploadStatistics(code2concept.size(), target); } - private IIdType storeCodeSystem(RequestDetails theRequestDetails, final TermCodeSystemVersion theCodeSystemVersion, CodeSystem theCodeSystem, List theValueSets, List theConceptMaps) { + private IIdType storeCodeSystem( + RequestDetails theRequestDetails, + final TermCodeSystemVersion theCodeSystemVersion, + CodeSystem theCodeSystem, + List theValueSets, + List theConceptMaps) { Validate.isTrue(theCodeSystem.getContent() == CodeSystem.CodeSystemContentMode.NOTPRESENT); List valueSets = ObjectUtils.defaultIfNull(theValueSets, Collections.emptyList()); @@ -841,21 +1108,43 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { IIdType retVal; myDeferredStorageSvc.setProcessDeferred(false); - retVal = myCodeSystemStorageSvc.storeNewCodeSystemVersion(theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps); + retVal = myCodeSystemStorageSvc.storeNewCodeSystemVersion( + theCodeSystem, theCodeSystemVersion, theRequestDetails, valueSets, conceptMaps); myDeferredStorageSvc.setProcessDeferred(true); return retVal; } - public static void iterateOverZipFileCsv(LoadedFileDescriptors theDescriptors, String theFileNamePart, IZipContentsHandlerCsv theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) { - iterateOverZipFileCsv(theDescriptors, theFileNamePart, theHandler, theDelimiter, theQuoteMode, theIsPartialFilename, true); + public static void iterateOverZipFileCsv( + LoadedFileDescriptors theDescriptors, + String theFileNamePart, + IZipContentsHandlerCsv theHandler, + char theDelimiter, + QuoteMode theQuoteMode, + boolean theIsPartialFilename) { + iterateOverZipFileCsv( + theDescriptors, theFileNamePart, theHandler, theDelimiter, theQuoteMode, theIsPartialFilename, true); } - public static void iterateOverZipFileCsvOptional(LoadedFileDescriptors theDescriptors, String theFileNamePart, IZipContentsHandlerCsv theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) { - iterateOverZipFileCsv(theDescriptors, theFileNamePart, theHandler, theDelimiter, theQuoteMode, theIsPartialFilename, false); + public static void iterateOverZipFileCsvOptional( + LoadedFileDescriptors theDescriptors, + String theFileNamePart, + IZipContentsHandlerCsv theHandler, + char theDelimiter, + QuoteMode theQuoteMode, + boolean theIsPartialFilename) { + iterateOverZipFileCsv( + theDescriptors, theFileNamePart, theHandler, theDelimiter, theQuoteMode, theIsPartialFilename, false); } - private static void iterateOverZipFileCsv(LoadedFileDescriptors theDescriptors, String theFileNamePart, IZipContentsHandlerCsv theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename, boolean theRequireMatch) { + private static void iterateOverZipFileCsv( + LoadedFileDescriptors theDescriptors, + String theFileNamePart, + IZipContentsHandlerCsv theHandler, + char theDelimiter, + QuoteMode theQuoteMode, + boolean theIsPartialFilename, + boolean theRequireMatch) { IZipContentsHandler handler = (reader, filename) -> { CSVParser parsed = newCsvRecords(theDelimiter, theQuoteMode, reader); Iterator iter = parsed.iterator(); @@ -878,10 +1167,14 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { }; iterateOverZipFile(theDescriptors, theFileNamePart, theIsPartialFilename, theRequireMatch, handler); - } - private static void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, boolean theIsPartialFilename, boolean theRequireMatch, IZipContentsHandler theHandler) { + private static void iterateOverZipFile( + LoadedFileDescriptors theDescriptors, + String theFileNamePart, + boolean theIsPartialFilename, + boolean theRequireMatch, + IZipContentsHandler theHandler) { boolean foundMatch = false; for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) { String nextFilename = nextZipBytes.getFilename(); @@ -905,7 +1198,6 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { throw new InternalErrorException(Msg.code(877) + e); } } - } if (!foundMatch && theRequireMatch) { @@ -913,14 +1205,12 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { } } - @Nonnull - private static CSVParser newCsvRecords(char theDelimiter, QuoteMode theQuoteMode, Reader theReader) throws IOException { + private static CSVParser newCsvRecords(char theDelimiter, QuoteMode theQuoteMode, Reader theReader) + throws IOException { CSVParser parsed; - CSVFormat format = CSVFormat - .newFormat(theDelimiter) - .withFirstRecordAsHeader() - .withTrim(); + CSVFormat format = + CSVFormat.newFormat(theDelimiter).withFirstRecordAsHeader().withTrim(); if (theQuoteMode != null) { format = format.withQuote('"').withQuoteMode(theQuoteMode); } @@ -948,11 +1238,13 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc { return concept; } - public static TermConceptProperty getOrCreateConceptProperty(Map> code2Properties, String code, String key) { + public static TermConceptProperty getOrCreateConceptProperty( + Map> code2Properties, String code, String key) { List termConceptProperties = code2Properties.get(code); - if (termConceptProperties == null) - return new TermConceptProperty(); - Optional termConceptProperty = termConceptProperties.stream().filter(property -> key.equals(property.getKey())).findFirst(); + if (termConceptProperties == null) return new TermConceptProperty(); + Optional termConceptProperty = termConceptProperties.stream() + .filter(property -> key.equals(property.getKey())) + .findFirst(); return termConceptProperty.orElseGet(TermConceptProperty::new); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java index 36adf9e7b3e..6e8df461d85 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java @@ -151,13 +151,6 @@ import org.springframework.transaction.support.TransactionTemplate; import org.springframework.util.CollectionUtils; import org.springframework.util.comparator.Comparators; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; -import javax.persistence.EntityManager; -import javax.persistence.NonUniqueResultException; -import javax.persistence.PersistenceContext; -import javax.persistence.PersistenceContextType; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -176,6 +169,13 @@ import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; +import javax.persistence.EntityManager; +import javax.persistence.NonUniqueResultException; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; import static ca.uhn.fhir.jpa.entity.TermConceptPropertyBinder.CONCEPT_PROPERTY_PREFIX_NAME; import static ca.uhn.fhir.jpa.term.api.ITermLoaderSvc.LOINC_URI; @@ -210,60 +210,88 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { private static final int INDEXED_ROOTS_LOGGING_COUNT = 50_000; private static Runnable myInvokeOnNextCallForUnitTest; private static boolean ourForceDisableHibernateSearchForUnitTest; - private final Cache myCodeSystemCurrentVersionCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(1)); + private final Cache myCodeSystemCurrentVersionCache = + CacheFactory.build(TimeUnit.MINUTES.toMillis(1)); + @Autowired protected DaoRegistry myDaoRegistry; + @Autowired protected ITermCodeSystemDao myCodeSystemDao; + @Autowired protected ITermConceptDao myConceptDao; + @Autowired protected ITermConceptPropertyDao myConceptPropertyDao; + @Autowired protected ITermConceptDesignationDao myConceptDesignationDao; + @Autowired protected ITermValueSetDao myTermValueSetDao; + @Autowired protected ITermValueSetConceptDao myValueSetConceptDao; + @Autowired protected ITermValueSetConceptDesignationDao myValueSetConceptDesignationDao; + @Autowired protected FhirContext myContext; + @PersistenceContext(type = PersistenceContextType.TRANSACTION) protected EntityManager myEntityManager; + private boolean myPreExpandingValueSets = false; + @Autowired private ITermCodeSystemVersionDao myCodeSystemVersionDao; + @Autowired private JpaStorageSettings myStorageSettings; + private TransactionTemplate myTxTemplate; + @Autowired private PlatformTransactionManager myTransactionManager; + @Autowired(required = false) private IFulltextSearchSvc myFulltextSearchSvc; + @Autowired private PlatformTransactionManager myTxManager; + @Autowired private ITermConceptDao myTermConceptDao; + @Autowired private ITermValueSetConceptViewDao myTermValueSetConceptViewDao; + @Autowired private ITermValueSetConceptViewOracleDao myTermValueSetConceptViewOracleDao; + @Autowired(required = false) private ITermDeferredStorageSvc myDeferredStorageSvc; + @Autowired private IIdHelperService myIdHelperService; + @Autowired private ApplicationContext myApplicationContext; + private volatile IValidationSupport myJpaValidationSupport; private volatile IValidationSupport myValidationSupport; - //We need this bean so we can tell which mode hibernate search is running in. + // We need this bean so we can tell which mode hibernate search is running in. @Autowired private HibernatePropertiesProvider myHibernatePropertiesProvider; + @Autowired private CachingValidationSupport myCachingValidationSupport; + @Autowired private VersionCanonicalizer myVersionCanonicalizer; + @Autowired private IJpaStorageResourceParser myJpaStorageResourceParser; @@ -278,7 +306,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return fetchValueSet(theValueSetUrl) != null; } - private boolean addCodeIfNotAlreadyAdded(@Nullable ValueSetExpansionOptions theExpansionOptions, IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, TermConcept theConcept, boolean theAdd, String theValueSetIncludeVersion) { + private boolean addCodeIfNotAlreadyAdded( + @Nullable ValueSetExpansionOptions theExpansionOptions, + IValueSetConceptAccumulator theValueSetCodeAccumulator, + Set theAddedCodes, + TermConcept theConcept, + boolean theAdd, + String theValueSetIncludeVersion) { String codeSystem = theConcept.getCodeSystemVersion().getCodeSystem().getCodeSystemUri(); String codeSystemVersion = theConcept.getCodeSystemVersion().getCodeSystemVersionId(); String code = theConcept.getCode(); @@ -287,38 +321,80 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { String directParentPids = ""; if (theExpansionOptions != null && theExpansionOptions.isIncludeHierarchy()) { - directParentPids = theConcept - .getParents() - .stream() - .map(t -> t.getParent().getId().toString()) - .collect(joining(" ")); + directParentPids = theConcept.getParents().stream() + .map(t -> t.getParent().getId().toString()) + .collect(joining(" ")); } - Collection designations = theConcept.getDesignations(); if (StringUtils.isNotEmpty(theValueSetIncludeVersion)) { - return addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, designations, theAdd, codeSystem + OUR_PIPE_CHARACTER + theValueSetIncludeVersion, code, display, sourceConceptPid, directParentPids, codeSystemVersion); + return addCodeIfNotAlreadyAdded( + theValueSetCodeAccumulator, + theAddedCodes, + designations, + theAdd, + codeSystem + OUR_PIPE_CHARACTER + theValueSetIncludeVersion, + code, + display, + sourceConceptPid, + directParentPids, + codeSystemVersion); } else { - return addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, designations, theAdd, codeSystem, code, display, sourceConceptPid, directParentPids, codeSystemVersion); + return addCodeIfNotAlreadyAdded( + theValueSetCodeAccumulator, + theAddedCodes, + designations, + theAdd, + codeSystem, + code, + display, + sourceConceptPid, + directParentPids, + codeSystemVersion); } } - private boolean addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, boolean theAdd, String theCodeSystem, String theCodeSystemVersion, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids, Collection theDesignations) { + private boolean addCodeIfNotAlreadyAdded( + IValueSetConceptAccumulator theValueSetCodeAccumulator, + Set theAddedCodes, + boolean theAdd, + String theCodeSystem, + String theCodeSystemVersion, + String theCode, + String theDisplay, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + Collection theDesignations) { if (StringUtils.isNotEmpty(theCodeSystemVersion)) { if (isNoneBlank(theCodeSystem, theCode)) { if (theAdd && theAddedCodes.add(theCodeSystem + OUR_PIPE_CHARACTER + theCode)) { - theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem + OUR_PIPE_CHARACTER + theCodeSystemVersion, theCode, theDisplay, theDesignations, theSourceConceptPid, theSourceConceptDirectParentPids, theCodeSystemVersion); + theValueSetCodeAccumulator.includeConceptWithDesignations( + theCodeSystem + OUR_PIPE_CHARACTER + theCodeSystemVersion, + theCode, + theDisplay, + theDesignations, + theSourceConceptPid, + theSourceConceptDirectParentPids, + theCodeSystemVersion); return true; } if (!theAdd && theAddedCodes.remove(theCodeSystem + OUR_PIPE_CHARACTER + theCode)) { - theValueSetCodeAccumulator.excludeConcept(theCodeSystem + OUR_PIPE_CHARACTER + theCodeSystemVersion, theCode); + theValueSetCodeAccumulator.excludeConcept( + theCodeSystem + OUR_PIPE_CHARACTER + theCodeSystemVersion, theCode); return true; } } } else { if (theAdd && theAddedCodes.add(theCodeSystem + OUR_PIPE_CHARACTER + theCode)) { - theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem, theCode, theDisplay, theDesignations, theSourceConceptPid, theSourceConceptDirectParentPids, theCodeSystemVersion); + theValueSetCodeAccumulator.includeConceptWithDesignations( + theCodeSystem, + theCode, + theDisplay, + theDesignations, + theSourceConceptPid, + theSourceConceptDirectParentPids, + theCodeSystemVersion); return true; } @@ -331,10 +407,27 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return false; } - private boolean addCodeIfNotAlreadyAdded(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, Collection theDesignations, boolean theAdd, String theCodeSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids, String theSystemVersion) { + private boolean addCodeIfNotAlreadyAdded( + IValueSetConceptAccumulator theValueSetCodeAccumulator, + Set theAddedCodes, + Collection theDesignations, + boolean theAdd, + String theCodeSystem, + String theCode, + String theDisplay, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + String theSystemVersion) { if (isNoneBlank(theCodeSystem, theCode)) { if (theAdd && theAddedCodes.add(theCodeSystem + OUR_PIPE_CHARACTER + theCode)) { - theValueSetCodeAccumulator.includeConceptWithDesignations(theCodeSystem, theCode, theDisplay, theDesignations, theSourceConceptPid, theSourceConceptDirectParentPids, theSystemVersion); + theValueSetCodeAccumulator.includeConceptWithDesignations( + theCodeSystem, + theCode, + theDisplay, + theDesignations, + theSourceConceptPid, + theSourceConceptDirectParentPids, + theSystemVersion); return true; } @@ -351,7 +444,12 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { boolean retVal = theSetToPopulate.add(theConcept); if (retVal) { if (theSetToPopulate.size() >= myStorageSettings.getMaximumExpansionSize()) { - String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "expansionTooLarge", myStorageSettings.getMaximumExpansionSize()); + String msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "expansionTooLarge", + myStorageSettings.getMaximumExpansionSize()); throw new ExpansionTooCostlyException(Msg.code(885) + msg); } } @@ -368,7 +466,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { public void deleteValueSetForResource(ResourceTable theResourceTable) { // Get existing entity so it can be deleted. - Optional optionalExistingTermValueSetById = myTermValueSetDao.findByResourcePid(theResourceTable.getId()); + Optional optionalExistingTermValueSetById = + myTermValueSetDao.findByResourcePid(theResourceTable.getId()); if (optionalExistingTermValueSetById.isPresent()) { TermValueSet existingTermValueSet = optionalExistingTermValueSetById.get(); @@ -393,30 +492,41 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Override @Transactional - public List expandValueSetIntoConceptList(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl) { - // TODO: DM 2019-09-10 - This is problematic because an incorrect URL that matches ValueSet.id will not be found in the terminology tables but will yield a ValueSet here. Depending on the ValueSet, the expansion may time-out. + public List expandValueSetIntoConceptList( + @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl) { + // TODO: DM 2019-09-10 - This is problematic because an incorrect URL that matches ValueSet.id will not be found + // in the terminology tables but will yield a ValueSet here. Depending on the ValueSet, the expansion may + // time-out. ValueSet expanded = expandValueSet(theExpansionOptions, theValueSetCanonicalUrl); ArrayList retVal = new ArrayList<>(); - for (ValueSet.ValueSetExpansionContainsComponent nextContains : expanded.getExpansion().getContains()) { - retVal.add(new FhirVersionIndependentConcept(nextContains.getSystem(), nextContains.getCode(), nextContains.getDisplay(), nextContains.getVersion())); + for (ValueSet.ValueSetExpansionContainsComponent nextContains : + expanded.getExpansion().getContains()) { + retVal.add(new FhirVersionIndependentConcept( + nextContains.getSystem(), + nextContains.getCode(), + nextContains.getDisplay(), + nextContains.getVersion())); } return retVal; } @Override - public ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl) { + public ValueSet expandValueSet( + @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl) { ValueSet valueSet = fetchCanonicalValueSetFromCompleteContext(theValueSetCanonicalUrl); if (valueSet == null) { - throw new ResourceNotFoundException(Msg.code(886) + "Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSetCanonicalUrl)); + throw new ResourceNotFoundException( + Msg.code(886) + "Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSetCanonicalUrl)); } return expandValueSet(theExpansionOptions, valueSet); } @Override - public ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand) { + public ValueSet expandValueSet( + @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand) { String filter = null; if (theExpansionOptions != null) { filter = theExpansionOptions.getFilter(); @@ -424,14 +534,19 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return doExpandValueSet(theExpansionOptions, theValueSetToExpand, ExpansionFilter.fromFilterString(filter)); } - private ValueSet doExpandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, ExpansionFilter theFilter) { + private ValueSet doExpandValueSet( + @Nullable ValueSetExpansionOptions theExpansionOptions, + ValueSet theValueSetToExpand, + ExpansionFilter theFilter) { ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSetToExpand, "ValueSet to expand can not be null"); ValueSetExpansionOptions expansionOptions = provideExpansionOptions(theExpansionOptions); int offset = expansionOptions.getOffset(); int count = expansionOptions.getCount(); - ValueSetExpansionComponentWithConceptAccumulator accumulator = new ValueSetExpansionComponentWithConceptAccumulator(myContext, count, expansionOptions.isIncludeHierarchy()); + ValueSetExpansionComponentWithConceptAccumulator accumulator = + new ValueSetExpansionComponentWithConceptAccumulator( + myContext, count, expansionOptions.isIncludeHierarchy()); accumulator.setHardExpansionMaximumSize(myStorageSettings.getMaximumExpansionSize()); accumulator.setSkipCountRemaining(offset); accumulator.setIdentifier(UUID.randomUUID().toString()); @@ -459,9 +574,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { valueSet.setExpansion(accumulator); for (String next : accumulator.getMessages()) { - valueSet.getMeta().addExtension() - .setUrl(HapiExtensions.EXT_VALUESET_EXPANSION_MESSAGE) - .setValue(new StringType(next)); + valueSet.getMeta() + .addExtension() + .setUrl(HapiExtensions.EXT_VALUESET_EXPANSION_MESSAGE) + .setValue(new StringType(next)); } if (expansionOptions.isIncludeHierarchy()) { @@ -471,11 +587,17 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return valueSet; } - private void expandValueSetIntoAccumulator(ValueSet theValueSetToExpand, ValueSetExpansionOptions theExpansionOptions, IValueSetConceptAccumulator theAccumulator, ExpansionFilter theFilter, boolean theAdd) { + private void expandValueSetIntoAccumulator( + ValueSet theValueSetToExpand, + ValueSetExpansionOptions theExpansionOptions, + IValueSetConceptAccumulator theAccumulator, + ExpansionFilter theFilter, + boolean theAdd) { Optional optionalTermValueSet; if (theValueSetToExpand.hasUrl()) { if (theValueSetToExpand.hasVersion()) { - optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(theValueSetToExpand.getUrl(), theValueSetToExpand.getVersion()); + optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion( + theValueSetToExpand.getUrl(), theValueSetToExpand.getVersion()); } else { optionalTermValueSet = findCurrentTermValueSet(theValueSetToExpand.getUrl()); } @@ -487,8 +609,15 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { * ValueSet doesn't exist in pre-expansion database, so perform in-memory expansion */ if (optionalTermValueSet.isEmpty()) { - ourLog.debug("ValueSet is not present in terminology tables. Will perform in-memory expansion without parameters. {}", getValueSetInfo(theValueSetToExpand)); - String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingInMemoryExpansion", getValueSetInfo(theValueSetToExpand)); + ourLog.debug( + "ValueSet is not present in terminology tables. Will perform in-memory expansion without parameters. {}", + getValueSetInfo(theValueSetToExpand)); + String msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "valueSetExpandedUsingInMemoryExpansion", + getValueSetInfo(theValueSetToExpand)); theAccumulator.addMessage(msg); doExpandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter); return; @@ -499,7 +628,14 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { */ TermValueSet termValueSet = optionalTermValueSet.get(); if (termValueSet.getExpansionStatus() != TermValueSetPreExpansionStatusEnum.EXPANDED) { - String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "valueSetNotYetExpanded", getValueSetInfo(theValueSetToExpand), termValueSet.getExpansionStatus().name(), termValueSet.getExpansionStatus().getDescription()); + String msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "valueSetNotYetExpanded", + getValueSetInfo(theValueSetToExpand), + termValueSet.getExpansionStatus().name(), + termValueSet.getExpansionStatus().getDescription()); theAccumulator.addMessage(msg); doExpandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter); return; @@ -509,7 +645,9 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { * ValueSet is pre-expanded in database so let's use that */ String expansionTimestamp = toHumanReadableExpansionTimestamp(termValueSet); - String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp); + String msg = myContext + .getLocalizer() + .getMessage(TermReadSvcImpl.class, "valueSetExpandedUsingPreExpansion", expansionTimestamp); theAccumulator.addMessage(msg); expandConcepts(theExpansionOptions, theAccumulator, termValueSet, theFilter, theAdd, isOracleDialect()); } @@ -518,8 +656,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { private String toHumanReadableExpansionTimestamp(TermValueSet termValueSet) { String expansionTimestamp = "(unknown)"; if (termValueSet.getExpansionTimestamp() != null) { - String timeElapsed = StopWatch.formatMillis(System.currentTimeMillis() - termValueSet.getExpansionTimestamp().getTime()); - expansionTimestamp = new InstantType(termValueSet.getExpansionTimestamp()).getValueAsString() + " (" + timeElapsed + " ago)"; + String timeElapsed = StopWatch.formatMillis(System.currentTimeMillis() + - termValueSet.getExpansionTimestamp().getTime()); + expansionTimestamp = new InstantType(termValueSet.getExpansionTimestamp()).getValueAsString() + " (" + + timeElapsed + " ago)"; } return expansionTimestamp; } @@ -528,8 +668,15 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return myHibernatePropertiesProvider.getDialect() instanceof org.hibernate.dialect.Oracle12cDialect; } - private void expandConcepts(ValueSetExpansionOptions theExpansionOptions, IValueSetConceptAccumulator theAccumulator, TermValueSet theTermValueSet, ExpansionFilter theFilter, boolean theAdd, boolean theOracle) { - // NOTE: if you modifiy the logic here, look to `expandConceptsOracle` and see if your new code applies to its copy pasted sibling + private void expandConcepts( + ValueSetExpansionOptions theExpansionOptions, + IValueSetConceptAccumulator theAccumulator, + TermValueSet theTermValueSet, + ExpansionFilter theFilter, + boolean theAdd, + boolean theOracle) { + // NOTE: if you modifiy the logic here, look to `expandConceptsOracle` and see if your new code applies to its + // copy pasted sibling Integer offset = theAccumulator.getSkipCountRemaining(); offset = ObjectUtils.defaultIfNull(offset, 0); offset = Math.min(offset, theTermValueSet.getTotalConcepts().intValue()); @@ -544,26 +691,36 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { Collection conceptViews; boolean wasFilteredResult = false; String filterDisplayValue = null; - if (!theFilter.getFilters().isEmpty() && JpaConstants.VALUESET_FILTER_DISPLAY.equals(theFilter.getFilters().get(0).getProperty()) && theFilter.getFilters().get(0).getOp() == ValueSet.FilterOperator.EQUAL) { - filterDisplayValue = lowerCase(theFilter.getFilters().get(0).getValue().replace("%", "[%]")); + if (!theFilter.getFilters().isEmpty() + && JpaConstants.VALUESET_FILTER_DISPLAY.equals( + theFilter.getFilters().get(0).getProperty()) + && theFilter.getFilters().get(0).getOp() == ValueSet.FilterOperator.EQUAL) { + filterDisplayValue = + lowerCase(theFilter.getFilters().get(0).getValue().replace("%", "[%]")); String displayValue = "%" + lowerCase(filterDisplayValue) + "%"; if (theOracle) { - conceptViews = myTermValueSetConceptViewOracleDao.findByTermValueSetId(theTermValueSet.getId(), displayValue); + conceptViews = + myTermValueSetConceptViewOracleDao.findByTermValueSetId(theTermValueSet.getId(), displayValue); } else { conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(theTermValueSet.getId(), displayValue); } wasFilteredResult = true; } else { - // TODO JA HS: I'm pretty sure we are overfetching here. test says offset 3, count 4, but we are fetching index 3 -> 10 here, grabbing 7 concepts. - //Specifically this test testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange + // TODO JA HS: I'm pretty sure we are overfetching here. test says offset 3, count 4, but we are fetching + // index 3 -> 10 here, grabbing 7 concepts. + // Specifically this test + // testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange if (theOracle) { - conceptViews = myTermValueSetConceptViewOracleDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId()); + conceptViews = myTermValueSetConceptViewOracleDao.findByTermValueSetId( + offset, toIndex, theTermValueSet.getId()); } else { - conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId()); + conceptViews = + myTermValueSetConceptViewDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId()); } theAccumulator.consumeSkipCount(offset); if (theAdd) { - theAccumulator.incrementOrDecrementTotalConcepts(true, theTermValueSet.getTotalConcepts().intValue()); + theAccumulator.incrementOrDecrementTotalConcepts( + true, theTermValueSet.getTotalConcepts().intValue()); } } @@ -584,18 +741,20 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { String display = conceptView.getConceptDisplay(); String systemVersion = conceptView.getConceptSystemVersion(); - //-- this is quick solution, may need to revisit + // -- this is quick solution, may need to revisit if (!applyFilter(display, filterDisplayValue)) { continue; } Long conceptPid = conceptView.getConceptPid(); if (!pidToConcept.containsKey(conceptPid)) { - FhirVersionIndependentConcept concept = new FhirVersionIndependentConcept(system, code, display, systemVersion); + FhirVersionIndependentConcept concept = + new FhirVersionIndependentConcept(system, code, display, systemVersion); pidToConcept.put(conceptPid, concept); } - // TODO: DM 2019-08-17 - Implement includeDesignations parameter for $expand operation to designations optional. + // TODO: DM 2019-08-17 - Implement includeDesignations parameter for $expand operation to designations + // optional. if (conceptView.getDesignationPid() != null) { TermConceptDesignation designation = new TermConceptDesignation(); @@ -609,7 +768,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } if (++designationsExpanded % 250 == 0) { - logDesignationsExpanded("Expansion of designations in progress. ", theTermValueSet, designationsExpanded); + logDesignationsExpanded( + "Expansion of designations in progress. ", theTermValueSet, designationsExpanded); } } @@ -640,7 +800,14 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { Long sourceConceptPid = pidToSourcePid.get(nextPid); String sourceConceptDirectParentPids = pidToSourceDirectParentPids.get(nextPid); - theAccumulator.includeConceptWithDesignations(system, code, display, designations, sourceConceptPid, sourceConceptDirectParentPids, systemVersion); + theAccumulator.includeConceptWithDesignations( + system, + code, + display, + designations, + sourceConceptPid, + sourceConceptDirectParentPids, + systemVersion); } else { boolean removed = theAccumulator.excludeConcept(system, code); if (removed) { @@ -657,29 +824,37 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { logConceptsExpanded("Finished expanding concepts. ", theTermValueSet, conceptsExpanded); } - private void logConceptsExpanded(String theLogDescriptionPrefix, TermValueSet theTermValueSet, int theConceptsExpanded) { + private void logConceptsExpanded( + String theLogDescriptionPrefix, TermValueSet theTermValueSet, int theConceptsExpanded) { if (theConceptsExpanded > 0) { - ourLog.debug("{}Have expanded {} concepts in ValueSet[{}]", theLogDescriptionPrefix, theConceptsExpanded, theTermValueSet.getUrl()); + ourLog.debug( + "{}Have expanded {} concepts in ValueSet[{}]", + theLogDescriptionPrefix, + theConceptsExpanded, + theTermValueSet.getUrl()); } } - private void logDesignationsExpanded(String theLogDescriptionPrefix, TermValueSet theTermValueSet, int theDesignationsExpanded) { + private void logDesignationsExpanded( + String theLogDescriptionPrefix, TermValueSet theTermValueSet, int theDesignationsExpanded) { if (theDesignationsExpanded > 0) { - ourLog.debug("{}Have expanded {} designations in ValueSet[{}]", theLogDescriptionPrefix, theDesignationsExpanded, theTermValueSet.getUrl()); + ourLog.debug( + "{}Have expanded {} designations in ValueSet[{}]", + theLogDescriptionPrefix, + theDesignationsExpanded, + theTermValueSet.getUrl()); } } public boolean applyFilter(final String theDisplay, final String theFilterDisplay) { - //-- safety check only, no need to apply filter - if (theDisplay == null || theFilterDisplay == null) - return true; + // -- safety check only, no need to apply filter + if (theDisplay == null || theFilterDisplay == null) return true; // -- sentence case - if (startsWithIgnoreCase(theDisplay, theFilterDisplay)) - return true; + if (startsWithIgnoreCase(theDisplay, theFilterDisplay)) return true; - //-- token case + // -- token case return startsWithByWordBoundaries(theDisplay, theFilterDisplay); } @@ -689,32 +864,39 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { List tokens = new ArrayList<>(); while (tok.hasMoreTokens()) { String token = tok.nextToken(); - if (startsWithIgnoreCase(token, theFilterDisplay)) - return true; + if (startsWithIgnoreCase(token, theFilterDisplay)) return true; tokens.add(token); } - // Allow to search by the end of the phrase. E.g. "working proficiency" will match "Limited working proficiency" + // Allow to search by the end of the phrase. E.g. "working proficiency" will match "Limited working + // proficiency" for (int start = 0; start <= tokens.size() - 1; ++start) { for (int end = start + 1; end <= tokens.size(); ++end) { String sublist = String.join(" ", tokens.subList(start, end)); - if (startsWithIgnoreCase(sublist, theFilterDisplay)) - return true; + if (startsWithIgnoreCase(sublist, theFilterDisplay)) return true; } } return false; } @Override - public void expandValueSet(ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { - doExpandValueSet(theExpansionOptions, theValueSetToExpand, theValueSetCodeAccumulator, ExpansionFilter.NO_FILTER); + public void expandValueSet( + ValueSetExpansionOptions theExpansionOptions, + ValueSet theValueSetToExpand, + IValueSetConceptAccumulator theValueSetCodeAccumulator) { + doExpandValueSet( + theExpansionOptions, theValueSetToExpand, theValueSetCodeAccumulator, ExpansionFilter.NO_FILTER); } /** * Note: Not transactional because specific calls within this method * get executed in a transaction */ - private void doExpandValueSet(ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator, @Nonnull ExpansionFilter theExpansionFilter) { + private void doExpandValueSet( + ValueSetExpansionOptions theExpansionOptions, + ValueSet theValueSetToExpand, + IValueSetConceptAccumulator theValueSetCodeAccumulator, + @Nonnull ExpansionFilter theExpansionFilter) { Set addedCodes = new HashSet<>(); StopWatch sw = new StopWatch(); @@ -725,29 +907,37 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { Integer skipCountRemaining = theValueSetCodeAccumulator.getSkipCountRemaining(); if (skipCountRemaining != null && skipCountRemaining > 0) { if (theValueSetToExpand.getCompose().getExclude().size() > 0) { - String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "valueSetNotYetExpanded_OffsetNotAllowed", valueSetInfo); + String msg = myContext + .getLocalizer() + .getMessage(TermReadSvcImpl.class, "valueSetNotYetExpanded_OffsetNotAllowed", valueSetInfo); throw new InvalidRequestException(Msg.code(887) + msg); } } // Handle includes ourLog.debug("Handling includes"); - for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) { - myTxTemplate.executeWithoutResult(tx -> - expandValueSetHandleIncludeOrExclude(theExpansionOptions, theValueSetCodeAccumulator, addedCodes, - include, true, theExpansionFilter)); + for (ValueSet.ConceptSetComponent include : + theValueSetToExpand.getCompose().getInclude()) { + myTxTemplate.executeWithoutResult(tx -> expandValueSetHandleIncludeOrExclude( + theExpansionOptions, theValueSetCodeAccumulator, addedCodes, include, true, theExpansionFilter)); } // Handle excludes ourLog.debug("Handling excludes"); - for (ValueSet.ConceptSetComponent exclude : theValueSetToExpand.getCompose().getExclude()) { - myTxTemplate.executeWithoutResult(tx -> - expandValueSetHandleIncludeOrExclude(theExpansionOptions, theValueSetCodeAccumulator, addedCodes, - exclude, false, ExpansionFilter.NO_FILTER)); + for (ValueSet.ConceptSetComponent exclude : + theValueSetToExpand.getCompose().getExclude()) { + myTxTemplate.executeWithoutResult(tx -> expandValueSetHandleIncludeOrExclude( + theExpansionOptions, + theValueSetCodeAccumulator, + addedCodes, + exclude, + false, + ExpansionFilter.NO_FILTER)); } if (theValueSetCodeAccumulator instanceof ValueSetConceptAccumulator) { - myTxTemplate.execute(t -> ((ValueSetConceptAccumulator) theValueSetCodeAccumulator).removeGapsFromConceptOrder()); + myTxTemplate.execute( + t -> ((ValueSetConceptAccumulator) theValueSetCodeAccumulator).removeGapsFromConceptOrder()); } ourLog.debug("Done working with {} in {}ms", valueSetInfo, sw.getMillis()); @@ -758,16 +948,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { boolean isIdentified = false; if (theValueSet.hasUrl()) { isIdentified = true; - sb - .append("ValueSet.url[") - .append(theValueSet.getUrl()) - .append("]"); + sb.append("ValueSet.url[").append(theValueSet.getUrl()).append("]"); } else if (theValueSet.hasId()) { isIdentified = true; - sb - .append("ValueSet.id[") - .append(theValueSet.getId()) - .append("]"); + sb.append("ValueSet.id[").append(theValueSet.getId()).append("]"); } if (!isIdentified) { @@ -780,12 +964,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { /** * Returns true if there are potentially more results to process. */ - private void expandValueSetHandleIncludeOrExclude(@Nullable ValueSetExpansionOptions theExpansionOptions, - IValueSetConceptAccumulator theValueSetCodeAccumulator, - Set theAddedCodes, - ValueSet.ConceptSetComponent theIncludeOrExclude, - boolean theAdd, - @Nonnull ExpansionFilter theExpansionFilter) { + private void expandValueSetHandleIncludeOrExclude( + @Nullable ValueSetExpansionOptions theExpansionOptions, + IValueSetConceptAccumulator theValueSetCodeAccumulator, + Set theAddedCodes, + ValueSet.ConceptSetComponent theIncludeOrExclude, + boolean theAdd, + @Nonnull ExpansionFilter theExpansionFilter) { String system = theIncludeOrExclude.getSystem(); boolean hasSystem = isNotBlank(system); @@ -793,37 +978,63 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { if (hasSystem) { - if (theExpansionFilter.hasCode() && theExpansionFilter.getSystem() != null && !system.equals(theExpansionFilter.getSystem())) { + if (theExpansionFilter.hasCode() + && theExpansionFilter.getSystem() != null + && !system.equals(theExpansionFilter.getSystem())) { return; } ourLog.debug("Starting {} expansion around CodeSystem: {}", (theAdd ? "inclusion" : "exclusion"), system); - Optional termCodeSystemVersion = optionalFindTermCodeSystemVersion(theIncludeOrExclude); + Optional termCodeSystemVersion = + optionalFindTermCodeSystemVersion(theIncludeOrExclude); if (termCodeSystemVersion.isPresent()) { - expandValueSetHandleIncludeOrExcludeUsingDatabase(theExpansionOptions, theValueSetCodeAccumulator, - theAddedCodes, theIncludeOrExclude, theAdd, theExpansionFilter, system, termCodeSystemVersion.get()); + expandValueSetHandleIncludeOrExcludeUsingDatabase( + theExpansionOptions, + theValueSetCodeAccumulator, + theAddedCodes, + theIncludeOrExclude, + theAdd, + theExpansionFilter, + system, + termCodeSystemVersion.get()); } else { if (theIncludeOrExclude.getConcept().size() > 0 && theExpansionFilter.hasCode()) { if (defaultString(theIncludeOrExclude.getSystem()).equals(theExpansionFilter.getSystem())) { - if (theIncludeOrExclude.getConcept().stream().noneMatch(t -> t.getCode().equals(theExpansionFilter.getCode()))) { + if (theIncludeOrExclude.getConcept().stream() + .noneMatch(t -> t.getCode().equals(theExpansionFilter.getCode()))) { return; } } } - Consumer consumer = c -> - addOrRemoveCode(theValueSetCodeAccumulator, theAddedCodes, theAdd, system, c.getCode(), c.getDisplay(), c.getSystemVersion()); + Consumer consumer = c -> addOrRemoveCode( + theValueSetCodeAccumulator, + theAddedCodes, + theAdd, + system, + c.getCode(), + c.getDisplay(), + c.getSystemVersion()); try { - ConversionContext40_50.INSTANCE.init(new VersionConvertor_40_50(new BaseAdvisor_40_50()), "ValueSet"); - org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent includeOrExclude = ValueSet40_50.convertConceptSetComponent(theIncludeOrExclude); - new InMemoryTerminologyServerValidationSupport(myContext).expandValueSetIncludeOrExclude(new ValidationSupportContext(provideValidationSupport()), consumer, includeOrExclude); + ConversionContext40_50.INSTANCE.init( + new VersionConvertor_40_50(new BaseAdvisor_40_50()), "ValueSet"); + org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent includeOrExclude = + ValueSet40_50.convertConceptSetComponent(theIncludeOrExclude); + new InMemoryTerminologyServerValidationSupport(myContext) + .expandValueSetIncludeOrExclude( + new ValidationSupportContext(provideValidationSupport()), + consumer, + includeOrExclude); } catch (InMemoryTerminologyServerValidationSupport.ExpansionCouldNotBeCompletedInternallyException e) { - if (theExpansionOptions != null && !theExpansionOptions.isFailOnMissingCodeSystem() && e.getFailureType() == InMemoryTerminologyServerValidationSupport.FailureType.UNKNOWN_CODE_SYSTEM) { + if (theExpansionOptions != null + && !theExpansionOptions.isFailOnMissingCodeSystem() + && e.getFailureType() + == InMemoryTerminologyServerValidationSupport.FailureType.UNKNOWN_CODE_SYSTEM) { return; } throw new InternalErrorException(Msg.code(888) + e); @@ -836,33 +1047,42 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { for (CanonicalType nextValueSet : theIncludeOrExclude.getValueSet()) { String valueSetUrl = nextValueSet.getValueAsString(); - ourLog.debug("Starting {} expansion around ValueSet: {}", (theAdd ? "inclusion" : "exclusion"), valueSetUrl); + ourLog.debug( + "Starting {} expansion around ValueSet: {}", (theAdd ? "inclusion" : "exclusion"), valueSetUrl); - ExpansionFilter subExpansionFilter = new ExpansionFilter(theExpansionFilter, theIncludeOrExclude.getFilter(), theValueSetCodeAccumulator.getCapacityRemaining()); + ExpansionFilter subExpansionFilter = new ExpansionFilter( + theExpansionFilter, + theIncludeOrExclude.getFilter(), + theValueSetCodeAccumulator.getCapacityRemaining()); - // TODO: DM 2019-09-10 - This is problematic because an incorrect URL that matches ValueSet.id will not be found in the terminology tables but will yield a ValueSet here. Depending on the ValueSet, the expansion may time-out. + // TODO: DM 2019-09-10 - This is problematic because an incorrect URL that matches ValueSet.id will not + // be found in the terminology tables but will yield a ValueSet here. Depending on the ValueSet, the + // expansion may time-out. ValueSet valueSet = fetchCanonicalValueSetFromCompleteContext(valueSetUrl); if (valueSet == null) { - throw new ResourceNotFoundException(Msg.code(889) + "Unknown ValueSet: " + UrlUtil.escapeUrlParam(valueSetUrl)); + throw new ResourceNotFoundException( + Msg.code(889) + "Unknown ValueSet: " + UrlUtil.escapeUrlParam(valueSetUrl)); } - expandValueSetIntoAccumulator(valueSet, theExpansionOptions, theValueSetCodeAccumulator, subExpansionFilter, theAdd); - + expandValueSetIntoAccumulator( + valueSet, theExpansionOptions, theValueSetCodeAccumulator, subExpansionFilter, theAdd); } } else { - throw new InvalidRequestException(Msg.code(890) + "ValueSet contains " + (theAdd ? "include" : "exclude") + " criteria with no system defined"); + throw new InvalidRequestException(Msg.code(890) + "ValueSet contains " + (theAdd ? "include" : "exclude") + + " criteria with no system defined"); } - - } - private Optional optionalFindTermCodeSystemVersion(ValueSet.ConceptSetComponent theIncludeOrExclude) { + private Optional optionalFindTermCodeSystemVersion( + ValueSet.ConceptSetComponent theIncludeOrExclude) { if (isEmpty(theIncludeOrExclude.getVersion())) { - return Optional.ofNullable(myCodeSystemDao.findByCodeSystemUri(theIncludeOrExclude.getSystem())).map(TermCodeSystem::getCurrentVersion); + return Optional.ofNullable(myCodeSystemDao.findByCodeSystemUri(theIncludeOrExclude.getSystem())) + .map(TermCodeSystem::getCurrentVersion); } else { - return Optional.ofNullable(myCodeSystemVersionDao.findByCodeSystemUriAndVersion(theIncludeOrExclude.getSystem(), theIncludeOrExclude.getVersion())); + return Optional.ofNullable(myCodeSystemVersionDao.findByCodeSystemUriAndVersion( + theIncludeOrExclude.getSystem(), theIncludeOrExclude.getVersion())); } } @@ -871,14 +1091,14 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } private void expandValueSetHandleIncludeOrExcludeUsingDatabase( - ValueSetExpansionOptions theExpansionOptions, - IValueSetConceptAccumulator theValueSetCodeAccumulator, - Set theAddedCodes, - ValueSet.ConceptSetComponent theIncludeOrExclude, - boolean theAdd, - @Nonnull ExpansionFilter theExpansionFilter, - String theSystem, - TermCodeSystemVersion theTermCodeSystemVersion) { + ValueSetExpansionOptions theExpansionOptions, + IValueSetConceptAccumulator theValueSetCodeAccumulator, + Set theAddedCodes, + ValueSet.ConceptSetComponent theIncludeOrExclude, + boolean theAdd, + @Nonnull ExpansionFilter theExpansionFilter, + String theSystem, + TermCodeSystemVersion theTermCodeSystemVersion) { StopWatch fullOperationSw = new StopWatch(); String includeOrExcludeVersion = theIncludeOrExclude.getVersion(); @@ -888,7 +1108,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { * since we're going to do it without the database. */ if (!isHibernateSearchEnabled()) { - expandWithoutHibernateSearch(theValueSetCodeAccumulator, theTermCodeSystemVersion, theAddedCodes, theIncludeOrExclude, theSystem, theAdd); + expandWithoutHibernateSearch( + theValueSetCodeAccumulator, + theTermCodeSystemVersion, + theAddedCodes, + theIncludeOrExclude, + theSystem, + theAdd); return; } @@ -904,46 +1130,63 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } int chunkSize = chunkSizeOpt.get(); - SearchProperties searchProps = buildSearchScroll(theTermCodeSystemVersion, theExpansionFilter, theSystem, - theIncludeOrExclude, chunkSize, includeOrExcludeVersion); + SearchProperties searchProps = buildSearchScroll( + theTermCodeSystemVersion, + theExpansionFilter, + theSystem, + theIncludeOrExclude, + chunkSize, + includeOrExcludeVersion); int accumulatedBatchesSoFar = 0; try (SearchScroll scroll = searchProps.getSearchScroll()) { - ourLog.debug("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), chunkSize); + ourLog.debug( + "Beginning batch expansion for {} with max results per batch: {}", + (theAdd ? "inclusion" : "exclusion"), + chunkSize); for (SearchScrollResult chunk = scroll.next(); chunk.hasHits(); chunk = scroll.next()) { int countForBatch = 0; - List pids = chunk.hits() - .stream() - .map(t -> (Long) t.id()) - .collect(Collectors.toList()); + List pids = chunk.hits().stream().map(t -> (Long) t.id()).collect(Collectors.toList()); List termConcepts = myTermConceptDao.fetchConceptsAndDesignationsByPid(pids); // If the include section had multiple codes, return the codes in the same order termConcepts = sortTermConcepts(searchProps, termConcepts); - // int firstResult = theQueryIndex * maxResultsPerBatch;// TODO GGG HS we lose the ability to check the index of the first result, so just best-guessing it here. + // int firstResult = theQueryIndex * maxResultsPerBatch;// TODO GGG HS we lose the ability to check the + // index of the first result, so just best-guessing it here. Optional expansionStepOpt = searchProps.getExpansionStepOpt(); int delta = 0; for (TermConcept concept : termConcepts) { count++; countForBatch++; if (theAdd && expansionStepOpt.isPresent()) { - ValueSet.ConceptReferenceComponent theIncludeConcept = getMatchedConceptIncludedInValueSet(theIncludeOrExclude, concept); + ValueSet.ConceptReferenceComponent theIncludeConcept = + getMatchedConceptIncludedInValueSet(theIncludeOrExclude, concept); if (theIncludeConcept != null && isNotBlank(theIncludeConcept.getDisplay())) { concept.setDisplay(theIncludeConcept.getDisplay()); } } - boolean added = addCodeIfNotAlreadyAdded(theExpansionOptions, theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, includeOrExcludeVersion); + boolean added = addCodeIfNotAlreadyAdded( + theExpansionOptions, + theValueSetCodeAccumulator, + theAddedCodes, + concept, + theAdd, + includeOrExcludeVersion); if (added) { delta++; } } - ourLog.debug("Batch expansion scroll for {} with offset {} produced {} results in {}ms", - (theAdd ? "inclusion" : "exclusion"), accumulatedBatchesSoFar, chunk.hits().size(), chunk.took().toMillis()); + ourLog.debug( + "Batch expansion scroll for {} with offset {} produced {} results in {}ms", + (theAdd ? "inclusion" : "exclusion"), + accumulatedBatchesSoFar, + chunk.hits().size(), + chunk.took().toMillis()); theValueSetCodeAccumulator.incrementOrDecrementTotalConcepts(theAdd, delta); accumulatedBatchesSoFar += countForBatch; @@ -953,8 +1196,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { myEntityManager.clear(); } - ourLog.debug("Expansion for {} produced {} results in {}ms", - (theAdd ? "inclusion" : "exclusion"), count, fullOperationSw.getMillis()); + ourLog.debug( + "Expansion for {} produced {} results in {}ms", + (theAdd ? "inclusion" : "exclusion"), + count, + fullOperationSw.getMillis()); } } @@ -975,7 +1221,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return termConcepts; } - private Optional getScrollChunkSize(boolean theAdd, IValueSetConceptAccumulator theValueSetCodeAccumulator) { + private Optional getScrollChunkSize( + boolean theAdd, IValueSetConceptAccumulator theValueSetCodeAccumulator) { int maxResultsPerBatch = SearchBuilder.getMaximumPageSize(); /* @@ -991,16 +1238,19 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return maxResultsPerBatch > 0 ? Optional.of(maxResultsPerBatch) : Optional.empty(); } - private SearchProperties buildSearchScroll(TermCodeSystemVersion theTermCodeSystemVersion, - ExpansionFilter theExpansionFilter, - String theSystem, - ValueSet.ConceptSetComponent theIncludeOrExclude, - Integer theScrollChunkSize, String theIncludeOrExcludeVersion) { + private SearchProperties buildSearchScroll( + TermCodeSystemVersion theTermCodeSystemVersion, + ExpansionFilter theExpansionFilter, + String theSystem, + ValueSet.ConceptSetComponent theIncludeOrExclude, + Integer theScrollChunkSize, + String theIncludeOrExcludeVersion) { SearchSession searchSession = Search.session(myEntityManager); - //Manually building a predicate since we need to throw it around. - SearchPredicateFactory predicate = searchSession.scope(TermConcept.class).predicate(); + // Manually building a predicate since we need to throw it around. + SearchPredicateFactory predicate = + searchSession.scope(TermConcept.class).predicate(); - //Build the top-level expansion on filters. + // Build the top-level expansion on filters. PredicateFinalStep step = predicate.bool(b -> { b.must(predicate.match().field("myCodeSystemVersionPid").matching(theTermCodeSystemVersion.getPid())); @@ -1019,18 +1269,16 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { SearchProperties returnProps = new SearchProperties(); - List codes = theIncludeOrExclude - .getConcept() - .stream() - .filter(Objects::nonNull) - .map(ValueSet.ConceptReferenceComponent::getCode) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + List codes = theIncludeOrExclude.getConcept().stream() + .filter(Objects::nonNull) + .map(ValueSet.ConceptReferenceComponent::getCode) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); returnProps.setIncludeOrExcludeCodes(codes); Optional expansionStepOpt = buildExpansionPredicate(codes, predicate); - final PredicateFinalStep finishedQuery = expansionStepOpt.isPresent() - ? predicate.bool().must(step).must(expansionStepOpt.get()) : step; + final PredicateFinalStep finishedQuery = + expansionStepOpt.isPresent() ? predicate.bool().must(step).must(expansionStepOpt.get()) : step; returnProps.setExpansionStepOpt(expansionStepOpt); /* @@ -1038,40 +1286,40 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { * be due to the dark arts that is memory management. Will monitor but not do anything about this right now. */ - //BooleanQuery.setMaxClauseCount(SearchBuilder.getMaximumPageSize()); - //TODO GGG HS looks like we can't set max clause count, but it can be set server side. - //BooleanQuery.setMaxClauseCount(10000); + // BooleanQuery.setMaxClauseCount(SearchBuilder.getMaximumPageSize()); + // TODO GGG HS looks like we can't set max clause count, but it can be set server side. + // BooleanQuery.setMaxClauseCount(10000); // JM 22-02-15 - Hopefully increasing maxClauseCount should be not needed anymore SearchQuery termConceptsQuery = searchSession - .search(TermConcept.class) - .selectEntityReference() - .where(f -> finishedQuery) - .toQuery(); + .search(TermConcept.class) + .selectEntityReference() + .where(f -> finishedQuery) + .toQuery(); returnProps.setSearchScroll(termConceptsQuery.scroll(theScrollChunkSize)); return returnProps; } - private ValueSet.ConceptReferenceComponent getMatchedConceptIncludedInValueSet(ValueSet.ConceptSetComponent theIncludeOrExclude, TermConcept concept) { - return theIncludeOrExclude - .getConcept() - .stream().filter(includedConcept -> includedConcept.getCode().equalsIgnoreCase(concept.getCode())) - .findFirst() - .orElse(null); + private ValueSet.ConceptReferenceComponent getMatchedConceptIncludedInValueSet( + ValueSet.ConceptSetComponent theIncludeOrExclude, TermConcept concept) { + return theIncludeOrExclude.getConcept().stream() + .filter(includedConcept -> includedConcept.getCode().equalsIgnoreCase(concept.getCode())) + .findFirst() + .orElse(null); } /** * Helper method which builds a predicate for the expansion */ - private Optional buildExpansionPredicate(List theCodes, SearchPredicateFactory thePredicate) { + private Optional buildExpansionPredicate( + List theCodes, SearchPredicateFactory thePredicate) { if (CollectionUtils.isEmpty(theCodes)) { return Optional.empty(); } if (theCodes.size() < BooleanQuery.getMaxClauseCount()) { - return Optional.of(thePredicate.simpleQueryString() - .field("myCode").matching(String.join(" | ", theCodes))); + return Optional.of(thePredicate.simpleQueryString().field("myCode").matching(String.join(" | ", theCodes))); } // Number of codes is larger than maxClauseCount, so we split the query in several clauses @@ -1099,12 +1347,19 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return codeSystemUrlAndVersion; } - private @Nonnull - ValueSetExpansionOptions provideExpansionOptions(@Nullable ValueSetExpansionOptions theExpansionOptions) { + private @Nonnull ValueSetExpansionOptions provideExpansionOptions( + @Nullable ValueSetExpansionOptions theExpansionOptions) { return Objects.requireNonNullElse(theExpansionOptions, DEFAULT_EXPANSION_OPTIONS); } - private void addOrRemoveCode(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, boolean theAdd, String theSystem, String theCode, String theDisplay, String theSystemVersion) { + private void addOrRemoveCode( + IValueSetConceptAccumulator theValueSetCodeAccumulator, + Set theAddedCodes, + boolean theAdd, + String theSystem, + String theCode, + String theDisplay, + String theSystemVersion) { if (theAdd && theAddedCodes.add(theSystem + OUR_PIPE_CHARACTER + theCode)) { theValueSetCodeAccumulator.includeConcept(theSystem, theCode, theDisplay, null, null, theSystemVersion); } @@ -1113,13 +1368,18 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } } - private void handleFilter(String theCodeSystemIdentifier, SearchPredicateFactory theF, BooleanPredicateClausesStep theB, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilter( + String theCodeSystemIdentifier, + SearchPredicateFactory theF, + BooleanPredicateClausesStep theB, + ValueSet.ConceptSetFilterComponent theFilter) { if (isBlank(theFilter.getValue()) && theFilter.getOp() == null && isBlank(theFilter.getProperty())) { return; } if (isBlank(theFilter.getValue()) || theFilter.getOp() == null || isBlank(theFilter.getProperty())) { - throw new InvalidRequestException(Msg.code(891) + "Invalid filter, must have fields populated: property op value"); + throw new InvalidRequestException( + Msg.code(891) + "Invalid filter, must have fields populated: property op value"); } switch (theFilter.getProperty()) { @@ -1158,15 +1418,20 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } } - private void handleFilterPropertyDefault(SearchPredicateFactory theF, - BooleanPredicateClausesStep theB, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterPropertyDefault( + SearchPredicateFactory theF, + BooleanPredicateClausesStep theB, + ValueSet.ConceptSetFilterComponent theFilter) { String value = theFilter.getValue(); Term term = new Term(CONCEPT_PROPERTY_PREFIX_NAME + theFilter.getProperty(), value); theB.must(theF.match().field(term.field()).matching(term.text())); } - private void handleFilterRegex(SearchPredicateFactory theF, BooleanPredicateClausesStep theB, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterRegex( + SearchPredicateFactory theF, + BooleanPredicateClausesStep theB, + ValueSet.ConceptSetFilterComponent theFilter) { /* * We treat the regex filter as a match on the regex * anywhere in the property string. The spec does not @@ -1186,12 +1451,14 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } theB.must(theF.regexp() - .field(CONCEPT_PROPERTY_PREFIX_NAME + theFilter.getProperty()) - .matching(value)); + .field(CONCEPT_PROPERTY_PREFIX_NAME + theFilter.getProperty()) + .matching(value)); } - private void handleFilterLoincCopyright(SearchPredicateFactory theF, BooleanPredicateClausesStep theB, - ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterLoincCopyright( + SearchPredicateFactory theF, + BooleanPredicateClausesStep theB, + ValueSet.ConceptSetFilterComponent theFilter) { if (theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { @@ -1223,7 +1490,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } @SuppressWarnings("EnumSwitchStatementWhichMissesCases") - private void handleFilterLoincAncestor(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterLoincAncestor( + String theSystem, + SearchPredicateFactory f, + BooleanPredicateClausesStep b, + ValueSet.ConceptSetFilterComponent theFilter) { switch (theFilter.getOp()) { case EQUAL: addLoincFilterAncestorEqual(theSystem, f, b, theFilter); @@ -1232,31 +1503,47 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { addLoincFilterAncestorIn(theSystem, f, b, theFilter); break; default: - throw new InvalidRequestException(Msg.code(892) + "Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); + throw new InvalidRequestException(Msg.code(892) + "Don't know how to handle op=" + theFilter.getOp() + + " on property " + theFilter.getProperty()); } - } - private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void addLoincFilterAncestorEqual( + String theSystem, + SearchPredicateFactory f, + BooleanPredicateClausesStep b, + ValueSet.ConceptSetFilterComponent theFilter) { addLoincFilterAncestorEqual(theSystem, f, b, theFilter.getProperty(), theFilter.getValue()); } - private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, String theProperty, String theValue) { + private void addLoincFilterAncestorEqual( + String theSystem, + SearchPredicateFactory f, + BooleanPredicateClausesStep b, + String theProperty, + String theValue) { List terms = getAncestorTerms(theSystem, theProperty, theValue); - b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); + b.must(f.bool(innerB -> terms.forEach( + term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); } - private void addLoincFilterAncestorIn(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void addLoincFilterAncestorIn( + String theSystem, + SearchPredicateFactory f, + BooleanPredicateClausesStep b, + ValueSet.ConceptSetFilterComponent theFilter) { String[] values = theFilter.getValue().split(","); List terms = new ArrayList<>(); for (String value : values) { terms.addAll(getAncestorTerms(theSystem, theFilter.getProperty(), value)); } - b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); + b.must(f.bool(innerB -> terms.forEach( + term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); } @SuppressWarnings("EnumSwitchStatementWhichMissesCases") - private void handleFilterLoincParentChild(SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterLoincParentChild( + SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { switch (theFilter.getOp()) { case EQUAL: addLoincFilterParentChildEqual(f, b, theFilter.getProperty(), theFilter.getValue()); @@ -1265,11 +1552,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { addLoincFilterParentChildIn(f, b, theFilter); break; default: - throw new InvalidRequestException(Msg.code(893) + "Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); + throw new InvalidRequestException(Msg.code(893) + "Don't know how to handle op=" + theFilter.getOp() + + " on property " + theFilter.getProperty()); } } - private void addLoincFilterParentChildIn(SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void addLoincFilterParentChildIn( + SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { String[] values = theFilter.getValue().split(","); List terms = new ArrayList<>(); for (String value : values) { @@ -1277,19 +1566,26 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { terms.add(getPropertyTerm(theFilter.getProperty(), value)); } - b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); + b.must(f.bool(innerB -> terms.forEach( + term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); } - private void addLoincFilterParentChildEqual(SearchPredicateFactory f, BooleanPredicateClausesStep b, String theProperty, String theValue) { + private void addLoincFilterParentChildEqual( + SearchPredicateFactory f, BooleanPredicateClausesStep b, String theProperty, String theValue) { logFilteringValueOnProperty(theValue, theProperty); b.must(f.match().field(CONCEPT_PROPERTY_PREFIX_NAME + theProperty).matching(theValue)); } - private void handleFilterConceptAndCode(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterConceptAndCode( + String theSystem, + SearchPredicateFactory f, + BooleanPredicateClausesStep b, + ValueSet.ConceptSetFilterComponent theFilter) { TermConcept code = findCodeForFilterCriteria(theSystem, theFilter); if (theFilter.getOp() == ValueSet.FilterOperator.ISA) { - ourLog.debug(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay()); + ourLog.debug( + " * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay()); b.must(f.match().field("myParentPids").matching("" + code.getId())); } else { @@ -1300,17 +1596,21 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Nonnull private TermConcept findCodeForFilterCriteria(String theSystem, ValueSet.ConceptSetFilterComponent theFilter) { return findCode(theSystem, theFilter.getValue()) - .orElseThrow(() -> new InvalidRequestException(Msg.code(2071) + "Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theFilter.getValue())); + .orElseThrow(() -> + new InvalidRequestException(Msg.code(2071) + "Invalid filter criteria - code does not exist: {" + + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theFilter.getValue())); } private void throwInvalidFilter(ValueSet.ConceptSetFilterComponent theFilter, String theErrorSuffix) { - throw new InvalidRequestException(Msg.code(894) + "Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty() + theErrorSuffix); + throw new InvalidRequestException(Msg.code(894) + "Don't know how to handle op=" + theFilter.getOp() + + " on property " + theFilter.getProperty() + theErrorSuffix); } private void isCodeSystemLoincOrThrowInvalidRequestException(String theSystemIdentifier, String theProperty) { String systemUrl = getUrlFromIdentifier(theSystemIdentifier); if (!isCodeSystemLoinc(systemUrl)) { - throw new InvalidRequestException(Msg.code(895) + "Invalid filter, property " + theProperty + " is LOINC-specific and cannot be used with system: " + systemUrl); + throw new InvalidRequestException(Msg.code(895) + "Invalid filter, property " + theProperty + + " is LOINC-specific and cannot be used with system: " + systemUrl); } } @@ -1318,7 +1618,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return LOINC_URI.equals(theSystem); } - private void handleFilterDisplay(SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterDisplay( + SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { if (theFilter.getProperty().equals("display:exact") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { addDisplayFilterExact(f, b, theFilter); } else if (theFilter.getProperty().equals("display") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { @@ -1330,18 +1631,26 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } } - private void addDisplayFilterExact(SearchPredicateFactory f, BooleanPredicateClausesStep bool, ValueSet.ConceptSetFilterComponent nextFilter) { + private void addDisplayFilterExact( + SearchPredicateFactory f, + BooleanPredicateClausesStep bool, + ValueSet.ConceptSetFilterComponent nextFilter) { bool.must(f.phrase().field("myDisplay").matching(nextFilter.getValue())); } - private void addDisplayFilterInexact(SearchPredicateFactory f, BooleanPredicateClausesStep bool, ValueSet.ConceptSetFilterComponent nextFilter) { + private void addDisplayFilterInexact( + SearchPredicateFactory f, + BooleanPredicateClausesStep bool, + ValueSet.ConceptSetFilterComponent nextFilter) { bool.must(f.phrase() - .field("myDisplay").boost(4.0f) - .field("myDisplayWordEdgeNGram").boost(1.0f) - .field("myDisplayEdgeNGram").boost(1.0f) - .matching(nextFilter.getValue().toLowerCase()) - .slop(2) - ); + .field("myDisplay") + .boost(4.0f) + .field("myDisplayWordEdgeNGram") + .boost(1.0f) + .field("myDisplayEdgeNGram") + .boost(1.0f) + .matching(nextFilter.getValue().toLowerCase()) + .slop(2)); } private Term getPropertyTerm(String theProperty, String theValue) { @@ -1352,7 +1661,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { List retVal = new ArrayList<>(); TermConcept code = findCode(theSystem, theValue) - .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); + .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); retVal.add(new Term("myParentPids", "" + code.getId())); logFilteringValueOnProperty(theValue, theProperty); @@ -1361,7 +1671,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } @SuppressWarnings("EnumSwitchStatementWhichMissesCases") - private void handleFilterLoincDescendant(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterLoincDescendant( + String theSystem, + SearchPredicateFactory f, + BooleanPredicateClausesStep b, + ValueSet.ConceptSetFilterComponent theFilter) { switch (theFilter.getOp()) { case EQUAL: addLoincFilterDescendantEqual(theSystem, f, b, theFilter); @@ -1370,17 +1684,22 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { addLoincFilterDescendantIn(theSystem, f, b, theFilter); break; default: - throw new InvalidRequestException(Msg.code(896) + "Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); + throw new InvalidRequestException(Msg.code(896) + "Don't know how to handle op=" + theFilter.getOp() + + " on property " + theFilter.getProperty()); } } - private void addLoincFilterDescendantEqual(String theSystem, SearchPredicateFactory f, - BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void addLoincFilterDescendantEqual( + String theSystem, + SearchPredicateFactory f, + BooleanPredicateClausesStep b, + ValueSet.ConceptSetFilterComponent theFilter) { List parentPids = getCodeParentPids(theSystem, theFilter.getProperty(), theFilter.getValue()); if (parentPids.isEmpty()) { // Can't return empty must, because it wil match according to other predicates. - // Some day there will be a 'matchNone' predicate (https://discourse.hibernate.org/t/fail-fast-predicate/6062) + // Some day there will be a 'matchNone' predicate + // (https://discourse.hibernate.org/t/fail-fast-predicate/6062) b.mustNot(f.matchAll()); return; } @@ -1389,7 +1708,6 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { innerB.minimumShouldMatchNumber(1); parentPids.forEach(pid -> innerB.should(f.match().field("myId").matching(pid))); })); - } /** @@ -1397,8 +1715,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { * Strategy is to find codes which have their pId(s) in the list of the parentId(s) of all the TermConcept(s) * representing the codes in theFilter.getValue() */ - private void addLoincFilterDescendantIn(String theSystem, SearchPredicateFactory f, - BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + private void addLoincFilterDescendantIn( + String theSystem, + SearchPredicateFactory f, + BooleanPredicateClausesStep b, + ValueSet.ConceptSetFilterComponent theFilter) { String[] values = theFilter.getValue().split(","); if (values.length == 0) { @@ -1408,8 +1729,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { List descendantCodePidList = getMultipleCodeParentPids(theSystem, theFilter.getProperty(), values); b.must(f.bool(innerB -> descendantCodePidList.forEach( - pId -> innerB.should(f.match().field("myId").matching(pId)) - ))); + pId -> innerB.should(f.match().field("myId").matching(pId))))); } /** @@ -1417,14 +1737,14 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { */ private List getCodeParentPids(String theSystem, String theProperty, String theValue) { TermConcept code = findCode(theSystem, theValue) - .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + - Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); + .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); String[] parentPids = code.getParentPidsAsString().split(" "); List retVal = Arrays.stream(parentPids) - .filter(pid -> !StringUtils.equals(pid, "NONE")) - .map(Long::parseLong) - .collect(Collectors.toList()); + .filter(pid -> !StringUtils.equals(pid, "NONE")) + .map(Long::parseLong) + .collect(Collectors.toList()); logFilteringValueOnProperty(theValue, theProperty); return retVal; } @@ -1437,15 +1757,15 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { List termConcepts = findCodes(theSystem, valuesList); if (valuesList.size() != termConcepts.size()) { String exMsg = getTermConceptsFetchExceptionMsg(termConcepts, valuesList); - throw new InvalidRequestException(Msg.code(2064) + "Invalid filter criteria - {" + - Constants.codeSystemWithDefaultDescription(theSystem) + "}: " + exMsg); + throw new InvalidRequestException(Msg.code(2064) + "Invalid filter criteria - {" + + Constants.codeSystemWithDefaultDescription(theSystem) + "}: " + exMsg); } List retVal = termConcepts.stream() - .flatMap(tc -> Arrays.stream(tc.getParentPidsAsString().split(" "))) - .filter(pid -> !StringUtils.equals(pid, "NONE")) - .map(Long::parseLong) - .collect(Collectors.toList()); + .flatMap(tc -> Arrays.stream(tc.getParentPidsAsString().split(" "))) + .filter(pid -> !StringUtils.equals(pid, "NONE")) + .map(Long::parseLong) + .collect(Collectors.toList()); logFilteringValueOnProperties(valuesList, theProperty); @@ -1458,19 +1778,24 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { private String getTermConceptsFetchExceptionMsg(List theTermConcepts, List theValues) { // case: more TermConcept(s) retrieved than codes queried if (theTermConcepts.size() > theValues.size()) { - return "Invalid filter criteria - More TermConcepts were found than indicated codes. Queried codes: [" + - join(",", theValues + "]; Obtained TermConcept IDs, codes: [" + - theTermConcepts.stream().map(tc -> tc.getId() + ", " + tc.getCode()) - .collect(joining("; ")) + "]"); + return "Invalid filter criteria - More TermConcepts were found than indicated codes. Queried codes: [" + + join( + ",", + theValues + "]; Obtained TermConcept IDs, codes: [" + + theTermConcepts.stream() + .map(tc -> tc.getId() + ", " + tc.getCode()) + .collect(joining("; ")) + + "]"); } // case: less TermConcept(s) retrieved than codes queried - Set matchedCodes = theTermConcepts.stream().map(TermConcept::getCode).collect(toSet()); - List notMatchedValues = theValues.stream() - .filter(v -> !matchedCodes.contains(v)).collect(toList()); + Set matchedCodes = + theTermConcepts.stream().map(TermConcept::getCode).collect(toSet()); + List notMatchedValues = + theValues.stream().filter(v -> !matchedCodes.contains(v)).collect(toList()); - return "Invalid filter criteria - No TermConcept(s) were found for the requested codes: [" + - join(",", notMatchedValues + "]"); + return "Invalid filter criteria - No TermConcept(s) were found for the requested codes: [" + + join(",", notMatchedValues + "]"); } private void logFilteringValueOnProperty(String theValue, String theProperty) { @@ -1482,21 +1807,35 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } private void throwInvalidRequestForOpOnProperty(ValueSet.FilterOperator theOp, String theProperty) { - throw new InvalidRequestException(Msg.code(897) + "Don't know how to handle op=" + theOp + " on property " + theProperty); + throw new InvalidRequestException( + Msg.code(897) + "Don't know how to handle op=" + theOp + " on property " + theProperty); } private void throwInvalidRequestForValueOnProperty(String theValue, String theProperty) { - throw new InvalidRequestException(Msg.code(898) + "Don't know how to handle value=" + theValue + " on property " + theProperty); + throw new InvalidRequestException( + Msg.code(898) + "Don't know how to handle value=" + theValue + " on property " + theProperty); } - private void expandWithoutHibernateSearch(IValueSetConceptAccumulator theValueSetCodeAccumulator, TermCodeSystemVersion theVersion, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, String theSystem, boolean theAdd) { + private void expandWithoutHibernateSearch( + IValueSetConceptAccumulator theValueSetCodeAccumulator, + TermCodeSystemVersion theVersion, + Set theAddedCodes, + ValueSet.ConceptSetComponent theInclude, + String theSystem, + boolean theAdd) { ourLog.trace("Hibernate search is not enabled"); if (theValueSetCodeAccumulator instanceof ValueSetExpansionComponentWithConceptAccumulator) { - Validate.isTrue(((ValueSetExpansionComponentWithConceptAccumulator) theValueSetCodeAccumulator).getParameter().isEmpty(), "Can not expand ValueSet with parameters - Hibernate Search is not enabled on this server."); + Validate.isTrue( + ((ValueSetExpansionComponentWithConceptAccumulator) theValueSetCodeAccumulator) + .getParameter() + .isEmpty(), + "Can not expand ValueSet with parameters - Hibernate Search is not enabled on this server."); } - Validate.isTrue(isNotBlank(theSystem), "Can not expand ValueSet without explicit system - Hibernate Search is not enabled on this server."); + Validate.isTrue( + isNotBlank(theSystem), + "Can not expand ValueSet without explicit system - Hibernate Search is not enabled on this server."); for (ValueSet.ConceptSetFilterComponent nextFilter : theInclude.getFilter()) { boolean handled = false; @@ -1504,24 +1843,39 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { case "concept": case "code": if (nextFilter.getOp() == ValueSet.FilterOperator.ISA) { - theValueSetCodeAccumulator.addMessage("Processing IS-A filter in database - Note that Hibernate Search is not enabled on this server, so this operation can be inefficient."); + theValueSetCodeAccumulator.addMessage( + "Processing IS-A filter in database - Note that Hibernate Search is not enabled on this server, so this operation can be inefficient."); TermConcept code = findCodeForFilterCriteria(theSystem, nextFilter); - addConceptAndChildren(theValueSetCodeAccumulator, theAddedCodes, theInclude, theSystem, theAdd, code); + addConceptAndChildren( + theValueSetCodeAccumulator, theAddedCodes, theInclude, theSystem, theAdd, code); handled = true; } break; } if (!handled) { - throwInvalidFilter(nextFilter, " - Note that Hibernate Search is disabled on this server so not all ValueSet expansion funtionality is available."); + throwInvalidFilter( + nextFilter, + " - Note that Hibernate Search is disabled on this server so not all ValueSet expansion funtionality is available."); } } if (theInclude.getConcept().isEmpty()) { - Collection concepts = myConceptDao.fetchConceptsAndDesignationsByVersionPid(theVersion.getPid()); + Collection concepts = + myConceptDao.fetchConceptsAndDesignationsByVersionPid(theVersion.getPid()); for (TermConcept next : concepts) { - addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, theAdd, theSystem, theInclude.getVersion(), next.getCode(), next.getDisplay(), next.getId(), next.getParentPidsAsString(), next.getDesignations()); + addCodeIfNotAlreadyAdded( + theValueSetCodeAccumulator, + theAddedCodes, + theAdd, + theSystem, + theInclude.getVersion(), + next.getCode(), + next.getDisplay(), + next.getId(), + next.getParentPidsAsString(), + next.getDesignations()); } } @@ -1529,28 +1883,50 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { if (!theSystem.equals(theInclude.getSystem()) && isNotBlank(theSystem)) { continue; } - Collection designations = next - .getDesignation() - .stream() - .map(t -> new TermConceptDesignation() - .setValue(t.getValue()) - .setLanguage(t.getLanguage()) - .setUseCode(t.getUse().getCode()) - .setUseSystem(t.getUse().getSystem()) - .setUseDisplay(t.getUse().getDisplay()) - ) - .collect(Collectors.toList()); - addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, theAdd, theSystem, theInclude.getVersion(), next.getCode(), next.getDisplay(), null, null, designations); + Collection designations = next.getDesignation().stream() + .map(t -> new TermConceptDesignation() + .setValue(t.getValue()) + .setLanguage(t.getLanguage()) + .setUseCode(t.getUse().getCode()) + .setUseSystem(t.getUse().getSystem()) + .setUseDisplay(t.getUse().getDisplay())) + .collect(Collectors.toList()); + addCodeIfNotAlreadyAdded( + theValueSetCodeAccumulator, + theAddedCodes, + theAdd, + theSystem, + theInclude.getVersion(), + next.getCode(), + next.getDisplay(), + null, + null, + designations); } - - } - private void addConceptAndChildren(IValueSetConceptAccumulator theValueSetCodeAccumulator, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, String theSystem, boolean theAdd, TermConcept theConcept) { + private void addConceptAndChildren( + IValueSetConceptAccumulator theValueSetCodeAccumulator, + Set theAddedCodes, + ValueSet.ConceptSetComponent theInclude, + String theSystem, + boolean theAdd, + TermConcept theConcept) { for (TermConcept nextChild : theConcept.getChildCodes()) { - boolean added = addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, theAdd, theSystem, theInclude.getVersion(), nextChild.getCode(), nextChild.getDisplay(), nextChild.getId(), nextChild.getParentPidsAsString(), nextChild.getDesignations()); + boolean added = addCodeIfNotAlreadyAdded( + theValueSetCodeAccumulator, + theAddedCodes, + theAdd, + theSystem, + theInclude.getVersion(), + nextChild.getCode(), + nextChild.getDisplay(), + nextChild.getId(), + nextChild.getParentPidsAsString(), + nextChild.getDesignations()); if (added) { - addConceptAndChildren(theValueSetCodeAccumulator, theAddedCodes, theInclude, theSystem, theAdd, nextChild); + addConceptAndChildren( + theValueSetCodeAccumulator, theAddedCodes, theInclude, theSystem, theAdd, nextChild); } } } @@ -1562,14 +1938,23 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { ValueSet canonicalValueSet = myVersionCanonicalizer.valueSetToCanonical(valueSet); Optional optionalTermValueSet = fetchValueSetEntity(canonicalValueSet); if (optionalTermValueSet.isEmpty()) { - return myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "valueSetNotFoundInTerminologyDatabase", theValueSetId); + return myContext + .getLocalizer() + .getMessage(TermReadSvcImpl.class, "valueSetNotFoundInTerminologyDatabase", theValueSetId); } - ourLog.info("Invalidating pre-calculated expansion on ValueSet {} / {}", theValueSetId, canonicalValueSet.getUrl()); + ourLog.info( + "Invalidating pre-calculated expansion on ValueSet {} / {}", theValueSetId, canonicalValueSet.getUrl()); TermValueSet termValueSet = optionalTermValueSet.get(); if (termValueSet.getExpansionStatus() == TermValueSetPreExpansionStatusEnum.NOT_EXPANDED) { - return myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "valueSetCantInvalidateNotYetPrecalculated", termValueSet.getUrl(), termValueSet.getExpansionStatus()); + return myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "valueSetCantInvalidateNotYetPrecalculated", + termValueSet.getUrl(), + termValueSet.getExpansionStatus()); } Long totalConcepts = termValueSet.getTotalConcepts(); @@ -1582,7 +1967,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { afterValueSetExpansionStatusChange(); - return myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "valueSetPreExpansionInvalidated", termValueSet.getUrl(), totalConcepts); + return myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, "valueSetPreExpansionInvalidated", termValueSet.getUrl(), totalConcepts); } @Override @@ -1591,15 +1979,20 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { Optional optionalTermValueSet = fetchValueSetEntity(theValueSet); if (optionalTermValueSet.isEmpty()) { - ourLog.warn("ValueSet is not present in terminology tables. Will perform in-memory code validation. {}", getValueSetInfo(theValueSet)); + ourLog.warn( + "ValueSet is not present in terminology tables. Will perform in-memory code validation. {}", + getValueSetInfo(theValueSet)); return false; } TermValueSet termValueSet = optionalTermValueSet.get(); if (termValueSet.getExpansionStatus() != TermValueSetPreExpansionStatusEnum.EXPANDED) { - ourLog.warn("{} is present in terminology tables but not ready for persistence-backed invocation of operation $validation-code. Will perform in-memory code validation. Current status: {} | {}", - getValueSetInfo(theValueSet), termValueSet.getExpansionStatus().name(), termValueSet.getExpansionStatus().getDescription()); + ourLog.warn( + "{} is present in terminology tables but not ready for persistence-backed invocation of operation $validation-code. Will perform in-memory code validation. Current status: {} | {}", + getValueSetInfo(theValueSet), + termValueSet.getExpansionStatus().name(), + termValueSet.getExpansionStatus().getDescription()); return false; } @@ -1612,33 +2005,43 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } private JpaPid getValueSetResourcePersistentId(ValueSet theValueSet) { - return myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), theValueSet.getIdElement().getResourceType(), theValueSet.getIdElement().getIdPart()); + return myIdHelperService.resolveResourcePersistentIds( + RequestPartitionId.allPartitions(), + theValueSet.getIdElement().getResourceType(), + theValueSet.getIdElement().getIdPart()); } protected IValidationSupport.CodeValidationResult validateCodeIsInPreExpandedValueSet( - ConceptValidationOptions theValidationOptions, - ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Coding theCoding, CodeableConcept theCodeableConcept) { + ConceptValidationOptions theValidationOptions, + ValueSet theValueSet, + String theSystem, + String theCode, + String theDisplay, + Coding theCoding, + CodeableConcept theCodeableConcept) { assert TransactionSynchronizationManager.isSynchronizationActive(); ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet.hasId(), "ValueSet.id is required"); JpaPid valueSetResourcePid = getValueSetResourcePersistentId(theValueSet); - List concepts = new ArrayList<>(); if (isNotBlank(theCode)) { if (theValidationOptions.isInferSystem()) { - concepts.addAll(myValueSetConceptDao.findByValueSetResourcePidAndCode(valueSetResourcePid.getId(), theCode)); + concepts.addAll( + myValueSetConceptDao.findByValueSetResourcePidAndCode(valueSetResourcePid.getId(), theCode)); } else if (isNotBlank(theSystem)) { concepts.addAll(findByValueSetResourcePidSystemAndCode(valueSetResourcePid, theSystem, theCode)); } } else if (theCoding != null) { if (theCoding.hasSystem() && theCoding.hasCode()) { - concepts.addAll(findByValueSetResourcePidSystemAndCode(valueSetResourcePid, theCoding.getSystem(), theCoding.getCode())); + concepts.addAll(findByValueSetResourcePidSystemAndCode( + valueSetResourcePid, theCoding.getSystem(), theCoding.getCode())); } } else if (theCodeableConcept != null) { for (Coding coding : theCodeableConcept.getCoding()) { if (coding.hasSystem() && coding.hasCode()) { - concepts.addAll(findByValueSetResourcePidSystemAndCode(valueSetResourcePid, coding.getSystem(), coding.getCode())); + concepts.addAll(findByValueSetResourcePidSystemAndCode( + valueSetResourcePid, coding.getSystem(), coding.getCode())); if (!concepts.isEmpty()) { break; } @@ -1648,9 +2051,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return null; } - TermValueSet valueSetEntity = myTermValueSetDao.findByResourcePid(valueSetResourcePid.getId()).orElseThrow(IllegalStateException::new); + TermValueSet valueSetEntity = myTermValueSetDao + .findByResourcePid(valueSetResourcePid.getId()) + .orElseThrow(IllegalStateException::new); String timingDescription = toHumanReadableExpansionTimestamp(valueSetEntity); - String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "validationPerformedAgainstPreExpansion", timingDescription); + String msg = myContext + .getLocalizer() + .getMessage(TermReadSvcImpl.class, "validationPerformedAgainstPreExpansion", timingDescription); if (theValidationOptions.isValidateDisplay() && concepts.size() > 0) { String systemVersion = null; @@ -1658,28 +2065,30 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { systemVersion = concept.getSystemVersion(); if (isBlank(theDisplay) || isBlank(concept.getDisplay()) || theDisplay.equals(concept.getDisplay())) { return new IValidationSupport.CodeValidationResult() - .setCode(concept.getCode()) - .setDisplay(concept.getDisplay()) - .setCodeSystemVersion(concept.getSystemVersion()) - .setMessage(msg); + .setCode(concept.getCode()) + .setDisplay(concept.getDisplay()) + .setCodeSystemVersion(concept.getSystemVersion()) + .setMessage(msg); } } String expectedDisplay = concepts.get(0).getDisplay(); String append = createMessageAppendForDisplayMismatch(theSystem, theDisplay, expectedDisplay) + " - " + msg; - return createFailureCodeValidationResult(theSystem, theCode, systemVersion, append).setDisplay(expectedDisplay); + return createFailureCodeValidationResult(theSystem, theCode, systemVersion, append) + .setDisplay(expectedDisplay); } if (!concepts.isEmpty()) { return new IValidationSupport.CodeValidationResult() - .setCode(concepts.get(0).getCode()) - .setDisplay(concepts.get(0).getDisplay()) - .setCodeSystemVersion(concepts.get(0).getSystemVersion()) - .setMessage(msg); + .setCode(concepts.get(0).getCode()) + .setDisplay(concepts.get(0).getDisplay()) + .setCodeSystemVersion(concepts.get(0).getSystemVersion()) + .setMessage(msg); } // Ok, we failed - List outcome = myValueSetConceptDao.findByTermValueSetIdSystemOnly(Pageable.ofSize(1), valueSetEntity.getId(), theSystem); + List outcome = myValueSetConceptDao.findByTermValueSetIdSystemOnly( + Pageable.ofSize(1), valueSetEntity.getId(), theSystem); String append; if (outcome.size() == 0) { append = " - No codes in ValueSet belong to CodeSystem with URL " + theSystem; @@ -1690,14 +2099,16 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return createFailureCodeValidationResult(theSystem, theCode, null, append); } - private CodeValidationResult createFailureCodeValidationResult(String theSystem, String theCode, String theCodeSystemVersion, String theAppend) { + private CodeValidationResult createFailureCodeValidationResult( + String theSystem, String theCode, String theCodeSystemVersion, String theAppend) { return new CodeValidationResult() - .setSeverity(IssueSeverity.ERROR) - .setCodeSystemVersion(theCodeSystemVersion) - .setMessage("Unable to validate code " + theSystem + "#" + theCode + theAppend); + .setSeverity(IssueSeverity.ERROR) + .setCodeSystemVersion(theCodeSystemVersion) + .setMessage("Unable to validate code " + theSystem + "#" + theCode + theAppend); } - private List findByValueSetResourcePidSystemAndCode(JpaPid theResourcePid, String theSystem, String theCode) { + private List findByValueSetResourcePidSystemAndCode( + JpaPid theResourcePid, String theSystem, String theCode) { assert TransactionSynchronizationManager.isSynchronizationActive(); List retVal = new ArrayList<>(); @@ -1707,10 +2118,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { String systemUrl = theSystem.substring(0, versionIndex); String systemVersion = theSystem.substring(versionIndex + 1); optionalTermValueSetConcept = myValueSetConceptDao.findByValueSetResourcePidSystemAndCodeWithVersion( - theResourcePid.getId(), systemUrl, systemVersion, theCode); + theResourcePid.getId(), systemUrl, systemVersion, theCode); } else { optionalTermValueSetConcept = myValueSetConceptDao.findByValueSetResourcePidSystemAndCode( - theResourcePid.getId(), theSystem, theCode); + theResourcePid.getId(), theSystem, theCode); } optionalTermValueSetConcept.ifPresent(retVal::add); return retVal; @@ -1726,7 +2137,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } private Optional fetchLoadedCode(Long theCodeSystemResourcePid, String theCode) { - TermCodeSystemVersion codeSystem = myCodeSystemVersionDao.findCurrentVersionForCodeSystemResourcePid(theCodeSystemResourcePid); + TermCodeSystemVersion codeSystem = + myCodeSystemVersionDao.findCurrentVersionForCodeSystemResourcePid(theCodeSystemResourcePid); return myConceptDao.findByCodeSystemAndCode(codeSystem.getPid(), theCode); } @@ -1773,22 +2185,25 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Nullable private TermCodeSystemVersionDetails getCurrentCodeSystemVersion(String theCodeSystemIdentifier) { String version = getVersionFromIdentifier(theCodeSystemIdentifier); - TermCodeSystemVersionDetails retVal = myCodeSystemCurrentVersionCache.get(theCodeSystemIdentifier, t -> myTxTemplate.execute(tx -> { - TermCodeSystemVersion csv = null; - TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(getUrlFromIdentifier(theCodeSystemIdentifier)); - if (cs != null) { - if (version != null) { - csv = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(cs.getPid(), version); - } else if (cs.getCurrentVersion() != null) { - csv = cs.getCurrentVersion(); - } - } - if (csv != null) { - return new TermCodeSystemVersionDetails(csv.getPid(), csv.getCodeSystemVersionId()); - } else { - return NO_CURRENT_VERSION; - } - })); + TermCodeSystemVersionDetails retVal = myCodeSystemCurrentVersionCache.get( + theCodeSystemIdentifier, + t -> myTxTemplate.execute(tx -> { + TermCodeSystemVersion csv = null; + TermCodeSystem cs = + myCodeSystemDao.findByCodeSystemUri(getUrlFromIdentifier(theCodeSystemIdentifier)); + if (cs != null) { + if (version != null) { + csv = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(cs.getPid(), version); + } else if (cs.getCurrentVersion() != null) { + csv = cs.getCurrentVersion(); + } + } + if (csv != null) { + return new TermCodeSystemVersionDetails(csv.getPid(), csv.getCodeSystemVersionId()); + } else { + return NO_CURRENT_VERSION; + } + })); if (retVal == NO_CURRENT_VERSION) { return null; } @@ -1819,7 +2234,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Transactional(propagation = Propagation.REQUIRED) @Override - public Set findCodesAbove(Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) { + public Set findCodesAbove( + Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) { StopWatch stopwatch = new StopWatch(); Optional concept = fetchLoadedCode(theCodeSystemResourcePid, theCode); @@ -1851,7 +2267,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Transactional(propagation = Propagation.REQUIRED) @Override - public Set findCodesBelow(Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) { + public Set findCodesBelow( + Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) { Stopwatch stopwatch = Stopwatch.createStarted(); Optional concept = fetchLoadedCode(theCodeSystemResourcePid, theCode); @@ -1864,7 +2281,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { fetchChildren(concept.get(), retVal); - ourLog.debug("Fetched {} codes below code {} in {}ms", retVal.size(), theCode, stopwatch.elapsed(TimeUnit.MILLISECONDS)); + ourLog.debug( + "Fetched {} codes below code {} in {}ms", + retVal.size(), + theCode, + stopwatch.elapsed(TimeUnit.MILLISECONDS)); return retVal; } @@ -1935,12 +2356,15 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { setPreExpandingValueSets(true); try { ValueSet valueSet = txTemplate.execute(t -> { - TermValueSet refreshedValueSetToExpand = myTermValueSetDao.findById(valueSetToExpand.getId()).orElseThrow(() -> new IllegalStateException("Unknown VS ID: " + valueSetToExpand.getId())); + TermValueSet refreshedValueSetToExpand = myTermValueSetDao + .findById(valueSetToExpand.getId()) + .orElseThrow(() -> new IllegalStateException("Unknown VS ID: " + valueSetToExpand.getId())); return getValueSetFromResourceTable(refreshedValueSetToExpand.getResource()); }); assert valueSet != null; - ValueSetConceptAccumulator accumulator = new ValueSetConceptAccumulator(valueSetToExpand, myTermValueSetDao, myValueSetConceptDao, myValueSetConceptDesignationDao); + ValueSetConceptAccumulator accumulator = new ValueSetConceptAccumulator( + valueSetToExpand, myTermValueSetDao, myValueSetConceptDao, myValueSetConceptDesignationDao); ValueSetExpansionOptions options = new ValueSetExpansionOptions(); options.setIncludeHierarchy(true); expandValueSet(options, valueSet, accumulator); @@ -1950,19 +2374,22 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.EXPANDED); valueSetToExpand.setExpansionTimestamp(new Date()); myTermValueSetDao.saveAndFlush(valueSetToExpand); - }); afterValueSetExpansionStatusChange(); - ourLog.info("Pre-expanded ValueSet[{}] with URL[{}] - Saved {} concepts in {}", valueSet.getId(), valueSet.getUrl(), accumulator.getConceptsSaved(), sw); + ourLog.info( + "Pre-expanded ValueSet[{}] with URL[{}] - Saved {} concepts in {}", + valueSet.getId(), + valueSet.getUrl(), + accumulator.getConceptsSaved(), + sw); } catch (Exception e) { ourLog.error("Failed to pre-expand ValueSet: " + e.getMessage(), e); txTemplate.executeWithoutResult(t -> { valueSetToExpand.setExpansionStatus(TermValueSetPreExpansionStatusEnum.FAILED_TO_EXPAND); myTermValueSetDao.saveAndFlush(valueSetToExpand); - }); } finally { @@ -1996,7 +2423,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { private Optional getNextTermValueSetNotExpanded() { Optional retVal = Optional.empty(); - Slice page = myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 1), TermValueSetPreExpansionStatusEnum.NOT_EXPANDED); + Slice page = myTermValueSetDao.findByExpansionStatus( + PageRequest.of(0, 1), TermValueSetPreExpansionStatusEnum.NOT_EXPANDED); if (!page.getContent().isEmpty()) { retVal = Optional.of(page.getContent().get(0)); @@ -2011,12 +2439,17 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { ValidateUtil.isTrueOrThrowInvalidRequest(theResourceTable != null, "No resource supplied"); if (isPlaceholder(theValueSet)) { - ourLog.info("Not storing TermValueSet for placeholder {}", theValueSet.getIdElement().toVersionless().getValueAsString()); + ourLog.info( + "Not storing TermValueSet for placeholder {}", + theValueSet.getIdElement().toVersionless().getValueAsString()); return; } - ValidateUtil.isNotBlankOrThrowUnprocessableEntity(theValueSet.getUrl(), "ValueSet has no value for ValueSet.url"); - ourLog.info("Storing TermValueSet for {}", theValueSet.getIdElement().toVersionless().getValueAsString()); + ValidateUtil.isNotBlankOrThrowUnprocessableEntity( + theValueSet.getUrl(), "ValueSet has no value for ValueSet.url"); + ourLog.info( + "Storing TermValueSet for {}", + theValueSet.getIdElement().toVersionless().getValueAsString()); /* * Get CodeSystem and validate CodeSystemVersion @@ -2049,15 +2482,30 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { TermValueSet existingTermValueSet = optionalExistingTermValueSetByUrl.get(); String msg; if (version != null) { - msg = myContext.getLocalizer().getMessage( - TermReadSvcImpl.class, - "cannotCreateDuplicateValueSetUrlAndVersion", - url, version, existingTermValueSet.getResource().getIdDt().toUnqualifiedVersionless().getValue()); + msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "cannotCreateDuplicateValueSetUrlAndVersion", + url, + version, + existingTermValueSet + .getResource() + .getIdDt() + .toUnqualifiedVersionless() + .getValue()); } else { - msg = myContext.getLocalizer().getMessage( - TermReadSvcImpl.class, - "cannotCreateDuplicateValueSetUrl", - url, existingTermValueSet.getResource().getIdDt().toUnqualifiedVersionless().getValue()); + msg = myContext + .getLocalizer() + .getMessage( + TermReadSvcImpl.class, + "cannotCreateDuplicateValueSetUrl", + url, + existingTermValueSet + .getResource() + .getIdDt() + .toUnqualifiedVersionless() + .getValue()); } throw new UnprocessableEntityException(Msg.code(902) + msg); } @@ -2065,17 +2513,23 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Override @Transactional - public IFhirResourceDaoCodeSystem.SubsumesResult subsumes(IPrimitiveType theCodeA, IPrimitiveType theCodeB, - IPrimitiveType theSystem, IBaseCoding theCodingA, IBaseCoding theCodingB) { + public IFhirResourceDaoCodeSystem.SubsumesResult subsumes( + IPrimitiveType theCodeA, + IPrimitiveType theCodeB, + IPrimitiveType theSystem, + IBaseCoding theCodingA, + IBaseCoding theCodingB) { FhirVersionIndependentConcept conceptA = toConcept(theCodeA, theSystem, theCodingA); FhirVersionIndependentConcept conceptB = toConcept(theCodeB, theSystem, theCodingB); if (!StringUtils.equals(conceptA.getSystem(), conceptB.getSystem())) { - throw new InvalidRequestException(Msg.code(903) + "Unable to test subsumption across different code systems"); + throw new InvalidRequestException( + Msg.code(903) + "Unable to test subsumption across different code systems"); } if (!StringUtils.equals(conceptA.getSystemVersion(), conceptB.getSystemVersion())) { - throw new InvalidRequestException(Msg.code(904) + "Unable to test subsumption across different code system versions"); + throw new InvalidRequestException( + Msg.code(904) + "Unable to test subsumption across different code system versions"); } String codeASystemIdentifier; @@ -2085,7 +2539,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { codeASystemIdentifier = conceptA.getSystem(); } TermConcept codeA = findCode(codeASystemIdentifier, conceptA.getCode()) - .orElseThrow(() -> new InvalidRequestException("Unknown code: " + conceptA)); + .orElseThrow(() -> new InvalidRequestException("Unknown code: " + conceptA)); String codeBSystemIdentifier; if (StringUtils.isNotEmpty(conceptB.getSystemVersion())) { @@ -2094,7 +2548,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { codeBSystemIdentifier = conceptB.getSystem(); } TermConcept codeB = findCode(codeBSystemIdentifier, conceptB.getCode()) - .orElseThrow(() -> new InvalidRequestException("Unknown code: " + conceptB)); + .orElseThrow(() -> new InvalidRequestException("Unknown code: " + conceptB)); SearchSession searchSession = Search.session(myEntityManager); @@ -2110,7 +2564,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return new IFhirResourceDaoCodeSystem.SubsumesResult(subsumes); } - protected IValidationSupport.LookupCodeResult lookupCode(String theSystem, String theCode, String theDisplayLanguage) { + protected IValidationSupport.LookupCodeResult lookupCode( + String theSystem, String theCode, String theDisplayLanguage) { TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); return txTemplate.execute(t -> { Optional codeOpt = findCode(theSystem, theCode); @@ -2140,10 +2595,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { for (TermConceptProperty next : code.getProperties()) { if (next.getType() == TermConceptPropertyTypeEnum.CODING) { - IValidationSupport.CodingConceptProperty property = new IValidationSupport.CodingConceptProperty(next.getKey(), next.getCodeSystem(), next.getValue(), next.getDisplay()); + IValidationSupport.CodingConceptProperty property = + new IValidationSupport.CodingConceptProperty( + next.getKey(), next.getCodeSystem(), next.getValue(), next.getDisplay()); result.getProperties().add(property); } else if (next.getType() == TermConceptPropertyTypeEnum.STRING) { - IValidationSupport.StringConceptProperty property = new IValidationSupport.StringConceptProperty(next.getKey(), next.getValue()); + IValidationSupport.StringConceptProperty property = + new IValidationSupport.StringConceptProperty(next.getKey(), next.getValue()); result.getProperties().add(property); } else { throw new InternalErrorException(Msg.code(905) + "Unknown type: " + next.getType()); @@ -2153,19 +2611,23 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return result; } else { - return new LookupCodeResult() - .setFound(false); + return new LookupCodeResult().setFound(false); } }); } @Nullable - private ConceptSubsumptionOutcome testForSubsumption(SearchSession theSearchSession, TermConcept theLeft, TermConcept theRight, ConceptSubsumptionOutcome theOutput) { - List fetch = theSearchSession.search(TermConcept.class) - .where(f -> f.bool() - .must(f.match().field("myId").matching(theRight.getId())) - .must(f.match().field("myParentPids").matching(Long.toString(theLeft.getId()))) - ).fetchHits(1); + private ConceptSubsumptionOutcome testForSubsumption( + SearchSession theSearchSession, + TermConcept theLeft, + TermConcept theRight, + ConceptSubsumptionOutcome theOutput) { + List fetch = theSearchSession + .search(TermConcept.class) + .where(f -> f.bool() + .must(f.match().field("myId").matching(theRight.getId())) + .must(f.match().field("myParentPids").matching(Long.toString(theLeft.getId())))) + .fetchHits(1); if (fetch.size() > 0) { return theOutput; @@ -2174,7 +2636,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } } - private ArrayList toVersionIndependentConcepts(String theSystem, Set codes) { + private ArrayList toVersionIndependentConcepts( + String theSystem, Set codes) { ArrayList retVal = new ArrayList<>(codes.size()); for (TermConcept next : codes) { retVal.add(new FhirVersionIndependentConcept(theSystem, next.getCode())); @@ -2184,12 +2647,20 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Override @Transactional - public CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + public CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { invokeRunnableForUnitTest(); IPrimitiveType urlPrimitive; if (theValueSet instanceof org.hl7.fhir.dstu2.model.ValueSet) { - urlPrimitive = FhirContext.forDstu2Hl7OrgCached().newTerser().getSingleValueOrNull(theValueSet, "url", IPrimitiveType.class); + urlPrimitive = FhirContext.forDstu2Hl7OrgCached() + .newTerser() + .getSingleValueOrNull(theValueSet, "url", IPrimitiveType.class); } else { urlPrimitive = myContext.newTerser().getSingleValueOrNull(theValueSet, "url", IPrimitiveType.class); } @@ -2202,40 +2673,61 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @CoverageIgnore @Override - public IValidationSupport.CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystemUrl, String theCode, String theDisplay, String theValueSetUrl) { - //TODO GGG TRY TO JUST AUTO_PASS HERE AND SEE WHAT HAPPENS. + public IValidationSupport.CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystemUrl, + String theCode, + String theDisplay, + String theValueSetUrl) { + // TODO GGG TRY TO JUST AUTO_PASS HERE AND SEE WHAT HAPPENS. invokeRunnableForUnitTest(); theOptions.setValidateDisplay(isNotBlank(theDisplay)); if (isNotBlank(theValueSetUrl)) { - return validateCodeInValueSet(theValidationSupportContext, theOptions, theValueSetUrl, theCodeSystemUrl, theCode, theDisplay); + return validateCodeInValueSet( + theValidationSupportContext, theOptions, theValueSetUrl, theCodeSystemUrl, theCode, theDisplay); } TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED); txTemplate.setReadOnly(true); - Optional codeOpt = txTemplate.execute(tx -> findCode(theCodeSystemUrl, theCode).map(c -> { - String codeSystemVersionId = getCurrentCodeSystemVersion(theCodeSystemUrl).myCodeSystemVersionId; - return new FhirVersionIndependentConcept(theCodeSystemUrl, c.getCode(), c.getDisplay(), codeSystemVersionId); - })); + Optional codeOpt = + txTemplate.execute(tx -> findCode(theCodeSystemUrl, theCode).map(c -> { + String codeSystemVersionId = getCurrentCodeSystemVersion(theCodeSystemUrl).myCodeSystemVersionId; + return new FhirVersionIndependentConcept( + theCodeSystemUrl, c.getCode(), c.getDisplay(), codeSystemVersionId); + })); if (codeOpt != null && codeOpt.isPresent()) { FhirVersionIndependentConcept code = codeOpt.get(); - if (!theOptions.isValidateDisplay() || isBlank(code.getDisplay()) || isBlank(theDisplay) || code.getDisplay().equals(theDisplay)) { - return new CodeValidationResult() - .setCode(code.getCode()) - .setDisplay(code.getDisplay()); + if (!theOptions.isValidateDisplay() + || isBlank(code.getDisplay()) + || isBlank(theDisplay) + || code.getDisplay().equals(theDisplay)) { + return new CodeValidationResult().setCode(code.getCode()).setDisplay(code.getDisplay()); } else { - String messageAppend = createMessageAppendForDisplayMismatch(theCodeSystemUrl, theDisplay, code.getDisplay()); - return createFailureCodeValidationResult(theCodeSystemUrl, theCode, code.getSystemVersion(), messageAppend).setDisplay(code.getDisplay()); + String messageAppend = + createMessageAppendForDisplayMismatch(theCodeSystemUrl, theDisplay, code.getDisplay()); + return createFailureCodeValidationResult( + theCodeSystemUrl, theCode, code.getSystemVersion(), messageAppend) + .setDisplay(code.getDisplay()); } } - return createFailureCodeValidationResult(theCodeSystemUrl, theCode, null, createMessageAppendForCodeNotFoundInCodeSystem(theCodeSystemUrl)); + return createFailureCodeValidationResult( + theCodeSystemUrl, theCode, null, createMessageAppendForCodeNotFoundInCodeSystem(theCodeSystemUrl)); } - IValidationSupport.CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theValidationOptions, String theValueSetUrl, String theCodeSystem, String theCode, String theDisplay) { - IBaseResource valueSet = theValidationSupportContext.getRootValidationSupport().fetchValueSet(theValueSetUrl); + IValidationSupport.CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theValidationOptions, + String theValueSetUrl, + String theCodeSystem, + String theCode, + String theDisplay) { + IBaseResource valueSet = + theValidationSupportContext.getRootValidationSupport().fetchValueSet(theValueSetUrl); CodeValidationResult retVal = null; // If we don't have a PID, this came from some source other than the JPA @@ -2246,7 +2738,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); retVal = txTemplate.execute(tx -> { if (isValueSetPreExpandedForCodeValidation(valueSet)) { - return validateCodeIsInPreExpandedValueSet(theValidationOptions, valueSet, theCodeSystem, theCode, theDisplay, null, null); + return validateCodeIsInPreExpandedValueSet( + theValidationOptions, valueSet, theCodeSystem, theCode, theDisplay, null, null); } else { return null; } @@ -2256,7 +2749,14 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { if (retVal == null) { if (valueSet != null) { - retVal = new InMemoryTerminologyServerValidationSupport(myContext).validateCodeInValueSet(theValidationSupportContext, theValidationOptions, theCodeSystem, theCode, theDisplay, valueSet); + retVal = new InMemoryTerminologyServerValidationSupport(myContext) + .validateCodeInValueSet( + theValidationSupportContext, + theValidationOptions, + theCodeSystem, + theCode, + theDisplay, + valueSet); } else { String append = " - Unable to locate ValueSet[" + theValueSetUrl + "]"; retVal = createFailureCodeValidationResult(theCodeSystem, theCode, null, append); @@ -2264,17 +2764,21 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } // Check if someone is accidentally using a VS url where it should be a CS URL - if (retVal != null && retVal.getCode() == null && theCodeSystem != null && myContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2)) { + if (retVal != null + && retVal.getCode() == null + && theCodeSystem != null + && myContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2)) { if (isValueSetSupported(theValidationSupportContext, theCodeSystem)) { if (!isCodeSystemSupported(theValidationSupportContext, theCodeSystem)) { - String newMessage = "Unable to validate code " + theCodeSystem + "#" + theCode + " - Supplied system URL is a ValueSet URL and not a CodeSystem URL, check if it is correct: " + theCodeSystem; + String newMessage = "Unable to validate code " + theCodeSystem + "#" + theCode + + " - Supplied system URL is a ValueSet URL and not a CodeSystem URL, check if it is correct: " + + theCodeSystem; retVal.setMessage(newMessage); } } } return retVal; - } @Override @@ -2326,7 +2830,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return myContext; } - private void findCodesAbove(CodeSystem theSystem, String theSystemString, String theCode, List theListToPopulate) { + private void findCodesAbove( + CodeSystem theSystem, + String theSystemString, + String theCode, + List theListToPopulate) { List conceptList = theSystem.getConcept(); for (CodeSystem.ConceptDefinitionComponent next : conceptList) { addTreeIfItContainsCode(theSystemString, next, theCode, theListToPopulate); @@ -2343,12 +2851,20 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return retVal; } - private void findCodesBelow(CodeSystem theSystem, String theSystemString, String theCode, List theListToPopulate) { + private void findCodesBelow( + CodeSystem theSystem, + String theSystemString, + String theCode, + List theListToPopulate) { List conceptList = theSystem.getConcept(); findCodesBelow(theSystemString, theCode, theListToPopulate, conceptList); } - private void findCodesBelow(String theSystemString, String theCode, List theListToPopulate, List conceptList) { + private void findCodesBelow( + String theSystemString, + String theCode, + List theListToPopulate, + List conceptList) { for (CodeSystem.ConceptDefinitionComponent next : conceptList) { if (theCode.equals(next.getCode())) { addAllChildren(theSystemString, next, theListToPopulate); @@ -2368,7 +2884,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { return retVal; } - private void addAllChildren(String theSystemString, CodeSystem.ConceptDefinitionComponent theCode, List theListToPopulate) { + private void addAllChildren( + String theSystemString, + CodeSystem.ConceptDefinitionComponent theCode, + List theListToPopulate) { if (isNotBlank(theCode.getCode())) { theListToPopulate.add(new FhirVersionIndependentConcept(theSystemString, theCode.getCode())); } @@ -2377,7 +2896,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } } - private boolean addTreeIfItContainsCode(String theSystemString, CodeSystem.ConceptDefinitionComponent theNext, String theCode, List theListToPopulate) { + private boolean addTreeIfItContainsCode( + String theSystemString, + CodeSystem.ConceptDefinitionComponent theNext, + String theCode, + List theListToPopulate) { boolean foundCodeInChild = false; for (CodeSystem.ConceptDefinitionComponent nextChild : theNext.getConcept()) { foundCodeInChild |= addTreeIfItContainsCode(theSystemString, nextChild, theCode, theListToPopulate); @@ -2392,10 +2915,17 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } @Nonnull - private FhirVersionIndependentConcept toConcept(IPrimitiveType theCodeType, IPrimitiveType theCodeSystemIdentifierType, IBaseCoding theCodingType) { + private FhirVersionIndependentConcept toConcept( + IPrimitiveType theCodeType, + IPrimitiveType theCodeSystemIdentifierType, + IBaseCoding theCodingType) { String code = theCodeType != null ? theCodeType.getValueAsString() : null; - String system = theCodeSystemIdentifierType != null ? getUrlFromIdentifier(theCodeSystemIdentifierType.getValueAsString()) : null; - String systemVersion = theCodeSystemIdentifierType != null ? getVersionFromIdentifier(theCodeSystemIdentifierType.getValueAsString()) : null; + String system = theCodeSystemIdentifierType != null + ? getUrlFromIdentifier(theCodeSystemIdentifierType.getValueAsString()) + : null; + String systemVersion = theCodeSystemIdentifierType != null + ? getVersionFromIdentifier(theCodeSystemIdentifierType.getValueAsString()) + : null; if (theCodingType != null) { Coding canonicalizedCoding = myVersionCanonicalizer.codingToCanonical(theCodingType); assert canonicalizedCoding != null; // Shouldn't be null, since theCodingType isn't @@ -2433,14 +2963,15 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Override public Optional readCodeSystemByForcedId(String theForcedId) { @SuppressWarnings("unchecked") - List resultList = (List) myEntityManager.createQuery( - "select f.myResource from ForcedId f " + - "where f.myResourceType = 'CodeSystem' and f.myForcedId = '" + theForcedId + "'").getResultList(); + List resultList = (List) myEntityManager + .createQuery("select f.myResource from ForcedId f " + + "where f.myResourceType = 'CodeSystem' and f.myForcedId = '" + theForcedId + "'") + .getResultList(); if (resultList.isEmpty()) return Optional.empty(); if (resultList.size() > 1) - throw new NonUniqueResultException(Msg.code(911) + "More than one CodeSystem is pointed by forcedId: " + theForcedId + ". Was constraint " - + ForcedId.IDX_FORCEDID_TYPE_FID + " removed?"); + throw new NonUniqueResultException(Msg.code(911) + "More than one CodeSystem is pointed by forcedId: " + + theForcedId + ". Was constraint " + ForcedId.IDX_FORCEDID_TYPE_FID + " removed?"); IFhirResourceDao csDao = myDaoRegistry.getResourceDao("CodeSystem"); IBaseResource cs = myJpaStorageResourceParser.toResource(resultList.get(0), false); @@ -2467,15 +2998,15 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { try { SearchSession searchSession = getSearchSession(); searchSession - .massIndexer(TermConcept.class) - .dropAndCreateSchemaOnStart(true) - .purgeAllOnStart(false) - .batchSizeToLoadObjects(100) - .cacheMode(CacheMode.IGNORE) - .threadsToLoadObjects(6) - .transactionTimeout(60 * SECONDS_IN_MINUTE) - .monitor(new PojoMassIndexingLoggingMonitor(INDEXED_ROOTS_LOGGING_COUNT)) - .startAndWait(); + .massIndexer(TermConcept.class) + .dropAndCreateSchemaOnStart(true) + .purgeAllOnStart(false) + .batchSizeToLoadObjects(100) + .cacheMode(CacheMode.IGNORE) + .threadsToLoadObjects(6) + .transactionTimeout(60 * SECONDS_IN_MINUTE) + .monitor(new PojoMassIndexingLoggingMonitor(INDEXED_ROOTS_LOGGING_COUNT)) + .startAndWait(); } finally { myDeferredStorageSvc.setProcessDeferred(true); } @@ -2491,7 +3022,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @VisibleForTesting int calculateObjectLoadingThreadNumber() { IConnectionPoolInfoProvider connectionPoolInfoProvider = - new ConnectionPoolInfoProvider(myHibernatePropertiesProvider.getDataSource()); + new ConnectionPoolInfoProvider(myHibernatePropertiesProvider.getDataSource()); Optional maxConnectionsOpt = connectionPoolInfoProvider.getTotalConnectionSize(); if (maxConnectionsOpt.isEmpty()) { return DEFAULT_MASS_INDEXER_OBJECT_LOADING_THREADS; @@ -2500,8 +3031,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { int maxConnections = maxConnectionsOpt.get(); int usableThreads = maxConnections < 6 ? 1 : maxConnections - 5; int objectThreads = Math.min(usableThreads, MAX_MASS_INDEXER_OBJECT_LOADING_THREADS); - ourLog.debug("Data source connection pool has {} connections allocated, so reindexing will use {} object " + - "loading threads (each using a connection)", maxConnections, objectThreads); + ourLog.debug( + "Data source connection pool has {} connections allocated, so reindexing will use {} object " + + "loading threads (each using a connection)", + maxConnections, + objectThreads); return objectThreads; } @@ -2511,7 +3045,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } @Override - public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) { + public ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + ValueSetExpansionOptions theExpansionOptions, + @Nonnull IBaseResource theValueSetToExpand) { ValueSet canonicalInput = myVersionCanonicalizer.valueSetToCanonical(theValueSetToExpand); org.hl7.fhir.r4.model.ValueSet expandedR4 = expandValueSet(theExpansionOptions, canonicalInput); return new ValueSetExpansionOutcome(myVersionCanonicalizer.valueSetFromCanonical(expandedR4)); @@ -2525,25 +3062,39 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } @Override - public void expandValueSet(ValueSetExpansionOptions theExpansionOptions, IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) { - org.hl7.fhir.r4.model.ValueSet valueSetToExpand = myVersionCanonicalizer.valueSetToCanonical(theValueSetToExpand); + public void expandValueSet( + ValueSetExpansionOptions theExpansionOptions, + IBaseResource theValueSetToExpand, + IValueSetConceptAccumulator theValueSetCodeAccumulator) { + org.hl7.fhir.r4.model.ValueSet valueSetToExpand = + myVersionCanonicalizer.valueSetToCanonical(theValueSetToExpand); expandValueSet(theExpansionOptions, valueSetToExpand, theValueSetCodeAccumulator); } private org.hl7.fhir.r4.model.ValueSet getValueSetFromResourceTable(ResourceTable theResourceTable) { - Class type = getFhirContext().getResourceDefinition("ValueSet").getImplementingClass(); + Class type = + getFhirContext().getResourceDefinition("ValueSet").getImplementingClass(); IBaseResource valueSet = myJpaStorageResourceParser.toResource(type, theResourceTable, null, false); return myVersionCanonicalizer.valueSetToCanonical(valueSet); } @Override - public CodeValidationResult validateCodeIsInPreExpandedValueSet(ConceptValidationOptions theOptions, IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept) { + public CodeValidationResult validateCodeIsInPreExpandedValueSet( + ConceptValidationOptions theOptions, + IBaseResource theValueSet, + String theSystem, + String theCode, + String theDisplay, + IBaseDatatype theCoding, + IBaseDatatype theCodeableConcept) { ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSet, "ValueSet must not be null"); org.hl7.fhir.r4.model.ValueSet valueSetR4 = myVersionCanonicalizer.valueSetToCanonical(theValueSet); org.hl7.fhir.r4.model.Coding codingR4 = myVersionCanonicalizer.codingToCanonical((IBaseCoding) theCoding); - org.hl7.fhir.r4.model.CodeableConcept codeableConcept = myVersionCanonicalizer.codeableConceptToCanonical(theCodeableConcept); + org.hl7.fhir.r4.model.CodeableConcept codeableConcept = + myVersionCanonicalizer.codeableConceptToCanonical(theCodeableConcept); - return validateCodeIsInPreExpandedValueSet(theOptions, valueSetR4, theSystem, theCode, theDisplay, codingR4, codeableConcept); + return validateCodeIsInPreExpandedValueSet( + theOptions, valueSetR4, theSystem, theCode, theDisplay, codingR4, codeableConcept); } @Override @@ -2554,13 +3105,16 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { return lookupCode(theSystem, theCode, theDisplayLanguage); } private static class TermCodeSystemVersionDetails { - private final long myPid; private final String myCodeSystemVersionId; @@ -2623,12 +3177,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } return theExpansionOptions.getTheDisplayLanguage().equalsIgnoreCase(theStoredLang); - } @Nonnull - private static String createMessageAppendForDisplayMismatch(String theCodeSystemUrl, String theDisplay, String theExpectedDisplay) { - return " - Concept Display \"" + theDisplay + "\" does not match expected \"" + theExpectedDisplay + "\" for CodeSystem: " + theCodeSystemUrl; + private static String createMessageAppendForDisplayMismatch( + String theCodeSystemUrl, String theDisplay, String theExpectedDisplay) { + return " - Concept Display \"" + theDisplay + "\" does not match expected \"" + theExpectedDisplay + + "\" for CodeSystem: " + theCodeSystemUrl; } @Nonnull @@ -2666,7 +3221,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { myInvokeOnNextCallForUnitTest = theInvokeOnNextCallForUnitTest; } - static List toPersistedConcepts(List theConcept, TermCodeSystemVersion theCodeSystemVersion) { + static List toPersistedConcepts( + List theConcept, TermCodeSystemVersion theCodeSystemVersion) { ArrayList retVal = new ArrayList<>(); for (CodeSystem.ConceptDefinitionComponent next : theConcept) { @@ -2680,21 +3236,33 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } @Nonnull - static TermConcept toTermConcept(CodeSystem.ConceptDefinitionComponent theConceptDefinition, TermCodeSystemVersion theCodeSystemVersion) { + static TermConcept toTermConcept( + CodeSystem.ConceptDefinitionComponent theConceptDefinition, TermCodeSystemVersion theCodeSystemVersion) { TermConcept termConcept = new TermConcept(); termConcept.setCode(theConceptDefinition.getCode()); termConcept.setCodeSystemVersion(theCodeSystemVersion); termConcept.setDisplay(theConceptDefinition.getDisplay()); - termConcept.addChildren(toPersistedConcepts(theConceptDefinition.getConcept(), theCodeSystemVersion), RelationshipTypeEnum.ISA); + termConcept.addChildren( + toPersistedConcepts(theConceptDefinition.getConcept(), theCodeSystemVersion), RelationshipTypeEnum.ISA); - for (CodeSystem.ConceptDefinitionDesignationComponent designationComponent : theConceptDefinition.getDesignation()) { + for (CodeSystem.ConceptDefinitionDesignationComponent designationComponent : + theConceptDefinition.getDesignation()) { if (isNotBlank(designationComponent.getValue())) { TermConceptDesignation designation = termConcept.addDesignation(); designation.setLanguage(designationComponent.hasLanguage() ? designationComponent.getLanguage() : null); if (designationComponent.hasUse()) { - designation.setUseSystem(designationComponent.getUse().hasSystem() ? designationComponent.getUse().getSystem() : null); - designation.setUseCode(designationComponent.getUse().hasCode() ? designationComponent.getUse().getCode() : null); - designation.setUseDisplay(designationComponent.getUse().hasDisplay() ? designationComponent.getUse().getDisplay() : null); + designation.setUseSystem( + designationComponent.getUse().hasSystem() + ? designationComponent.getUse().getSystem() + : null); + designation.setUseCode( + designationComponent.getUse().hasCode() + ? designationComponent.getUse().getCode() + : null); + designation.setUseDisplay( + designationComponent.getUse().hasDisplay() + ? designationComponent.getUse().getDisplay() + : null); } designation.setValue(designationComponent.getValue()); } @@ -2718,7 +3286,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { property.setDisplay(nextCoding.getDisplay()); } else if (next.getValue() != null) { // TODO: LOINC has properties of type BOOLEAN that we should handle - ourLog.warn("Don't know how to handle properties of type: " + next.getValue().getClass()); + ourLog.warn("Don't know how to handle properties of type: " + + next.getValue().getClass()); continue; } @@ -2729,11 +3298,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { static boolean isDisplayLanguageMatch(String theReqLang, String theStoredLang) { // NOTE: return the designation when one of then is not specified. - if (theReqLang == null || theStoredLang == null) - return true; + if (theReqLang == null || theStoredLang == null) return true; return theReqLang.equalsIgnoreCase(theStoredLang); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcUtil.java index d165b4a2b80..87daa5061f1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcUtil.java @@ -35,9 +35,9 @@ public class TermReadSvcUtil { private static final Logger ourLog = LoggerFactory.getLogger(TermReadSvcUtil.class); public static Optional getValueSetId(String theUrl) { - if (! theUrl.startsWith(LOINC_GENERIC_VALUESET_URL)) return Optional.empty(); + if (!theUrl.startsWith(LOINC_GENERIC_VALUESET_URL)) return Optional.empty(); - if (! theUrl.startsWith(LOINC_GENERIC_VALUESET_URL_PLUS_SLASH)) { + if (!theUrl.startsWith(LOINC_GENERIC_VALUESET_URL_PLUS_SLASH)) { if (theUrl.equals(LOINC_GENERIC_VALUESET_URL)) { // the request is for the loinc all valueset which when loading was given the name: 'loinc-all' return Optional.of(LOINC_ALL_VALUESET_ID); @@ -51,22 +51,17 @@ public class TermReadSvcUtil { return isBlank(forcedId) ? Optional.empty() : Optional.of(forcedId); } - public static boolean isLoincUnversionedValueSet(String theUrl) { boolean isLoincCodeSystem = StringUtils.containsIgnoreCase(theUrl, LOINC_LOW); - boolean isNoVersion = ! theUrl.contains("|"); + boolean isNoVersion = !theUrl.contains("|"); return isLoincCodeSystem && isNoVersion; } - public static boolean isLoincUnversionedCodeSystem(String theUrl) { boolean isLoincCodeSystem = StringUtils.containsIgnoreCase(theUrl, LOINC_LOW); - boolean isNoVersion = ! theUrl.contains("|"); + boolean isNoVersion = !theUrl.contains("|"); return isLoincCodeSystem && isNoVersion; } - - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReindexingSvcImpl.java index af9eaf804f4..6588022f46c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReindexingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReindexingSvcImpl.java @@ -53,15 +53,21 @@ import static org.apache.commons.lang3.StringUtils.isBlank; public class TermReindexingSvcImpl implements ITermReindexingSvc, IHasScheduledJobs { private static final Logger ourLog = LoggerFactory.getLogger(TermReindexingSvcImpl.class); private static boolean ourForceSaveDeferredAlwaysForUnitTest; + @Autowired protected ITermConceptDao myConceptDao; + private ArrayListMultimap myChildToParentPidCache; + @Autowired private PlatformTransactionManager myTransactionMgr; + @Autowired private ITermConceptParentChildLinkDao myConceptParentChildLinkDao; + @Autowired private ITermDeferredStorageSvc myDeferredStorageSvc; + @Autowired private TermConceptDaoSvc myTermConceptDaoSvc; @@ -83,14 +89,23 @@ public class TermReindexingSvcImpl implements ITermReindexingSvc, IHasScheduledJ Collection parentLinks = myConceptParentChildLinkDao.findAllWithChild(theConceptPid); if (parentLinks.isEmpty()) { myChildToParentPidCache.put(theConceptPid, -1L); - ourLog.info("Found {} parent concepts of concept {} (cache has {})", 0, theConceptPid, myChildToParentPidCache.size()); + ourLog.info( + "Found {} parent concepts of concept {} (cache has {})", + 0, + theConceptPid, + myChildToParentPidCache.size()); return; } else { for (Long next : parentLinks) { myChildToParentPidCache.put(theConceptPid, next); } - int parentCount = myChildToParentPidCache.get(theConceptPid).size(); - ourLog.info("Found {} parent concepts of concept {} (cache has {})", parentCount, theConceptPid, myChildToParentPidCache.size()); + int parentCount = + myChildToParentPidCache.get(theConceptPid).size(); + ourLog.info( + "Found {} parent concepts of concept {} (cache has {})", + parentCount, + theConceptPid, + myChildToParentPidCache.size()); } } @@ -101,14 +116,13 @@ public class TermReindexingSvcImpl implements ITermReindexingSvc, IHasScheduledJ theParentsBuilder.append(nextParent); createParentsString(theParentsBuilder, nextParent); } - } - @Override protected void doInTransactionWithoutResult(TransactionStatus theArg0) { int maxResult = 1000; - Page concepts = myConceptDao.findResourcesRequiringReindexing(PageRequest.of(0, maxResult)); + Page concepts = + myConceptDao.findResourcesRequiringReindexing(PageRequest.of(0, maxResult)); if (!concepts.hasContent()) { if (myChildToParentPidCache != null) { ourLog.info("Clearing parent concept cache"); @@ -138,10 +152,14 @@ public class TermReindexingSvcImpl implements ITermReindexingSvc, IHasScheduledJ count++; } - ourLog.info("Indexed {} / {} concepts in {}ms - Avg {}ms / resource", count, concepts.getContent().size(), stopwatch.getMillis(), stopwatch.getMillisPerOperation(count)); + ourLog.info( + "Indexed {} / {} concepts in {}ms - Avg {}ms / resource", + count, + concepts.getContent().size(), + stopwatch.getMillis(), + stopwatch.getMillisPerOperation(count)); } }); - } @Override @@ -172,6 +190,4 @@ public class TermReindexingSvcImpl implements ITermReindexingSvc, IHasScheduledJ public static void setForceSaveDeferredAlwaysForUnitTest(boolean theForceSaveDeferredAlwaysForUnitTest) { ourForceSaveDeferredAlwaysForUnitTest = theForceSaveDeferredAlwaysForUnitTest; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcDstu2.java index 47be4e7fd8e..882508ed9da 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcDstu2.java @@ -43,5 +43,4 @@ public class TermVersionAdapterSvcDstu2 implements ITermVersionAdapterSvc { public void createOrUpdateValueSet(ValueSet theValueSet) { throw new UnsupportedOperationException(Msg.code(884)); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcDstu3.java index 5e530fc24f6..4a4ae31292e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcDstu3.java @@ -52,7 +52,6 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im super(); } - /** * Initialize the beans that are used by this service. *

    @@ -70,19 +69,25 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im } @Override - public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { + public IIdType createOrUpdateCodeSystem( + org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { CodeSystem resourceToStore; try { - resourceToStore = (CodeSystem) VersionConvertorFactory_30_40.convertResource(theCodeSystemResource, new BaseAdvisor_30_40(false)); + resourceToStore = (CodeSystem) + VersionConvertorFactory_30_40.convertResource(theCodeSystemResource, new BaseAdvisor_30_40(false)); } catch (FHIRException e) { throw new InternalErrorException(Msg.code(879) + e); } validateCodeSystemForStorage(theCodeSystemResource); if (isBlank(resourceToStore.getIdElement().getIdPart())) { String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()); - return myCodeSystemResourceDao.update(resourceToStore, matchUrl, theRequestDetails).getId(); + return myCodeSystemResourceDao + .update(resourceToStore, matchUrl, theRequestDetails) + .getId(); } else { - return myCodeSystemResourceDao.update(resourceToStore, theRequestDetails).getId(); + return myCodeSystemResourceDao + .update(resourceToStore, theRequestDetails) + .getId(); } } @@ -90,7 +95,8 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) { ConceptMap resourceToStore; try { - resourceToStore = (ConceptMap) VersionConvertorFactory_30_40.convertResource(theConceptMap, new BaseAdvisor_30_40(false)); + resourceToStore = (ConceptMap) + VersionConvertorFactory_30_40.convertResource(theConceptMap, new BaseAdvisor_30_40(false)); } catch (FHIRException e) { throw new InternalErrorException(Msg.code(880) + e); } @@ -106,7 +112,8 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) { ValueSet valueSetDstu3; try { - valueSetDstu3 = (ValueSet) VersionConvertorFactory_30_40.convertResource(theValueSet, new BaseAdvisor_30_40(false)); + valueSetDstu3 = + (ValueSet) VersionConvertorFactory_30_40.convertResource(theValueSet, new BaseAdvisor_30_40(false)); } catch (FHIRException e) { throw new InternalErrorException(Msg.code(881) + e); } @@ -118,5 +125,4 @@ public class TermVersionAdapterSvcDstu3 extends BaseTermVersionAdapterSvcImpl im myValueSetResourceDao.update(valueSetDstu3); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR4.java index 8be659d365f..4c813e329be 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR4.java @@ -63,16 +63,21 @@ public class TermVersionAdapterSvcR4 extends BaseTermVersionAdapterSvcImpl imple } @Override - public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { + public IIdType createOrUpdateCodeSystem( + org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { validateCodeSystemForStorage(theCodeSystemResource); if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) { if (theCodeSystemResource.getUrl().contains(LOINC_LOW)) { throw new InvalidParameterException(Msg.code(859) + "'loinc' CodeSystem must have an 'ID' element"); } String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()); - return myCodeSystemResourceDao.update(theCodeSystemResource, matchUrl, theRequestDetails).getId(); + return myCodeSystemResourceDao + .update(theCodeSystemResource, matchUrl, theRequestDetails) + .getId(); } else { - return myCodeSystemResourceDao.update(theCodeSystemResource, theRequestDetails).getId(); + return myCodeSystemResourceDao + .update(theCodeSystemResource, theRequestDetails) + .getId(); } } @@ -95,5 +100,4 @@ public class TermVersionAdapterSvcR4 extends BaseTermVersionAdapterSvcImpl imple myValueSetResourceDao.update(theValueSet); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR4B.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR4B.java index ed51a9347db..24db2d0f5e5 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR4B.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR4B.java @@ -63,24 +63,33 @@ public class TermVersionAdapterSvcR4B extends BaseTermVersionAdapterSvcImpl impl } @Override - public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { + public IIdType createOrUpdateCodeSystem( + org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { validateCodeSystemForStorage(theCodeSystemResource); - org.hl7.fhir.r5.model.CodeSystem codeSystemR5 = (org.hl7.fhir.r5.model.CodeSystem) VersionConvertorFactory_40_50.convertResource(theCodeSystemResource, new BaseAdvisor_40_50(false)); - CodeSystem codeSystemR4 = (CodeSystem) VersionConvertorFactory_43_50.convertResource(codeSystemR5, new BaseAdvisor_43_50(false)); + org.hl7.fhir.r5.model.CodeSystem codeSystemR5 = (org.hl7.fhir.r5.model.CodeSystem) + VersionConvertorFactory_40_50.convertResource(theCodeSystemResource, new BaseAdvisor_40_50(false)); + CodeSystem codeSystemR4 = + (CodeSystem) VersionConvertorFactory_43_50.convertResource(codeSystemR5, new BaseAdvisor_43_50(false)); if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) { String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()); - return myCodeSystemResourceDao.update(codeSystemR4, matchUrl, theRequestDetails).getId(); + return myCodeSystemResourceDao + .update(codeSystemR4, matchUrl, theRequestDetails) + .getId(); } else { - return myCodeSystemResourceDao.update(codeSystemR4, theRequestDetails).getId(); + return myCodeSystemResourceDao + .update(codeSystemR4, theRequestDetails) + .getId(); } } @Override public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) { - org.hl7.fhir.r5.model.ConceptMap conceptMapR5 = (org.hl7.fhir.r5.model.ConceptMap) VersionConvertorFactory_40_50.convertResource(theConceptMap, new BaseAdvisor_40_50(false)); - ConceptMap conceptMapR4 = (ConceptMap) VersionConvertorFactory_43_50.convertResource(conceptMapR5, new BaseAdvisor_43_50(false)); + org.hl7.fhir.r5.model.ConceptMap conceptMapR5 = (org.hl7.fhir.r5.model.ConceptMap) + VersionConvertorFactory_40_50.convertResource(theConceptMap, new BaseAdvisor_40_50(false)); + ConceptMap conceptMapR4 = + (ConceptMap) VersionConvertorFactory_43_50.convertResource(conceptMapR5, new BaseAdvisor_43_50(false)); if (isBlank(theConceptMap.getIdElement().getIdPart())) { String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl()); @@ -93,8 +102,10 @@ public class TermVersionAdapterSvcR4B extends BaseTermVersionAdapterSvcImpl impl @Override public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) { - org.hl7.fhir.r5.model.ValueSet valueSetR5 = (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_40_50.convertResource(theValueSet, new BaseAdvisor_40_50(false)); - ValueSet valueSetR4 = (ValueSet) VersionConvertorFactory_43_50.convertResource(valueSetR5, new BaseAdvisor_43_50(false)); + org.hl7.fhir.r5.model.ValueSet valueSetR5 = (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_40_50.convertResource(theValueSet, new BaseAdvisor_40_50(false)); + ValueSet valueSetR4 = + (ValueSet) VersionConvertorFactory_43_50.convertResource(valueSetR5, new BaseAdvisor_43_50(false)); if (isBlank(theValueSet.getIdElement().getIdPart())) { String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl()); @@ -103,5 +114,4 @@ public class TermVersionAdapterSvcR4B extends BaseTermVersionAdapterSvcImpl impl myValueSetResourceDao.update(valueSetR4); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR5.java index 98ef3079fbd..be6660ff64e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermVersionAdapterSvcR5.java @@ -61,22 +61,29 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple } @Override - public IIdType createOrUpdateCodeSystem(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { + public IIdType createOrUpdateCodeSystem( + org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, RequestDetails theRequestDetails) { validateCodeSystemForStorage(theCodeSystemResource); - CodeSystem codeSystemR4 = (CodeSystem) VersionConvertorFactory_40_50.convertResource(theCodeSystemResource, new BaseAdvisor_40_50(false)); + CodeSystem codeSystemR4 = (CodeSystem) + VersionConvertorFactory_40_50.convertResource(theCodeSystemResource, new BaseAdvisor_40_50(false)); if (isBlank(theCodeSystemResource.getIdElement().getIdPart())) { String matchUrl = "CodeSystem?url=" + UrlUtil.escapeUrlParam(theCodeSystemResource.getUrl()); - return myCodeSystemResourceDao.update(codeSystemR4, matchUrl, theRequestDetails).getId(); + return myCodeSystemResourceDao + .update(codeSystemR4, matchUrl, theRequestDetails) + .getId(); } else { - return myCodeSystemResourceDao.update(codeSystemR4, theRequestDetails).getId(); + return myCodeSystemResourceDao + .update(codeSystemR4, theRequestDetails) + .getId(); } } @Override public void createOrUpdateConceptMap(org.hl7.fhir.r4.model.ConceptMap theConceptMap) { - ConceptMap conceptMapR4 = (ConceptMap) VersionConvertorFactory_40_50.convertResource(theConceptMap, new BaseAdvisor_40_50(false)); + ConceptMap conceptMapR4 = + (ConceptMap) VersionConvertorFactory_40_50.convertResource(theConceptMap, new BaseAdvisor_40_50(false)); if (isBlank(theConceptMap.getIdElement().getIdPart())) { String matchUrl = "ConceptMap?url=" + UrlUtil.escapeUrlParam(theConceptMap.getUrl()); @@ -89,7 +96,8 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple @Override public void createOrUpdateValueSet(org.hl7.fhir.r4.model.ValueSet theValueSet) { - ValueSet valueSetR4 = (ValueSet) VersionConvertorFactory_40_50.convertResource(theValueSet, new BaseAdvisor_40_50(false)); + ValueSet valueSetR4 = + (ValueSet) VersionConvertorFactory_40_50.convertResource(theValueSet, new BaseAdvisor_40_50(false)); if (isBlank(theValueSet.getIdElement().getIdPart())) { String matchUrl = "ValueSet?url=" + UrlUtil.escapeUrlParam(theValueSet.getUrl()); @@ -98,5 +106,4 @@ public class TermVersionAdapterSvcR5 extends BaseTermVersionAdapterSvcImpl imple myValueSetResourceDao.update(valueSetR4); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java index b0f5de0f874..18c91c828b0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetConceptAccumulator.java @@ -28,10 +28,10 @@ import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import ca.uhn.fhir.util.ValidateUtil; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isAnyBlank; import static org.apache.commons.lang3.StringUtils.isNoneBlank; @@ -41,14 +41,18 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ValueSetConceptAccumulator.class); private TermValueSet myTermValueSet; - final private ITermValueSetDao myValueSetDao; - final private ITermValueSetConceptDao myValueSetConceptDao; - final private ITermValueSetConceptDesignationDao myValueSetConceptDesignationDao; + private final ITermValueSetDao myValueSetDao; + private final ITermValueSetConceptDao myValueSetConceptDao; + private final ITermValueSetConceptDesignationDao myValueSetConceptDesignationDao; private int myConceptsSaved; private int myDesignationsSaved; private int myConceptsExcluded; - public ValueSetConceptAccumulator(@Nonnull TermValueSet theTermValueSet, @Nonnull ITermValueSetDao theValueSetDao, @Nonnull ITermValueSetConceptDao theValueSetConceptDao, @Nonnull ITermValueSetConceptDesignationDao theValueSetConceptDesignationDao) { + public ValueSetConceptAccumulator( + @Nonnull TermValueSet theTermValueSet, + @Nonnull ITermValueSetDao theValueSetDao, + @Nonnull ITermValueSetConceptDao theValueSetConceptDao, + @Nonnull ITermValueSetConceptDesignationDao theValueSetConceptDesignationDao) { myTermValueSet = theTermValueSet; myValueSetDao = theValueSetDao; myValueSetConceptDao = theValueSetConceptDao; @@ -65,13 +69,38 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { } @Override - public void includeConcept(String theSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids, String theSystemVersion) { - saveConcept(theSystem, theCode, theDisplay, theSourceConceptPid, theSourceConceptDirectParentPids, theSystemVersion); + public void includeConcept( + String theSystem, + String theCode, + String theDisplay, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + String theSystemVersion) { + saveConcept( + theSystem, + theCode, + theDisplay, + theSourceConceptPid, + theSourceConceptDirectParentPids, + theSystemVersion); } @Override - public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection theDesignations, Long theSourceConceptPid, String theSourceConceptDirectParentPids, String theSystemVersion) { - TermValueSetConcept concept = saveConcept(theSystem, theCode, theDisplay, theSourceConceptPid, theSourceConceptDirectParentPids, theSystemVersion); + public void includeConceptWithDesignations( + String theSystem, + String theCode, + String theDisplay, + Collection theDesignations, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + String theSystemVersion) { + TermValueSetConcept concept = saveConcept( + theSystem, + theCode, + theDisplay, + theSourceConceptPid, + theSourceConceptDirectParentPids, + theSystemVersion); if (theDesignations != null) { for (TermConceptDesignation designation : theDesignations) { saveConceptDesignation(concept, designation); @@ -89,17 +118,23 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { Optional optionalConcept; int versionIdx = theSystem.indexOf("|"); if (versionIdx >= 0) { - String systemUrl = theSystem.substring(0,versionIdx); - String systemVersion = theSystem.substring(versionIdx+1); - optionalConcept = myValueSetConceptDao.findByTermValueSetIdSystemAndCodeWithVersion(myTermValueSet.getId(), systemUrl, systemVersion,theCode); + String systemUrl = theSystem.substring(0, versionIdx); + String systemVersion = theSystem.substring(versionIdx + 1); + optionalConcept = myValueSetConceptDao.findByTermValueSetIdSystemAndCodeWithVersion( + myTermValueSet.getId(), systemUrl, systemVersion, theCode); } else { - optionalConcept = myValueSetConceptDao.findByTermValueSetIdSystemAndCode(myTermValueSet.getId(), theSystem, theCode); + optionalConcept = + myValueSetConceptDao.findByTermValueSetIdSystemAndCode(myTermValueSet.getId(), theSystem, theCode); } if (optionalConcept.isPresent()) { TermValueSetConcept concept = optionalConcept.get(); - ourLog.debug("Excluding [{}|{}] from ValueSet[{}]", concept.getSystem(), concept.getCode(), myTermValueSet.getUrl()); + ourLog.debug( + "Excluding [{}|{}] from ValueSet[{}]", + concept.getSystem(), + concept.getCode(), + myTermValueSet.getUrl()); for (TermValueSetConceptDesignation designation : concept.getDesignations()) { myValueSetConceptDesignationDao.deleteById(designation.getId()); myTermValueSet.decrementTotalConceptDesignations(); @@ -107,7 +142,11 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { myValueSetConceptDao.deleteById(concept.getId()); myTermValueSet.decrementTotalConcepts(); myValueSetDao.save(myTermValueSet); - ourLog.debug("Done excluding [{}|{}] from ValueSet[{}]", concept.getSystem(), concept.getCode(), myTermValueSet.getUrl()); + ourLog.debug( + "Done excluding [{}|{}] from ValueSet[{}]", + concept.getSystem(), + concept.getCode(), + myTermValueSet.getUrl()); if (++myConceptsExcluded % 250 == 0) { ourLog.info("Have excluded {} concepts from ValueSet[{}]", myConceptsExcluded, myTermValueSet.getUrl()); @@ -116,7 +155,13 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { return false; } - private TermValueSetConcept saveConcept(String theSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids, String theSystemVersion) { + private TermValueSetConcept saveConcept( + String theSystem, + String theCode, + String theDisplay, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + String theSystemVersion) { ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystem, "ValueSet contains a concept with no system value"); ValidateUtil.isNotBlankOrThrowInvalidRequest(theCode, "ValueSet contains a concept with no code value"); @@ -126,7 +171,7 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { int versionIndex = theSystem.indexOf("|"); if (versionIndex >= 0) { concept.setSystem(theSystem.substring(0, versionIndex)); - concept.setSystemVersion(theSystem.substring(versionIndex+1)); + concept.setSystemVersion(theSystem.substring(versionIndex + 1)); } else { concept.setSystem(theSystem); } @@ -149,8 +194,10 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { return concept; } - private TermValueSetConceptDesignation saveConceptDesignation(TermValueSetConcept theConcept, TermConceptDesignation theDesignation) { - ValidateUtil.isNotBlankOrThrowInvalidRequest(theDesignation.getValue(), "ValueSet contains a concept designation with no value"); + private TermValueSetConceptDesignation saveConceptDesignation( + TermValueSetConcept theConcept, TermConceptDesignation theDesignation) { + ValidateUtil.isNotBlankOrThrowInvalidRequest( + theDesignation.getValue(), "ValueSet contains a concept designation with no value"); TermValueSetConceptDesignation designation = new TermValueSetConceptDesignation(); designation.setConcept(theConcept); @@ -168,7 +215,12 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { myValueSetDao.save(myTermValueSet.incrementTotalConceptDesignations()); if (++myDesignationsSaved % 250 == 0) { - ourLog.debug("Have pre-expanded {} designations for Concept[{}|{}] in ValueSet[{}]", myDesignationsSaved, theConcept.getSystem(), theConcept.getCode(), myTermValueSet.getUrl()); + ourLog.debug( + "Have pre-expanded {} designations for Concept[{}|{}] in ValueSet[{}]", + myDesignationsSaved, + theConcept.getSystem(), + theConcept.getCode(), + myTermValueSet.getUrl()); } return designation; @@ -185,16 +237,20 @@ public class ValueSetConceptAccumulator implements IValueSetConceptAccumulator { for (Long conceptId : conceptIds) { myValueSetConceptDao.updateOrderById(conceptId, order++); } - ourLog.info("Have removed gaps from concept order for {} concepts in ValueSet[{}]", conceptIds.size(), myTermValueSet.getUrl()); + ourLog.info( + "Have removed gaps from concept order for {} concepts in ValueSet[{}]", + conceptIds.size(), + myTermValueSet.getUrl()); return true; } - public int getConceptsSaved() { + public int getConceptsSaved() { return myConceptsSaved; - } + } - // TODO: DM 2019-07-16 - We may need TermValueSetConceptProperty, similar to TermConceptProperty. + // TODO: DM 2019-07-16 - We may need TermValueSetConceptProperty, similar to TermConceptProperty. // TODO: DM 2019-07-16 - If so, we should also populate TermValueSetConceptProperty entities here. - // TODO: DM 2019-07-30 - Expansions don't include the properties themselves; they may be needed to facilitate filters and parameterized expansions. + // TODO: DM 2019-07-30 - Expansions don't include the properties themselves; they may be needed to facilitate + // filters and parameterized expansions. } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java index e8c3e996376..94a6184120c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.term; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; import ca.uhn.fhir.jpa.term.ex.ExpansionTooCostlyException; import ca.uhn.fhir.model.api.annotation.Block; @@ -28,8 +28,6 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.r4.model.ValueSet; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -38,11 +36,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @Block() -public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.ValueSetExpansionComponent implements IValueSetConceptAccumulator { +public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.ValueSetExpansionComponent + implements IValueSetConceptAccumulator { private final int myMaxCapacity; private final FhirContext myContext; private int mySkipCountRemaining; @@ -51,7 +52,8 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V private int myAddedConcepts; private Integer myTotalConcepts; private Map mySourcePidToConcept = new HashMap<>(); - private Map myConceptToSourceDirectParentPids = new HashMap<>(); + private Map myConceptToSourceDirectParentPids = + new HashMap<>(); private boolean myTrackingHierarchy; /** @@ -61,7 +63,8 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V * an {@link InternalErrorException} * @param theTrackingHierarchy */ - ValueSetExpansionComponentWithConceptAccumulator(FhirContext theContext, int theMaxCapacity, boolean theTrackingHierarchy) { + ValueSetExpansionComponentWithConceptAccumulator( + FhirContext theContext, int theMaxCapacity, boolean theTrackingHierarchy) { myMaxCapacity = theMaxCapacity; myContext = theContext; myTrackingHierarchy = theTrackingHierarchy; @@ -94,7 +97,13 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V } @Override - public void includeConcept(String theSystem, String theCode, String theDisplay, Long theSourceConceptPid, String theSourceConceptDirectParentPids, String theCodeSystemVersion) { + public void includeConcept( + String theSystem, + String theCode, + String theDisplay, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + String theCodeSystemVersion) { if (mySkipCountRemaining > 0) { mySkipCountRemaining--; return; @@ -110,7 +119,14 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V } @Override - public void includeConceptWithDesignations(String theSystem, String theCode, String theDisplay, Collection theDesignations, Long theSourceConceptPid, String theSourceConceptDirectParentPids, String theCodeSystemVersion) { + public void includeConceptWithDesignations( + String theSystem, + String theCode, + String theDisplay, + Collection theDesignations, + Long theSourceConceptPid, + String theSourceConceptDirectParentPids, + String theCodeSystemVersion) { if (mySkipCountRemaining > 0) { mySkipCountRemaining--; return; @@ -137,14 +153,13 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V if (theDesignations != null) { for (TermConceptDesignation termConceptDesignation : theDesignations) { - contains - .addDesignation() - .setValue(termConceptDesignation.getValue()) - .setLanguage(termConceptDesignation.getLanguage()) - .getUse() - .setSystem(termConceptDesignation.getUseSystem()) - .setCode(termConceptDesignation.getUseCode()) - .setDisplay(termConceptDesignation.getUseDisplay()); + contains.addDesignation() + .setValue(termConceptDesignation.getValue()) + .setLanguage(termConceptDesignation.getLanguage()) + .getUse() + .setSystem(termConceptDesignation.getUseSystem()) + .setCode(termConceptDesignation.getUseCode()) + .setDisplay(termConceptDesignation.getUseDisplay()); } } } @@ -173,14 +188,12 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V excludeSystemVersion = null; } if (excludeSystemVersion != null) { - return this.getContains().removeIf(t -> - excludeSystem.equals(t.getSystem()) && - theCode.equals(t.getCode()) && - excludeSystemVersion.equals(t.getVersion())); + return this.getContains() + .removeIf(t -> excludeSystem.equals(t.getSystem()) + && theCode.equals(t.getCode()) + && excludeSystemVersion.equals(t.getVersion())); } else { - return this.getContains().removeIf(t -> - theSystem.equals(t.getSystem()) && - theCode.equals(t.getCode())); + return this.getContains().removeIf(t -> theSystem.equals(t.getSystem()) && theCode.equals(t.getCode())); } } @@ -193,7 +206,9 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V } if (myHardExpansionMaximumSize > 0 && myAddedConcepts > myHardExpansionMaximumSize) { - String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "expansionTooLarge", myHardExpansionMaximumSize); + String msg = myContext + .getLocalizer() + .getMessage(TermReadSvcImpl.class, "expansionTooLarge", myHardExpansionMaximumSize); msg = appendAccumulatorMessages(msg); throw new ExpansionTooCostlyException(Msg.code(832) + msg); } @@ -203,7 +218,7 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V @Nonnull private String appendAccumulatorMessages(String msg) { - msg += getMessages().stream().map(t->" - " + t).collect(Collectors.joining()); + msg += getMessages().stream().map(t -> " - " + t).collect(Collectors.joining()); return msg; } @@ -224,7 +239,8 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V } } - private void setSystemAndVersion(String theSystemAndVersion, ValueSet.ValueSetExpansionContainsComponent myComponent) { + private void setSystemAndVersion( + String theSystemAndVersion, ValueSet.ValueSetExpansionContainsComponent myComponent) { if (StringUtils.isNotEmpty((theSystemAndVersion))) { int versionSeparator = theSystemAndVersion.lastIndexOf('|'); if (versionSeparator != -1) { @@ -246,11 +262,14 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V public void applyHierarchy() { for (int i = 0; i < this.getContains().size(); i++) { - ValueSet.ValueSetExpansionContainsComponent nextContains = this.getContains().get(i); + ValueSet.ValueSetExpansionContainsComponent nextContains = + this.getContains().get(i); String directParentPidsString = myConceptToSourceDirectParentPids.get(nextContains); if (isNotBlank(directParentPidsString) && !directParentPidsString.equals("NONE")) { - List directParentPids = Arrays.stream(directParentPidsString.split(" ")).map(t -> Long.parseLong(t)).collect(Collectors.toList()); + List directParentPids = Arrays.stream(directParentPidsString.split(" ")) + .map(t -> Long.parseLong(t)) + .collect(Collectors.toList()); boolean firstMatch = false; for (Long next : directParentPids) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermCodeSystemStorageSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermCodeSystemStorageSvc.java index 2d1cfc7ffae..4874ff73f8f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermCodeSystemStorageSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermCodeSystemStorageSvc.java @@ -40,41 +40,57 @@ public interface ITermCodeSystemStorageSvc { String MAKE_LOADING_VERSION_CURRENT = "make.loading.version.current"; - /** * Defaults to true when parameter is null or entry is not present in requestDetails.myUserData */ static boolean isMakeVersionCurrent(RequestDetails theRequestDetails) { - return theRequestDetails == null || - (boolean) theRequestDetails.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE); + return theRequestDetails == null + || (boolean) theRequestDetails.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE); } - void storeNewCodeSystemVersion(IResourcePersistentId theCodeSystemResourcePid, String theSystemUri, String theSystemName, - String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable, - RequestDetails theRequestDetails); + void storeNewCodeSystemVersion( + IResourcePersistentId theCodeSystemResourcePid, + String theSystemUri, + String theSystemName, + String theSystemVersionId, + TermCodeSystemVersion theCodeSystemVersion, + ResourceTable theCodeSystemResourceTable, + RequestDetails theRequestDetails); /** * Default implementation supports previous signature of method which was added RequestDetails parameter */ @Transactional - default void storeNewCodeSystemVersion(IResourcePersistentId theCodeSystemResourcePid, String theSystemUri, String theSystemName, - String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable) { + default void storeNewCodeSystemVersion( + IResourcePersistentId theCodeSystemResourcePid, + String theSystemUri, + String theSystemName, + String theSystemVersionId, + TermCodeSystemVersion theCodeSystemVersion, + ResourceTable theCodeSystemResourceTable) { - storeNewCodeSystemVersion(theCodeSystemResourcePid, theSystemUri, theSystemName, theSystemVersionId, - theCodeSystemVersion, theCodeSystemResourceTable, null); + storeNewCodeSystemVersion( + theCodeSystemResourcePid, + theSystemUri, + theSystemName, + theSystemVersionId, + theCodeSystemVersion, + theCodeSystemResourceTable, + null); } - /** * @return Returns the ID of the created/updated code system */ - IIdType storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, - TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List theValueSets, - List theConceptMaps); + IIdType storeNewCodeSystemVersion( + org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, + TermCodeSystemVersion theCodeSystemVersion, + RequestDetails theRequestDetails, + List theValueSets, + List theConceptMaps); - - - void storeNewCodeSystemVersionIfNeeded(CodeSystem theCodeSystem, ResourceTable theResourceEntity, RequestDetails theRequestDetails); + void storeNewCodeSystemVersionIfNeeded( + CodeSystem theCodeSystem, ResourceTable theResourceEntity, RequestDetails theRequestDetails); /** * Default implementation supports previous signature of method which was added RequestDetails parameter @@ -83,12 +99,9 @@ public interface ITermCodeSystemStorageSvc { storeNewCodeSystemVersionIfNeeded(theCodeSystem, theResourceEntity, null); } - - UploadStatistics applyDeltaCodeSystemsAdd(String theSystem, CustomTerminologySet theAdditions); UploadStatistics applyDeltaCodeSystemsRemove(String theSystem, CustomTerminologySet theRemovals); int saveConcept(TermConcept theNextConcept); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermConceptMappingSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermConceptMappingSvc.java index 181d3f5fbb9..127861a1961 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermConceptMappingSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermConceptMappingSvc.java @@ -20,14 +20,13 @@ package ca.uhn.fhir.jpa.term.api; import ca.uhn.fhir.context.support.IValidationSupport; -import ca.uhn.fhir.jpa.api.model.TranslationRequest; import ca.uhn.fhir.context.support.TranslateConceptResults; +import ca.uhn.fhir.jpa.api.model.TranslationRequest; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import org.hl7.fhir.r4.model.ConceptMap; public interface ITermConceptMappingSvc extends IValidationSupport { - TranslateConceptResults translate(TranslationRequest theTranslationRequest); TranslateConceptResults translateWithReverse(TranslationRequest theTranslationRequest); @@ -35,7 +34,4 @@ public interface ITermConceptMappingSvc extends IValidationSupport { void deleteConceptMapAndChildren(ResourceTable theResourceTable); void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap); - - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java index f19cd90fdb7..52bbbd2cc2e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReadSvc.java @@ -38,11 +38,11 @@ import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.ValueSet; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; import java.util.Optional; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * This interface is the "read" interface for the terminology service. It handles things like @@ -56,32 +56,44 @@ import java.util.Set; */ public interface ITermReadSvc extends IValidationSupport { - ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl); + ValueSet expandValueSet( + @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl); - ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand); + ValueSet expandValueSet( + @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand); - void expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator); + void expandValueSet( + @Nullable ValueSetExpansionOptions theExpansionOptions, + ValueSet theValueSetToExpand, + IValueSetConceptAccumulator theValueSetCodeAccumulator); /** * Version independent */ - IBaseResource expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, IBaseResource theValueSetToExpand); + IBaseResource expandValueSet( + @Nullable ValueSetExpansionOptions theExpansionOptions, IBaseResource theValueSetToExpand); - void expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, IBaseResource theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator); + void expandValueSet( + @Nullable ValueSetExpansionOptions theExpansionOptions, + IBaseResource theValueSetToExpand, + IValueSetConceptAccumulator theValueSetCodeAccumulator); - List expandValueSetIntoConceptList(ValueSetExpansionOptions theExpansionOptions, String theValueSetCanonicalUrl); + List expandValueSetIntoConceptList( + ValueSetExpansionOptions theExpansionOptions, String theValueSetCanonicalUrl); Optional findCode(String theCodeSystem, String theCode); List findCodes(String theCodeSystem, List theCodes); - Set findCodesAbove(Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode); + Set findCodesAbove( + Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode); List findCodesAbove(String theSystem, String theCode); List findCodesAboveUsingBuiltInSystems(String theSystem, String theCode); - Set findCodesBelow(Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode); + Set findCodesBelow( + Long theCodeSystemResourcePid, Long theCodeSystemResourceVersionPid, String theCode); List findCodesBelow(String theSystem, String theCode); @@ -93,7 +105,12 @@ public interface ITermReadSvc extends IValidationSupport { void storeTermValueSet(ResourceTable theResourceTable, ValueSet theValueSet); - IFhirResourceDaoCodeSystem.SubsumesResult subsumes(IPrimitiveType theCodeA, IPrimitiveType theCodeB, IPrimitiveType theSystem, IBaseCoding theCodingA, IBaseCoding theCodingB); + IFhirResourceDaoCodeSystem.SubsumesResult subsumes( + IPrimitiveType theCodeA, + IPrimitiveType theCodeB, + IPrimitiveType theSystem, + IBaseCoding theCodingA, + IBaseCoding theCodingB); void preExpandDeferredValueSetsToTerminologyTables(); @@ -101,7 +118,14 @@ public interface ITermReadSvc extends IValidationSupport { * Version independent */ @Transactional() - CodeValidationResult validateCodeIsInPreExpandedValueSet(ConceptValidationOptions theOptions, IBaseResource theValueSet, String theSystem, String theCode, String theDisplay, IBaseDatatype theCoding, IBaseDatatype theCodeableConcept); + CodeValidationResult validateCodeIsInPreExpandedValueSet( + ConceptValidationOptions theOptions, + IBaseResource theValueSet, + String theSystem, + String theCode, + String theDisplay, + IBaseDatatype theCoding, + IBaseDatatype theCodeableConcept); boolean isValueSetPreExpandedForCodeValidation(ValueSet theValueSet); @@ -127,5 +151,4 @@ public interface ITermReadSvc extends IValidationSupport { * Recreates freetext indexes for TermConcept and nested TermConceptProperty */ ReindexTerminologyResult reindexTerminology() throws InterruptedException; - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReindexingSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReindexingSvc.java index 812af9e36a4..e6e20a52f8f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReindexingSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermReindexingSvc.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.jpa.term.api; public interface ITermReindexingSvc { void processReindexing(); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermVersionAdapterSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermVersionAdapterSvc.java index 948b8f69706..b569f4cfa06 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermVersionAdapterSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermVersionAdapterSvc.java @@ -41,5 +41,4 @@ public interface ITermVersionAdapterSvc { void createOrUpdateConceptMap(ConceptMap theNextConceptMap); void createOrUpdateValueSet(ValueSet theValueSet); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/TermCodeSystemDeleteJobSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/TermCodeSystemDeleteJobSvc.java index e47699cd401..4b8942758ae 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/TermCodeSystemDeleteJobSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/TermCodeSystemDeleteJobSvc.java @@ -117,10 +117,14 @@ public class TermCodeSystemDeleteJobSvc implements ITermCodeSystemDeleteJobSvc { ourLog.debug("Executing for codeSystemVersionId: {}", theVersionPid); // if TermCodeSystemVersion being deleted is current, disconnect it form TermCodeSystem - Optional codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theVersionPid); + Optional codeSystemOpt = + myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theVersionPid); if (codeSystemOpt.isPresent()) { TermCodeSystem codeSystem = codeSystemOpt.get(); - ourLog.info("Removing code system version: {} as current version of code system: {}", theVersionPid, codeSystem.getPid()); + ourLog.info( + "Removing code system version: {} as current version of code system: {}", + theVersionPid, + codeSystem.getPid()); codeSystem.setCurrentVersion(null); myCodeSystemDao.save(codeSystem); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/CustomTerminologySet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/CustomTerminologySet.java index 20c412abe20..72bd94f53a8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/CustomTerminologySet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/CustomTerminologySet.java @@ -30,7 +30,6 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.apache.commons.csv.QuoteMode; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -40,6 +39,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class CustomTerminologySet { @@ -67,7 +67,8 @@ public class CustomTerminologySet { public TermConcept addRootConcept(String theCode, String theDisplay) { Validate.notBlank(theCode, "theCode must not be blank"); - Validate.isTrue(myRootConcepts.stream().noneMatch(t -> t.getCode().equals(theCode)), "Already have code %s", theCode); + Validate.isTrue( + myRootConcepts.stream().noneMatch(t -> t.getCode().equals(theCode)), "Already have code %s", theCode); TermConcept retVal = new TermConcept(); retVal.setCode(theCode); retVal.setDisplay(theDisplay); @@ -75,7 +76,6 @@ public class CustomTerminologySet { return retVal; } - public int getSize() { return mySize; } @@ -122,10 +122,7 @@ public class CustomTerminologySet { } public Set getRootConceptCodes() { - return getRootConcepts() - .stream() - .map(TermConcept::getCode) - .collect(Collectors.toSet()); + return getRootConcepts().stream().map(TermConcept::getCode).collect(Collectors.toSet()); } @Nonnull @@ -135,14 +132,26 @@ public class CustomTerminologySet { // Concepts IZipContentsHandlerCsv conceptHandler = new ConceptHandler(code2concept); - TermLoaderSvcImpl.iterateOverZipFileCsv(theDescriptors, TermLoaderSvcImpl.CUSTOM_CONCEPTS_FILE, conceptHandler, ',', QuoteMode.NON_NUMERIC, false); + TermLoaderSvcImpl.iterateOverZipFileCsv( + theDescriptors, + TermLoaderSvcImpl.CUSTOM_CONCEPTS_FILE, + conceptHandler, + ',', + QuoteMode.NON_NUMERIC, + false); if (theDescriptors.hasFile(TermLoaderSvcImpl.CUSTOM_PROPERTIES_FILE)) { Map> theCode2property = new LinkedHashMap<>(); IZipContentsHandlerCsv propertyHandler = new PropertyHandler(theCode2property); - TermLoaderSvcImpl.iterateOverZipFileCsv(theDescriptors, TermLoaderSvcImpl.CUSTOM_PROPERTIES_FILE, propertyHandler, ',', QuoteMode.NON_NUMERIC, false); + TermLoaderSvcImpl.iterateOverZipFileCsv( + theDescriptors, + TermLoaderSvcImpl.CUSTOM_PROPERTIES_FILE, + propertyHandler, + ',', + QuoteMode.NON_NUMERIC, + false); for (TermConcept termConcept : code2concept.values()) { - if (!theCode2property.isEmpty() && theCode2property.get(termConcept.getCode()) != null) { + if (!theCode2property.isEmpty() && theCode2property.get(termConcept.getCode()) != null) { theCode2property.get(termConcept.getCode()).forEach(property -> { termConcept.getProperties().add(property); }); @@ -159,7 +168,13 @@ public class CustomTerminologySet { // Hierarchy if (theDescriptors.hasFile(TermLoaderSvcImpl.CUSTOM_HIERARCHY_FILE)) { IZipContentsHandlerCsv hierarchyHandler = new HierarchyHandler(code2concept); - TermLoaderSvcImpl.iterateOverZipFileCsv(theDescriptors, TermLoaderSvcImpl.CUSTOM_HIERARCHY_FILE, hierarchyHandler, ',', QuoteMode.NON_NUMERIC, false); + TermLoaderSvcImpl.iterateOverZipFileCsv( + theDescriptors, + TermLoaderSvcImpl.CUSTOM_HIERARCHY_FILE, + hierarchyHandler, + ',', + QuoteMode.NON_NUMERIC, + false); } Map codesInOrder = new HashMap<>(); @@ -183,11 +198,9 @@ public class CustomTerminologySet { int order2 = codesInOrder.get(code2); return order1 - order2; }); - } return new CustomTerminologySet(code2concept.size(), rootConcepts); } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/PropertyHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/PropertyHandler.java index 6a5be2cd6e7..775ec1b7687 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/PropertyHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/custom/PropertyHandler.java @@ -55,15 +55,17 @@ public class PropertyHandler implements IZipContentsHandlerCsv { String type = trim(theRecord.get(TYPE)); List conceptProperties = myCode2Properties.get(code); - if (conceptProperties == null) - conceptProperties = new ArrayList<>(); + if (conceptProperties == null) conceptProperties = new ArrayList<>(); - TermConceptProperty conceptProperty = TermLoaderSvcImpl.getOrCreateConceptProperty(myCode2Properties, code, key); - ValidateUtil.isNotNullOrThrowUnprocessableEntity(conceptProperty, "Concept property %s not found in file", conceptProperty); + TermConceptProperty conceptProperty = + TermLoaderSvcImpl.getOrCreateConceptProperty(myCode2Properties, code, key); + ValidateUtil.isNotNullOrThrowUnprocessableEntity( + conceptProperty, "Concept property %s not found in file", conceptProperty); conceptProperty.setKey(key); conceptProperty.setValue(value); - //TODO: check this for different types, other types should be added once TermConceptPropertyTypeEnum contain different types + // TODO: check this for different types, other types should be added once TermConceptPropertyTypeEnum + // contain different types conceptProperty.setType(TermConceptPropertyTypeEnum.STRING); conceptProperties.add(conceptProperty); myCode2Properties.put(code, conceptProperties); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ex/ExpansionTooCostlyException.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ex/ExpansionTooCostlyException.java index 03a941f44f2..32cd836322c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ex/ExpansionTooCostlyException.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ex/ExpansionTooCostlyException.java @@ -26,5 +26,4 @@ public class ExpansionTooCostlyException extends InternalErrorException { public ExpansionTooCostlyException(String theMessage) { super(theMessage); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/icd10/Icd10Loader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/icd10/Icd10Loader.java index 582836644b8..f7def558a67 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/icd10/Icd10Loader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/icd10/Icd10Loader.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.term.icd10; - import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; @@ -30,9 +29,7 @@ import org.xml.sax.SAXException; import java.io.IOException; import java.io.Reader; -import java.util.Collection; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Optional; @@ -87,11 +84,13 @@ public class Icd10Loader { // Preferred label and other properties for (Element rubric : getChildrenByTagName(aClass, "Rubric")) { String kind = rubric.getAttribute("kind"); - Optional firstLabel = getChildrenByTagName(rubric, "Label").stream().findFirst(); + Optional firstLabel = + getChildrenByTagName(rubric, "Label").stream().findFirst(); if (firstLabel.isPresent()) { String textContent = firstLabel.get().getTextContent(); if (textContent != null && !textContent.isEmpty()) { - textContent = textContent.replace("\n", "").replace("\r", "").replace("\t", ""); + textContent = + textContent.replace("\n", "").replace("\r", "").replace("\t", ""); if (kind.equals("preferred")) { termConcept.setDisplay(textContent); } else { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/icd10cm/Icd10CmLoader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/icd10cm/Icd10CmLoader.java index 73731b9e190..47d5d626b1b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/icd10cm/Icd10CmLoader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/icd10cm/Icd10CmLoader.java @@ -52,7 +52,6 @@ public class Icd10CmLoader { myCodeSystemVersion = theCodeSystemVersion; } - public void load(Reader theReader) throws IOException, SAXException { myConceptCount = 0; @@ -75,10 +74,8 @@ public class Icd10CmLoader { } } } - } - private void extractCode(Element theDiagElement, TermConcept theParentConcept) { String code = theDiagElement.getElementsByTagName(NAME).item(0).getTextContent(); String display = theDiagElement.getElementsByTagName(DESC).item(0).getTextContent(); @@ -95,7 +92,7 @@ public class Icd10CmLoader { for (Element nextChildDiag : XmlUtil.getChildrenByTagName(theDiagElement, DIAG)) { extractCode(nextChildDiag, concept); - if (XmlUtil.getChildrenByTagName(theDiagElement, SEVEN_CHR_DEF).size() != 0){ + if (XmlUtil.getChildrenByTagName(theDiagElement, SEVEN_CHR_DEF).size() != 0) { extractExtension(theDiagElement, nextChildDiag, concept); } } @@ -104,9 +101,10 @@ public class Icd10CmLoader { } private void extractExtension(Element theDiagElement, Element theChildDiag, TermConcept theParentConcept) { - for (Element nextChrNote : XmlUtil.getChildrenByTagName(theDiagElement, SEVEN_CHR_DEF)){ - for (Element nextExtension : XmlUtil.getChildrenByTagName(nextChrNote, EXTENSION)){ - String baseCode = theChildDiag.getElementsByTagName(NAME).item(0).getTextContent(); + for (Element nextChrNote : XmlUtil.getChildrenByTagName(theDiagElement, SEVEN_CHR_DEF)) { + for (Element nextExtension : XmlUtil.getChildrenByTagName(nextChrNote, EXTENSION)) { + String baseCode = + theChildDiag.getElementsByTagName(NAME).item(0).getTextContent(); String sevenChar = nextExtension.getAttributes().item(0).getNodeValue(); String baseDef = theChildDiag.getElementsByTagName(DESC).item(0).getTextContent(); String sevenCharDef = nextExtension.getTextContent(); @@ -116,7 +114,6 @@ public class Icd10CmLoader { concept.setCode(getExtendedCode(baseCode, sevenChar)); concept.setDisplay(getExtendedDisplay(baseDef, sevenCharDef)); } - } } @@ -131,7 +128,7 @@ public class Icd10CmLoader { private String getExtendedCode(String theBaseCode, String theSevenChar) { String placeholder = "X"; String code = theBaseCode; - for (int i = code.length(); i < 7; i++){ + for (int i = code.length(); i < 7; i++) { code += placeholder; } code += theSevenChar; @@ -141,5 +138,4 @@ public class Icd10CmLoader { public int getConceptCount() { return myConceptCount; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java index e3f32c60ca7..a49bfa24969 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincHandler.java @@ -45,6 +45,7 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { * the website URL to LOINC. */ public static final String LOINC_WEBSITE_URL = "https://loinc.org"; + public static final String REGENSTRIEF_INSTITUTE_INC = "Regenstrief Institute, Inc."; private final List myConceptMaps; private final Map myIdToConceptMaps = new HashMap<>(); @@ -54,13 +55,20 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { protected final Properties myUploadProperties; protected String myLoincCopyrightStatement; - BaseLoincHandler(Map theCode2Concept, List theValueSets, - List theConceptMaps, Properties theUploadProperties) { + BaseLoincHandler( + Map theCode2Concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties) { this(theCode2Concept, theValueSets, theConceptMaps, theUploadProperties, null); } - BaseLoincHandler(Map theCode2Concept, List theValueSets, - List theConceptMaps, Properties theUploadProperties, String theCopyrightStatement) { + BaseLoincHandler( + Map theCode2Concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { myValueSets = theValueSets; myValueSets.forEach(t -> myIdToValueSet.put(t.getId(), t)); myCode2Concept = theCode2Concept; @@ -102,15 +110,10 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { } } - include - .addConcept() - .setCode(theCode) - .setDisplay(displayName); - + include.addConcept().setCode(theCode).setDisplay(displayName); } } - void addConceptMapEntry(ConceptMapping theMapping, String theExternalCopyright) { if (isBlank(theMapping.getSourceCode())) { return; @@ -127,16 +130,17 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { conceptMap.setName(theMapping.getConceptMapName()); conceptMap.setVersion(theMapping.getConceptMapVersion()); conceptMap.setPublisher(REGENSTRIEF_INSTITUTE_INC); - conceptMap.addContact() - .setName(REGENSTRIEF_INSTITUTE_INC) - .addTelecom() - .setSystem(ContactPoint.ContactPointSystem.URL) - .setValue(LOINC_WEBSITE_URL); + conceptMap + .addContact() + .setName(REGENSTRIEF_INSTITUTE_INC) + .addTelecom() + .setSystem(ContactPoint.ContactPointSystem.URL) + .setValue(LOINC_WEBSITE_URL); String copyright = theExternalCopyright; if (!copyright.contains("LOINC")) { - copyright = myLoincCopyrightStatement + - (myLoincCopyrightStatement.endsWith(".") ? " " : ". ") + copyright; + copyright = + myLoincCopyrightStatement + (myLoincCopyrightStatement.endsWith(".") ? " " : ". ") + copyright; } conceptMap.setCopyright(copyright); @@ -156,7 +160,8 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { for (ConceptMap.ConceptMapGroupComponent next : conceptMap.getGroup()) { if (next.getSource().equals(theMapping.getSourceCodeSystem())) { if (next.getTarget().equals(theMapping.getTargetCodeSystem())) { - if (!defaultString(theMapping.getTargetCodeSystemVersion()).equals(defaultString(next.getTargetVersion()))) { + if (!defaultString(theMapping.getTargetCodeSystemVersion()) + .equals(defaultString(next.getTargetVersion()))) { continue; } group = next; @@ -190,17 +195,22 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { } } if (!found) { - source - .addTarget() - .setCode(theMapping.getTargetCode()) - .setDisplay(theMapping.getTargetDisplay()) - .setEquivalence(theMapping.getEquivalence()); + source.addTarget() + .setCode(theMapping.getTargetCode()) + .setDisplay(theMapping.getTargetDisplay()) + .setEquivalence(theMapping.getEquivalence()); } else { - ourLog.info("Not going to add a mapping from [{}/{}] to [{}/{}] because one already exists", theMapping.getSourceCodeSystem(), theMapping.getSourceCode(), theMapping.getTargetCodeSystem(), theMapping.getTargetCode()); + ourLog.info( + "Not going to add a mapping from [{}/{}] to [{}/{}] because one already exists", + theMapping.getSourceCodeSystem(), + theMapping.getSourceCode(), + theMapping.getTargetCodeSystem(), + theMapping.getTargetCode()); } } - ValueSet getValueSet(String theValueSetId, String theValueSetUri, String theValueSetName, String theVersionPropertyName) { + ValueSet getValueSet( + String theValueSetId, String theValueSetUri, String theValueSetName, String theVersionPropertyName) { String version; String codeSystemVersion = myUploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); @@ -223,10 +233,10 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { vs.setStatus(Enumerations.PublicationStatus.ACTIVE); vs.setPublisher(REGENSTRIEF_INSTITUTE_INC); vs.addContact() - .setName(REGENSTRIEF_INSTITUTE_INC) - .addTelecom() - .setSystem(ContactPoint.ContactPointSystem.URL) - .setValue(LOINC_WEBSITE_URL); + .setName(REGENSTRIEF_INSTITUTE_INC) + .addTelecom() + .setSystem(ContactPoint.ContactPointSystem.URL) + .setValue(LOINC_WEBSITE_URL); vs.setCopyright(myLoincCopyrightStatement); myIdToValueSet.put(theValueSetId, vs); myValueSets.add(vs); @@ -241,7 +251,6 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { return vs; } - static class ConceptMapping { private String myCopyright; @@ -384,6 +393,5 @@ public abstract class BaseLoincHandler implements IZipContentsHandlerCsv { myTargetDisplay = theTargetDisplay; return this; } - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincTop2000LabResultsHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincTop2000LabResultsHandler.java index f5be7c612d9..27da8453275 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincTop2000LabResultsHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/BaseLoincTop2000LabResultsHandler.java @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.jpa.entity.TermConcept; -import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; +import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import org.apache.commons.csv.CSVRecord; import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ValueSet; @@ -39,9 +39,15 @@ public class BaseLoincTop2000LabResultsHandler extends BaseLoincHandler implemen private String myValueSetUri; private String myValueSetName; - public BaseLoincTop2000LabResultsHandler(Map theCode2concept, List theValueSets, - String theValueSetId, String theValueSetUri, String theValueSetName, List theConceptMaps, - Properties theUploadProperties, String theCopyrightStatement) { + public BaseLoincTop2000LabResultsHandler( + Map theCode2concept, + List theValueSets, + String theValueSetId, + String theValueSetUri, + String theValueSetName, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties, theCopyrightStatement); String versionId = myUploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); if (versionId != null) { @@ -61,5 +67,4 @@ public class BaseLoincTop2000LabResultsHandler extends BaseLoincHandler implemen ValueSet valueSet = getValueSet(myValueSetId, myValueSetUri, myValueSetName, null); addCodeAsIncludeToValueSet(valueSet, ITermLoaderSvc.LOINC_URI, loincNumber, displayName); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java index 181cffca751..3ff98820035 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListHandler.java @@ -39,8 +39,13 @@ public class LoincAnswerListHandler extends BaseLoincHandler { private final Map myCode2Concept; private final TermCodeSystemVersion myCodeSystemVersion; - public LoincAnswerListHandler(TermCodeSystemVersion theCodeSystemVersion, Map theCode2concept, - List theValueSets, List theConceptMaps, Properties theUploadProperties, String theCopyrightStatement) { + public LoincAnswerListHandler( + TermCodeSystemVersion theCodeSystemVersion, + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties, theCopyrightStatement); myCodeSystemVersion = theCodeSystemVersion; myCode2Concept = theCode2concept; @@ -65,7 +70,6 @@ public class LoincAnswerListHandler extends BaseLoincHandler { String extCodeSystem = trim(theRecord.get("ExtCodeSystem")); String extCodeSystemVersion = trim(theRecord.get("ExtCodeSystemVersion")); - // Answer list code if (!myCode2Concept.containsKey(answerListId)) { TermConcept concept = new TermConcept(myCodeSystemVersion, answerListId); @@ -81,11 +85,10 @@ public class LoincAnswerListHandler extends BaseLoincHandler { } else { valueSetId = answerListId; } - ValueSet vs = getValueSet(valueSetId, "http://loinc.org/vs/" + answerListId, answerListName, LOINC_ANSWERLIST_VERSION.getCode()); + ValueSet vs = getValueSet( + valueSetId, "http://loinc.org/vs/" + answerListId, answerListName, LOINC_ANSWERLIST_VERSION.getCode()); if (vs.getIdentifier().isEmpty()) { - vs.addIdentifier() - .setSystem("urn:ietf:rfc:3986") - .setValue("urn:oid:" + answerListOid); + vs.addIdentifier().setSystem("urn:ietf:rfc:3986").setValue("urn:oid:" + answerListOid); } if (isNotBlank(answerString)) { @@ -100,17 +103,13 @@ public class LoincAnswerListHandler extends BaseLoincHandler { myCode2Concept.put(answerString, concept); } - vs - .getCompose() - .getIncludeFirstRep() - .setSystem(ITermLoaderSvc.LOINC_URI) - .setVersion(codeSystemVersionId) - .addConcept() - .setCode(answerString) - .setDisplay(displayText); - + vs.getCompose() + .getIncludeFirstRep() + .setSystem(ITermLoaderSvc.LOINC_URI) + .setVersion(codeSystemVersionId) + .addConcept() + .setCode(answerString) + .setDisplay(displayText); } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListLinkHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListLinkHandler.java index 4c47c1b790f..e1494690b45 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListLinkHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincAnswerListLinkHandler.java @@ -74,7 +74,5 @@ public class LoincAnswerListLinkHandler implements IZipContentsHandlerCsv { if (answerListCode != null) { answerListCode.addPropertyString("answers-for", loincNumber); } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincCodingPropertiesHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincCodingPropertiesHandler.java index 5fddbc8c053..24e2012c56b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincCodingPropertiesHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincCodingPropertiesHandler.java @@ -50,25 +50,25 @@ public class LoincCodingPropertiesHandler implements IZipContentsHandlerCsv { public static final String ASSOCIATED_OBSERVATIONS_PROP_NAME = "AssociatedObservations"; public static final String LOINC_NUM = "LOINC_NUM"; - private final Map myCode2Concept; private final Map myPropertyNameTypeMap; - - public LoincCodingPropertiesHandler(Map theCode2concept, - Map thePropertyNameTypeMap) { + public LoincCodingPropertiesHandler( + Map theCode2concept, Map thePropertyNameTypeMap) { myCode2Concept = theCode2concept; myPropertyNameTypeMap = thePropertyNameTypeMap; } - @Override public void accept(CSVRecord theRecord) { - if ( ! anyValidProperty()) { return; } + if (!anyValidProperty()) { + return; + } String code = trim(theRecord.get(LOINC_NUM)); - if (isBlank(code)) { return; } - + if (isBlank(code)) { + return; + } String askAtOrderEntryValue = trim(theRecord.get(ASK_AT_ORDER_ENTRY_PROP_NAME)); String associatedObservationsValue = trim(theRecord.get(ASSOCIATED_OBSERVATIONS_PROP_NAME)); @@ -89,37 +89,36 @@ public class LoincCodingPropertiesHandler implements IZipContentsHandlerCsv { } } - /** * Validates that at least one ot target properties is defined in loinc.xml file and is of type "CODING" */ private boolean anyValidProperty() { CodeSystem.PropertyType askAtOrderEntryPropType = myPropertyNameTypeMap.get(ASK_AT_ORDER_ENTRY_PROP_NAME); - CodeSystem.PropertyType associatedObservationsPropType = myPropertyNameTypeMap.get(ASSOCIATED_OBSERVATIONS_PROP_NAME); + CodeSystem.PropertyType associatedObservationsPropType = + myPropertyNameTypeMap.get(ASSOCIATED_OBSERVATIONS_PROP_NAME); return askAtOrderEntryPropType == CodeSystem.PropertyType.CODING - || associatedObservationsPropType == CodeSystem.PropertyType.CODING; + || associatedObservationsPropType == CodeSystem.PropertyType.CODING; } - private void addCodingProperties(TermConcept theSrcTermConcept, String thePropertyName, String thePropertyValue) { List propertyCodeValues = parsePropertyCodeValues(thePropertyValue); for (String propertyCodeValue : propertyCodeValues) { TermConcept targetTermConcept = myCode2Concept.get(propertyCodeValue); if (targetTermConcept == null) { - ourLog.error("Couldn't find TermConcept for code: '{}'. Display property set to blank for property: '{}'", - propertyCodeValue, thePropertyName); + ourLog.error( + "Couldn't find TermConcept for code: '{}'. Display property set to blank for property: '{}'", + propertyCodeValue, + thePropertyName); continue; } - theSrcTermConcept.addPropertyCoding(thePropertyName, ITermLoaderSvc.LOINC_URI, propertyCodeValue, targetTermConcept.getDisplay()); + theSrcTermConcept.addPropertyCoding( + thePropertyName, ITermLoaderSvc.LOINC_URI, propertyCodeValue, targetTermConcept.getDisplay()); ourLog.trace("Adding coding property: {} to concept.code {}", thePropertyName, theSrcTermConcept.getCode()); } } - private List parsePropertyCodeValues(String theValue) { - return Arrays.stream( theValue.split(";") ) - .map(String::trim) - .collect(Collectors.toList()); + return Arrays.stream(theValue.split(";")).map(String::trim).collect(Collectors.toList()); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincConsumerNameHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincConsumerNameHandler.java index c95f868aca9..3f5b1c4a33b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincConsumerNameHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincConsumerNameHandler.java @@ -38,7 +38,7 @@ public class LoincConsumerNameHandler implements IZipContentsHandlerCsv { @Override public void accept(CSVRecord theRecord) { - + String loincNumber = trim(theRecord.get("LoincNumber")); if (isBlank(loincNumber)) { return; @@ -48,15 +48,12 @@ public class LoincConsumerNameHandler implements IZipContentsHandlerCsv { if (isBlank(consumerName)) { return; } - + TermConcept loincCode = myCode2Concept.get(loincNumber); if (loincCode == null) { return; } - - loincCode.addDesignation() - .setUseDisplay("ConsumerName") - .setValue(consumerName); - } + loincCode.addDesignation().setUseDisplay("ConsumerName").setValue(consumerName); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincDocumentOntologyHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincDocumentOntologyHandler.java index 624b7c621be..b428f1e6186 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincDocumentOntologyHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincDocumentOntologyHandler.java @@ -46,9 +46,13 @@ public class LoincDocumentOntologyHandler extends BaseLoincHandler implements IZ public static final String DOCUMENT_ONTOLOGY_CODES_VS_NAME = "LOINC Document Ontology Codes"; private final Map myCode2Concept; - public LoincDocumentOntologyHandler(Map theCode2concept, Map thePropertyNames, List theValueSets, List theConceptMaps, - Properties theUploadProperties, String theCopyrightStatement) { + public LoincDocumentOntologyHandler( + Map theCode2concept, + Map thePropertyNames, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties, theCopyrightStatement); myCode2Concept = theCode2concept; } @@ -100,9 +104,5 @@ public class LoincDocumentOntologyHandler extends BaseLoincHandler implements IZ code.addPropertyCoding(loincCodePropName, ITermLoaderSvc.LOINC_URI, partNumber, partName); ourLog.debug("Adding coding property: {} to concept.code {}", loincCodePropName, partNumber); } - } - - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincGroupFileHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincGroupFileHandler.java index 110e37f7453..a316f29fcef 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincGroupFileHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincGroupFileHandler.java @@ -36,14 +36,18 @@ public class LoincGroupFileHandler extends BaseLoincHandler implements IZipConte public static final String VS_URI_PREFIX = "http://loinc.org/vs/"; - public LoincGroupFileHandler(Map theCode2concept, List theValueSets, - List theConceptMaps, Properties theUploadProperties, String theCopyrightStatement) { + public LoincGroupFileHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties, theCopyrightStatement); } @Override public void accept(CSVRecord theRecord) { - //"ParentGroupId","GroupId","Group","Archetype","Status","VersionFirstReleased" + // "ParentGroupId","GroupId","Group","Archetype","Status","VersionFirstReleased" String parentGroupId = trim(theRecord.get("ParentGroupId")); String groupId = trim(theRecord.get("GroupId")); String groupName = trim(theRecord.get("Group")); @@ -60,15 +64,10 @@ public class LoincGroupFileHandler extends BaseLoincHandler implements IZipConte } ValueSet parentValueSet = getValueSet(parentGroupValueSetId, VS_URI_PREFIX + parentGroupId, null, null); - parentValueSet - .getCompose() - .getIncludeFirstRep() - .addValueSet(VS_URI_PREFIX + groupId); + parentValueSet.getCompose().getIncludeFirstRep().addValueSet(VS_URI_PREFIX + groupId); // Create group to set its name (terms are added in a different // handler) getValueSet(groupValueSetId, VS_URI_PREFIX + groupId, groupName, null); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincGroupTermsFileHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincGroupTermsFileHandler.java index 00bed183749..a4e9f37e9c0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincGroupTermsFileHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincGroupTermsFileHandler.java @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.jpa.entity.TermConcept; -import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; +import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import org.apache.commons.csv.CSVRecord; import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ValueSet; @@ -35,13 +35,17 @@ import static org.apache.commons.lang3.StringUtils.trim; public class LoincGroupTermsFileHandler extends BaseLoincHandler implements IZipContentsHandlerCsv { - public LoincGroupTermsFileHandler(Map theCode2concept, List theValueSets, List theConceptMaps, Properties theUploadProperties) { + public LoincGroupTermsFileHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties); } @Override public void accept(CSVRecord theRecord) { - //"Category","GroupId","Archetype","LoincNumber","LongCommonName" + // "Category","GroupId","Archetype","LoincNumber","LongCommonName" String groupId = trim(theRecord.get("GroupId")); String codeSystemVersionId = myUploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); String valueSetId; @@ -55,6 +59,4 @@ public class LoincGroupTermsFileHandler extends BaseLoincHandler implements IZip ValueSet valueSet = getValueSet(valueSetId, LoincGroupFileHandler.VS_URI_PREFIX + groupId, null, null); addCodeAsIncludeToValueSet(valueSet, ITermLoaderSvc.LOINC_URI, loincNumber, null); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHandler.java index ea8cf45f299..618bb7fcac7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHandler.java @@ -44,7 +44,11 @@ public class LoincHandler implements IZipContentsHandlerCsv { private final Map myPropertyNames; private final Map myPartTypeAndPartNameToPartNumber; - public LoincHandler(TermCodeSystemVersion theCodeSystemVersion, Map theCode2concept, Map thePropertyNames, Map thePartTypeAndPartNameToPartNumber) { + public LoincHandler( + TermCodeSystemVersion theCodeSystemVersion, + Map theCode2concept, + Map thePropertyNames, + Map thePartTypeAndPartNameToPartNumber) { myCodeSystemVersion = theCodeSystemVersion; myCode2Concept = theCode2concept; myPropertyNames = thePropertyNames; @@ -64,10 +68,7 @@ public class LoincHandler implements IZipContentsHandlerCsv { concept.setDisplay(display); if (isNotBlank(shortName) && !display.equalsIgnoreCase(shortName)) { - concept - .addDesignation() - .setUseDisplay("ShortName") - .setValue(shortName); + concept.addDesignation().setUseDisplay("ShortName").setValue(shortName); } for (String nextPropertyName : myPropertyNames.keySet()) { @@ -84,11 +85,15 @@ public class LoincHandler implements IZipContentsHandlerCsv { switch (nextPropertyType) { case STRING: concept.addPropertyString(nextPropertyName, nextPropertyValue); - ourLog.trace("Adding string property: {} to concept.code {}", nextPropertyName, concept.getCode()); + ourLog.trace( + "Adding string property: {} to concept.code {}", + nextPropertyName, + concept.getCode()); break; case CODING: - // "Coding" property types are handled by loincCodingProperties, partlink, hierarchy, RsnaPlaybook or DocumentOntology handlers + // "Coding" property types are handled by loincCodingProperties, partlink, hierarchy, + // RsnaPlaybook or DocumentOntology handlers break; case DECIMAL: @@ -97,9 +102,9 @@ public class LoincHandler implements IZipContentsHandlerCsv { case BOOLEAN: case DATETIME: case NULL: - throw new InternalErrorException(Msg.code(915) + "Don't know how to handle LOINC property of type: " + nextPropertyType); + throw new InternalErrorException(Msg.code(915) + + "Don't know how to handle LOINC property of type: " + nextPropertyType); } - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHierarchyHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHierarchyHandler.java index 7b5493cabdb..b3e8c2bfa04 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHierarchyHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincHierarchyHandler.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; -import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; +import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import org.apache.commons.csv.CSVRecord; import java.util.Map; @@ -33,50 +33,41 @@ import static org.apache.commons.lang3.StringUtils.trim; public class LoincHierarchyHandler implements IZipContentsHandlerCsv { - private Map myCode2Concept; - private TermCodeSystemVersion myCodeSystemVersion; + private Map myCode2Concept; + private TermCodeSystemVersion myCodeSystemVersion; - public LoincHierarchyHandler(TermCodeSystemVersion theCodeSystemVersion, Map theCode2concept) { - myCodeSystemVersion = theCodeSystemVersion; - myCode2Concept = theCode2concept; - } + public LoincHierarchyHandler(TermCodeSystemVersion theCodeSystemVersion, Map theCode2concept) { + myCodeSystemVersion = theCodeSystemVersion; + myCode2Concept = theCode2concept; + } - @Override - public void accept(CSVRecord theRecord) { - String parentCode = trim(theRecord.get("IMMEDIATE_PARENT")); - String childCode = trim(theRecord.get("CODE")); - String childCodeText = trim(theRecord.get("CODE_TEXT")); + @Override + public void accept(CSVRecord theRecord) { + String parentCode = trim(theRecord.get("IMMEDIATE_PARENT")); + String childCode = trim(theRecord.get("CODE")); + String childCodeText = trim(theRecord.get("CODE_TEXT")); - if (isNotBlank(parentCode) && isNotBlank(childCode)) { - TermConcept parent = getOrCreate(parentCode, "(unknown)"); - TermConcept child = getOrCreate(childCode, childCodeText); + if (isNotBlank(parentCode) && isNotBlank(childCode)) { + TermConcept parent = getOrCreate(parentCode, "(unknown)"); + TermConcept child = getOrCreate(childCode, childCodeText); - parent.addChild(child, TermConceptParentChildLink.RelationshipTypeEnum.ISA); + parent.addChild(child, TermConceptParentChildLink.RelationshipTypeEnum.ISA); - parent.addPropertyCoding( - "child", - ITermLoaderSvc.LOINC_URI, - child.getCode(), - child.getDisplay()); + parent.addPropertyCoding("child", ITermLoaderSvc.LOINC_URI, child.getCode(), child.getDisplay()); - child.addPropertyCoding( - "parent", - ITermLoaderSvc.LOINC_URI, - parent.getCode(), - parent.getDisplay()); - } - } - - private TermConcept getOrCreate(String theCode, String theDisplay) { - TermConcept retVal = myCode2Concept.get(theCode); - if (retVal == null) { - retVal = new TermConcept(); - retVal.setCodeSystemVersion(myCodeSystemVersion); - retVal.setCode(theCode); - retVal.setDisplay(theDisplay); - myCode2Concept.put(theCode, retVal); - } - return retVal; - } + child.addPropertyCoding("parent", ITermLoaderSvc.LOINC_URI, parent.getCode(), parent.getDisplay()); + } + } + private TermConcept getOrCreate(String theCode, String theDisplay) { + TermConcept retVal = myCode2Concept.get(theCode); + if (retVal == null) { + retVal = new TermConcept(); + retVal.setCodeSystemVersion(myCodeSystemVersion); + retVal.setCode(theCode); + retVal.setDisplay(theDisplay); + myCode2Concept.put(theCode, retVal); + } + return retVal; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincIeeeMedicalDeviceCodeHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincIeeeMedicalDeviceCodeHandler.java index 85d4609e2c0..a59730349da 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincIeeeMedicalDeviceCodeHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincIeeeMedicalDeviceCodeHandler.java @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.jpa.entity.TermConcept; -import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; +import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import org.apache.commons.csv.CSVRecord; import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.Enumerations; @@ -40,13 +40,18 @@ public class LoincIeeeMedicalDeviceCodeHandler extends BaseLoincHandler implemen public static final String LOINC_IEEE_CM_ID = "loinc-to-ieee-11073-10101"; public static final String LOINC_IEEE_CM_URI = "http://loinc.org/cm/loinc-to-ieee-11073-10101"; public static final String LOINC_IEEE_CM_NAME = "LOINC/IEEE Device Code Mappings"; - private static final String CM_COPYRIGHT = "The LOINC/IEEE Medical Device Code Mapping Table contains content from IEEE (http://ieee.org), copyright © 2017 IEEE."; + private static final String CM_COPYRIGHT = + "The LOINC/IEEE Medical Device Code Mapping Table contains content from IEEE (http://ieee.org), copyright © 2017 IEEE."; /** * Constructor */ - public LoincIeeeMedicalDeviceCodeHandler(Map theCode2concept, List theValueSets, - List theConceptMaps, Properties theUploadProperties, String theCopyrightStatement) { + public LoincIeeeMedicalDeviceCodeHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties, theCopyrightStatement); } @@ -56,7 +61,8 @@ public class LoincIeeeMedicalDeviceCodeHandler extends BaseLoincHandler implemen String codeSystemVersionId = myUploadProperties.getProperty(LOINC_CODESYSTEM_VERSION.getCode()); String loincIeeeCmVersion; if (codeSystemVersionId != null) { - loincIeeeCmVersion = myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()) + "-" + codeSystemVersionId; + loincIeeeCmVersion = + myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()) + "-" + codeSystemVersionId; } else { loincIeeeCmVersion = myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()); } @@ -75,21 +81,19 @@ public class LoincIeeeMedicalDeviceCodeHandler extends BaseLoincHandler implemen conceptMapId = LOINC_IEEE_CM_ID; } addConceptMapEntry( - new ConceptMapping() - .setConceptMapId(conceptMapId) - .setConceptMapUri(LOINC_IEEE_CM_URI) - .setConceptMapVersion(loincIeeeCmVersion) - .setConceptMapName(LOINC_IEEE_CM_NAME) - .setSourceCodeSystem(sourceCodeSystemUri) - .setSourceCodeSystemVersion(codeSystemVersionId) - .setSourceCode(loincNumber) - .setSourceDisplay(longCommonName) - .setTargetCodeSystem(targetCodeSystemUri) - .setTargetCode(ieeeCode) - .setTargetDisplay(ieeeDisplayName) - .setEquivalence(Enumerations.ConceptMapEquivalence.EQUAL), - myLoincCopyrightStatement + " " + CM_COPYRIGHT); + new ConceptMapping() + .setConceptMapId(conceptMapId) + .setConceptMapUri(LOINC_IEEE_CM_URI) + .setConceptMapVersion(loincIeeeCmVersion) + .setConceptMapName(LOINC_IEEE_CM_NAME) + .setSourceCodeSystem(sourceCodeSystemUri) + .setSourceCodeSystemVersion(codeSystemVersionId) + .setSourceCode(loincNumber) + .setSourceDisplay(longCommonName) + .setTargetCodeSystem(targetCodeSystemUri) + .setTargetCode(ieeeCode) + .setTargetDisplay(ieeeDisplayName) + .setEquivalence(Enumerations.ConceptMapEquivalence.EQUAL), + myLoincCopyrightStatement + " " + CM_COPYRIGHT); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincImagingDocumentCodeHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincImagingDocumentCodeHandler.java index 5611536a243..20f483e17cc 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincImagingDocumentCodeHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincImagingDocumentCodeHandler.java @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.jpa.entity.TermConcept; -import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; +import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import org.apache.commons.csv.CSVRecord; import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ValueSet; @@ -39,7 +39,11 @@ public class LoincImagingDocumentCodeHandler extends BaseLoincHandler implements public static final String VS_URI = "http://loinc.org/vs/loinc-imaging-document-codes"; public static final String VS_NAME = "LOINC Imaging Document Codes"; - public LoincImagingDocumentCodeHandler(Map theCode2concept, List theValueSets, List theConceptMaps, Properties theUploadProperties) { + public LoincImagingDocumentCodeHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties); } @@ -56,9 +60,7 @@ public class LoincImagingDocumentCodeHandler extends BaseLoincHandler implements valueSetId = VS_ID_BASE; } - ValueSet valueSet = getValueSet(valueSetId, VS_URI, VS_NAME,null); + ValueSet valueSet = getValueSet(valueSetId, VS_URI, VS_NAME, null); addCodeAsIncludeToValueSet(valueSet, ITermLoaderSvc.LOINC_URI, loincNumber, displayName); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincLinguisticVariantHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincLinguisticVariantHandler.java index 256a195db57..711779d47f0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincLinguisticVariantHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincLinguisticVariantHandler.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.jpa.term.loinc; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.trim; -import static org.apache.commons.lang3.StringUtils.trimToEmpty; - -import java.util.Map; - -import org.apache.commons.csv.CSVRecord; - import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; +import org.apache.commons.csv.CSVRecord; + +import java.util.Map; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.trim; +import static org.apache.commons.lang3.StringUtils.trimToEmpty; public class LoincLinguisticVariantHandler implements IZipContentsHandlerCsv { @@ -43,7 +42,7 @@ public class LoincLinguisticVariantHandler implements IZipContentsHandlerCsv { @Override public void accept(CSVRecord theRecord) { - + String loincNumber = trim(theRecord.get("LOINC_NUM")); if (isBlank(loincNumber)) { return; @@ -53,14 +52,14 @@ public class LoincLinguisticVariantHandler implements IZipContentsHandlerCsv { if (concept == null) { return; } - + // The following should be created as designations for each term: - // COMPONENT:PROPERTY:TIME_ASPCT:SYSTEM:SCALE_TYP:METHOD_TYP (as colon-separated concatenation - FormalName) - // SHORTNAME - // LONG_COMMON_NAME - // LinguisticVariantDisplayName - - //-- add formalName designation + // COMPONENT:PROPERTY:TIME_ASPCT:SYSTEM:SCALE_TYP:METHOD_TYP (as colon-separated concatenation - FormalName) + // SHORTNAME + // LONG_COMMON_NAME + // LinguisticVariantDisplayName + + // -- add formalName designation StringBuilder fullySpecifiedName = new StringBuilder(); fullySpecifiedName.append(trimToEmpty(theRecord.get("COMPONENT") + ":")); fullySpecifiedName.append(trimToEmpty(theRecord.get("PROPERTY") + ":")); @@ -68,38 +67,37 @@ public class LoincLinguisticVariantHandler implements IZipContentsHandlerCsv { fullySpecifiedName.append(trimToEmpty(theRecord.get("SYSTEM") + ":")); fullySpecifiedName.append(trimToEmpty(theRecord.get("SCALE_TYP") + ":")); fullySpecifiedName.append(trimToEmpty(theRecord.get("METHOD_TYP"))); - + String fullySpecifiedNameStr = fullySpecifiedName.toString(); - + // skip if COMPONENT, PROPERTY, TIME_ASPCT, SYSTEM, SCALE_TYP and METHOD_TYP are all empty if (!fullySpecifiedNameStr.equals(":::::")) { concept.addDesignation() - .setLanguage(myLanguageCode) - .setUseSystem(ITermLoaderSvc.LOINC_URI) - .setUseCode("FullySpecifiedName") - .setUseDisplay("FullySpecifiedName") - .setValue(fullySpecifiedNameStr); + .setLanguage(myLanguageCode) + .setUseSystem(ITermLoaderSvc.LOINC_URI) + .setUseCode("FullySpecifiedName") + .setUseDisplay("FullySpecifiedName") + .setValue(fullySpecifiedNameStr); } - - //-- other designations + + // -- other designations addDesignation(theRecord, concept, "SHORTNAME"); - addDesignation(theRecord, concept, "LONG_COMMON_NAME"); + addDesignation(theRecord, concept, "LONG_COMMON_NAME"); addDesignation(theRecord, concept, "LinguisticVariantDisplayName"); - } private void addDesignation(CSVRecord theRecord, TermConcept concept, String fieldName) { - + String field = trim(theRecord.get(fieldName)); if (isBlank(field)) { return; } - + concept.addDesignation() - .setLanguage(myLanguageCode) - .setUseSystem(ITermLoaderSvc.LOINC_URI) - .setUseCode(fieldName) - .setUseDisplay(fieldName) - .setValue(field); + .setLanguage(myLanguageCode) + .setUseSystem(ITermLoaderSvc.LOINC_URI) + .setUseCode(fieldName) + .setUseDisplay(fieldName) + .setValue(field); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincLinguisticVariantsHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincLinguisticVariantsHandler.java index f89921f923c..1245dad647c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincLinguisticVariantsHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincLinguisticVariantsHandler.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; import org.apache.commons.csv.CSVRecord; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.trim; @@ -38,7 +38,7 @@ public class LoincLinguisticVariantsHandler implements IZipContentsHandlerCsv { @Override public void accept(CSVRecord theRecord) { - + String id = trim(theRecord.get("ID")); if (isBlank(id)) { return; @@ -48,17 +48,17 @@ public class LoincLinguisticVariantsHandler implements IZipContentsHandlerCsv { if (isBlank(isoLanguage)) { return; } - + String isoCountry = trim(theRecord.get("ISO_COUNTRY")); if (isBlank(isoCountry)) { return; } - + String languageName = trim(theRecord.get("LANGUAGE_NAME")); if (isBlank(languageName)) { return; } - + LinguisticVariant linguisticVariant = new LinguisticVariant(id, isoLanguage, isoCountry, languageName); myLinguisticVariants.add(linguisticVariant); } @@ -70,7 +70,11 @@ public class LoincLinguisticVariantsHandler implements IZipContentsHandlerCsv { private String myIsoCountry; private String myLanguageName; - public LinguisticVariant(@Nonnull String theId, @Nonnull String theIsoLanguage, @Nonnull String theIsoCountry, @Nonnull String theLanguageName) { + public LinguisticVariant( + @Nonnull String theId, + @Nonnull String theIsoLanguage, + @Nonnull String theIsoCountry, + @Nonnull String theLanguageName) { this.myId = theId; this.myIsoLanguage = theIsoLanguage; this.myIsoCountry = theIsoCountry; @@ -84,10 +88,9 @@ public class LoincLinguisticVariantsHandler implements IZipContentsHandlerCsv { public String getLanguageName() { return myLanguageName; } - + public String getLanguageCode() { return myIsoLanguage + "-" + myIsoCountry; } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincMapToHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincMapToHandler.java index 77e877a6e59..2140fdb81c2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincMapToHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincMapToHandler.java @@ -65,14 +65,13 @@ public class LoincMapToHandler implements IZipContentsHandlerCsv { TermConcept concept = myCode2Concept.get(code); if (concept == null) { - ourLog.warn("A TermConcept was not found for MapTo '" + CONCEPT_CODE_PROP_NAME + - "' property: '" + code + "' MapTo record ignored."); + ourLog.warn("A TermConcept was not found for MapTo '" + CONCEPT_CODE_PROP_NAME + "' property: '" + code + + "' MapTo record ignored."); return; } concept.addPropertyCoding(MAP_TO_PROP_NAME, ITermLoaderSvc.LOINC_URI, mapTo, display); - ourLog.trace("Adding " + MAP_TO_PROP_NAME + " coding property: {} to concept.code {}", mapTo, concept.getCode()); + ourLog.trace( + "Adding " + MAP_TO_PROP_NAME + " coding property: {} to concept.code {}", mapTo, concept.getCode()); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincParentGroupFileHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincParentGroupFileHandler.java index 3abf530c2dc..ce71879b2ae 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincParentGroupFileHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincParentGroupFileHandler.java @@ -34,7 +34,11 @@ import static org.apache.commons.lang3.StringUtils.trim; public class LoincParentGroupFileHandler extends BaseLoincHandler implements IZipContentsHandlerCsv { - public LoincParentGroupFileHandler(Map theCode2concept, List theValueSets, List theConceptMaps, Properties theUploadProperties) { + public LoincParentGroupFileHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties); } @@ -53,6 +57,4 @@ public class LoincParentGroupFileHandler extends BaseLoincHandler implements IZi getValueSet(valueSetId, LoincGroupFileHandler.VS_URI_PREFIX + parentGroupId, parentGroupName, null); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartHandler.java index 442a2302c2b..50401eab532 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartHandler.java @@ -52,10 +52,10 @@ public class LoincPartHandler implements IZipContentsHandlerCsv { String partDisplayName = trim(theRecord.get("PartDisplayName")); // Per Dan's note, we include deprecated parts -// String status = trim(theRecord.get("Status")); -// if (!"ACTIVE".equals(status)) { -// return; -// } + // String status = trim(theRecord.get("Status")); + // if (!"ACTIVE".equals(status)) { + // return; + // } PartTypeAndPartName partTypeAndPartName = new PartTypeAndPartName(partTypeName, partName); String previousValue = myPartTypeAndPartNameToPartNumber.put(partTypeAndPartName, partNumber); @@ -70,16 +70,13 @@ public class LoincPartHandler implements IZipContentsHandlerCsv { if (isNotBlank(partDisplayName)) { concept.addDesignation() - .setConcept(concept) - .setUseDisplay("PartDisplayName") - .setValue(partDisplayName); + .setConcept(concept) + .setUseDisplay("PartDisplayName") + .setValue(partDisplayName); } - } public Map getPartTypeAndPartNameToPartNumber() { return myPartTypeAndPartNameToPartNumber; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartLinkHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartLinkHandler.java index 8792d286eab..a8e60d3067e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartLinkHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartLinkHandler.java @@ -44,7 +44,10 @@ public class LoincPartLinkHandler implements IZipContentsHandlerCsv { private final Map myPropertyNames; private Long myPartCount; - public LoincPartLinkHandler(TermCodeSystemVersion theCodeSystemVersion, Map theCode2concept, Map thePropertyNames) { + public LoincPartLinkHandler( + TermCodeSystemVersion theCodeSystemVersion, + Map theCode2concept, + Map thePropertyNames) { myCodeSystemVersion = theCodeSystemVersion; myCode2Concept = theCode2concept; myPropertyNames = thePropertyNames; @@ -81,15 +84,14 @@ public class LoincPartLinkHandler implements IZipContentsHandlerCsv { } else if (propertyType == CodeSystem.PropertyType.CODING) { expectedValue = partNumber; } else { - throw new InternalErrorException(Msg.code(914) + "Don't know how to handle property of type: " + propertyType); + throw new InternalErrorException( + Msg.code(914) + "Don't know how to handle property of type: " + propertyType); } - Optional existingProprty = loincConcept - .getProperties() - .stream() - .filter(t -> t.getKey().equals(propertyPart)) - .filter(t -> t.getValue().equals(expectedValue)) - .findFirst(); + Optional existingProprty = loincConcept.getProperties().stream() + .filter(t -> t.getKey().equals(propertyPart)) + .filter(t -> t.getValue().equals(expectedValue)) + .findFirst(); if (existingProprty.isPresent()) { return; } @@ -100,6 +102,5 @@ public class LoincPartLinkHandler implements IZipContentsHandlerCsv { } else { loincConcept.addPropertyCoding(propertyPart, ITermLoaderSvc.LOINC_URI, partNumber, partName); } - } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartRelatedCodeMappingHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartRelatedCodeMappingHandler.java index 05384c4c72c..2da61c14e81 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartRelatedCodeMappingHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincPartRelatedCodeMappingHandler.java @@ -57,10 +57,15 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme private static final String LOINC_PUBCHEM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-pubchem"; private static final String LOINC_PUBCHEM_PART_MAP_NAME = "LOINC Part Map to PubChem"; - private static final String CM_COPYRIGHT = "The LOINC Part File, LOINC/SNOMED CT Expression Association and Map Sets File, RELMA database and associated search index files include SNOMED Clinical Terms (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights are reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO. Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org. Under the terms of the Affiliate License, use of SNOMED CT in countries that are not IHTSDO Members is subject to reporting and fee payment obligations. However, IHTSDO agrees to waive the requirements to report and pay fees for use of SNOMED CT content included in the LOINC Part Mapping and LOINC Term Associations for purposes that support or enable more effective use of LOINC. This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov."; + private static final String CM_COPYRIGHT = + "The LOINC Part File, LOINC/SNOMED CT Expression Association and Map Sets File, RELMA database and associated search index files include SNOMED Clinical Terms (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights are reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO. Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org. Under the terms of the Affiliate License, use of SNOMED CT in countries that are not IHTSDO Members is subject to reporting and fee payment obligations. However, IHTSDO agrees to waive the requirements to report and pay fees for use of SNOMED CT content included in the LOINC Part Mapping and LOINC Term Associations for purposes that support or enable more effective use of LOINC. This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov."; - public LoincPartRelatedCodeMappingHandler(Map theCode2concept, List theValueSets, - List theConceptMaps, Properties theUploadProperties, String theCopyrightStatement) { + public LoincPartRelatedCodeMappingHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties, theCopyrightStatement); } @@ -84,11 +89,12 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme // ConceptMap version from properties files String loincPartMapVersion; if (codeSystemVersionId != null) { - loincPartMapVersion = myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()) + "-" + codeSystemVersionId; + loincPartMapVersion = + myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()) + "-" + codeSystemVersionId; } else { loincPartMapVersion = myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()); } - + Enumerations.ConceptMapEquivalence equivalence; switch (trim(defaultString(mapType))) { case "": @@ -106,7 +112,8 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme equivalence = Enumerations.ConceptMapEquivalence.RELATEDTO; break; default: - throw new InternalErrorException(Msg.code(916) + "Unknown equivalence '" + mapType + "' for PartNumber: " + partNumber); + throw new InternalErrorException( + Msg.code(916) + "Unknown equivalence '" + mapType + "' for PartNumber: " + partNumber); } String loincPartMapId; @@ -147,24 +154,21 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme } addConceptMapEntry( - new ConceptMapping() - .setConceptMapId(conceptMapId) - .setConceptMapUri(loincPartMapUri) - .setConceptMapVersion(loincPartMapVersion) - .setConceptMapName(loincPartMapName) - .setSourceCodeSystem(ITermLoaderSvc.LOINC_URI) - .setSourceCodeSystemVersion(codeSystemVersionId) - .setSourceCode(partNumber) - .setSourceDisplay(partName) - .setTargetCodeSystem(extCodeSystem) - .setTargetCode(extCodeId) - .setTargetDisplay(extCodeDisplayName) - .setTargetCodeSystemVersion(extCodeSystemVersion) - .setEquivalence(equivalence) - .setCopyright(extCodeSystemCopyrightNotice), - myLoincCopyrightStatement + " " + CM_COPYRIGHT - ); - + new ConceptMapping() + .setConceptMapId(conceptMapId) + .setConceptMapUri(loincPartMapUri) + .setConceptMapVersion(loincPartMapVersion) + .setConceptMapName(loincPartMapName) + .setSourceCodeSystem(ITermLoaderSvc.LOINC_URI) + .setSourceCodeSystemVersion(codeSystemVersionId) + .setSourceCode(partNumber) + .setSourceDisplay(partName) + .setTargetCodeSystem(extCodeSystem) + .setTargetCode(extCodeId) + .setTargetDisplay(extCodeDisplayName) + .setTargetCodeSystemVersion(extCodeSystemVersion) + .setEquivalence(equivalence) + .setCopyright(extCodeSystemCopyrightNotice), + myLoincCopyrightStatement + " " + CM_COPYRIGHT); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincRsnaPlaybookHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincRsnaPlaybookHandler.java index 35b450f2431..5351809f937 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincRsnaPlaybookHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincRsnaPlaybookHandler.java @@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.entity.TermConcept; -import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; +import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.apache.commons.csv.CSVRecord; import org.hl7.fhir.r4.model.ConceptMap; @@ -53,7 +53,8 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IZipCo * -ja */ public static final String RPID_CS_URI = RID_CS_URI; - private static final String CM_COPYRIGHT = "The LOINC/RSNA Radiology Playbook and the LOINC Part File contain content from RadLex® (http://rsna.org/RadLex.aspx), copyright © 2005-2017, The Radiological Society of North America, Inc., available at no cost under the license at http://www.rsna.org/uploadedFiles/RSNA/Content/Informatics/RadLex_License_Agreement_and_Terms_of_Use_V2_Final.pdf."; + private static final String CM_COPYRIGHT = + "The LOINC/RSNA Radiology Playbook and the LOINC Part File contain content from RadLex® (http://rsna.org/RadLex.aspx), copyright © 2005-2017, The Radiological Society of North America, Inc., available at no cost under the license at http://www.rsna.org/uploadedFiles/RSNA/Content/Informatics/RadLex_License_Agreement_and_Terms_of_Use_V2_Final.pdf."; private final Map myCode2Concept; private final List myValueSets; private final Map myIdToValueSet = new HashMap<>(); @@ -62,8 +63,12 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IZipCo /** * Constructor */ - public LoincRsnaPlaybookHandler(Map theCode2concept, List theValueSets, - List theConceptMaps, Properties theUploadProperties, String theCopyrightStatement) { + public LoincRsnaPlaybookHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties, theCopyrightStatement); myCode2Concept = theCode2concept; myValueSets = theValueSets; @@ -89,12 +94,12 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IZipCo // ConceptMap version from properties files String loincRsnaCmVersion; if (codeSystemVersionId != null) { - loincRsnaCmVersion = myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()) + "-" + codeSystemVersionId; + loincRsnaCmVersion = + myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()) + "-" + codeSystemVersionId; } else { loincRsnaCmVersion = myUploadProperties.getProperty(LOINC_CONCEPTMAP_VERSION.getCode()); } - // RSNA Codes VS ValueSet vs; String rsnaCodesValueSetId; @@ -117,14 +122,13 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IZipCo } if (!myCodesInRsnaPlaybookValueSet.contains(loincNumber)) { - vs - .getCompose() - .getIncludeFirstRep() - .setSystem(ITermLoaderSvc.LOINC_URI) - .setVersion(codeSystemVersionId) - .addConcept() - .setCode(loincNumber) - .setDisplay(longCommonName); + vs.getCompose() + .getIncludeFirstRep() + .setSystem(ITermLoaderSvc.LOINC_URI) + .setVersion(codeSystemVersionId) + .addConcept() + .setCode(loincNumber) + .setDisplay(longCommonName); myCodesInRsnaPlaybookValueSet.add(loincNumber); } @@ -196,8 +200,10 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IZipCo String partConceptMapId; String termConceptMapId; if (codeSystemVersionId != null) { - partConceptMapId = LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_ID + "-" + codeSystemVersionId; - termConceptMapId = LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_ID + "-" + codeSystemVersionId; + partConceptMapId = + LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_ID + "-" + codeSystemVersionId; + termConceptMapId = + LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_ID + "-" + codeSystemVersionId; } else { partConceptMapId = LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_ID; termConceptMapId = LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_ID; @@ -206,42 +212,39 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IZipCo // LOINC Part -> Radlex RID code mappings if (isNotBlank(rid)) { addConceptMapEntry( - new ConceptMapping() - .setConceptMapId(partConceptMapId) - .setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_URI) - .setConceptMapVersion(loincRsnaCmVersion) - .setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_NAME) - .setSourceCodeSystem(ITermLoaderSvc.LOINC_URI) - .setSourceCodeSystemVersion(codeSystemVersionId) - .setSourceCode(partNumber) - .setSourceDisplay(partName) - .setTargetCodeSystem(RID_CS_URI) - .setTargetCode(rid) - .setTargetDisplay(preferredName) - .setEquivalence(Enumerations.ConceptMapEquivalence.EQUAL) - ,myLoincCopyrightStatement + " " + CM_COPYRIGHT); + new ConceptMapping() + .setConceptMapId(partConceptMapId) + .setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_URI) + .setConceptMapVersion(loincRsnaCmVersion) + .setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_NAME) + .setSourceCodeSystem(ITermLoaderSvc.LOINC_URI) + .setSourceCodeSystemVersion(codeSystemVersionId) + .setSourceCode(partNumber) + .setSourceDisplay(partName) + .setTargetCodeSystem(RID_CS_URI) + .setTargetCode(rid) + .setTargetDisplay(preferredName) + .setEquivalence(Enumerations.ConceptMapEquivalence.EQUAL), + myLoincCopyrightStatement + " " + CM_COPYRIGHT); } // LOINC Term -> Radlex RPID code mappings if (isNotBlank(rpid)) { addConceptMapEntry( - new ConceptMapping() - .setConceptMapId(termConceptMapId) - .setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_URI) - .setConceptMapVersion(loincRsnaCmVersion) - .setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_NAME) - .setSourceCodeSystem(ITermLoaderSvc.LOINC_URI) - .setSourceCodeSystemVersion(codeSystemVersionId) - .setSourceCode(loincNumber) - .setSourceDisplay(longCommonName) - .setTargetCodeSystem(RPID_CS_URI) - .setTargetCode(rpid) - .setTargetDisplay(longName) - .setEquivalence(Enumerations.ConceptMapEquivalence.EQUAL), - myLoincCopyrightStatement + " " + CM_COPYRIGHT); + new ConceptMapping() + .setConceptMapId(termConceptMapId) + .setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_URI) + .setConceptMapVersion(loincRsnaCmVersion) + .setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_NAME) + .setSourceCodeSystem(ITermLoaderSvc.LOINC_URI) + .setSourceCodeSystemVersion(codeSystemVersionId) + .setSourceCode(loincNumber) + .setSourceDisplay(longCommonName) + .setTargetCodeSystem(RPID_CS_URI) + .setTargetCode(rpid) + .setTargetDisplay(longName) + .setEquivalence(Enumerations.ConceptMapEquivalence.EQUAL), + myLoincCopyrightStatement + " " + CM_COPYRIGHT); } - } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincTop2000LabResultsSiHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincTop2000LabResultsSiHandler.java index 26c29f7e563..6ecf7b8d958 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincTop2000LabResultsSiHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincTop2000LabResultsSiHandler.java @@ -33,11 +33,20 @@ public class LoincTop2000LabResultsSiHandler extends BaseLoincTop2000LabResultsH public static final String TOP_2000_SI_VS_URI = "http://loinc.org/vs/top-2000-lab-observations-si"; public static final String TOP_2000_SI_VS_NAME = "Top 2000 Lab Results SI"; - public LoincTop2000LabResultsSiHandler(Map theCode2concept, List theValueSets, - List theConceptMaps, Properties theUploadProperties, String theCopyrightStatement) { - super(theCode2concept, theValueSets, TOP_2000_SI_VS_ID, TOP_2000_SI_VS_URI, TOP_2000_SI_VS_NAME, - theConceptMaps, theUploadProperties, theCopyrightStatement); + public LoincTop2000LabResultsSiHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { + super( + theCode2concept, + theValueSets, + TOP_2000_SI_VS_ID, + TOP_2000_SI_VS_URI, + TOP_2000_SI_VS_NAME, + theConceptMaps, + theUploadProperties, + theCopyrightStatement); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincTop2000LabResultsUsHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincTop2000LabResultsUsHandler.java index 48c2d5b23b5..47fe8f8af0e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincTop2000LabResultsUsHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincTop2000LabResultsUsHandler.java @@ -33,11 +33,20 @@ public class LoincTop2000LabResultsUsHandler extends BaseLoincTop2000LabResultsH public static final String TOP_2000_US_VS_URI = "http://loinc.org/vs/top-2000-lab-observations-us"; public static final String TOP_2000_US_VS_NAME = "Top 2000 Lab Results US"; - public LoincTop2000LabResultsUsHandler(Map theCode2concept, List theValueSets, - List theConceptMaps, Properties theUploadProperties, String theCopyrightStatement) { - super(theCode2concept, theValueSets, TOP_2000_US_VS_ID, TOP_2000_US_VS_URI, TOP_2000_US_VS_NAME, - theConceptMaps, theUploadProperties, theCopyrightStatement); + public LoincTop2000LabResultsUsHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties, + String theCopyrightStatement) { + super( + theCode2concept, + theValueSets, + TOP_2000_US_VS_ID, + TOP_2000_US_VS_URI, + TOP_2000_US_VS_NAME, + theConceptMaps, + theUploadProperties, + theCopyrightStatement); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUniversalOrderSetHandler.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUniversalOrderSetHandler.java index b4ea99d6161..4255c1a0568 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUniversalOrderSetHandler.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUniversalOrderSetHandler.java @@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.term.loinc; import ca.uhn.fhir.jpa.entity.TermConcept; -import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import ca.uhn.fhir.jpa.term.IZipContentsHandlerCsv; +import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; import org.apache.commons.csv.CSVRecord; import org.hl7.fhir.r4.model.ConceptMap; import org.hl7.fhir.r4.model.ValueSet; @@ -37,7 +37,11 @@ public class LoincUniversalOrderSetHandler extends BaseLoincHandler implements I public static final String VS_URI = "http://loinc.org/vs/loinc-universal-order-set"; public static final String VS_NAME = "LOINC Universal Order Set"; - public LoincUniversalOrderSetHandler(Map theCode2concept, List theValueSets, List theConceptMaps, Properties theUploadProperties) { + public LoincUniversalOrderSetHandler( + Map theCode2concept, + List theValueSets, + List theConceptMaps, + Properties theUploadProperties) { super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties); } @@ -58,6 +62,4 @@ public class LoincUniversalOrderSetHandler extends BaseLoincHandler implements I ValueSet valueSet = getValueSet(valueSetId, VS_URI, VS_NAME, null); addCodeAsIncludeToValueSet(valueSet, ITermLoaderSvc.LOINC_URI, loincNumber, displayName); } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java index 54c8beb1d19..752009f8100 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/LoincUploadPropertiesEnum.java @@ -30,7 +30,6 @@ public enum LoincUploadPropertiesEnum { /** * Sorting agnostic. */ - LOINC_UPLOAD_PROPERTIES_FILE("loincupload.properties"), LOINC_XML_FILE("loinc.xml"), @@ -61,7 +60,8 @@ public enum LoincUploadPropertiesEnum { // IEEE medical device codes LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE("loinc.ieee.medical.device.code.mapping.table.file"), - LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT("AccessoryFiles/LoincIeeeMedicalDeviceCodeMappingTable/LoincIeeeMedicalDeviceCodeMappingTable.csv"), + LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT( + "AccessoryFiles/LoincIeeeMedicalDeviceCodeMappingTable/LoincIeeeMedicalDeviceCodeMappingTable.csv"), // Imaging document codes LOINC_IMAGING_DOCUMENT_CODES_FILE("loinc.imaging.document.codes.file"), @@ -96,7 +96,8 @@ public enum LoincUploadPropertiesEnum { // Universal lab order ValueSet LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE("loinc.universal.lab.order.valueset.file"), - LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT("AccessoryFiles/LoincUniversalLabOrdersValueSet/LoincUniversalLabOrdersValueSet.csv"), + LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT( + "AccessoryFiles/LoincUniversalLabOrdersValueSet/LoincUniversalLabOrdersValueSet.csv"), /* * OPTIONAL diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/PartTypeAndPartName.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/PartTypeAndPartName.java index 0baddcefbee..3ebf29692d1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/PartTypeAndPartName.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/loinc/PartTypeAndPartName.java @@ -45,9 +45,9 @@ public class PartTypeAndPartName { PartTypeAndPartName that = (PartTypeAndPartName) theO; return new EqualsBuilder() - .append(myPartType, that.myPartType) - .append(myPartName, that.myPartName) - .isEquals(); + .append(myPartType, that.myPartType) + .append(myPartName, that.myPartName) + .isEquals(); } public String getPartName() { @@ -60,17 +60,14 @@ public class PartTypeAndPartName { @Override public int hashCode() { - return new HashCodeBuilder(17, 37) - .append(myPartType) - .append(myPartName) - .toHashCode(); + return new HashCodeBuilder(17, 37).append(myPartType).append(myPartName).toHashCode(); } @Override public String toString() { return new ToStringBuilder(this) - .append("partType", myPartType) - .append("partName", myPartName) - .toString(); + .append("partType", myPartType) + .append("partName", myPartName) + .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerConcept.java index 4208196fbae..23b5491cbe0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerConcept.java @@ -28,27 +28,27 @@ import java.util.Set; public final class SctHandlerConcept implements IZipContentsHandlerCsv { - private Set myValidConceptIds; - private Map myConceptIdToMostRecentDate = new HashMap(); + private Set myValidConceptIds; + private Map myConceptIdToMostRecentDate = new HashMap(); - public SctHandlerConcept(Set theValidConceptIds) { - myValidConceptIds = theValidConceptIds; - } + public SctHandlerConcept(Set theValidConceptIds) { + myValidConceptIds = theValidConceptIds; + } - @Override - public void accept(CSVRecord theRecord) { - String id = theRecord.get("id"); - String date = theRecord.get("effectiveTime"); + @Override + public void accept(CSVRecord theRecord) { + String id = theRecord.get("id"); + String date = theRecord.get("effectiveTime"); - if (!myConceptIdToMostRecentDate.containsKey(id) || myConceptIdToMostRecentDate.get(id).compareTo(date) < 0) { - boolean active = "1".equals(theRecord.get("active")); - if (active) { - myValidConceptIds.add(id); - } else { - myValidConceptIds.remove(id); - } - myConceptIdToMostRecentDate.put(id, date); - } - - } + if (!myConceptIdToMostRecentDate.containsKey(id) + || myConceptIdToMostRecentDate.get(id).compareTo(date) < 0) { + boolean active = "1".equals(theRecord.get("active")); + if (active) { + myValidConceptIds.add(id); + } else { + myValidConceptIds.remove(id); + } + myConceptIdToMostRecentDate.put(id, date); + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerDescription.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerDescription.java index 5c5e9327818..5ca4da63e49 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerDescription.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerDescription.java @@ -29,36 +29,40 @@ import java.util.Map; import java.util.Set; public final class SctHandlerDescription implements IZipContentsHandlerCsv { - private final Map myCode2concept; - private final TermCodeSystemVersion myCodeSystemVersion; - private final Map myId2concept; - private Set myValidConceptIds; + private final Map myCode2concept; + private final TermCodeSystemVersion myCodeSystemVersion; + private final Map myId2concept; + private Set myValidConceptIds; - public SctHandlerDescription(Set theValidConceptIds, Map theCode2concept, Map theId2concept, TermCodeSystemVersion theCodeSystemVersion) { - myCode2concept = theCode2concept; - myId2concept = theId2concept; - myCodeSystemVersion = theCodeSystemVersion; - myValidConceptIds = theValidConceptIds; - } + public SctHandlerDescription( + Set theValidConceptIds, + Map theCode2concept, + Map theId2concept, + TermCodeSystemVersion theCodeSystemVersion) { + myCode2concept = theCode2concept; + myId2concept = theId2concept; + myCodeSystemVersion = theCodeSystemVersion; + myValidConceptIds = theValidConceptIds; + } - @Override - public void accept(CSVRecord theRecord) { - String id = theRecord.get("id"); - boolean active = "1".equals(theRecord.get("active")); - if (!active) { - return; - } - String conceptId = theRecord.get("conceptId"); - if (!myValidConceptIds.contains(conceptId)) { - return; - } + @Override + public void accept(CSVRecord theRecord) { + String id = theRecord.get("id"); + boolean active = "1".equals(theRecord.get("active")); + if (!active) { + return; + } + String conceptId = theRecord.get("conceptId"); + if (!myValidConceptIds.contains(conceptId)) { + return; + } - String term = theRecord.get("term"); + String term = theRecord.get("term"); - TermConcept concept = TermLoaderSvcImpl.getOrCreateConcept(myId2concept, id); - concept.setCode(conceptId); - concept.setDisplay(term); - concept.setCodeSystemVersion(myCodeSystemVersion); - myCode2concept.put(conceptId, concept); - } + TermConcept concept = TermLoaderSvcImpl.getOrCreateConcept(myId2concept, id); + concept.setCode(conceptId); + concept.setDisplay(term); + concept.setCodeSystemVersion(myCodeSystemVersion); + myCode2concept.put(conceptId, concept); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerRelationship.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerRelationship.java index 99e8491f65f..cac6da48d4c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerRelationship.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/snomedct/SctHandlerRelationship.java @@ -28,67 +28,71 @@ import org.apache.commons.csv.CSVRecord; import java.util.*; public final class SctHandlerRelationship implements IZipContentsHandlerCsv { - private final Map myCode2concept; - private final TermCodeSystemVersion myCodeSystemVersion; - private final Map myRootConcepts; + private final Map myCode2concept; + private final TermCodeSystemVersion myCodeSystemVersion; + private final Map myRootConcepts; - public SctHandlerRelationship(TermCodeSystemVersion theCodeSystemVersion, HashMap theRootConcepts, Map theCode2concept) { - myCodeSystemVersion = theCodeSystemVersion; - myRootConcepts = theRootConcepts; - myCode2concept = theCode2concept; - } + public SctHandlerRelationship( + TermCodeSystemVersion theCodeSystemVersion, + HashMap theRootConcepts, + Map theCode2concept) { + myCodeSystemVersion = theCodeSystemVersion; + myRootConcepts = theRootConcepts; + myCode2concept = theCode2concept; + } - @Override - public void accept(CSVRecord theRecord) { - Set ignoredTypes = new HashSet(); - ignoredTypes.add("Method (attribute)"); - ignoredTypes.add("Direct device (attribute)"); - ignoredTypes.add("Has focus (attribute)"); - ignoredTypes.add("Access instrument"); - ignoredTypes.add("Procedure site (attribute)"); - ignoredTypes.add("Causative agent (attribute)"); - ignoredTypes.add("Course (attribute)"); - ignoredTypes.add("Finding site (attribute)"); - ignoredTypes.add("Has definitional manifestation (attribute)"); + @Override + public void accept(CSVRecord theRecord) { + Set ignoredTypes = new HashSet(); + ignoredTypes.add("Method (attribute)"); + ignoredTypes.add("Direct device (attribute)"); + ignoredTypes.add("Has focus (attribute)"); + ignoredTypes.add("Access instrument"); + ignoredTypes.add("Procedure site (attribute)"); + ignoredTypes.add("Causative agent (attribute)"); + ignoredTypes.add("Course (attribute)"); + ignoredTypes.add("Finding site (attribute)"); + ignoredTypes.add("Has definitional manifestation (attribute)"); - String sourceId = theRecord.get("sourceId"); - String destinationId = theRecord.get("destinationId"); - String typeId = theRecord.get("typeId"); - boolean active = "1".equals(theRecord.get("active")); + String sourceId = theRecord.get("sourceId"); + String destinationId = theRecord.get("destinationId"); + String typeId = theRecord.get("typeId"); + boolean active = "1".equals(theRecord.get("active")); - TermConcept typeConcept = myCode2concept.get(typeId); - TermConcept sourceConcept = myCode2concept.get(sourceId); - TermConcept targetConcept = myCode2concept.get(destinationId); - if (sourceConcept != null && targetConcept != null && typeConcept != null) { - if (typeConcept.getDisplay().equals("Is a (attribute)")) { - TermConceptParentChildLink.RelationshipTypeEnum relationshipType = TermConceptParentChildLink.RelationshipTypeEnum.ISA; - if (!sourceId.equals(destinationId)) { - if (active) { - TermConceptParentChildLink link = new TermConceptParentChildLink(); - link.setChild(sourceConcept); - link.setParent(targetConcept); - link.setRelationshipType(relationshipType); - link.setCodeSystem(myCodeSystemVersion); - - targetConcept.addChild(sourceConcept, relationshipType); - } else { - // not active, so we're removing any existing links - for (TermConceptParentChildLink next : new ArrayList(targetConcept.getChildren())) { - if (next.getRelationshipType() == relationshipType) { - if (next.getChild().getCode().equals(sourceConcept.getCode())) { - next.getParent().getChildren().remove(next); - next.getChild().getParents().remove(next); - } - } - } - } - } - } else if (ignoredTypes.contains(typeConcept.getDisplay())) { - // ignore - } else { - // ourLog.warn("Unknown relationship type: {}/{}", typeId, typeConcept.getDisplay()); - } - } - } + TermConcept typeConcept = myCode2concept.get(typeId); + TermConcept sourceConcept = myCode2concept.get(sourceId); + TermConcept targetConcept = myCode2concept.get(destinationId); + if (sourceConcept != null && targetConcept != null && typeConcept != null) { + if (typeConcept.getDisplay().equals("Is a (attribute)")) { + TermConceptParentChildLink.RelationshipTypeEnum relationshipType = + TermConceptParentChildLink.RelationshipTypeEnum.ISA; + if (!sourceId.equals(destinationId)) { + if (active) { + TermConceptParentChildLink link = new TermConceptParentChildLink(); + link.setChild(sourceConcept); + link.setParent(targetConcept); + link.setRelationshipType(relationshipType); + link.setCodeSystem(myCodeSystemVersion); + targetConcept.addChild(sourceConcept, relationshipType); + } else { + // not active, so we're removing any existing links + for (TermConceptParentChildLink next : + new ArrayList(targetConcept.getChildren())) { + if (next.getRelationshipType() == relationshipType) { + if (next.getChild().getCode().equals(sourceConcept.getCode())) { + next.getParent().getChildren().remove(next); + next.getChild().getParents().remove(next); + } + } + } + } + } + } else if (ignoredTypes.contains(typeConcept.getDisplay())) { + // ignore + } else { + // ourLog.warn("Unknown relationship type: {}/{}", typeId, typeConcept.getDisplay()); + } + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/AddRemoveCount.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/AddRemoveCount.java index 2b096e863d6..334370de480 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/AddRemoveCount.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/AddRemoveCount.java @@ -24,7 +24,6 @@ public class AddRemoveCount { private int myAddCount; private int myRemoveCount; - public void addToAddCount(int theCount) { myAddCount += theCount; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/BaseIterator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/BaseIterator.java index 66220b6752d..d9d97f941f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/BaseIterator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/BaseIterator.java @@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.util; import ca.uhn.fhir.i18n.Msg; + import java.util.Iterator; public abstract class BaseIterator implements Iterator { @@ -28,5 +29,4 @@ public abstract class BaseIterator implements Iterator { public void remove() { throw new UnsupportedOperationException(Msg.code(798)); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CoordCalculator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CoordCalculator.java index 5068f135126..2c8d4f7e01f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CoordCalculator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CoordCalculator.java @@ -19,9 +19,8 @@ */ package ca.uhn.fhir.jpa.util; - -import org.hibernate.search.engine.spatial.GeoPoint; import org.hibernate.search.engine.spatial.GeoBoundingBox; +import org.hibernate.search.engine.spatial.GeoPoint; import org.slf4j.Logger; import static ca.uhn.fhir.jpa.searchparam.extractor.GeopointNormalizer.normalizeLatitude; @@ -30,22 +29,26 @@ import static org.slf4j.LoggerFactory.getLogger; public class CoordCalculator { private static final Logger ourLog = getLogger(CoordCalculator.class); - public static final double MAX_SUPPORTED_DISTANCE_KM = 10000.0; // Slightly less than a quarter of the earth's circumference + public static final double MAX_SUPPORTED_DISTANCE_KM = + 10000.0; // Slightly less than a quarter of the earth's circumference private static final double RADIUS_EARTH_KM = 6378.1; // Source: https://stackoverflow.com/questions/7222382/get-lat-long-given-current-point-distance-and-bearing - static GeoPoint findTarget(double theLatitudeDegrees, double theLongitudeDegrees, double theBearingDegrees, double theDistanceKm) { + static GeoPoint findTarget( + double theLatitudeDegrees, double theLongitudeDegrees, double theBearingDegrees, double theDistanceKm) { double latitudeRadians = Math.toRadians(normalizeLatitude(theLatitudeDegrees)); double longitudeRadians = Math.toRadians(normalizeLongitude(theLongitudeDegrees)); double bearingRadians = Math.toRadians(theBearingDegrees); double distanceRadians = theDistanceKm / RADIUS_EARTH_KM; - double targetLatitude = Math.asin( Math.sin(latitudeRadians) * Math.cos(distanceRadians) + - Math.cos(latitudeRadians) * Math.sin(distanceRadians) * Math.cos(bearingRadians)); + double targetLatitude = Math.asin(Math.sin(latitudeRadians) * Math.cos(distanceRadians) + + Math.cos(latitudeRadians) * Math.sin(distanceRadians) * Math.cos(bearingRadians)); - double targetLongitude = longitudeRadians + Math.atan2(Math.sin(bearingRadians) * Math.sin(distanceRadians) * Math.cos(latitudeRadians), - Math.cos(distanceRadians)-Math.sin(latitudeRadians) * Math.sin(targetLatitude)); + double targetLongitude = longitudeRadians + + Math.atan2( + Math.sin(bearingRadians) * Math.sin(distanceRadians) * Math.cos(latitudeRadians), + Math.cos(distanceRadians) - Math.sin(latitudeRadians) * Math.sin(targetLatitude)); double latitude = Math.toDegrees(targetLatitude); double longitude = Math.toDegrees(targetLongitude); @@ -61,8 +64,10 @@ public class CoordCalculator { public static GeoBoundingBox getBox(double theLatitudeDegrees, double theLongitudeDegrees, Double theDistanceKm) { double diagonalDistanceKm = theDistanceKm * Math.sqrt(2.0); - GeoPoint topLeft = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 315.0, diagonalDistanceKm); - GeoPoint bottomRight = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 135.0, diagonalDistanceKm); + GeoPoint topLeft = + CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 315.0, diagonalDistanceKm); + GeoPoint bottomRight = + CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 135.0, diagonalDistanceKm); return GeoBoundingBox.of(topLeft, bottomRight); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/Counter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/Counter.java index ae9eb5bc8d4..62945642c07 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/Counter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/Counter.java @@ -26,5 +26,4 @@ public class Counter { public long getThenAdd() { return myCount++; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java index 01cf764d192..ad465cb715e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java @@ -40,5 +40,4 @@ public class JpaHapiTransactionService extends HapiTransactionService { } return myCustomIsolationSupported; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/LogicUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/LogicUtil.java index 81bc79571ba..99059758df8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/LogicUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/LogicUtil.java @@ -46,5 +46,4 @@ public class LogicUtil { } return foundOne; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/MethodRequest.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/MethodRequest.java index 3558e4b6a91..b747df23d1a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/MethodRequest.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/MethodRequest.java @@ -23,8 +23,8 @@ package ca.uhn.fhir.jpa.util; * Created by Jeff on 2/8/2017. */ public enum MethodRequest { - POST, - GET, - PUT, - DELETE + POST, + GET, + PUT, + DELETE } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryChunker.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryChunker.java index 6a2d79cae7a..20bce83b8be 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryChunker.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryChunker.java @@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.util; import ca.uhn.fhir.jpa.search.builder.SearchBuilder; import ca.uhn.fhir.util.TaskChunker; -import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.function.Consumer; @@ -38,5 +37,4 @@ public class QueryChunker extends TaskChunker { public void chunk(Collection theInput, Consumer> theBatchConsumer) { chunk(theInput, SearchBuilder.getMaximumPageSize(), theBatchConsumer); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java index 7e668aa7006..545935497c8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryParameterUtils.java @@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.SearchInclude; import ca.uhn.fhir.jpa.entity.SearchTypeEnum; -import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.search.SearchStatusEnum; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -46,23 +45,17 @@ import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.persistence.EntityManager; -import javax.persistence.TypedQuery; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.From; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.From; +import javax.persistence.criteria.Predicate; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; @@ -74,11 +67,13 @@ public class QueryParameterUtils { public static final Condition[] EMPTY_CONDITION_ARRAY = new Condition[0]; static { - DualHashBidiMap compareOperationToParamPrefix = new DualHashBidiMap<>(); + DualHashBidiMap compareOperationToParamPrefix = + new DualHashBidiMap<>(); compareOperationToParamPrefix.put(SearchFilterParser.CompareOperation.ap, ParamPrefixEnum.APPROXIMATE); compareOperationToParamPrefix.put(SearchFilterParser.CompareOperation.eq, ParamPrefixEnum.EQUAL); compareOperationToParamPrefix.put(SearchFilterParser.CompareOperation.gt, ParamPrefixEnum.GREATERTHAN); - compareOperationToParamPrefix.put(SearchFilterParser.CompareOperation.ge, ParamPrefixEnum.GREATERTHAN_OR_EQUALS); + compareOperationToParamPrefix.put( + SearchFilterParser.CompareOperation.ge, ParamPrefixEnum.GREATERTHAN_OR_EQUALS); compareOperationToParamPrefix.put(SearchFilterParser.CompareOperation.lt, ParamPrefixEnum.LESSTHAN); compareOperationToParamPrefix.put(SearchFilterParser.CompareOperation.le, ParamPrefixEnum.LESSTHAN_OR_EQUALS); compareOperationToParamPrefix.put(SearchFilterParser.CompareOperation.ne, ParamPrefixEnum.NOT_EQUAL); @@ -87,111 +82,112 @@ public class QueryParameterUtils { ourCompareOperationToParamPrefix = UnmodifiableBidiMap.unmodifiableBidiMap(compareOperationToParamPrefix); } + @Nullable + public static Condition toAndPredicate(List theAndPredicates) { + List andPredicates = + theAndPredicates.stream().filter(Objects::nonNull).collect(Collectors.toList()); + if (andPredicates.size() == 0) { + return null; + } else if (andPredicates.size() == 1) { + return andPredicates.get(0); + } else { + return ComboCondition.and(andPredicates.toArray(EMPTY_CONDITION_ARRAY)); + } + } @Nullable - public static Condition toAndPredicate(List theAndPredicates) { - List andPredicates = theAndPredicates - .stream() - .filter(Objects::nonNull) - .collect(Collectors.toList()); - if (andPredicates.size() == 0) { - return null; - } else if (andPredicates.size() == 1) { - return andPredicates.get(0); - } else { - return ComboCondition.and(andPredicates.toArray(EMPTY_CONDITION_ARRAY)); - } - } + public static Condition toOrPredicate(List theOrPredicates) { + List orPredicates = + theOrPredicates.stream().filter(t -> t != null).collect(Collectors.toList()); + if (orPredicates.size() == 0) { + return null; + } else if (orPredicates.size() == 1) { + return orPredicates.get(0); + } else { + return ComboCondition.or(orPredicates.toArray(EMPTY_CONDITION_ARRAY)); + } + } - @Nullable - public static Condition toOrPredicate(List theOrPredicates) { - List orPredicates = theOrPredicates.stream().filter(t -> t != null).collect(Collectors.toList()); - if (orPredicates.size() == 0) { - return null; - } else if (orPredicates.size() == 1) { - return orPredicates.get(0); - } else { - return ComboCondition.or(orPredicates.toArray(EMPTY_CONDITION_ARRAY)); - } - } + @Nullable + public static Condition toOrPredicate(Condition... theOrPredicates) { + return toOrPredicate(Arrays.asList(theOrPredicates)); + } - @Nullable - public static Condition toOrPredicate(Condition... theOrPredicates) { - return toOrPredicate(Arrays.asList(theOrPredicates)); - } + @Nullable + public static Condition toAndPredicate(Condition... theAndPredicates) { + return toAndPredicate(Arrays.asList(theAndPredicates)); + } - @Nullable - public static Condition toAndPredicate(Condition... theAndPredicates) { - return toAndPredicate(Arrays.asList(theAndPredicates)); - } + @Nonnull + public static Condition toEqualToOrInPredicate( + DbColumn theColumn, List theValuePlaceholders, boolean theInverse) { + if (theInverse) { + return toNotEqualToOrNotInPredicate(theColumn, theValuePlaceholders); + } else { + return toEqualToOrInPredicate(theColumn, theValuePlaceholders); + } + } - @Nonnull - public static Condition toEqualToOrInPredicate(DbColumn theColumn, List theValuePlaceholders, boolean theInverse) { - if (theInverse) { - return toNotEqualToOrNotInPredicate(theColumn, theValuePlaceholders); - } else { - return toEqualToOrInPredicate(theColumn, theValuePlaceholders); - } - } + @Nonnull + public static Condition toEqualToOrInPredicate(DbColumn theColumn, List theValuePlaceholders) { + if (theValuePlaceholders.size() == 1) { + return BinaryCondition.equalTo(theColumn, theValuePlaceholders.get(0)); + } + return new InCondition(theColumn, theValuePlaceholders); + } - @Nonnull - public static Condition toEqualToOrInPredicate(DbColumn theColumn, List theValuePlaceholders) { - if (theValuePlaceholders.size() == 1) { - return BinaryCondition.equalTo(theColumn, theValuePlaceholders.get(0)); - } - return new InCondition(theColumn, theValuePlaceholders); - } + @Nonnull + public static Condition toNotEqualToOrNotInPredicate(DbColumn theColumn, List theValuePlaceholders) { + if (theValuePlaceholders.size() == 1) { + return BinaryCondition.notEqualTo(theColumn, theValuePlaceholders.get(0)); + } + return new InCondition(theColumn, theValuePlaceholders).setNegate(true); + } - @Nonnull - public static Condition toNotEqualToOrNotInPredicate(DbColumn theColumn, List theValuePlaceholders) { - if (theValuePlaceholders.size() == 1) { - return BinaryCondition.notEqualTo(theColumn, theValuePlaceholders.get(0)); - } - return new InCondition(theColumn, theValuePlaceholders).setNegate(true); - } + public static SearchFilterParser.CompareOperation toOperation(ParamPrefixEnum thePrefix) { + SearchFilterParser.CompareOperation retVal = null; + if (thePrefix != null && ourCompareOperationToParamPrefix.containsValue(thePrefix)) { + retVal = ourCompareOperationToParamPrefix.getKey(thePrefix); + } + return ObjectUtils.defaultIfNull(retVal, SearchFilterParser.CompareOperation.eq); + } - public static SearchFilterParser.CompareOperation toOperation(ParamPrefixEnum thePrefix) { - SearchFilterParser.CompareOperation retVal = null; - if (thePrefix != null && ourCompareOperationToParamPrefix.containsValue(thePrefix)) { - retVal = ourCompareOperationToParamPrefix.getKey(thePrefix); - } - return ObjectUtils.defaultIfNull(retVal, SearchFilterParser.CompareOperation.eq); - } + public static ParamPrefixEnum fromOperation(SearchFilterParser.CompareOperation thePrefix) { + ParamPrefixEnum retVal = null; + if (thePrefix != null && ourCompareOperationToParamPrefix.containsKey(thePrefix)) { + retVal = ourCompareOperationToParamPrefix.get(thePrefix); + } + return ObjectUtils.defaultIfNull(retVal, ParamPrefixEnum.EQUAL); + } - public static ParamPrefixEnum fromOperation(SearchFilterParser.CompareOperation thePrefix) { - ParamPrefixEnum retVal = null; - if (thePrefix != null && ourCompareOperationToParamPrefix.containsKey(thePrefix)) { - retVal = ourCompareOperationToParamPrefix.get(thePrefix); - } - return ObjectUtils.defaultIfNull(retVal, ParamPrefixEnum.EQUAL); - } + public static String getChainedPart(String parameter) { + return parameter.substring(parameter.indexOf(".") + 1); + } - public static String getChainedPart(String parameter) { - return parameter.substring(parameter.indexOf(".") + 1); - } + public static String getParamNameWithPrefix(String theSpnamePrefix, String theParamName) { - public static String getParamNameWithPrefix(String theSpnamePrefix, String theParamName) { + if (StringUtils.isBlank(theSpnamePrefix)) return theParamName; - if (StringUtils.isBlank(theSpnamePrefix)) - return theParamName; + return theSpnamePrefix + "." + theParamName; + } - return theSpnamePrefix + "." + theParamName; - } + public static Predicate[] toPredicateArray(List thePredicates) { + return thePredicates.toArray(new Predicate[0]); + } - public static Predicate[] toPredicateArray(List thePredicates) { - return thePredicates.toArray(new Predicate[0]); - } - - private static List createLastUpdatedPredicates(final DateRangeParam theLastUpdated, CriteriaBuilder builder, From from) { + private static List createLastUpdatedPredicates( + final DateRangeParam theLastUpdated, CriteriaBuilder builder, From from) { List lastUpdatedPredicates = new ArrayList<>(); if (theLastUpdated != null) { if (theLastUpdated.getLowerBoundAsInstant() != null) { ourLog.debug("LastUpdated lower bound: {}", new InstantDt(theLastUpdated.getLowerBoundAsInstant())); - Predicate predicateLower = builder.greaterThanOrEqualTo(from.get("myUpdated"), theLastUpdated.getLowerBoundAsInstant()); + Predicate predicateLower = + builder.greaterThanOrEqualTo(from.get("myUpdated"), theLastUpdated.getLowerBoundAsInstant()); lastUpdatedPredicates.add(predicateLower); } if (theLastUpdated.getUpperBoundAsInstant() != null) { - Predicate predicateUpper = builder.lessThanOrEqualTo(from.get("myUpdated"), theLastUpdated.getUpperBoundAsInstant()); + Predicate predicateUpper = + builder.lessThanOrEqualTo(from.get("myUpdated"), theLastUpdated.getUpperBoundAsInstant()); lastUpdatedPredicates.add(predicateUpper); } } @@ -208,14 +204,21 @@ public class QueryParameterUtils { } } - public static void populateSearchEntity(SearchParameterMap theParams, String theResourceType, String theSearchUuid, String theQueryString, Search theSearch, RequestPartitionId theRequestPartitionId) { + public static void populateSearchEntity( + SearchParameterMap theParams, + String theResourceType, + String theSearchUuid, + String theQueryString, + Search theSearch, + RequestPartitionId theRequestPartitionId) { theSearch.setDeleted(false); theSearch.setUuid(theSearchUuid); theSearch.setCreated(new Date()); theSearch.setTotalCount(null); theSearch.setNumFound(0); theSearch.setPreferredPageSize(theParams.getCount()); - theSearch.setSearchType(theParams.getEverythingMode() != null ? SearchTypeEnum.EVERYTHING : SearchTypeEnum.SEARCH); + theSearch.setSearchType( + theParams.getEverythingMode() != null ? SearchTypeEnum.EVERYTHING : SearchTypeEnum.SEARCH); theSearch.setLastUpdated(theParams.getLastUpdated()); theSearch.setResourceType(theResourceType); theSearch.setStatus(SearchStatusEnum.LOADING); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/RegexpGsonBuilderUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/RegexpGsonBuilderUtil.java index f0172075d15..b3229fd2135 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/RegexpGsonBuilderUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/RegexpGsonBuilderUtil.java @@ -30,8 +30,7 @@ import com.google.gson.JsonPrimitive; */ public class RegexpGsonBuilderUtil { - - private RegexpGsonBuilderUtil() { } + private RegexpGsonBuilderUtil() {} /** * Builds a json object as this sample: @@ -50,6 +49,4 @@ public class RegexpGsonBuilderUtil { JsonArray a = new JsonArray(); return regexpJO; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ResourceCountCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ResourceCountCache.java index 7ea7836e289..be94ac5a0a1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ResourceCountCache.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ResourceCountCache.java @@ -118,6 +118,4 @@ public class ResourceCountCache implements IHasScheduledJobs { static void setNowForUnitTest(Long theNowForUnitTest) { ourNowForUnitTest = theNowForUnitTest; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ScrollableResultsIterator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ScrollableResultsIterator.java index b892f36e8db..b201778ebc8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ScrollableResultsIterator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/ScrollableResultsIterator.java @@ -61,7 +61,6 @@ public class ScrollableResultsIterator extends BaseIterator return next; } - @Override public void close() { if (myScroll != null) { @@ -69,5 +68,4 @@ public class ScrollableResultsIterator extends BaseIterator myScroll = null; } } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SearchParameterMapCalculator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SearchParameterMapCalculator.java index dd11b79f13e..97df1ba7990 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SearchParameterMapCalculator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SearchParameterMapCalculator.java @@ -27,17 +27,15 @@ import static ca.uhn.fhir.jpa.searchparam.SearchParameterMap.INTEGER_0; public class SearchParameterMapCalculator { - static public boolean isWantCount(SearchParameterMap myParams) { + public static boolean isWantCount(SearchParameterMap myParams) { return isWantCount(myParams.getSearchTotalMode()); } - static public boolean isWantCount(SearchTotalModeEnum theSearchTotalModeEnum){ + public static boolean isWantCount(SearchTotalModeEnum theSearchTotalModeEnum) { return SearchTotalModeEnum.ACCURATE.equals(theSearchTotalModeEnum); } - static public boolean isWantOnlyCount(SearchParameterMap myParams) { - return SummaryEnum.COUNT.equals(myParams.getSummaryMode()) - | INTEGER_0.equals(myParams.getCount()); + public static boolean isWantOnlyCount(SearchParameterMap myParams) { + return SummaryEnum.COUNT.equals(myParams.getSummaryMode()) | INTEGER_0.equals(myParams.getCount()); } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SpringObjectCaster.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SpringObjectCaster.java index 81702c3089d..3e498d4af40 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SpringObjectCaster.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SpringObjectCaster.java @@ -28,19 +28,20 @@ import org.springframework.aop.support.AopUtils; */ public class SpringObjectCaster { - /** - * Retrieve the Spring proxy object's target object - * @param proxy - * @param clazz - * @param - * @return - * @throws Exception - */ - public static T getTargetObject(Object proxy, Class clazz) throws Exception { - while( (AopUtils.isJdkDynamicProxy(proxy))) { - return clazz.cast(getTargetObject(((Advised)proxy).getTargetSource().getTarget(), clazz)); - } + /** + * Retrieve the Spring proxy object's target object + * @param proxy + * @param clazz + * @param + * @return + * @throws Exception + */ + public static T getTargetObject(Object proxy, Class clazz) throws Exception { + while ((AopUtils.isJdkDynamicProxy(proxy))) { + return clazz.cast( + getTargetObject(((Advised) proxy).getTargetSource().getTarget(), clazz)); + } - return clazz.cast(proxy); - } + return clazz.cast(proxy); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorDstu2.java index 7c0e9c1e871..8cfaafda7ac 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorDstu2.java @@ -60,12 +60,12 @@ public class SubscriptionsRequireManualActivationInterceptorDstu2 extends Server } } - public void setDao(IFhirResourceDao theDao) { myDao = theDao; } - private void verifyStatusOk(RestOperationTypeEnum theOperation, IBaseResource theOldResourceOrNull, IBaseResource theResource) { + private void verifyStatusOk( + RestOperationTypeEnum theOperation, IBaseResource theOldResourceOrNull, IBaseResource theResource) { Subscription subscription = (Subscription) theResource; SubscriptionStatusEnum newStatus = subscription.getStatusElement().getValueAsEnum(); @@ -75,13 +75,16 @@ public class SubscriptionsRequireManualActivationInterceptorDstu2 extends Server if (newStatus == null) { String actualCode = subscription.getStatusElement().getValueAsString(); - throw new UnprocessableEntityException(Msg.code(800) + "Can not " + theOperation.getCode() + " resource: Subscription.status must be populated on this server" + ((isNotBlank(actualCode)) ? " (invalid value " + actualCode + ")" : "")); + throw new UnprocessableEntityException(Msg.code(800) + "Can not " + theOperation.getCode() + + " resource: Subscription.status must be populated on this server" + + ((isNotBlank(actualCode)) ? " (invalid value " + actualCode + ")" : "")); } if (theOldResourceOrNull != null) { try { Subscription existing = (Subscription) theOldResourceOrNull; - SubscriptionStatusEnum existingStatus = existing.getStatusElement().getValueAsEnum(); + SubscriptionStatusEnum existingStatus = + existing.getStatusElement().getValueAsEnum(); if (existingStatus != newStatus) { verifyActiveStatus(theOperation, subscription, newStatus, existingStatus); } @@ -93,8 +96,13 @@ public class SubscriptionsRequireManualActivationInterceptorDstu2 extends Server } } - private void verifyActiveStatus(RestOperationTypeEnum theOperation, Subscription theSubscription, SubscriptionStatusEnum newStatus, SubscriptionStatusEnum theExistingStatus) { - SubscriptionChannelTypeEnum channelType = theSubscription.getChannel().getTypeElement().getValueAsEnum(); + private void verifyActiveStatus( + RestOperationTypeEnum theOperation, + Subscription theSubscription, + SubscriptionStatusEnum newStatus, + SubscriptionStatusEnum theExistingStatus) { + SubscriptionChannelTypeEnum channelType = + theSubscription.getChannel().getTypeElement().getValueAsEnum(); if (channelType == null) { throw new UnprocessableEntityException(Msg.code(801) + "Subscription.channel.type must be populated"); @@ -105,14 +113,19 @@ public class SubscriptionsRequireManualActivationInterceptorDstu2 extends Server } if (theExistingStatus != null) { - throw new UnprocessableEntityException(Msg.code(802) + "Subscription.status can not be changed from " + describeStatus(theExistingStatus) + " to " + describeStatus(newStatus)); + throw new UnprocessableEntityException(Msg.code(802) + "Subscription.status can not be changed from " + + describeStatus(theExistingStatus) + " to " + describeStatus(newStatus)); } if (theSubscription.getStatus() == null) { - throw new UnprocessableEntityException(Msg.code(803) + "Can not " + theOperation.getCode().toLowerCase() + " resource: Subscription.status must be populated on this server"); + throw new UnprocessableEntityException( + Msg.code(803) + "Can not " + theOperation.getCode().toLowerCase() + + " resource: Subscription.status must be populated on this server"); } - throw new UnprocessableEntityException(Msg.code(804) + "Subscription.status must be '" + SubscriptionStatusEnum.OFF.getCode() + "' or '" + SubscriptionStatusEnum.REQUESTED.getCode() + "' on a newly created subscription"); + throw new UnprocessableEntityException( + Msg.code(804) + "Subscription.status must be '" + SubscriptionStatusEnum.OFF.getCode() + "' or '" + + SubscriptionStatusEnum.REQUESTED.getCode() + "' on a newly created subscription"); } private String describeStatus(SubscriptionStatusEnum existingStatus) { @@ -124,5 +137,4 @@ public class SubscriptionsRequireManualActivationInterceptorDstu2 extends Server } return existingStatusString; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorDstu3.java index 1f16114273f..4762daf87d2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorDstu3.java @@ -60,12 +60,12 @@ public class SubscriptionsRequireManualActivationInterceptorDstu3 extends Server } } - public void setDao(IFhirResourceDao theDao) { myDao = theDao; } - private void verifyStatusOk(RestOperationTypeEnum theOperation, IBaseResource theOldResourceOrNull, IBaseResource theResource) { + private void verifyStatusOk( + RestOperationTypeEnum theOperation, IBaseResource theOldResourceOrNull, IBaseResource theResource) { Subscription subscription = (Subscription) theResource; SubscriptionStatus newStatus = subscription.getStatusElement().getValue(); @@ -75,7 +75,9 @@ public class SubscriptionsRequireManualActivationInterceptorDstu3 extends Server if (newStatus == null) { String actualCode = subscription.getStatusElement().getValueAsString(); - throw new UnprocessableEntityException(Msg.code(812) + "Can not " + theOperation.getCode() + " resource: Subscription.status must be populated on this server" + ((isNotBlank(actualCode)) ? " (invalid value " + actualCode + ")" : "")); + throw new UnprocessableEntityException(Msg.code(812) + "Can not " + theOperation.getCode() + + " resource: Subscription.status must be populated on this server" + + ((isNotBlank(actualCode)) ? " (invalid value " + actualCode + ")" : "")); } if (theOldResourceOrNull != null) { @@ -93,8 +95,13 @@ public class SubscriptionsRequireManualActivationInterceptorDstu3 extends Server } } - private void verifyActiveStatus(RestOperationTypeEnum theOperation, Subscription theSubscription, SubscriptionStatus newStatus, SubscriptionStatus theExistingStatus) { - SubscriptionChannelType channelType = theSubscription.getChannel().getTypeElement().getValue(); + private void verifyActiveStatus( + RestOperationTypeEnum theOperation, + Subscription theSubscription, + SubscriptionStatus newStatus, + SubscriptionStatus theExistingStatus) { + SubscriptionChannelType channelType = + theSubscription.getChannel().getTypeElement().getValue(); if (channelType == null) { throw new UnprocessableEntityException(Msg.code(813) + "Subscription.channel.type must be populated"); @@ -105,14 +112,19 @@ public class SubscriptionsRequireManualActivationInterceptorDstu3 extends Server } if (theExistingStatus != null) { - throw new UnprocessableEntityException(Msg.code(814) + "Subscription.status can not be changed from " + describeStatus(theExistingStatus) + " to " + describeStatus(newStatus)); + throw new UnprocessableEntityException(Msg.code(814) + "Subscription.status can not be changed from " + + describeStatus(theExistingStatus) + " to " + describeStatus(newStatus)); } if (theSubscription.getStatus() == null) { - throw new UnprocessableEntityException(Msg.code(815) + "Can not " + theOperation.getCode().toLowerCase() + " resource: Subscription.status must be populated on this server"); + throw new UnprocessableEntityException( + Msg.code(815) + "Can not " + theOperation.getCode().toLowerCase() + + " resource: Subscription.status must be populated on this server"); } - throw new UnprocessableEntityException(Msg.code(816) + "Subscription.status must be '" + SubscriptionStatus.OFF.toCode() + "' or '" + SubscriptionStatus.REQUESTED.toCode() + "' on a newly created subscription"); + throw new UnprocessableEntityException( + Msg.code(816) + "Subscription.status must be '" + SubscriptionStatus.OFF.toCode() + "' or '" + + SubscriptionStatus.REQUESTED.toCode() + "' on a newly created subscription"); } private String describeStatus(SubscriptionStatus existingStatus) { @@ -124,5 +136,4 @@ public class SubscriptionsRequireManualActivationInterceptorDstu3 extends Server } return existingStatusString; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorR4.java index d5999ac829f..7f04e5ecba9 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SubscriptionsRequireManualActivationInterceptorR4.java @@ -60,12 +60,12 @@ public class SubscriptionsRequireManualActivationInterceptorR4 extends ServerOpe } } - public void setDao(IFhirResourceDao theDao) { myDao = theDao; } - private void verifyStatusOk(RestOperationTypeEnum theOperation, IBaseResource theOldResourceOrNull, IBaseResource theResource) { + private void verifyStatusOk( + RestOperationTypeEnum theOperation, IBaseResource theOldResourceOrNull, IBaseResource theResource) { Subscription subscription = (Subscription) theResource; SubscriptionStatus newStatus = subscription.getStatusElement().getValue(); @@ -75,7 +75,9 @@ public class SubscriptionsRequireManualActivationInterceptorR4 extends ServerOpe if (newStatus == null) { String actualCode = subscription.getStatusElement().getValueAsString(); - throw new UnprocessableEntityException(Msg.code(807) + "Can not " + theOperation.getCode() + " resource: Subscription.status must be populated on this server" + ((isNotBlank(actualCode)) ? " (invalid value " + actualCode + ")" : "")); + throw new UnprocessableEntityException(Msg.code(807) + "Can not " + theOperation.getCode() + + " resource: Subscription.status must be populated on this server" + + ((isNotBlank(actualCode)) ? " (invalid value " + actualCode + ")" : "")); } if (theOldResourceOrNull != null) { @@ -93,8 +95,13 @@ public class SubscriptionsRequireManualActivationInterceptorR4 extends ServerOpe } } - private void verifyActiveStatus(RestOperationTypeEnum theOperation, Subscription theSubscription, SubscriptionStatus newStatus, SubscriptionStatus theExistingStatus) { - SubscriptionChannelType channelType = theSubscription.getChannel().getTypeElement().getValue(); + private void verifyActiveStatus( + RestOperationTypeEnum theOperation, + Subscription theSubscription, + SubscriptionStatus newStatus, + SubscriptionStatus theExistingStatus) { + SubscriptionChannelType channelType = + theSubscription.getChannel().getTypeElement().getValue(); if (channelType == null) { throw new UnprocessableEntityException(Msg.code(808) + "Subscription.channel.type must be populated"); @@ -105,14 +112,19 @@ public class SubscriptionsRequireManualActivationInterceptorR4 extends ServerOpe } if (theExistingStatus != null) { - throw new UnprocessableEntityException(Msg.code(809) + "Subscription.status can not be changed from " + describeStatus(theExistingStatus) + " to " + describeStatus(newStatus)); + throw new UnprocessableEntityException(Msg.code(809) + "Subscription.status can not be changed from " + + describeStatus(theExistingStatus) + " to " + describeStatus(newStatus)); } if (theSubscription.getStatus() == null) { - throw new UnprocessableEntityException(Msg.code(810) + "Can not " + theOperation.getCode().toLowerCase() + " resource: Subscription.status must be populated on this server"); + throw new UnprocessableEntityException( + Msg.code(810) + "Can not " + theOperation.getCode().toLowerCase() + + " resource: Subscription.status must be populated on this server"); } - throw new UnprocessableEntityException(Msg.code(811) + "Subscription.status must be '" + SubscriptionStatus.OFF.toCode() + "' or '" + SubscriptionStatus.REQUESTED.toCode() + "' on a newly created subscription"); + throw new UnprocessableEntityException( + Msg.code(811) + "Subscription.status must be '" + SubscriptionStatus.OFF.toCode() + "' or '" + + SubscriptionStatus.REQUESTED.toCode() + "' on a newly created subscription"); } private String describeStatus(SubscriptionStatus existingStatus) { @@ -124,5 +136,4 @@ public class SubscriptionsRequireManualActivationInterceptorR4 extends ServerOpe } return existingStatusString; } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/jsonpatch/JsonPatchUtils.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/jsonpatch/JsonPatchUtils.java index b3a48b40b5f..97c67dc3664 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/jsonpatch/JsonPatchUtils.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/jsonpatch/JsonPatchUtils.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.util.jsonpatch; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.parser.StrictErrorHandler; @@ -40,7 +40,8 @@ import static org.apache.commons.lang3.StringUtils.defaultString; public class JsonPatchUtils { - public static T apply(FhirContext theCtx, T theResourceToUpdate, @Language("JSON") String thePatchBody) { + public static T apply( + FhirContext theCtx, T theResourceToUpdate, @Language("JSON") String thePatchBody) { // Parse the patch ObjectMapper mapper = new ObjectMapper(); mapper.configure(JsonParser.Feature.INCLUDE_SOURCE_IN_LOCATION, false); @@ -53,7 +54,8 @@ public class JsonPatchUtils { JsonNode jsonPatchNode = mapper.readTree(parser); patch = JsonPatch.fromJson(jsonPatchNode); - JsonNode originalJsonDocument = mapper.readTree(theCtx.newJsonParser().encodeResourceToString(theResourceToUpdate)); + JsonNode originalJsonDocument = + mapper.readTree(theCtx.newJsonParser().encodeResourceToString(theResourceToUpdate)); JsonNode after = patch.apply(originalJsonDocument); @SuppressWarnings("unchecked") @@ -68,10 +70,14 @@ public class JsonPatchUtils { try { retVal = fhirJsonParser.parseResource(clazz, postPatchedContent); } catch (DataFormatException e) { - String resourceId = theResourceToUpdate.getIdElement().toUnqualifiedVersionless().getValue(); + String resourceId = theResourceToUpdate + .getIdElement() + .toUnqualifiedVersionless() + .getValue(); String resourceType = theCtx.getResourceType(theResourceToUpdate); resourceId = defaultString(resourceId, resourceType); - String msg = theCtx.getLocalizer().getMessage(JsonPatchUtils.class, "failedToApplyPatch", resourceId, e.getMessage()); + String msg = theCtx.getLocalizer() + .getMessage(JsonPatchUtils.class, "failedToApplyPatch", resourceId, e.getMessage()); throw new InvalidRequestException(Msg.code(818) + msg); } return retVal; @@ -79,7 +85,5 @@ public class JsonPatchUtils { } catch (IOException | JsonPatchException theE) { throw new InvalidRequestException(Msg.code(819) + theE.getMessage()); } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/xmlpatch/XmlPatchUtils.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/xmlpatch/XmlPatchUtils.java index d579d972e39..abec669ac89 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/xmlpatch/XmlPatchUtils.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/xmlpatch/XmlPatchUtils.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.util.xmlpatch; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import com.github.dnault.xmlpatch.Patcher; @@ -33,24 +33,25 @@ import java.io.IOException; public class XmlPatchUtils { public static T apply(FhirContext theCtx, T theResourceToUpdate, String thePatchBody) { - + @SuppressWarnings("unchecked") Class clazz = (Class) theResourceToUpdate.getClass(); - + String inputResource = theCtx.newXmlParser().encodeResourceToString(theResourceToUpdate); - + ByteArrayOutputStream result = new ByteArrayOutputStream(); try { - Patcher.patch(new ByteArrayInputStream(inputResource.getBytes(Constants.CHARSET_UTF8)), new ByteArrayInputStream(thePatchBody.getBytes(Constants.CHARSET_UTF8)), result); + Patcher.patch( + new ByteArrayInputStream(inputResource.getBytes(Constants.CHARSET_UTF8)), + new ByteArrayInputStream(thePatchBody.getBytes(Constants.CHARSET_UTF8)), + result); } catch (IOException e) { throw new InternalErrorException(Msg.code(817) + e); } - + String resultString = new String(result.toByteArray(), Constants.CHARSET_UTF8); T retVal = theCtx.newXmlParser().parseResource(clazz, resultString); - + return retVal; } - - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java index 39df15bbf69..9a42f1d5873 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/validation/JpaValidationSupportChain.java @@ -46,12 +46,16 @@ public class JpaValidationSupportChain extends ValidationSupportChain { @Qualifier("myDefaultProfileValidationSupport") @Autowired private IValidationSupport myDefaultProfileValidationSupport; + @Autowired private ITermReadSvc myTerminologyService; + @Autowired private NpmJpaValidationSupport myNpmJpaValidationSupport; + @Autowired private ITermConceptMappingSvc myConceptMappingSvc; + @Autowired private UnknownCodeSystemWarningValidationSupport myUnknownCodeSystemWarningValidationSupport; @@ -86,5 +90,4 @@ public class JpaValidationSupportChain extends ValidationSupportChain { // This needs to be last in the chain, it was designed for that addValidationSupport(myUnknownCodeSystemWarningValidationSupport); } - } diff --git a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IIpsGenerationStrategy.java b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IIpsGenerationStrategy.java index 6d3322e9b72..4a61fba6231 100644 --- a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IIpsGenerationStrategy.java +++ b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IIpsGenerationStrategy.java @@ -24,10 +24,10 @@ import ca.uhn.fhir.model.api.Include; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * This interface is the primary configuration and strategy provider for the @@ -111,7 +111,8 @@ public interface IIpsGenerationStrategy { * being searched for. * @param theSearchParameterMap The map to manipulate. */ - void massageResourceSearch(IpsContext.IpsSectionContext theIpsSectionContext, SearchParameterMap theSearchParameterMap); + void massageResourceSearch( + IpsContext.IpsSectionContext theIpsSectionContext, SearchParameterMap theSearchParameterMap); /** * Return a set of Include directives to be added to the resource search @@ -131,5 +132,4 @@ public interface IIpsGenerationStrategy { * IPS document. The strategy can decide whether to include it or not. */ boolean shouldInclude(IpsContext.IpsSectionContext theIpsSectionContext, IBaseResource theCandidate); - } diff --git a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IpsContext.java b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IpsContext.java index 92492b44da5..a0cf6666ced 100644 --- a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IpsContext.java +++ b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IpsContext.java @@ -67,7 +67,8 @@ public class IpsContext { private final IpsSectionEnum mySection; private final String myResourceType; - private IpsSectionContext(IBaseResource theSubject, IIdType theSubjectId, IpsSectionEnum theSection, String theResourceType) { + private IpsSectionContext( + IBaseResource theSubject, IIdType theSubjectId, IpsSectionEnum theSection, String theResourceType) { super(theSubject, theSubjectId); mySection = theSection; myResourceType = theResourceType; @@ -81,5 +82,4 @@ public class IpsContext { return mySection; } } - } diff --git a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IpsSectionEnum.java b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IpsSectionEnum.java index f7b23cc9a4c..c6d6a9f8b70 100644 --- a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IpsSectionEnum.java +++ b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/IpsSectionEnum.java @@ -20,18 +20,18 @@ package ca.uhn.fhir.jpa.ips.api; public enum IpsSectionEnum { - ALLERGY_INTOLERANCE, - MEDICATION_SUMMARY, - PROBLEM_LIST, - IMMUNIZATIONS, - PROCEDURES, - MEDICAL_DEVICES, - DIAGNOSTIC_RESULTS, - VITAL_SIGNS, - ILLNESS_HISTORY, - PREGNANCY, - SOCIAL_HISTORY, - FUNCTIONAL_STATUS, - PLAN_OF_CARE, - ADVANCE_DIRECTIVES - } + ALLERGY_INTOLERANCE, + MEDICATION_SUMMARY, + PROBLEM_LIST, + IMMUNIZATIONS, + PROCEDURES, + MEDICAL_DEVICES, + DIAGNOSTIC_RESULTS, + VITAL_SIGNS, + ILLNESS_HISTORY, + PREGNANCY, + SOCIAL_HISTORY, + FUNCTIONAL_STATUS, + PLAN_OF_CARE, + ADVANCE_DIRECTIVES +} diff --git a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/SectionRegistry.java b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/SectionRegistry.java index de6957227ce..2e7e4f53b4f 100644 --- a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/SectionRegistry.java +++ b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/api/SectionRegistry.java @@ -30,13 +30,13 @@ import org.hl7.fhir.r4.model.MedicationStatement; import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.ResourceType; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Consumer; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; /** * This class is the registry for sections for the IPS document. It can be extended @@ -103,150 +103,149 @@ public class SectionRegistry { protected void addSectionAllergyIntolerance() { addSection(IpsSectionEnum.ALLERGY_INTOLERANCE) - .withTitle("Allergies and Intolerances") - .withSectionCode("48765-2") - .withSectionDisplay("Allergies and Adverse Reactions") - .withResourceTypes(ResourceType.AllergyIntolerance.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/AllergiesAndIntolerances-uv-ips") - .withNoInfoGenerator(new AllergyIntoleranceNoInfoR4Generator()) - .build(); + .withTitle("Allergies and Intolerances") + .withSectionCode("48765-2") + .withSectionDisplay("Allergies and Adverse Reactions") + .withResourceTypes(ResourceType.AllergyIntolerance.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/AllergiesAndIntolerances-uv-ips") + .withNoInfoGenerator(new AllergyIntoleranceNoInfoR4Generator()) + .build(); } protected void addSectionMedicationSummary() { addSection(IpsSectionEnum.MEDICATION_SUMMARY) - .withTitle("Medication List") - .withSectionCode("10160-0") - .withSectionDisplay("Medication List") - .withResourceTypes( - ResourceType.MedicationStatement.name(), - ResourceType.MedicationRequest.name(), - ResourceType.MedicationAdministration.name(), - ResourceType.MedicationDispense.name() - ) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/MedicationSummary-uv-ips") - .withNoInfoGenerator(new MedicationNoInfoR4Generator()) - .build(); + .withTitle("Medication List") + .withSectionCode("10160-0") + .withSectionDisplay("Medication List") + .withResourceTypes( + ResourceType.MedicationStatement.name(), + ResourceType.MedicationRequest.name(), + ResourceType.MedicationAdministration.name(), + ResourceType.MedicationDispense.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/MedicationSummary-uv-ips") + .withNoInfoGenerator(new MedicationNoInfoR4Generator()) + .build(); } protected void addSectionProblemList() { addSection(IpsSectionEnum.PROBLEM_LIST) - .withTitle("Problem List") - .withSectionCode("11450-4") - .withSectionDisplay("Problem List") - .withResourceTypes(ResourceType.Condition.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/ProblemList-uv-ips") - .withNoInfoGenerator(new ProblemNoInfoR4Generator()) - .build(); + .withTitle("Problem List") + .withSectionCode("11450-4") + .withSectionDisplay("Problem List") + .withResourceTypes(ResourceType.Condition.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/ProblemList-uv-ips") + .withNoInfoGenerator(new ProblemNoInfoR4Generator()) + .build(); } protected void addSectionImmunizations() { addSection(IpsSectionEnum.IMMUNIZATIONS) - .withTitle("History of Immunizations") - .withSectionCode("11369-6") - .withSectionDisplay("History of Immunizations") - .withResourceTypes(ResourceType.Immunization.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/Immunizations-uv-ips") - .build(); + .withTitle("History of Immunizations") + .withSectionCode("11369-6") + .withSectionDisplay("History of Immunizations") + .withResourceTypes(ResourceType.Immunization.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/Immunizations-uv-ips") + .build(); } protected void addSectionProcedures() { addSection(IpsSectionEnum.PROCEDURES) - .withTitle("History of Procedures") - .withSectionCode("47519-4") - .withSectionDisplay("History of Procedures") - .withResourceTypes(ResourceType.Procedure.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/HistoryOfProcedures-uv-ips") - .build(); + .withTitle("History of Procedures") + .withSectionCode("47519-4") + .withSectionDisplay("History of Procedures") + .withResourceTypes(ResourceType.Procedure.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/HistoryOfProcedures-uv-ips") + .build(); } protected void addSectionMedicalDevices() { addSection(IpsSectionEnum.MEDICAL_DEVICES) - .withTitle("Medical Devices") - .withSectionCode("46240-8") - .withSectionDisplay("Medical Devices") - .withResourceTypes(ResourceType.DeviceUseStatement.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/MedicalDevices-uv-ips") - .build(); + .withTitle("Medical Devices") + .withSectionCode("46240-8") + .withSectionDisplay("Medical Devices") + .withResourceTypes(ResourceType.DeviceUseStatement.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/MedicalDevices-uv-ips") + .build(); } protected void addSectionDiagnosticResults() { addSection(IpsSectionEnum.DIAGNOSTIC_RESULTS) - .withTitle("Diagnostic Results") - .withSectionCode("30954-2") - .withSectionDisplay("Diagnostic Results") - .withResourceTypes(ResourceType.DiagnosticReport.name(), ResourceType.Observation.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/DiagnosticResults-uv-ips") - .build(); + .withTitle("Diagnostic Results") + .withSectionCode("30954-2") + .withSectionDisplay("Diagnostic Results") + .withResourceTypes(ResourceType.DiagnosticReport.name(), ResourceType.Observation.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/DiagnosticResults-uv-ips") + .build(); } protected void addSectionVitalSigns() { addSection(IpsSectionEnum.VITAL_SIGNS) - .withTitle("Vital Signs") - .withSectionCode("8716-3") - .withSectionDisplay("Vital Signs") - .withResourceTypes(ResourceType.Observation.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/VitalSigns-uv-ips") - .build(); + .withTitle("Vital Signs") + .withSectionCode("8716-3") + .withSectionDisplay("Vital Signs") + .withResourceTypes(ResourceType.Observation.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/VitalSigns-uv-ips") + .build(); } protected void addSectionPregnancy() { addSection(IpsSectionEnum.PREGNANCY) - .withTitle("Pregnancy Information") - .withSectionCode("10162-6") - .withSectionDisplay("Pregnancy Information") - .withResourceTypes(ResourceType.Observation.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/Pregnancy-uv-ips") - .build(); + .withTitle("Pregnancy Information") + .withSectionCode("10162-6") + .withSectionDisplay("Pregnancy Information") + .withResourceTypes(ResourceType.Observation.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/Pregnancy-uv-ips") + .build(); } protected void addSectionSocialHistory() { addSection(IpsSectionEnum.SOCIAL_HISTORY) - .withTitle("Social History") - .withSectionCode("29762-2") - .withSectionDisplay("Social History") - .withResourceTypes(ResourceType.Observation.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/SocialHistory-uv-ips") - .build(); + .withTitle("Social History") + .withSectionCode("29762-2") + .withSectionDisplay("Social History") + .withResourceTypes(ResourceType.Observation.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/SocialHistory-uv-ips") + .build(); } protected void addSectionIllnessHistory() { addSection(IpsSectionEnum.ILLNESS_HISTORY) - .withTitle("History of Past Illness") - .withSectionCode("11348-0") - .withSectionDisplay("History of Past Illness") - .withResourceTypes(ResourceType.Condition.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/PastHistoryOfIllnesses-uv-ips") - .build(); + .withTitle("History of Past Illness") + .withSectionCode("11348-0") + .withSectionDisplay("History of Past Illness") + .withResourceTypes(ResourceType.Condition.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/PastHistoryOfIllnesses-uv-ips") + .build(); } protected void addSectionFunctionalStatus() { addSection(IpsSectionEnum.FUNCTIONAL_STATUS) - .withTitle("Functional Status") - .withSectionCode("47420-5") - .withSectionDisplay("Functional Status") - .withResourceTypes(ResourceType.ClinicalImpression.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/FunctionalStatus-uv-ips") - .build(); + .withTitle("Functional Status") + .withSectionCode("47420-5") + .withSectionDisplay("Functional Status") + .withResourceTypes(ResourceType.ClinicalImpression.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/FunctionalStatus-uv-ips") + .build(); } protected void addSectionPlanOfCare() { addSection(IpsSectionEnum.PLAN_OF_CARE) - .withTitle("Plan of Care") - .withSectionCode("18776-5") - .withSectionDisplay("Plan of Care") - .withResourceTypes(ResourceType.CarePlan.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/PlanOfCare-uv-ips") - .build(); + .withTitle("Plan of Care") + .withSectionCode("18776-5") + .withSectionDisplay("Plan of Care") + .withResourceTypes(ResourceType.CarePlan.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/PlanOfCare-uv-ips") + .build(); } protected void addSectionAdvanceDirectives() { addSection(IpsSectionEnum.ADVANCE_DIRECTIVES) - .withTitle("Advance Directives") - .withSectionCode("42349-0") - .withSectionDisplay("Advance Directives") - .withResourceTypes(ResourceType.Consent.name()) - .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/AdvanceDirectives-uv-ips") - .build(); + .withTitle("Advance Directives") + .withSectionCode("42349-0") + .withSectionDisplay("Advance Directives") + .withResourceTypes(ResourceType.Consent.name()) + .withProfile("http://hl7.org/fhir/uv/ips/StructureDefinition/AdvanceDirectives-uv-ips") + .build(); } private SectionBuilder addSection(IpsSectionEnum theSectionEnum) { @@ -265,10 +264,12 @@ public class SectionRegistry { } public Section getSection(IpsSectionEnum theSectionEnum) { - return getSections().stream().filter(t -> t.getSectionEnum() == theSectionEnum).findFirst().orElseThrow(() -> new IllegalArgumentException("No section for type: " + theSectionEnum)); + return getSections().stream() + .filter(t -> t.getSectionEnum() == theSectionEnum) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No section for type: " + theSectionEnum)); } - public interface INoInfoGenerator { /** @@ -276,7 +277,6 @@ public class SectionRegistry { * although it can if it is a resource found in the repository. */ IBaseResource generate(IIdType theSubjectId); - } public class SectionBuilder { @@ -330,7 +330,14 @@ public class SectionRegistry { public void build() { myGlobalCustomizers.forEach(t -> t.accept(this)); - mySections.add(new Section(mySectionEnum, myTitle, mySectionCode, mySectionDisplay, myResourceTypes, myProfile, myNoInfoGenerator)); + mySections.add(new Section( + mySectionEnum, + myTitle, + mySectionCode, + mySectionDisplay, + myResourceTypes, + myProfile, + myNoInfoGenerator)); } } @@ -338,9 +345,16 @@ public class SectionRegistry { @Override public IBaseResource generate(IIdType theSubjectId) { AllergyIntolerance allergy = new AllergyIntolerance(); - allergy.setCode(new CodeableConcept().addCoding(new Coding().setCode("no-allergy-info").setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips").setDisplay("No information about allergies"))) - .setPatient(new Reference(theSubjectId)) - .setClinicalStatus(new CodeableConcept().addCoding(new Coding().setCode("active").setSystem("http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical"))); + allergy.setCode(new CodeableConcept() + .addCoding(new Coding() + .setCode("no-allergy-info") + .setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips") + .setDisplay("No information about allergies"))) + .setPatient(new Reference(theSubjectId)) + .setClinicalStatus(new CodeableConcept() + .addCoding(new Coding() + .setCode("active") + .setSystem("http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical"))); return allergy; } } @@ -350,10 +364,16 @@ public class SectionRegistry { public IBaseResource generate(IIdType theSubjectId) { MedicationStatement medication = new MedicationStatement(); // setMedicationCodeableConcept is not available - medication.setMedication(new CodeableConcept().addCoding(new Coding().setCode("no-medication-info").setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips").setDisplay("No information about medications"))) - .setSubject(new Reference(theSubjectId)) - .setStatus(MedicationStatement.MedicationStatementStatus.UNKNOWN); - // .setEffective(new Period().addExtension().setUrl("http://hl7.org/fhir/StructureDefinition/data-absent-reason").setValue((new Coding().setCode("not-applicable")))) + medication + .setMedication(new CodeableConcept() + .addCoding(new Coding() + .setCode("no-medication-info") + .setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips") + .setDisplay("No information about medications"))) + .setSubject(new Reference(theSubjectId)) + .setStatus(MedicationStatement.MedicationStatementStatus.UNKNOWN); + // .setEffective(new + // Period().addExtension().setUrl("http://hl7.org/fhir/StructureDefinition/data-absent-reason").setValue((new Coding().setCode("not-applicable")))) return medication; } } @@ -362,9 +382,17 @@ public class SectionRegistry { @Override public IBaseResource generate(IIdType theSubjectId) { Condition condition = new Condition(); - condition.setCode(new CodeableConcept().addCoding(new Coding().setCode("no-problem-info").setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips").setDisplay("No information about problems"))) - .setSubject(new Reference(theSubjectId)) - .setClinicalStatus(new CodeableConcept().addCoding(new Coding().setCode("active").setSystem("http://terminology.hl7.org/CodeSystem/condition-clinical"))); + condition + .setCode(new CodeableConcept() + .addCoding(new Coding() + .setCode("no-problem-info") + .setSystem("http://hl7.org/fhir/uv/ips/CodeSystem/absent-unknown-uv-ips") + .setDisplay("No information about problems"))) + .setSubject(new Reference(theSubjectId)) + .setClinicalStatus(new CodeableConcept() + .addCoding(new Coding() + .setCode("active") + .setSystem("http://terminology.hl7.org/CodeSystem/condition-clinical"))); return condition; } } @@ -379,7 +407,14 @@ public class SectionRegistry { private final String myProfile; private final INoInfoGenerator myNoInfoGenerator; - public Section(IpsSectionEnum theSectionEnum, String theTitle, String theSectionCode, String theSectionDisplay, List theResourceTypes, String theProfile, INoInfoGenerator theNoInfoGenerator) { + public Section( + IpsSectionEnum theSectionEnum, + String theTitle, + String theSectionCode, + String theSectionDisplay, + List theResourceTypes, + String theProfile, + INoInfoGenerator theNoInfoGenerator) { mySectionEnum = theSectionEnum; myTitle = theTitle; mySectionCode = theSectionCode; diff --git a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/generator/IpsGeneratorSvcImpl.java b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/generator/IpsGeneratorSvcImpl.java index b308d06b026..7177f71fe6f 100644 --- a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/generator/IpsGeneratorSvcImpl.java +++ b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/generator/IpsGeneratorSvcImpl.java @@ -45,8 +45,6 @@ import ca.uhn.fhir.util.ResourceReferenceInfo; import ca.uhn.fhir.util.ValidateUtil; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; -import org.apache.commons.collections4.BidiMap; -import org.apache.commons.collections4.bidimap.DualHashBidiMap; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseExtension; @@ -63,8 +61,6 @@ import org.hl7.fhir.r4.model.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -72,6 +68,8 @@ import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.jpa.term.api.ITermLoaderSvc.LOINC_URI; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -87,7 +85,8 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { /** * Constructor */ - public IpsGeneratorSvcImpl(FhirContext theFhirContext, IIpsGenerationStrategy theGenerationStrategy, DaoRegistry theDaoRegistry) { + public IpsGeneratorSvcImpl( + FhirContext theFhirContext, IIpsGenerationStrategy theGenerationStrategy, DaoRegistry theDaoRegistry) { myGenerationStrategy = theGenerationStrategy; myDaoRegistry = theDaoRegistry; myFhirContext = theFhirContext; @@ -95,24 +94,22 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { @Override public IBaseBundle generateIps(RequestDetails theRequestDetails, IIdType thePatientId) { - IBaseResource patient = myDaoRegistry - .getResourceDao("Patient") - .read(thePatientId, theRequestDetails); + IBaseResource patient = myDaoRegistry.getResourceDao("Patient").read(thePatientId, theRequestDetails); return generateIpsForPatient(theRequestDetails, patient); } @Override public IBaseBundle generateIps(RequestDetails theRequestDetails, TokenParam thePatientIdentifier) { - SearchParameterMap searchParameterMap = new SearchParameterMap() - .setLoadSynchronousUpTo(2) - .add(Patient.SP_IDENTIFIER, thePatientIdentifier); - IBundleProvider searchResults = myDaoRegistry - .getResourceDao("Patient") - .search(searchParameterMap, theRequestDetails); + SearchParameterMap searchParameterMap = + new SearchParameterMap().setLoadSynchronousUpTo(2).add(Patient.SP_IDENTIFIER, thePatientIdentifier); + IBundleProvider searchResults = + myDaoRegistry.getResourceDao("Patient").search(searchParameterMap, theRequestDetails); - ValidateUtil.isTrueOrThrowInvalidRequest(searchResults.sizeOrThrowNpe() > 0, "No Patient could be found matching given identifier"); - ValidateUtil.isTrueOrThrowInvalidRequest(searchResults.sizeOrThrowNpe() == 1, "Multiple Patient resources were found matching given identifier"); + ValidateUtil.isTrueOrThrowInvalidRequest( + searchResults.sizeOrThrowNpe() > 0, "No Patient could be found matching given identifier"); + ValidateUtil.isTrueOrThrowInvalidRequest( + searchResults.sizeOrThrowNpe() == 1, "Multiple Patient resources were found matching given identifier"); IBaseResource patient = searchResults.getResources(0, 1).get(0); @@ -120,7 +117,11 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { } private IBaseBundle generateIpsForPatient(RequestDetails theRequestDetails, IBaseResource thePatient) { - IIdType originalSubjectId = myFhirContext.getVersion().newIdType().setValue(thePatient.getIdElement().getValue()).toUnqualifiedVersionless(); + IIdType originalSubjectId = myFhirContext + .getVersion() + .newIdType() + .setValue(thePatient.getIdElement().getValue()) + .toUnqualifiedVersionless(); massageResourceId(null, thePatient); IpsContext context = new IpsContext(thePatient, originalSubjectId); @@ -131,7 +132,8 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { massageResourceId(context, author); CompositionBuilder compositionBuilder = createComposition(thePatient, context, author); - determineInclusions(theRequestDetails, originalSubjectId, context, compositionBuilder, globalResourcesToInclude); + determineInclusions( + theRequestDetails, originalSubjectId, context, compositionBuilder, globalResourcesToInclude); IBaseResource composition = compositionBuilder.getComposition(); @@ -142,7 +144,8 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { return createCompositionDocument(author, composition, globalResourcesToInclude); } - private IBaseBundle createCompositionDocument(IBaseResource author, IBaseResource composition, ResourceInclusionCollection theResourcesToInclude) { + private IBaseBundle createCompositionDocument( + IBaseResource author, IBaseResource composition, ResourceInclusionCollection theResourcesToInclude) { BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext); bundleBuilder.setType(Bundle.BundleType.DOCUMENT.toCode()); bundleBuilder.setIdentifier("urn:ietf:rfc:4122", UUID.randomUUID().toString()); @@ -163,15 +166,32 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { } @Nonnull - private ResourceInclusionCollection determineInclusions(RequestDetails theRequestDetails, IIdType originalSubjectId, IpsContext context, CompositionBuilder theCompositionBuilder, ResourceInclusionCollection theGlobalResourcesToInclude) { + private ResourceInclusionCollection determineInclusions( + RequestDetails theRequestDetails, + IIdType originalSubjectId, + IpsContext context, + CompositionBuilder theCompositionBuilder, + ResourceInclusionCollection theGlobalResourcesToInclude) { SectionRegistry sectionRegistry = myGenerationStrategy.getSectionRegistry(); for (SectionRegistry.Section nextSection : sectionRegistry.getSections()) { - determineInclusionsForSection(theRequestDetails, originalSubjectId, context, theCompositionBuilder, theGlobalResourcesToInclude, nextSection); + determineInclusionsForSection( + theRequestDetails, + originalSubjectId, + context, + theCompositionBuilder, + theGlobalResourcesToInclude, + nextSection); } return theGlobalResourcesToInclude; } - private void determineInclusionsForSection(RequestDetails theRequestDetails, IIdType theOriginalSubjectId, IpsContext theIpsContext, CompositionBuilder theCompositionBuilder, ResourceInclusionCollection theGlobalResourcesToInclude, SectionRegistry.Section theSection) { + private void determineInclusionsForSection( + RequestDetails theRequestDetails, + IIdType theOriginalSubjectId, + IpsContext theIpsContext, + CompositionBuilder theCompositionBuilder, + ResourceInclusionCollection theGlobalResourcesToInclude, + SectionRegistry.Section theSection) { ResourceInclusionCollection sectionResourcesToInclude = new ResourceInclusionCollection(); for (String nextResourceType : theSection.getResourceTypes()) { @@ -180,7 +200,8 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { searchParameterMap.add(subjectSp, new ReferenceParam(theOriginalSubjectId)); IpsSectionEnum sectionEnum = theSection.getSectionEnum(); - IpsContext.IpsSectionContext ipsSectionContext = theIpsContext.newSectionContext(sectionEnum, nextResourceType); + IpsContext.IpsSectionContext ipsSectionContext = + theIpsContext.newSectionContext(sectionEnum, nextResourceType); myGenerationStrategy.massageResourceSearch(ipsSectionContext, searchParameterMap); Set includes = myGenerationStrategy.provideResourceSearchIncludes(ipsSectionContext); @@ -199,7 +220,8 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { boolean include; - if (ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextCandidate) == BundleEntrySearchModeEnum.INCLUDE) { + if (ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextCandidate) + == BundleEntrySearchModeEnum.INCLUDE) { include = true; } else { include = myGenerationStrategy.shouldInclude(ipsSectionContext, nextCandidate); @@ -207,15 +229,21 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { if (include) { - String originalResourceId = nextCandidate.getIdElement().toUnqualifiedVersionless().getValue(); + String originalResourceId = nextCandidate + .getIdElement() + .toUnqualifiedVersionless() + .getValue(); // Check if we already have this resource included so that we don't // include it twice - IBaseResource previouslyExistingResource = theGlobalResourcesToInclude.getResourceByOriginalId(originalResourceId); + IBaseResource previouslyExistingResource = + theGlobalResourcesToInclude.getResourceByOriginalId(originalResourceId); if (previouslyExistingResource != null) { - BundleEntrySearchModeEnum candidateSearchEntryMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextCandidate); + BundleEntrySearchModeEnum candidateSearchEntryMode = + ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextCandidate); if (candidateSearchEntryMode == BundleEntrySearchModeEnum.MATCH) { - ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(previouslyExistingResource, BundleEntrySearchModeEnum.MATCH); + ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put( + previouslyExistingResource, BundleEntrySearchModeEnum.MATCH); } nextCandidate = previouslyExistingResource; @@ -225,15 +253,13 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { } else { IIdType id = myGenerationStrategy.massageResourceId(theIpsContext, nextCandidate); nextCandidate.setId(id); - theGlobalResourcesToInclude.addResourceIfNotAlreadyPresent(nextCandidate, originalResourceId); + theGlobalResourcesToInclude.addResourceIfNotAlreadyPresent( + nextCandidate, originalResourceId); sectionResourcesToInclude.addResourceIfNotAlreadyPresent(nextCandidate, originalResourceId); } } - } - } - } if (sectionResourcesToInclude.isEmpty() && theSection.getNoInfoGenerator() != null) { @@ -243,7 +269,9 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { noInfoResource.setId(id); } ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(noInfoResource, BundleEntrySearchModeEnum.MATCH); - theGlobalResourcesToInclude.addResourceIfNotAlreadyPresent(noInfoResource, noInfoResource.getIdElement().toUnqualifiedVersionless().getValue()); + theGlobalResourcesToInclude.addResourceIfNotAlreadyPresent( + noInfoResource, + noInfoResource.getIdElement().toUnqualifiedVersionless().getValue()); sectionResourcesToInclude.addResourceIfNotAlreadyPresent(noInfoResource, id); } @@ -256,9 +284,14 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { for (IBaseResource nextResource : sectionResourcesToInclude.getResources()) { List references = myFhirContext.newTerser().getAllResourceReferences(nextResource); for (ResourceReferenceInfo nextReference : references) { - String existingReference = nextReference.getResourceReference().getReferenceElement().getValue(); + String existingReference = nextReference + .getResourceReference() + .getReferenceElement() + .getValue(); if (isNotBlank(existingReference)) { - existingReference = new IdType(existingReference).toUnqualifiedVersionless().getValue(); + existingReference = new IdType(existingReference) + .toUnqualifiedVersionless() + .getValue(); String replacement = theGlobalResourcesToInclude.getIdSubstitution(existingReference); if (isNotBlank(replacement)) { if (!replacement.equals(existingReference)) { @@ -278,7 +311,11 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { } @SuppressWarnings("unchecked") - private void addSection(SectionRegistry.Section theSection, CompositionBuilder theCompositionBuilder, ResourceInclusionCollection theResourcesToInclude, ResourceInclusionCollection theGlobalResourcesToInclude) { + private void addSection( + SectionRegistry.Section theSection, + CompositionBuilder theCompositionBuilder, + ResourceInclusionCollection theResourcesToInclude, + ResourceInclusionCollection theGlobalResourcesToInclude) { CompositionBuilder.SectionBuilder sectionBuilder = theCompositionBuilder.addSection(); @@ -292,12 +329,14 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { IBaseExtension narrativeLink = ((IBaseHasExtensions) next).addExtension(); narrativeLink.setUrl("http://hl7.org/fhir/StructureDefinition/narrativeLink"); - String narrativeLinkValue = theCompositionBuilder.getComposition().getIdElement().getValue() - + "#" - + myFhirContext.getResourceType(next) - + "-" - + next.getIdElement().getValue(); - IPrimitiveType narrativeLinkUri = (IPrimitiveType) myFhirContext.getElementDefinition("uri").newInstance(); + String narrativeLinkValue = + theCompositionBuilder.getComposition().getIdElement().getValue() + + "#" + + myFhirContext.getResourceType(next) + + "-" + + next.getIdElement().getValue(); + IPrimitiveType narrativeLinkUri = (IPrimitiveType) + myFhirContext.getElementDefinition("uri").newInstance(); narrativeLinkUri.setValueAsString(narrativeLinkValue); narrativeLink.setValue(narrativeLinkUri); @@ -325,10 +364,9 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { private String determinePatientCompartmentSearchParameterName(String theResourceType) { RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theResourceType); - Set searchParams = resourceDef.getSearchParamsForCompartmentName("Patient") - .stream() - .map(RuntimeSearchParam::getName) - .collect(Collectors.toSet()); + Set searchParams = resourceDef.getSearchParamsForCompartmentName("Patient").stream() + .map(RuntimeSearchParam::getName) + .collect(Collectors.toSet()); // Prefer "patient", then "subject" then anything else if (searchParams.contains(Observation.SP_PATIENT)) { return Observation.SP_PATIENT; @@ -344,7 +382,10 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { theResource.setId(id); } - private String createSectionNarrative(SectionRegistry.Section theSection, ResourceInclusionCollection theResources, ResourceInclusionCollection theGlobalResourceCollection) { + private String createSectionNarrative( + SectionRegistry.Section theSection, + ResourceInclusionCollection theResources, + ResourceInclusionCollection theGlobalResourceCollection) { CustomThymeleafNarrativeGenerator generator = newNarrativeGenerator(theGlobalResourceCollection); Bundle bundle = new Bundle(); @@ -362,7 +403,8 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { } @Nonnull - private CustomThymeleafNarrativeGenerator newNarrativeGenerator(ResourceInclusionCollection theGlobalResourceCollection) { + private CustomThymeleafNarrativeGenerator newNarrativeGenerator( + ResourceInclusionCollection theGlobalResourceCollection) { List narrativePropertyFiles = myGenerationStrategy.getNarrativePropertyFiles(); CustomThymeleafNarrativeGenerator generator = new CustomThymeleafNarrativeGenerator(narrativePropertyFiles); generator.setFhirPathEvaluationContext(new IFhirPathEvaluationContext() { @@ -375,167 +417,151 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { return generator; } + /* + private static HashMap> hashPrimaries(List resourceList) { + HashMap> iPSResourceMap = new HashMap>(); - - - - - - - - -/* - - - - - - - private static HashMap> hashPrimaries(List resourceList) { - HashMap> iPSResourceMap = new HashMap>(); - - for (Resource resource : resourceList) { - for (PatientSummary.IPSSection iPSSection : PatientSummary.IPSSection.values()) { - if ( SectionTypes.get(iPSSection).contains(resource.getResourceType()) ) { - if ( !(resource.getResourceType() == ResourceType.Observation) || isObservationinSection(iPSSection, (Observation) resource)) { - if (iPSResourceMap.get(iPSSection) == null) { - iPSResourceMap.put(iPSSection, new ArrayList()); + for (Resource resource : resourceList) { + for (PatientSummary.IPSSection iPSSection : PatientSummary.IPSSection.values()) { + if ( SectionTypes.get(iPSSection).contains(resource.getResourceType()) ) { + if ( !(resource.getResourceType() == ResourceType.Observation) || isObservationinSection(iPSSection, (Observation) resource)) { + if (iPSResourceMap.get(iPSSection) == null) { + iPSResourceMap.put(iPSSection, new ArrayList()); + } + iPSResourceMap.get(iPSSection).add(resource); } - iPSResourceMap.get(iPSSection).add(resource); } } } + + return iPSResourceMap; } - return iPSResourceMap; - } - - private static HashMap> filterPrimaries(HashMap> sectionPrimaries) { - HashMap> filteredPrimaries = new HashMap>(); - for ( PatientSummary.IPSSection section : sectionPrimaries.keySet() ) { - List filteredList = new ArrayList(); - for (Resource resource : sectionPrimaries.get(section)) { - if (passesFilter(section, resource)) { - filteredList.add(resource); + private static HashMap> filterPrimaries(HashMap> sectionPrimaries) { + HashMap> filteredPrimaries = new HashMap>(); + for ( PatientSummary.IPSSection section : sectionPrimaries.keySet() ) { + List filteredList = new ArrayList(); + for (Resource resource : sectionPrimaries.get(section)) { + if (passesFilter(section, resource)) { + filteredList.add(resource); + } + } + if (filteredList.size() > 0) { + filteredPrimaries.put(section, filteredList); } } - if (filteredList.size() > 0) { - filteredPrimaries.put(section, filteredList); + return filteredPrimaries; + } + + private static List pruneResources(Patient patient, List resources, HashMap> sectionPrimaries, FhirContext ctx) { + List resourceIds = new ArrayList(); + List followedIds = new ArrayList(); + + HashMap resourcesById = new HashMap(); + for (Resource resource : resources) { + resourcesById.put(resource.getIdElement().getIdPart(), resource); } - } - return filteredPrimaries; - } + String patientId = patient.getIdElement().getIdPart(); + resourcesById.put(patientId, patient); - private static List pruneResources(Patient patient, List resources, HashMap> sectionPrimaries, FhirContext ctx) { - List resourceIds = new ArrayList(); - List followedIds = new ArrayList(); + recursivePrune(patientId, resourceIds, followedIds, resourcesById, ctx); - HashMap resourcesById = new HashMap(); - for (Resource resource : resources) { - resourcesById.put(resource.getIdElement().getIdPart(), resource); - } - String patientId = patient.getIdElement().getIdPart(); - resourcesById.put(patientId, patient); - - recursivePrune(patientId, resourceIds, followedIds, resourcesById, ctx); - - for (PatientSummary.IPSSection section : sectionPrimaries.keySet()) { - for (Resource resource : sectionPrimaries.get(section)) { - String resourceId = resource.getIdElement().getIdPart(); - recursivePrune(resourceId, resourceIds, followedIds, resourcesById, ctx); + for (PatientSummary.IPSSection section : sectionPrimaries.keySet()) { + for (Resource resource : sectionPrimaries.get(section)) { + String resourceId = resource.getIdElement().getIdPart(); + recursivePrune(resourceId, resourceIds, followedIds, resourcesById, ctx); + } } - } - List prunedResources = new ArrayList(); + List prunedResources = new ArrayList(); - for (Resource resource : resources) { - if (resourceIds.contains(resource.getIdElement().getIdPart())) { - prunedResources.add(resource); + for (Resource resource : resources) { + if (resourceIds.contains(resource.getIdElement().getIdPart())) { + prunedResources.add(resource); + } } + + return prunedResources; } - return prunedResources; - } - - private static Void recursivePrune(String resourceId, List resourceIds, List followedIds, HashMap resourcesById, FhirContext ctx) { - if (!resourceIds.contains(resourceId)) { - resourceIds.add(resourceId); - } - - Resource resource = resourcesById.get(resourceId); - if (resource != null) { - ctx.newTerser().getAllResourceReferences(resource).stream() - .map( r -> r.getResourceReference().getReferenceElement().getIdPart() ) - .forEach( id -> { - if (!followedIds.contains(id)) { - followedIds.add(id); - recursivePrune(id, resourceIds, followedIds, resourcesById, ctx); - } - }); - } - - return null; - } - - private static List addLinkToResources(List resources, HashMap> sectionPrimaries, Composition composition) { - List linkedResources = new ArrayList(); - HashMap valueUrls = new HashMap(); - - String url = "http://hl7.org/fhir/StructureDefinition/narrativeLink"; - String valueUrlBase = composition.getId() + "#"; - - for (PatientSummary.IPSSection section : sectionPrimaries.keySet()) { - String profile = SectionProfiles.get(section); - String[] arr = profile.split("/"); - String profileName = arr[arr.length - 1]; - String sectionValueUrlBase = valueUrlBase + profileName.split("-uv-")[0]; - - for (Resource resource : sectionPrimaries.get(section)) { - String valueUrl = sectionValueUrlBase + "-" + resource.getIdElement().getIdPart(); - valueUrls.put(resource.getIdElement().getIdPart(), valueUrl); + private static Void recursivePrune(String resourceId, List resourceIds, List followedIds, HashMap resourcesById, FhirContext ctx) { + if (!resourceIds.contains(resourceId)) { + resourceIds.add(resourceId); } - } - for (Resource resource : resources) { - if (valueUrls.containsKey(resource.getIdElement().getIdPart())) { - String valueUrl = valueUrls.get(resource.getIdElement().getIdPart()); - Extension extension = new Extension(); - extension.setUrl(url); - extension.setValue(new UriType(valueUrl)); - DomainResource domainResource = (DomainResource) resource; - domainResource.addExtension(extension); - resource = (Resource) domainResource; + Resource resource = resourcesById.get(resourceId); + if (resource != null) { + ctx.newTerser().getAllResourceReferences(resource).stream() + .map( r -> r.getResourceReference().getReferenceElement().getIdPart() ) + .forEach( id -> { + if (!followedIds.contains(id)) { + followedIds.add(id); + recursivePrune(id, resourceIds, followedIds, resourcesById, ctx); + } + }); } - linkedResources.add(resource); + + return null; } - return linkedResources; - } + private static List addLinkToResources(List resources, HashMap> sectionPrimaries, Composition composition) { + List linkedResources = new ArrayList(); + HashMap valueUrls = new HashMap(); - private static HashMap createNarratives(HashMap> sectionPrimaries, List resources, FhirContext ctx) { - HashMap hashedNarratives = new HashMap(); + String url = "http://hl7.org/fhir/StructureDefinition/narrativeLink"; + String valueUrlBase = composition.getId() + "#"; - for (PatientSummary.IPSSection section : sectionPrimaries.keySet()) { - String narrative = createSectionNarrative(section, resources, ctx); - hashedNarratives.put(section, narrative); + for (PatientSummary.IPSSection section : sectionPrimaries.keySet()) { + String profile = SectionProfiles.get(section); + String[] arr = profile.split("/"); + String profileName = arr[arr.length - 1]; + String sectionValueUrlBase = valueUrlBase + profileName.split("-uv-")[0]; + + for (Resource resource : sectionPrimaries.get(section)) { + String valueUrl = sectionValueUrlBase + "-" + resource.getIdElement().getIdPart(); + valueUrls.put(resource.getIdElement().getIdPart(), valueUrl); + } + } + + for (Resource resource : resources) { + if (valueUrls.containsKey(resource.getIdElement().getIdPart())) { + String valueUrl = valueUrls.get(resource.getIdElement().getIdPart()); + Extension extension = new Extension(); + extension.setUrl(url); + extension.setValue(new UriType(valueUrl)); + DomainResource domainResource = (DomainResource) resource; + domainResource.addExtension(extension); + resource = (Resource) domainResource; + } + linkedResources.add(resource); + } + + return linkedResources; } - return hashedNarratives; - } + private static HashMap createNarratives(HashMap> sectionPrimaries, List resources, FhirContext ctx) { + HashMap hashedNarratives = new HashMap(); + + for (PatientSummary.IPSSection section : sectionPrimaries.keySet()) { + String narrative = createSectionNarrative(section, resources, ctx); + hashedNarratives.put(section, narrative); + } + + return hashedNarratives; + } -*/ - + */ private static class ResourceInclusionCollection { @@ -551,9 +577,11 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { * @param theOriginalResourceId Must be an unqualified versionless ID */ public void addResourceIfNotAlreadyPresent(IBaseResource theResource, String theOriginalResourceId) { - assert theOriginalResourceId.matches("([A-Z][a-z]([A-Za-z]+)/[a-zA-Z0-9._-]+)|(urn:uuid:[0-9a-z-]+)") : "Not an unqualified versionless ID: " + theOriginalResourceId; + assert theOriginalResourceId.matches("([A-Z][a-z]([A-Za-z]+)/[a-zA-Z0-9._-]+)|(urn:uuid:[0-9a-z-]+)") + : "Not an unqualified versionless ID: " + theOriginalResourceId; - String resourceId = theResource.getIdElement().toUnqualifiedVersionless().getValue(); + String resourceId = + theResource.getIdElement().toUnqualifiedVersionless().getValue(); if (myIdToResource.containsKey(resourceId)) { return; } @@ -592,6 +620,4 @@ public class IpsGeneratorSvcImpl implements IIpsGeneratorSvc { return myResources.isEmpty(); } } - - } diff --git a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/provider/IpsOperationProvider.java b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/provider/IpsOperationProvider.java index 8bb9845d151..dd5f3a0a47c 100644 --- a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/provider/IpsOperationProvider.java +++ b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/provider/IpsOperationProvider.java @@ -42,33 +42,36 @@ public class IpsOperationProvider { myIpsGeneratorSvc = theIpsGeneratorSvc; } - /** * Patient/123/$summary */ - @Operation(name = JpaConstants.OPERATION_SUMMARY, idempotent = true, bundleType = BundleTypeEnum.DOCUMENT, typeName = "Patient", canonicalUrl = JpaConstants.SUMMARY_OPERATION_URL) - public IBaseBundle patientInstanceSummary( - @IdParam - IIdType thePatientId, + @Operation( + name = JpaConstants.OPERATION_SUMMARY, + idempotent = true, + bundleType = BundleTypeEnum.DOCUMENT, + typeName = "Patient", + canonicalUrl = JpaConstants.SUMMARY_OPERATION_URL) + public IBaseBundle patientInstanceSummary(@IdParam IIdType thePatientId, RequestDetails theRequestDetails) { - RequestDetails theRequestDetails - ) { return myIpsGeneratorSvc.generateIps(theRequestDetails, thePatientId); } /** * /Patient/$summary?identifier=foo|bar */ - @Operation(name = JpaConstants.OPERATION_SUMMARY, idempotent = true, bundleType = BundleTypeEnum.DOCUMENT, typeName = "Patient", canonicalUrl = JpaConstants.SUMMARY_OPERATION_URL) + @Operation( + name = JpaConstants.OPERATION_SUMMARY, + idempotent = true, + bundleType = BundleTypeEnum.DOCUMENT, + typeName = "Patient", + canonicalUrl = JpaConstants.SUMMARY_OPERATION_URL) public IBaseBundle patientTypeSummary( - - @Description(shortDefinition = "When the logical id of the patient is not used, servers MAY choose to support patient selection based on provided identifier") - @OperationParam(name = "identifier", min = 0, max = 1) - TokenParam thePatientIdentifier, - - RequestDetails theRequestDetails - ) { + @Description( + shortDefinition = + "When the logical id of the patient is not used, servers MAY choose to support patient selection based on provided identifier") + @OperationParam(name = "identifier", min = 0, max = 1) + TokenParam thePatientIdentifier, + RequestDetails theRequestDetails) { return myIpsGeneratorSvc.generateIps(theRequestDetails, thePatientIdentifier); } - } diff --git a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/strategy/DefaultIpsGenerationStrategy.java b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/strategy/DefaultIpsGenerationStrategy.java index db477611b11..df4781660b7 100644 --- a/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/strategy/DefaultIpsGenerationStrategy.java +++ b/hapi-fhir-jpaserver-ips/src/main/java/ca/uhn/fhir/jpa/ips/strategy/DefaultIpsGenerationStrategy.java @@ -32,20 +32,21 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.*; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.time.LocalDate; import java.time.format.DateTimeFormatter; import java.util.Collections; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.jpa.term.api.ITermLoaderSvc.LOINC_URI; @SuppressWarnings({"EnhancedSwitchMigration", "HttpUrlsUsage"}) public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { - public static final String DEFAULT_IPS_NARRATIVES_PROPERTIES = "classpath:ca/uhn/fhir/jpa/ips/narrative/ips-narratives.properties"; + public static final String DEFAULT_IPS_NARRATIVES_PROPERTIES = + "classpath:ca/uhn/fhir/jpa/ips/narrative/ips-narratives.properties"; private SectionRegistry mySectionRegistry; /** @@ -69,27 +70,27 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { @Override public List getNarrativePropertyFiles() { - return Lists.newArrayList( - DEFAULT_IPS_NARRATIVES_PROPERTIES - ); + return Lists.newArrayList(DEFAULT_IPS_NARRATIVES_PROPERTIES); } @Override public IBaseResource createAuthor() { Organization organization = new Organization(); - organization.setName("eHealthLab - University of Cyprus") - .addAddress(new Address() - .addLine("1 University Avenue") - .setCity("Nicosia") - .setPostalCode("2109") - .setCountry("CY")) - .setId(IdType.newRandomUuid()); + organization + .setName("eHealthLab - University of Cyprus") + .addAddress(new Address() + .addLine("1 University Avenue") + .setCity("Nicosia") + .setPostalCode("2109") + .setCountry("CY")) + .setId(IdType.newRandomUuid()); return organization; } @Override public String createTitle(IpsContext theContext) { - return "Patient Summary as of " + DateTimeFormatter.ofPattern("MM/dd/yyyy").format(LocalDate.now()); + return "Patient Summary as of " + + DateTimeFormatter.ofPattern("MM/dd/yyyy").format(LocalDate.now()); } @Override @@ -103,7 +104,8 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { } @Override - public void massageResourceSearch(IpsContext.IpsSectionContext theIpsSectionContext, SearchParameterMap theSearchParameterMap) { + public void massageResourceSearch( + IpsContext.IpsSectionContext theIpsSectionContext, SearchParameterMap theSearchParameterMap) { switch (theIpsSectionContext.getSection()) { case ALLERGY_INTOLERANCE: case PROBLEM_LIST: @@ -115,17 +117,23 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { return; case VITAL_SIGNS: if (theIpsSectionContext.getResourceType().equals(ResourceType.Observation.name())) { - theSearchParameterMap.add(Observation.SP_CATEGORY, new TokenOrListParam() - .addOr(new TokenParam("http://terminology.hl7.org/CodeSystem/observation-category", "vital-signs")) - ); + theSearchParameterMap.add( + Observation.SP_CATEGORY, + new TokenOrListParam() + .addOr(new TokenParam( + "http://terminology.hl7.org/CodeSystem/observation-category", + "vital-signs"))); return; } break; case SOCIAL_HISTORY: if (theIpsSectionContext.getResourceType().equals(ResourceType.Observation.name())) { - theSearchParameterMap.add(Observation.SP_CATEGORY, new TokenOrListParam() - .addOr(new TokenParam("http://terminology.hl7.org/CodeSystem/observation-category", "social-history")) - ); + theSearchParameterMap.add( + Observation.SP_CATEGORY, + new TokenOrListParam() + .addOr(new TokenParam( + "http://terminology.hl7.org/CodeSystem/observation-category", + "social-history"))); return; } break; @@ -133,83 +141,133 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { if (theIpsSectionContext.getResourceType().equals(ResourceType.DiagnosticReport.name())) { return; } else if (theIpsSectionContext.getResourceType().equals(ResourceType.Observation.name())) { - theSearchParameterMap.add(Observation.SP_CATEGORY, new TokenOrListParam() - .addOr(new TokenParam("http://terminology.hl7.org/CodeSystem/observation-category", "laboratory")) - ); + theSearchParameterMap.add( + Observation.SP_CATEGORY, + new TokenOrListParam() + .addOr(new TokenParam( + "http://terminology.hl7.org/CodeSystem/observation-category", + "laboratory"))); return; } break; case PREGNANCY: if (theIpsSectionContext.getResourceType().equals(ResourceType.Observation.name())) { - theSearchParameterMap.add(Observation.SP_CODE, new TokenOrListParam() - .addOr(new TokenParam(LOINC_URI, "82810-3")) - .addOr(new TokenParam(LOINC_URI, "11636-8")) - .addOr(new TokenParam(LOINC_URI, "11637-6")) - .addOr(new TokenParam(LOINC_URI, "11638-4")) - .addOr(new TokenParam(LOINC_URI, "11639-2")) - .addOr(new TokenParam(LOINC_URI, "11640-0")) - .addOr(new TokenParam(LOINC_URI, "11612-9")) - .addOr(new TokenParam(LOINC_URI, "11613-7")) - .addOr(new TokenParam(LOINC_URI, "11614-5")) - .addOr(new TokenParam(LOINC_URI, "33065-4")) - ); + theSearchParameterMap.add( + Observation.SP_CODE, + new TokenOrListParam() + .addOr(new TokenParam(LOINC_URI, "82810-3")) + .addOr(new TokenParam(LOINC_URI, "11636-8")) + .addOr(new TokenParam(LOINC_URI, "11637-6")) + .addOr(new TokenParam(LOINC_URI, "11638-4")) + .addOr(new TokenParam(LOINC_URI, "11639-2")) + .addOr(new TokenParam(LOINC_URI, "11640-0")) + .addOr(new TokenParam(LOINC_URI, "11612-9")) + .addOr(new TokenParam(LOINC_URI, "11613-7")) + .addOr(new TokenParam(LOINC_URI, "11614-5")) + .addOr(new TokenParam(LOINC_URI, "33065-4"))); return; } break; case MEDICATION_SUMMARY: if (theIpsSectionContext.getResourceType().equals(ResourceType.MedicationStatement.name())) { - theSearchParameterMap.add(MedicationStatement.SP_STATUS, new TokenOrListParam() - .addOr(new TokenParam(MedicationStatement.MedicationStatementStatus.ACTIVE.getSystem(), MedicationStatement.MedicationStatementStatus.ACTIVE.toCode())) - .addOr(new TokenParam(MedicationStatement.MedicationStatementStatus.INTENDED.getSystem(), MedicationStatement.MedicationStatementStatus.INTENDED.toCode())) - .addOr(new TokenParam(MedicationStatement.MedicationStatementStatus.UNKNOWN.getSystem(), MedicationStatement.MedicationStatementStatus.UNKNOWN.toCode())) - .addOr(new TokenParam(MedicationStatement.MedicationStatementStatus.ONHOLD.getSystem(), MedicationStatement.MedicationStatementStatus.ONHOLD.toCode())) - ); + theSearchParameterMap.add( + MedicationStatement.SP_STATUS, + new TokenOrListParam() + .addOr(new TokenParam( + MedicationStatement.MedicationStatementStatus.ACTIVE.getSystem(), + MedicationStatement.MedicationStatementStatus.ACTIVE.toCode())) + .addOr(new TokenParam( + MedicationStatement.MedicationStatementStatus.INTENDED.getSystem(), + MedicationStatement.MedicationStatementStatus.INTENDED.toCode())) + .addOr(new TokenParam( + MedicationStatement.MedicationStatementStatus.UNKNOWN.getSystem(), + MedicationStatement.MedicationStatementStatus.UNKNOWN.toCode())) + .addOr(new TokenParam( + MedicationStatement.MedicationStatementStatus.ONHOLD.getSystem(), + MedicationStatement.MedicationStatementStatus.ONHOLD.toCode()))); return; } else if (theIpsSectionContext.getResourceType().equals(ResourceType.MedicationRequest.name())) { - theSearchParameterMap.add(MedicationRequest.SP_STATUS, new TokenOrListParam() - .addOr(new TokenParam(MedicationRequest.MedicationRequestStatus.ACTIVE.getSystem(), MedicationRequest.MedicationRequestStatus.ACTIVE.toCode())) - .addOr(new TokenParam(MedicationRequest.MedicationRequestStatus.UNKNOWN.getSystem(), MedicationRequest.MedicationRequestStatus.UNKNOWN.toCode())) - .addOr(new TokenParam(MedicationRequest.MedicationRequestStatus.ONHOLD.getSystem(), MedicationRequest.MedicationRequestStatus.ONHOLD.toCode())) - ); + theSearchParameterMap.add( + MedicationRequest.SP_STATUS, + new TokenOrListParam() + .addOr(new TokenParam( + MedicationRequest.MedicationRequestStatus.ACTIVE.getSystem(), + MedicationRequest.MedicationRequestStatus.ACTIVE.toCode())) + .addOr(new TokenParam( + MedicationRequest.MedicationRequestStatus.UNKNOWN.getSystem(), + MedicationRequest.MedicationRequestStatus.UNKNOWN.toCode())) + .addOr(new TokenParam( + MedicationRequest.MedicationRequestStatus.ONHOLD.getSystem(), + MedicationRequest.MedicationRequestStatus.ONHOLD.toCode()))); return; - } else if (theIpsSectionContext.getResourceType().equals(ResourceType.MedicationAdministration.name())) { - theSearchParameterMap.add(MedicationAdministration.SP_STATUS, new TokenOrListParam() - .addOr(new TokenParam(MedicationAdministration.MedicationAdministrationStatus.INPROGRESS.getSystem(), MedicationAdministration.MedicationAdministrationStatus.INPROGRESS.toCode())) - .addOr(new TokenParam(MedicationAdministration.MedicationAdministrationStatus.UNKNOWN.getSystem(), MedicationAdministration.MedicationAdministrationStatus.UNKNOWN.toCode())) - .addOr(new TokenParam(MedicationAdministration.MedicationAdministrationStatus.ONHOLD.getSystem(), MedicationAdministration.MedicationAdministrationStatus.ONHOLD.toCode())) - ); + } else if (theIpsSectionContext + .getResourceType() + .equals(ResourceType.MedicationAdministration.name())) { + theSearchParameterMap.add( + MedicationAdministration.SP_STATUS, + new TokenOrListParam() + .addOr(new TokenParam( + MedicationAdministration.MedicationAdministrationStatus.INPROGRESS + .getSystem(), + MedicationAdministration.MedicationAdministrationStatus.INPROGRESS + .toCode())) + .addOr(new TokenParam( + MedicationAdministration.MedicationAdministrationStatus.UNKNOWN.getSystem(), + MedicationAdministration.MedicationAdministrationStatus.UNKNOWN.toCode())) + .addOr(new TokenParam( + MedicationAdministration.MedicationAdministrationStatus.ONHOLD.getSystem(), + MedicationAdministration.MedicationAdministrationStatus.ONHOLD.toCode()))); return; } else if (theIpsSectionContext.getResourceType().equals(ResourceType.MedicationDispense.name())) { - theSearchParameterMap.add(MedicationDispense.SP_STATUS, new TokenOrListParam() - .addOr(new TokenParam(MedicationDispense.MedicationDispenseStatus.INPROGRESS.getSystem(), MedicationDispense.MedicationDispenseStatus.INPROGRESS.toCode())) - .addOr(new TokenParam(MedicationDispense.MedicationDispenseStatus.UNKNOWN.getSystem(), MedicationDispense.MedicationDispenseStatus.UNKNOWN.toCode())) - .addOr(new TokenParam(MedicationDispense.MedicationDispenseStatus.ONHOLD.getSystem(), MedicationDispense.MedicationDispenseStatus.ONHOLD.toCode())) - ); + theSearchParameterMap.add( + MedicationDispense.SP_STATUS, + new TokenOrListParam() + .addOr(new TokenParam( + MedicationDispense.MedicationDispenseStatus.INPROGRESS.getSystem(), + MedicationDispense.MedicationDispenseStatus.INPROGRESS.toCode())) + .addOr(new TokenParam( + MedicationDispense.MedicationDispenseStatus.UNKNOWN.getSystem(), + MedicationDispense.MedicationDispenseStatus.UNKNOWN.toCode())) + .addOr(new TokenParam( + MedicationDispense.MedicationDispenseStatus.ONHOLD.getSystem(), + MedicationDispense.MedicationDispenseStatus.ONHOLD.toCode()))); return; } break; case PLAN_OF_CARE: if (theIpsSectionContext.getResourceType().equals(ResourceType.CarePlan.name())) { - theSearchParameterMap.add(CarePlan.SP_STATUS, new TokenOrListParam() - .addOr(new TokenParam(CarePlan.CarePlanStatus.ACTIVE.getSystem(), CarePlan.CarePlanStatus.ACTIVE.toCode())) - .addOr(new TokenParam(CarePlan.CarePlanStatus.ONHOLD.getSystem(), CarePlan.CarePlanStatus.ONHOLD.toCode())) - .addOr(new TokenParam(CarePlan.CarePlanStatus.UNKNOWN.getSystem(), CarePlan.CarePlanStatus.UNKNOWN.toCode())) - ); + theSearchParameterMap.add( + CarePlan.SP_STATUS, + new TokenOrListParam() + .addOr(new TokenParam( + CarePlan.CarePlanStatus.ACTIVE.getSystem(), + CarePlan.CarePlanStatus.ACTIVE.toCode())) + .addOr(new TokenParam( + CarePlan.CarePlanStatus.ONHOLD.getSystem(), + CarePlan.CarePlanStatus.ONHOLD.toCode())) + .addOr(new TokenParam( + CarePlan.CarePlanStatus.UNKNOWN.getSystem(), + CarePlan.CarePlanStatus.UNKNOWN.toCode()))); return; } break; case ADVANCE_DIRECTIVES: if (theIpsSectionContext.getResourceType().equals(ResourceType.Consent.name())) { - theSearchParameterMap.add(Consent.SP_STATUS, new TokenOrListParam() - .addOr(new TokenParam(Consent.ConsentState.ACTIVE.getSystem(), Consent.ConsentState.ACTIVE.toCode())) - ); + theSearchParameterMap.add( + Consent.SP_STATUS, + new TokenOrListParam() + .addOr(new TokenParam( + Consent.ConsentState.ACTIVE.getSystem(), + Consent.ConsentState.ACTIVE.toCode()))); return; } break; } // Shouldn't happen: This means none of the above switches handled the Section+resourceType combination - assert false : "Don't know how to handle " + theIpsSectionContext.getSection() + "/" + theIpsSectionContext.getResourceType(); + assert false + : "Don't know how to handle " + theIpsSectionContext.getSection() + "/" + + theIpsSectionContext.getResourceType(); } @Nonnull @@ -218,31 +276,21 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { switch (theIpsSectionContext.getSection()) { case MEDICATION_SUMMARY: if (ResourceType.MedicationStatement.name().equals(theIpsSectionContext.getResourceType())) { - return Sets.newHashSet( - MedicationStatement.INCLUDE_MEDICATION - ); + return Sets.newHashSet(MedicationStatement.INCLUDE_MEDICATION); } if (ResourceType.MedicationRequest.name().equals(theIpsSectionContext.getResourceType())) { - return Sets.newHashSet( - MedicationRequest.INCLUDE_MEDICATION - ); + return Sets.newHashSet(MedicationRequest.INCLUDE_MEDICATION); } if (ResourceType.MedicationAdministration.name().equals(theIpsSectionContext.getResourceType())) { - return Sets.newHashSet( - MedicationAdministration.INCLUDE_MEDICATION - ); + return Sets.newHashSet(MedicationAdministration.INCLUDE_MEDICATION); } if (ResourceType.MedicationDispense.name().equals(theIpsSectionContext.getResourceType())) { - return Sets.newHashSet( - MedicationDispense.INCLUDE_MEDICATION - ); + return Sets.newHashSet(MedicationDispense.INCLUDE_MEDICATION); } break; case MEDICAL_DEVICES: if (ResourceType.DeviceUseStatement.name().equals(theIpsSectionContext.getResourceType())) { - return Sets.newHashSet( - DeviceUseStatement.INCLUDE_DEVICE - ); + return Sets.newHashSet(DeviceUseStatement.INCLUDE_DEVICE); } break; case ALLERGY_INTOLERANCE: @@ -274,17 +322,34 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { case ALLERGY_INTOLERANCE: if (theIpsSectionContext.getResourceType().equals(ResourceType.AllergyIntolerance.name())) { AllergyIntolerance allergyIntolerance = (AllergyIntolerance) theCandidate; - return !allergyIntolerance.getClinicalStatus().hasCoding("http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical", "inactive") - && !allergyIntolerance.getClinicalStatus().hasCoding("http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical", "resolved") - && !allergyIntolerance.getVerificationStatus().hasCoding("http://terminology.hl7.org/CodeSystem/allergyintolerance-verification", "entered-in-error"); + return !allergyIntolerance + .getClinicalStatus() + .hasCoding( + "http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical", + "inactive") + && !allergyIntolerance + .getClinicalStatus() + .hasCoding( + "http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical", + "resolved") + && !allergyIntolerance + .getVerificationStatus() + .hasCoding( + "http://terminology.hl7.org/CodeSystem/allergyintolerance-verification", + "entered-in-error"); } break; case PROBLEM_LIST: if (theIpsSectionContext.getResourceType().equals(ResourceType.Condition.name())) { Condition prob = (Condition) theCandidate; - return !prob.getClinicalStatus().hasCoding("http://terminology.hl7.org/CodeSystem/condition-clinical", "inactive") - && !prob.getClinicalStatus().hasCoding("http://terminology.hl7.org/CodeSystem/condition-clinical", "resolved") - && !prob.getVerificationStatus().hasCoding("http://terminology.hl7.org/CodeSystem/condition-ver-status", "entered-in-error"); + return !prob.getClinicalStatus() + .hasCoding("http://terminology.hl7.org/CodeSystem/condition-clinical", "inactive") + && !prob.getClinicalStatus() + .hasCoding("http://terminology.hl7.org/CodeSystem/condition-clinical", "resolved") + && !prob.getVerificationStatus() + .hasCoding( + "http://terminology.hl7.org/CodeSystem/condition-ver-status", + "entered-in-error"); } break; case IMMUNIZATIONS: @@ -297,7 +362,7 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { if (theIpsSectionContext.getResourceType().equals(ResourceType.Procedure.name())) { Procedure proc = (Procedure) theCandidate; return proc.getStatus() != Procedure.ProcedureStatus.ENTEREDINERROR - && proc.getStatus() != Procedure.ProcedureStatus.NOTDONE; + && proc.getStatus() != Procedure.ProcedureStatus.NOTDONE; } break; case MEDICAL_DEVICES: @@ -326,12 +391,21 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { case ILLNESS_HISTORY: if (theIpsSectionContext.getResourceType().equals(ResourceType.Condition.name())) { Condition prob = (Condition) theCandidate; - if (prob.getVerificationStatus().hasCoding("http://terminology.hl7.org/CodeSystem/condition-ver-status", "entered-in-error")) { + if (prob.getVerificationStatus() + .hasCoding( + "http://terminology.hl7.org/CodeSystem/condition-ver-status", "entered-in-error")) { return false; } else { - return prob.getClinicalStatus().hasCoding("http://terminology.hl7.org/CodeSystem/condition-clinical", "inactive") - || prob.getClinicalStatus().hasCoding("http://terminology.hl7.org/CodeSystem/condition-clinical", "resolved") - || prob.getClinicalStatus().hasCoding("http://terminology.hl7.org/CodeSystem/condition-clinical", "remission"); + return prob.getClinicalStatus() + .hasCoding( + "http://terminology.hl7.org/CodeSystem/condition-clinical", "inactive") + || prob.getClinicalStatus() + .hasCoding( + "http://terminology.hl7.org/CodeSystem/condition-clinical", "resolved") + || prob.getClinicalStatus() + .hasCoding( + "http://terminology.hl7.org/CodeSystem/condition-clinical", + "remission"); } } break; @@ -353,12 +427,12 @@ public class DefaultIpsGenerationStrategy implements IIpsGenerationStrategy { if (theIpsSectionContext.getResourceType().equals(ResourceType.ClinicalImpression.name())) { ClinicalImpression clinicalImpression = (ClinicalImpression) theCandidate; return clinicalImpression.getStatus() != ClinicalImpression.ClinicalImpressionStatus.INPROGRESS - && clinicalImpression.getStatus() != ClinicalImpression.ClinicalImpressionStatus.ENTEREDINERROR; + && clinicalImpression.getStatus() + != ClinicalImpression.ClinicalImpressionStatus.ENTEREDINERROR; } break; } return true; } - } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmMessageHandler.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmMessageHandler.java index a04e45b5e4e..2a266400ff5 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmMessageHandler.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmMessageHandler.java @@ -54,14 +54,19 @@ public class MdmMessageHandler implements MessageHandler { @Autowired private MdmMatchLinkSvc myMdmMatchLinkSvc; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private FhirContext myFhirContext; + @Autowired private MdmResourceFilteringSvc myMdmResourceFilteringSvc; + @Autowired private IMdmSettings myMdmSettings; + @Autowired private IMdmModelConverterSvc myModelConverter; @@ -96,7 +101,7 @@ public class MdmMessageHandler implements MessageHandler { String resourceType = theSourceResource.getIdElement().getResourceType(); validateResourceType(resourceType); - if (myInterceptorBroadcaster.hasHooks(Pointcut.MDM_BEFORE_PERSISTED_RESOURCE_CHECKED)){ + if (myInterceptorBroadcaster.hasHooks(Pointcut.MDM_BEFORE_PERSISTED_RESOURCE_CHECKED)) { HookParams params = new HookParams().add(IBaseResource.class, theSourceResource); myInterceptorBroadcaster.callHooks(Pointcut.MDM_BEFORE_PERSISTED_RESOURCE_CHECKED, params); } @@ -123,16 +128,17 @@ public class MdmMessageHandler implements MessageHandler { } finally { // Interceptor call: MDM_AFTER_PERSISTED_RESOURCE_CHECKED HookParams params = new HookParams() - .add(ResourceOperationMessage.class, getOutgoingMessage(theMsg)) - .add(TransactionLogMessages.class, mdmContext.getTransactionLogMessages()) - .add(MdmLinkEvent.class, buildLinkChangeEvent(mdmContext)); + .add(ResourceOperationMessage.class, getOutgoingMessage(theMsg)) + .add(TransactionLogMessages.class, mdmContext.getTransactionLogMessages()) + .add(MdmLinkEvent.class, buildLinkChangeEvent(mdmContext)); myInterceptorBroadcaster.callHooks(Pointcut.MDM_AFTER_PERSISTED_RESOURCE_CHECKED, params); } } private MdmTransactionContext createMdmContext(ResourceModifiedMessage theMsg, String theResourceType) { - TransactionLogMessages transactionLogMessages = TransactionLogMessages.createFromTransactionGuid(theMsg.getTransactionId()); + TransactionLogMessages transactionLogMessages = + TransactionLogMessages.createFromTransactionGuid(theMsg.getTransactionId()); MdmTransactionContext.OperationType mdmOperation; switch (theMsg.getOperationType()) { case CREATE: @@ -147,23 +153,25 @@ public class MdmMessageHandler implements MessageHandler { case DELETE: default: ourLog.trace("Not creating an MdmTransactionContext for {}", theMsg.getOperationType()); - throw new InvalidRequestException(Msg.code(734) + "We can't handle non-update/create operations in MDM"); + throw new InvalidRequestException( + Msg.code(734) + "We can't handle non-update/create operations in MDM"); } return new MdmTransactionContext(transactionLogMessages, mdmOperation, theResourceType); } private void validateResourceType(String theResourceType) { if (!myMdmSettings.isSupportedMdmType(theResourceType)) { - throw new IllegalStateException(Msg.code(735) + "Unsupported resource type submitted to MDM matching queue: " + theResourceType); + throw new IllegalStateException( + Msg.code(735) + "Unsupported resource type submitted to MDM matching queue: " + theResourceType); } } private void handleCreateResource(IBaseResource theResource, MdmTransactionContext theMdmTransactionContext) { - myMdmMatchLinkSvc.updateMdmLinksForMdmSource((IAnyResource)theResource, theMdmTransactionContext); + myMdmMatchLinkSvc.updateMdmLinksForMdmSource((IAnyResource) theResource, theMdmTransactionContext); } private void handleUpdateResource(IBaseResource theResource, MdmTransactionContext theMdmTransactionContext) { - myMdmMatchLinkSvc.updateMdmLinksForMdmSource((IAnyResource)theResource, theMdmTransactionContext); + myMdmMatchLinkSvc.updateMdmLinksForMdmSource((IAnyResource) theResource, theMdmTransactionContext); } private void log(MdmTransactionContext theMdmContext, String theMessage, Exception theException) { @@ -173,21 +181,19 @@ public class MdmMessageHandler implements MessageHandler { private MdmLinkEvent buildLinkChangeEvent(MdmTransactionContext theMdmContext) { MdmLinkEvent linkChangeEvent = new MdmLinkEvent(); - theMdmContext.getMdmLinks() - .stream() - .forEach(l -> { - linkChangeEvent.addMdmLink(myModelConverter.toJson(l)); - }); + theMdmContext.getMdmLinks().stream().forEach(l -> { + linkChangeEvent.addMdmLink(myModelConverter.toJson(l)); + }); return linkChangeEvent; } private ResourceOperationMessage getOutgoingMessage(ResourceModifiedMessage theMsg) { IBaseResource targetResource = theMsg.getPayload(myFhirContext); - ResourceOperationMessage outgoingMsg = new ResourceOperationMessage(myFhirContext, targetResource, theMsg.getOperationType()); + ResourceOperationMessage outgoingMsg = + new ResourceOperationMessage(myFhirContext, targetResource, theMsg.getOperationType()); outgoingMsg.setTransactionId(theMsg.getTransactionId()); return outgoingMsg; } - } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmMessageKeySvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmMessageKeySvc.java index e0f92fe0588..5f3f1c381f3 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmMessageKeySvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmMessageKeySvc.java @@ -26,8 +26,8 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nullable; import java.util.List; +import javax.annotation.Nullable; @Service public class MdmMessageKeySvc implements ISubscriptionMessageKeySvc { diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmQueueConsumerLoader.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmQueueConsumerLoader.java index 310fa64e846..1acf874d65d 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmQueueConsumerLoader.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/broker/MdmQueueConsumerLoader.java @@ -41,7 +41,8 @@ public class MdmQueueConsumerLoader { protected IChannelReceiver myMdmChannel; - public MdmQueueConsumerLoader(IChannelFactory theChannelFactory, IMdmSettings theMdmSettings, MdmMessageHandler theMdmMessageHandler) { + public MdmQueueConsumerLoader( + IChannelFactory theChannelFactory, IMdmSettings theMdmSettings, MdmMessageHandler theMdmMessageHandler) { myChannelFactory = theChannelFactory; myMdmSettings = theMdmSettings; myMdmMessageHandler = theMdmMessageHandler; @@ -49,19 +50,22 @@ public class MdmQueueConsumerLoader { startListeningToMdmChannel(); } - private void startListeningToMdmChannel() { if (myMdmChannel == null) { ChannelConsumerSettings config = new ChannelConsumerSettings(); - + config.setConcurrentConsumers(myMdmSettings.getConcurrentConsumers()); - myMdmChannel = myChannelFactory.getOrCreateReceiver(IMdmSettings.EMPI_CHANNEL_NAME, ResourceModifiedJsonMessage.class, config); + myMdmChannel = myChannelFactory.getOrCreateReceiver( + IMdmSettings.EMPI_CHANNEL_NAME, ResourceModifiedJsonMessage.class, config); if (myMdmChannel == null) { ourLog.error("Unable to create receiver for {}", IMdmSettings.EMPI_CHANNEL_NAME); } else { myMdmChannel.subscribe(myMdmMessageHandler); - ourLog.info("MDM Matching Consumer subscribed to Matching Channel {} with name {}", myMdmChannel.getClass().getName(), myMdmChannel.getName()); + ourLog.info( + "MDM Matching Consumer subscribed to Matching Channel {} with name {}", + myMdmChannel.getClass().getName(), + myMdmChannel.getName()); } } } @@ -72,7 +76,10 @@ public class MdmQueueConsumerLoader { if (myMdmChannel != null) { // JMS channel needs to be destroyed to avoid dangling receivers myMdmChannel.destroy(); - ourLog.info("MDM Matching Consumer unsubscribed from Matching Channel {} with name {}", myMdmChannel.getClass().getName(), myMdmChannel.getName()); + ourLog.info( + "MDM Matching Consumer unsubscribed from Matching Channel {} with name {}", + myMdmChannel.getClass().getName(), + myMdmChannel.getName()); } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmCommonConfig.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmCommonConfig.java index f8e593db87b..2daa3d9ec20 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmCommonConfig.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmCommonConfig.java @@ -39,6 +39,7 @@ public class MdmCommonConfig { MdmRuleValidator mdmRuleValidator(FhirContext theFhirContext, ISearchParamRegistry theSearchParamRetriever) { return new MdmRuleValidator(theFhirContext, theSearchParamRetriever); } + @Bean @Lazy public MdmSearchExpandingInterceptor mdmSearchExpandingInterceptor() { @@ -53,24 +54,14 @@ public class MdmCommonConfig { @Bean @Lazy MdmResourceMatcherSvc mdmResourceComparatorSvc( - FhirContext theFhirContext, - IMatcherFactory theIMatcherFactory, - IMdmSettings theMdmSettings - ) { + FhirContext theFhirContext, IMatcherFactory theIMatcherFactory, IMdmSettings theMdmSettings) { return new MdmResourceMatcherSvc(theFhirContext, theIMatcherFactory, theMdmSettings); } @Bean @Lazy public IMatcherFactory matcherFactory( - FhirContext theFhirContext, - IMdmSettings theSettings, - INicknameSvc theNicknameSvc - ) { - return new MdmMatcherFactory( - theFhirContext, - theSettings, - theNicknameSvc - ); + FhirContext theFhirContext, IMdmSettings theSettings, INicknameSvc theNicknameSvc) { + return new MdmMatcherFactory(theFhirContext, theSettings, theNicknameSvc); } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java index 6e67cb21839..6c963e29b75 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmConsumerConfig.java @@ -94,7 +94,8 @@ public class MdmConsumerConfig { } @Bean - MdmQueueConsumerLoader mdmQueueConsumerLoader(IChannelFactory theChannelFactory, IMdmSettings theMdmSettings, MdmMessageHandler theMdmMessageHandler) { + MdmQueueConsumerLoader mdmQueueConsumerLoader( + IChannelFactory theChannelFactory, IMdmSettings theMdmSettings, MdmMessageHandler theMdmMessageHandler) { return new MdmQueueConsumerLoader(theChannelFactory, theMdmSettings, theMdmMessageHandler); } @@ -168,7 +169,6 @@ public class MdmConsumerConfig { return new MdmProviderLoader(); } - @Bean IMdmMatchFinderSvc mdmMatchFinderSvc() { return new MdmMatchFinderSvcImpl(); @@ -179,7 +179,6 @@ public class MdmConsumerConfig { return new GoldenResourceMergerSvcImpl(); } - @Bean IMdmLinkQuerySvc mdmLinkQuerySvc() { return new MdmLinkQuerySvcImplSvc(); @@ -190,14 +189,14 @@ public class MdmConsumerConfig { return new MdmModelConverterSvcImpl(); } - @Bean MdmCandidateSearchSvc mdmCandidateSearchSvc() { return new MdmCandidateSearchSvc(); } @Bean - CandidateSearcher candidateSearcher(DaoRegistry theDaoRegistry, IMdmSettings theMdmSettings, MdmSearchParamSvc theMdmSearchParamSvc) { + CandidateSearcher candidateSearcher( + DaoRegistry theDaoRegistry, IMdmSettings theMdmSettings, MdmSearchParamSvc theMdmSearchParamSvc) { return new CandidateSearcher(theDaoRegistry, theMdmSettings, theMdmSearchParamSvc); } @@ -231,7 +230,6 @@ public class MdmConsumerConfig { return new MdmLinkCreateSvcImpl(); } - @Bean MdmLoader mdmLoader() { return new MdmLoader(); @@ -243,18 +241,20 @@ public class MdmConsumerConfig { } @Bean - MdmControllerHelper mdmProviderHelper(FhirContext theFhirContext, - IResourceLoader theResourceLoader, - IMdmSettings theMdmSettings, - IMdmMatchFinderSvc theMdmMatchFinderSvc, - MessageHelper messageHelper, - IRequestPartitionHelperSvc partitionHelperSvc) { - return new MdmControllerHelper(theFhirContext, - theResourceLoader, - theMdmMatchFinderSvc, - theMdmSettings, - messageHelper, - partitionHelperSvc); + MdmControllerHelper mdmProviderHelper( + FhirContext theFhirContext, + IResourceLoader theResourceLoader, + IMdmSettings theMdmSettings, + IMdmMatchFinderSvc theMdmMatchFinderSvc, + MessageHelper messageHelper, + IRequestPartitionHelperSvc partitionHelperSvc) { + return new MdmControllerHelper( + theFhirContext, + theResourceLoader, + theMdmMatchFinderSvc, + theMdmSettings, + messageHelper, + partitionHelperSvc); } @Bean @@ -263,8 +263,7 @@ public class MdmConsumerConfig { } @Bean - MdmPartitionHelper mdmPartitionHelper(MessageHelper theMessageHelper, - IMdmSettings theMdmSettings) { + MdmPartitionHelper mdmPartitionHelper(MessageHelper theMessageHelper, IMdmSettings theMdmSettings) { return new MdmPartitionHelper(theMessageHelper, theMdmSettings); } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmLoader.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmLoader.java index 8423514526d..83b0e0c4ed1 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmLoader.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmLoader.java @@ -36,8 +36,10 @@ public class MdmLoader { @Autowired IMdmSettings myMdmSettings; + @Autowired MdmProviderLoader myMdmProviderLoader; + @Autowired MdmSubscriptionLoader myMdmSubscriptionLoader; diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmSubscriptionLoader.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmSubscriptionLoader.java index 4079b0192d2..4cf52bb8f8a 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmSubscriptionLoader.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/config/MdmSubscriptionLoader.java @@ -49,44 +49,50 @@ public class MdmSubscriptionLoader { public static final String MDM_SUBSCIPRION_ID_PREFIX = "mdm-"; private static final Logger ourLog = Logs.getMdmTroubleshootingLog(); + @Autowired public FhirContext myFhirContext; + @Autowired public DaoRegistry myDaoRegistry; + @Autowired IChannelNamer myChannelNamer; + @Autowired private SubscriptionLoader mySubscriptionLoader; + @Autowired private IMdmSettings myMdmSettings; private IFhirResourceDao mySubscriptionDao; - synchronized public void daoUpdateMdmSubscriptions() { + public synchronized void daoUpdateMdmSubscriptions() { List subscriptions; List mdmResourceTypes = myMdmSettings.getMdmRules().getMdmTypes(); switch (myFhirContext.getVersion().getVersion()) { case DSTU3: - subscriptions = mdmResourceTypes - .stream() - .map(resourceType -> buildMdmSubscriptionDstu3(MDM_SUBSCIPRION_ID_PREFIX + resourceType, resourceType + "?")) - .collect(Collectors.toList()); + subscriptions = mdmResourceTypes.stream() + .map(resourceType -> + buildMdmSubscriptionDstu3(MDM_SUBSCIPRION_ID_PREFIX + resourceType, resourceType + "?")) + .collect(Collectors.toList()); break; case R4: - subscriptions = mdmResourceTypes - .stream() - .map(resourceType -> buildMdmSubscriptionR4(MDM_SUBSCIPRION_ID_PREFIX + resourceType, resourceType + "?")) - .collect(Collectors.toList()); + subscriptions = mdmResourceTypes.stream() + .map(resourceType -> + buildMdmSubscriptionR4(MDM_SUBSCIPRION_ID_PREFIX + resourceType, resourceType + "?")) + .collect(Collectors.toList()); break; default: - throw new ConfigurationException(Msg.code(736) + "MDM not supported for FHIR version " + myFhirContext.getVersion().getVersion()); + throw new ConfigurationException(Msg.code(736) + "MDM not supported for FHIR version " + + myFhirContext.getVersion().getVersion()); } mySubscriptionDao = myDaoRegistry.getResourceDao("Subscription"); for (IBaseResource subscription : subscriptions) { updateIfNotPresent(subscription); } - //After loading all the subscriptions, sync the subscriptions to the registry. + // After loading all the subscriptions, sync the subscriptions to the registry. if (subscriptions != null && subscriptions.size() > 0) { mySubscriptionLoader.syncDatabaseToCache(); } @@ -107,11 +113,17 @@ public class MdmSubscriptionLoader { retval.setReason("MDM"); retval.setStatus(org.hl7.fhir.dstu3.model.Subscription.SubscriptionStatus.REQUESTED); retval.setCriteria(theCriteria); - retval.getMeta().addTag().setSystem(MdmConstants.SYSTEM_MDM_MANAGED).setCode(MdmConstants.CODE_HAPI_MDM_MANAGED); - retval.addExtension().setUrl(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION).setValue(new org.hl7.fhir.dstu3.model.BooleanType().setValue(true)); + retval.getMeta() + .addTag() + .setSystem(MdmConstants.SYSTEM_MDM_MANAGED) + .setCode(MdmConstants.CODE_HAPI_MDM_MANAGED); + retval.addExtension() + .setUrl(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION) + .setValue(new org.hl7.fhir.dstu3.model.BooleanType().setValue(true)); org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelComponent channel = retval.getChannel(); channel.setType(org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType.MESSAGE); - channel.setEndpoint("channel:" + myChannelNamer.getChannelName(IMdmSettings.EMPI_CHANNEL_NAME, new ChannelProducerSettings())); + channel.setEndpoint("channel:" + + myChannelNamer.getChannelName(IMdmSettings.EMPI_CHANNEL_NAME, new ChannelProducerSettings())); channel.setPayload("application/json"); return retval; } @@ -122,11 +134,17 @@ public class MdmSubscriptionLoader { retval.setReason("MDM"); retval.setStatus(Subscription.SubscriptionStatus.REQUESTED); retval.setCriteria(theCriteria); - retval.getMeta().addTag().setSystem(MdmConstants.SYSTEM_MDM_MANAGED).setCode(MdmConstants.CODE_HAPI_MDM_MANAGED); - retval.addExtension().setUrl(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION).setValue(new BooleanType().setValue(true)); + retval.getMeta() + .addTag() + .setSystem(MdmConstants.SYSTEM_MDM_MANAGED) + .setCode(MdmConstants.CODE_HAPI_MDM_MANAGED); + retval.addExtension() + .setUrl(HapiExtensions.EXTENSION_SUBSCRIPTION_CROSS_PARTITION) + .setValue(new BooleanType().setValue(true)); Subscription.SubscriptionChannelComponent channel = retval.getChannel(); channel.setType(Subscription.SubscriptionChannelType.MESSAGE); - channel.setEndpoint("channel:" + myChannelNamer.getChannelName(IMdmSettings.EMPI_CHANNEL_NAME, new ChannelProducerSettings())); + channel.setEndpoint("channel:" + + myChannelNamer.getChannelName(IMdmSettings.EMPI_CHANNEL_NAME, new ChannelProducerSettings())); channel.setPayload("application/json"); return retval; } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java index f2e79de7983..167dfe55e8f 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java @@ -46,12 +46,12 @@ import org.springframework.data.history.Revisions; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class MdmLinkDaoSvc

    > { @@ -59,32 +59,47 @@ public class MdmLinkDaoSvc

    myMdmLinkDao; + @Autowired private MdmLinkFactory myMdmLinkFactory; + @Autowired private IIdHelperService

    myIdHelperService; + @Autowired private FhirContext myFhirContext; @Transactional - public M createOrUpdateLinkEntity(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmMatchOutcome theMatchOutcome, MdmLinkSourceEnum theLinkSource, @Nullable MdmTransactionContext theMdmTransactionContext) { + public M createOrUpdateLinkEntity( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmMatchOutcome theMatchOutcome, + MdmLinkSourceEnum theLinkSource, + @Nullable MdmTransactionContext theMdmTransactionContext) { M mdmLink = getOrCreateMdmLinkByGoldenResourceAndSourceResource(theGoldenResource, theSourceResource); mdmLink.setLinkSource(theLinkSource); mdmLink.setMatchResult(theMatchOutcome.getMatchResultEnum()); // Preserve these flags for link updates mdmLink.setEidMatch(theMatchOutcome.isEidMatch() | mdmLink.isEidMatchPresent()); - mdmLink.setHadToCreateNewGoldenResource(theMatchOutcome.isCreatedNewResource() | mdmLink.getHadToCreateNewGoldenResource()); + mdmLink.setHadToCreateNewGoldenResource( + theMatchOutcome.isCreatedNewResource() | mdmLink.getHadToCreateNewGoldenResource()); mdmLink.setMdmSourceType(myFhirContext.getResourceType(theSourceResource)); setScoreProperties(theMatchOutcome, mdmLink); // Add partition for the mdm link if it's available in the source resource - RequestPartitionId partitionId = (RequestPartitionId) theSourceResource.getUserData(Constants.RESOURCE_PARTITION_ID); + RequestPartitionId partitionId = + (RequestPartitionId) theSourceResource.getUserData(Constants.RESOURCE_PARTITION_ID); if (partitionId != null && partitionId.getFirstPartitionIdOrNull() != null) { - mdmLink.setPartitionId(new PartitionablePartitionId(partitionId.getFirstPartitionIdOrNull(), partitionId.getPartitionDate())); + mdmLink.setPartitionId(new PartitionablePartitionId( + partitionId.getFirstPartitionIdOrNull(), partitionId.getPartitionDate())); } - String message = String.format("Creating %s link from %s to Golden Resource %s.", mdmLink.getMatchResult(), theSourceResource.getIdElement().toUnqualifiedVersionless(), theGoldenResource.getIdElement().toUnqualifiedVersionless()); + String message = String.format( + "Creating %s link from %s to Golden Resource %s.", + mdmLink.getMatchResult(), + theSourceResource.getIdElement().toUnqualifiedVersionless(), + theGoldenResource.getIdElement().toUnqualifiedVersionless()); theMdmTransactionContext.addTransactionLogMessage(message); ourLog.debug(message); save(mdmLink); @@ -93,26 +108,28 @@ public class MdmLinkDaoSvc

    oExisting = getLinkByGoldenResourcePidAndSourceResourcePid(goldenResourcePid, sourceResourcePid); @@ -135,8 +152,10 @@ public class MdmLinkDaoSvc

    getLinkByGoldenResourcePidAndSourceResourcePid(Long theGoldenResourcePid, Long theSourceResourcePid) { - return getLinkByGoldenResourcePidAndSourceResourcePid(myIdHelperService.newPid(theGoldenResourcePid), myIdHelperService.newPid(theSourceResourcePid)); + public Optional getLinkByGoldenResourcePidAndSourceResourcePid( + Long theGoldenResourcePid, Long theSourceResourcePid) { + return getLinkByGoldenResourcePidAndSourceResourcePid( + myIdHelperService.newPid(theGoldenResourcePid), myIdHelperService.newPid(theSourceResourcePid)); } /** @@ -153,7 +172,7 @@ public class MdmLinkDaoSvc

    example = Example.of(link); return myMdmLinkDao.findOne(example); @@ -224,13 +243,14 @@ public class MdmLinkDaoSvc

    getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(Long theGoldenResourcePid, - Long theSourcePid, MdmMatchResultEnum theMatchResult) { - return getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(myIdHelperService.newPid(theGoldenResourcePid), myIdHelperService.newPid(theSourcePid), theMatchResult); + public Optional getMdmLinksByGoldenResourcePidSourcePidAndMatchResult( + Long theGoldenResourcePid, Long theSourcePid, MdmMatchResultEnum theMatchResult) { + return getMdmLinksByGoldenResourcePidSourcePidAndMatchResult( + myIdHelperService.newPid(theGoldenResourcePid), myIdHelperService.newPid(theSourcePid), theMatchResult); } - public Optional getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(P theGoldenResourcePid, - P theSourcePid, MdmMatchResultEnum theMatchResult) { + public Optional getMdmLinksByGoldenResourcePidSourcePidAndMatchResult( + P theGoldenResourcePid, P theSourcePid, MdmMatchResultEnum theMatchResult) { M exampleLink = myMdmLinkFactory.newMdmLinkVersionless(); exampleLink.setGoldenResourcePersistenceId(theGoldenResourcePid); exampleLink.setSourcePersistenceId(theSourcePid); @@ -261,7 +281,6 @@ public class MdmLinkDaoSvc

    example = Example.of(exampleLink); return myMdmLinkDao.findOne(example); - } /** * Delete a given {@link IMdmLink}. Note that this does not clear out the Golden resource. @@ -375,13 +394,14 @@ public class MdmLinkDaoSvc

    getLinkByGoldenResourceAndSourceResource(@Nullable IAnyResource theGoldenResource, @Nullable IAnyResource theSourceResource) { + public Optional getLinkByGoldenResourceAndSourceResource( + @Nullable IAnyResource theGoldenResource, @Nullable IAnyResource theSourceResource) { if (theGoldenResource == null || theSourceResource == null) { return Optional.empty(); } return getLinkByGoldenResourcePidAndSourceResourcePid( - myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theGoldenResource), - myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theSourceResource)); + myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theGoldenResource), + myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theSourceResource)); } @Transactional(propagation = Propagation.MANDATORY) diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java index 550e9fd475e..b71ddbadf7e 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java @@ -23,7 +23,6 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc; -import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc; import ca.uhn.fhir.mdm.api.IMdmLink; import ca.uhn.fhir.mdm.api.IMdmLinkSvc; @@ -33,6 +32,7 @@ import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.mdm.model.MdmTransactionContext; import ca.uhn.fhir.mdm.util.GoldenResourceHelper; +import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.mdm.util.MdmResourceUtil; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; @@ -55,57 +55,79 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc { @Autowired GoldenResourceHelper myGoldenResourceHelper; + @Autowired MdmLinkDaoSvc myMdmLinkDaoSvc; + @Autowired IMdmLinkSvc myMdmLinkSvc; + @Autowired IIdHelperService myIdHelperService; + @Autowired MdmResourceDaoSvc myMdmResourceDaoSvc; + @Autowired MdmPartitionHelper myMdmPartitionHelper; @Override @Transactional - public IAnyResource mergeGoldenResources(IAnyResource theFromGoldenResource, IAnyResource theMergedResource, IAnyResource theToGoldenResource, MdmTransactionContext theMdmTransactionContext) { + public IAnyResource mergeGoldenResources( + IAnyResource theFromGoldenResource, + IAnyResource theMergedResource, + IAnyResource theToGoldenResource, + MdmTransactionContext theMdmTransactionContext) { String resourceType = theMdmTransactionContext.getResourceType(); if (theMergedResource != null) { if (myGoldenResourceHelper.hasIdentifier(theMergedResource)) { - throw new IllegalArgumentException(Msg.code(751) + "Manually merged resource can not contain identifiers"); + throw new IllegalArgumentException( + Msg.code(751) + "Manually merged resource can not contain identifiers"); } - myGoldenResourceHelper.mergeIndentifierFields(theFromGoldenResource, theMergedResource, theMdmTransactionContext); - myGoldenResourceHelper.mergeIndentifierFields(theToGoldenResource, theMergedResource, theMdmTransactionContext); + myGoldenResourceHelper.mergeIndentifierFields( + theFromGoldenResource, theMergedResource, theMdmTransactionContext); + myGoldenResourceHelper.mergeIndentifierFields( + theToGoldenResource, theMergedResource, theMdmTransactionContext); theMergedResource.setId(theToGoldenResource.getId()); - theToGoldenResource = (IAnyResource) myMdmResourceDaoSvc.upsertGoldenResource(theMergedResource, resourceType).getResource(); + theToGoldenResource = (IAnyResource) myMdmResourceDaoSvc + .upsertGoldenResource(theMergedResource, resourceType) + .getResource(); } else { - myGoldenResourceHelper.mergeIndentifierFields(theFromGoldenResource, theToGoldenResource, theMdmTransactionContext); - myGoldenResourceHelper.mergeNonIdentiferFields(theFromGoldenResource, theToGoldenResource, theMdmTransactionContext); - //Save changes to the golden resource + myGoldenResourceHelper.mergeIndentifierFields( + theFromGoldenResource, theToGoldenResource, theMdmTransactionContext); + myGoldenResourceHelper.mergeNonIdentiferFields( + theFromGoldenResource, theToGoldenResource, theMdmTransactionContext); + // Save changes to the golden resource myMdmResourceDaoSvc.upsertGoldenResource(theToGoldenResource, resourceType); } myMdmPartitionHelper.validateMdmResourcesPartitionMatches(theFromGoldenResource, theToGoldenResource); - //Merge the links from the FROM to the TO resource. Clean up dangling links. - mergeGoldenResourceLinks(theFromGoldenResource, theToGoldenResource, theFromGoldenResource.getIdElement(), theMdmTransactionContext); + // Merge the links from the FROM to the TO resource. Clean up dangling links. + mergeGoldenResourceLinks( + theFromGoldenResource, + theToGoldenResource, + theFromGoldenResource.getIdElement(), + theMdmTransactionContext); - //Create the new REDIRECT link + // Create the new REDIRECT link addMergeLink(theToGoldenResource, theFromGoldenResource, resourceType, theMdmTransactionContext); - //Strip the golden resource tag from the now-deprecated resource. + // Strip the golden resource tag from the now-deprecated resource. myMdmResourceDaoSvc.removeGoldenResourceTag(theFromGoldenResource, resourceType); - //Add the REDIRECT tag to that same deprecated resource. + // Add the REDIRECT tag to that same deprecated resource. MdmResourceUtil.setGoldenResourceRedirected(theFromGoldenResource); - //Save the deprecated resource. + // Save the deprecated resource. myMdmResourceDaoSvc.upsertGoldenResource(theFromGoldenResource, resourceType); - log(theMdmTransactionContext, "Merged " + theFromGoldenResource.getIdElement().toVersionless() - + " into " + theToGoldenResource.getIdElement().toVersionless()); + log( + theMdmTransactionContext, + "Merged " + theFromGoldenResource.getIdElement().toVersionless() + " into " + + theToGoldenResource.getIdElement().toVersionless()); return theToGoldenResource; } @@ -122,21 +144,19 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc { * GR -> TR */ private void addMergeLink( - IAnyResource theGoldenResource, - IAnyResource theTargetResource, - String theResourceType, - MdmTransactionContext theMdmTransactionContext - ) { - myMdmLinkSvc.deleteLink(theGoldenResource, theTargetResource, - theMdmTransactionContext); + IAnyResource theGoldenResource, + IAnyResource theTargetResource, + String theResourceType, + MdmTransactionContext theMdmTransactionContext) { + myMdmLinkSvc.deleteLink(theGoldenResource, theTargetResource, theMdmTransactionContext); myMdmLinkDaoSvc.createOrUpdateLinkEntity( - theTargetResource, // golden / LHS - theGoldenResource, // source / RHS - new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.REDIRECT), - MdmLinkSourceEnum.MANUAL, - theMdmTransactionContext // mdm transaction context - ); + theTargetResource, // golden / LHS + theGoldenResource, // source / RHS + new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.REDIRECT), + MdmLinkSourceEnum.MANUAL, + theMdmTransactionContext // mdm transaction context + ); } private RequestPartitionId getPartitionIdForResource(IAnyResource theResource) { @@ -165,11 +185,10 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc { * @param theMdmTransactionContext */ private void mergeGoldenResourceLinks( - IAnyResource theFromResource, - IAnyResource theToResource, - IIdType theToResourcePid, - MdmTransactionContext theMdmTransactionContext - ) { + IAnyResource theFromResource, + IAnyResource theToResource, + IIdType theToResourcePid, + MdmTransactionContext theMdmTransactionContext) { // fromLinks - links from theFromResource to any resource List fromLinks = myMdmLinkDaoSvc.findMdmLinksByGoldenResource(theFromResource); // toLinks - links from theToResource to any resource @@ -177,10 +196,9 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc { List toDelete = new ArrayList<>(); IResourcePersistentId goldenResourcePid = myIdHelperService.resolveResourcePersistentIds( - getPartitionIdForResource(theToResource), - theToResource.getIdElement().getResourceType(), - theToResource.getIdElement().getIdPart() - ); + getPartitionIdForResource(theToResource), + theToResource.getIdElement().getResourceType(), + theToResource.getIdElement().getIdPart()); // reassign links: // to <- from @@ -193,13 +211,17 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc { if (fromLink.isManual()) { switch (toLink.getLinkSource()) { case AUTO: - //3 - log(theMdmTransactionContext, String.format("MANUAL overrides AUT0. Deleting link %s", toLink.toString())); + // 3 + log( + theMdmTransactionContext, + String.format("MANUAL overrides AUT0. Deleting link %s", toLink.toString())); myMdmLinkDaoSvc.deleteLink(toLink); break; case MANUAL: if (fromLink.getMatchResult() != toLink.getMatchResult()) { - throw new InvalidRequestException(Msg.code(752) + "A MANUAL " + fromLink.getMatchResult() + " link may not be merged into a MANUAL " + toLink.getMatchResult() + " link for the same target"); + throw new InvalidRequestException(Msg.code(752) + "A MANUAL " + + fromLink.getMatchResult() + " link may not be merged into a MANUAL " + + toLink.getMatchResult() + " link for the same target"); } } } else { @@ -225,10 +247,12 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc { toDelete.forEach(link -> myMdmLinkDaoSvc.deleteLink(link)); } - private Optional findFirstLinkWithMatchingSource(List theMdmLinks, IMdmLink theLinkWithSourceToMatch) { + private Optional findFirstLinkWithMatchingSource( + List theMdmLinks, IMdmLink theLinkWithSourceToMatch) { return theMdmLinks.stream() - .filter(mdmLink -> mdmLink.getSourcePersistenceId().equals(theLinkWithSourceToMatch.getSourcePersistenceId())) - .findFirst(); + .filter(mdmLink -> + mdmLink.getSourcePersistenceId().equals(theLinkWithSourceToMatch.getSourcePersistenceId())) + .findFirst(); } private void log(MdmTransactionContext theMdmTransactionContext, String theMessage) { diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceSearchSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceSearchSvcImpl.java index 444ac198185..51712154b23 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceSearchSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceSearchSvcImpl.java @@ -27,13 +27,13 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.pid.HomogeneousResourcePidList; import ca.uhn.fhir.jpa.api.pid.IResourcePidList; import ca.uhn.fhir.jpa.api.svc.IGoldenResourceSearchSvc; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.mdm.api.MdmConstants; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.SortOrderEnum; import ca.uhn.fhir.rest.api.SortSpec; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.param.DateRangeParam; import ca.uhn.fhir.rest.param.TokenParam; @@ -41,10 +41,10 @@ import ca.uhn.fhir.util.DateRangeUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Date; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class GoldenResourceSearchSvcImpl implements IGoldenResourceSearchSvc { @Autowired @@ -58,20 +58,33 @@ public class GoldenResourceSearchSvcImpl implements IGoldenResourceSearchSvc { @Override @Transactional - public IResourcePidList fetchGoldenResourceIdsPage(Date theStart, Date theEnd, @Nonnull Integer thePageSize, @Nullable RequestPartitionId theRequestPartitionId, @Nonnull String theResourceType) { - return fetchResourceIdsPageWithResourceType(theStart, theEnd, thePageSize, theResourceType, theRequestPartitionId); + public IResourcePidList fetchGoldenResourceIdsPage( + Date theStart, + Date theEnd, + @Nonnull Integer thePageSize, + @Nullable RequestPartitionId theRequestPartitionId, + @Nonnull String theResourceType) { + return fetchResourceIdsPageWithResourceType( + theStart, theEnd, thePageSize, theResourceType, theRequestPartitionId); } - private IResourcePidList fetchResourceIdsPageWithResourceType(Date theStart, Date theEnd, int thePageSize, String theResourceType, RequestPartitionId theRequestPartitionId) { + private IResourcePidList fetchResourceIdsPageWithResourceType( + Date theStart, + Date theEnd, + int thePageSize, + String theResourceType, + RequestPartitionId theRequestPartitionId) { RuntimeResourceDefinition def = myFhirContext.getResourceDefinition(theResourceType); SearchParameterMap searchParamMap = myMatchUrlService.translateMatchUrl(theResourceType, def); searchParamMap.setSort(new SortSpec(Constants.PARAM_LASTUPDATED, SortOrderEnum.ASC)); - DateRangeParam chunkDateRange = DateRangeUtil.narrowDateRange(searchParamMap.getLastUpdated(), theStart, theEnd); + DateRangeParam chunkDateRange = + DateRangeUtil.narrowDateRange(searchParamMap.getLastUpdated(), theStart, theEnd); searchParamMap.setLastUpdated(chunkDateRange); searchParamMap.setCount(thePageSize); // request this many pids - searchParamMap.add("_tag", new TokenParam(MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD)); + searchParamMap.add( + "_tag", new TokenParam(MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD)); IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceType); SystemRequestDetails request = new SystemRequestDetails(); diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/IMdmModelConverterSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/IMdmModelConverterSvc.java index d986e80b635..4ec9923d4d8 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/IMdmModelConverterSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/IMdmModelConverterSvc.java @@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.mdm.svc; import ca.uhn.fhir.mdm.api.IMdmLink; import ca.uhn.fhir.mdm.api.MdmLinkJson; -import ca.uhn.fhir.mdm.api.MdmLinkWithRevisionJson; import ca.uhn.fhir.mdm.api.MdmLinkWithRevision; +import ca.uhn.fhir.mdm.api.MdmLinkWithRevisionJson; /** * Contract for decoupling API dependency from the base / JPA modules. diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java index ab6e4df700e..9887dac3f83 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java @@ -56,12 +56,12 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.math.BigDecimal; import java.util.HashSet; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * This class acts as a layer between MdmProviders and MDM services to support a REST API that's not a FHIR Operation API. @@ -71,62 +71,90 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { @Autowired FhirContext myFhirContext; + @Autowired MdmControllerHelper myMdmControllerHelper; + @Autowired IGoldenResourceMergerSvc myGoldenResourceMergerSvc; + @Autowired IMdmLinkQuerySvc myMdmLinkQuerySvc; + @Autowired IMdmLinkUpdaterSvc myIMdmLinkUpdaterSvc; + @Autowired IMdmLinkCreateSvc myIMdmLinkCreateSvc; + @Autowired IRequestPartitionHelperSvc myRequestPartitionHelperSvc; + @Autowired IJobCoordinator myJobCoordinator; - public MdmControllerSvcImpl() { - } + public MdmControllerSvcImpl() {} @Override - public IAnyResource mergeGoldenResources(String theFromGoldenResourceId, String theToGoldenResourceId, IAnyResource theManuallyMergedGoldenResource, MdmTransactionContext theMdmTransactionContext) { - IAnyResource fromGoldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException(ProviderConstants.MDM_MERGE_GR_FROM_GOLDEN_RESOURCE_ID, theFromGoldenResourceId); - IAnyResource toGoldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException(ProviderConstants.MDM_MERGE_GR_TO_GOLDEN_RESOURCE_ID, theToGoldenResourceId); + public IAnyResource mergeGoldenResources( + String theFromGoldenResourceId, + String theToGoldenResourceId, + IAnyResource theManuallyMergedGoldenResource, + MdmTransactionContext theMdmTransactionContext) { + IAnyResource fromGoldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException( + ProviderConstants.MDM_MERGE_GR_FROM_GOLDEN_RESOURCE_ID, theFromGoldenResourceId); + IAnyResource toGoldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException( + ProviderConstants.MDM_MERGE_GR_TO_GOLDEN_RESOURCE_ID, theToGoldenResourceId); myMdmControllerHelper.validateMergeResources(fromGoldenResource, toGoldenResource); myMdmControllerHelper.validateSameVersion(fromGoldenResource, theFromGoldenResourceId); myMdmControllerHelper.validateSameVersion(toGoldenResource, theToGoldenResourceId); - return myGoldenResourceMergerSvc.mergeGoldenResources(fromGoldenResource, theManuallyMergedGoldenResource, toGoldenResource, theMdmTransactionContext); + return myGoldenResourceMergerSvc.mergeGoldenResources( + fromGoldenResource, theManuallyMergedGoldenResource, toGoldenResource, theMdmTransactionContext); } @Override @Deprecated - public Page queryLinks(@Nullable String theGoldenResourceId, @Nullable String theSourceResourceId, @Nullable String theMatchResult, @Nullable String theLinkSource, MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest) { + public Page queryLinks( + @Nullable String theGoldenResourceId, + @Nullable String theSourceResourceId, + @Nullable String theMatchResult, + @Nullable String theLinkSource, + MdmTransactionContext theMdmTransactionContext, + MdmPageRequest thePageRequest) { MdmQuerySearchParameters mdmQuerySearchParameters = new MdmQuerySearchParameters(thePageRequest) - .setGoldenResourceId(theGoldenResourceId) - .setSourceId(theSourceResourceId) - .setMatchResult(theMatchResult) - .setLinkSource(theLinkSource); + .setGoldenResourceId(theGoldenResourceId) + .setSourceId(theSourceResourceId) + .setMatchResult(theMatchResult) + .setLinkSource(theLinkSource); return queryLinksFromPartitionList(mdmQuerySearchParameters, theMdmTransactionContext); } @Override @Deprecated - public Page queryLinks(@Nullable String theGoldenResourceId, @Nullable String theSourceResourceId, - @Nullable String theMatchResult, @Nullable String theLinkSource, MdmTransactionContext theMdmTransactionContext, - MdmPageRequest thePageRequest, @Nullable RequestDetails theRequestDetails) { + public Page queryLinks( + @Nullable String theGoldenResourceId, + @Nullable String theSourceResourceId, + @Nullable String theMatchResult, + @Nullable String theLinkSource, + MdmTransactionContext theMdmTransactionContext, + MdmPageRequest thePageRequest, + @Nullable RequestDetails theRequestDetails) { MdmQuerySearchParameters mdmQuerySearchParameters = new MdmQuerySearchParameters(thePageRequest) - .setGoldenResourceId(theGoldenResourceId) - .setSourceId(theSourceResourceId) - .setMatchResult(theMatchResult) - .setLinkSource(theLinkSource); + .setGoldenResourceId(theGoldenResourceId) + .setSourceId(theSourceResourceId) + .setMatchResult(theMatchResult) + .setLinkSource(theLinkSource); return queryLinks(mdmQuerySearchParameters, theMdmTransactionContext, theRequestDetails); } @Override - public Page queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext, RequestDetails theRequestDetails) { - RequestPartitionId theReadPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null); + public Page queryLinks( + MdmQuerySearchParameters theMdmQuerySearchParameters, + MdmTransactionContext theMdmTransactionContext, + RequestDetails theRequestDetails) { + RequestPartitionId theReadPartitionId = + myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null); Page resultPage; if (theReadPartitionId.hasPartitionIds()) { theMdmQuerySearchParameters.setPartitionIds(theReadPartitionId.getPartitionIds()); @@ -138,44 +166,61 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { } @Override - public List queryLinkHistory(MdmHistorySearchParameters theMdmHistorySearchParameters, RequestDetails theRequestDetails) { + public List queryLinkHistory( + MdmHistorySearchParameters theMdmHistorySearchParameters, RequestDetails theRequestDetails) { return myMdmLinkQuerySvc.queryLinkHistory(theMdmHistorySearchParameters); } @Override @Deprecated - public Page queryLinksFromPartitionList(@Nullable String theGoldenResourceId, @Nullable String theSourceResourceId, - @Nullable String theMatchResult, @Nullable String theLinkSource, - MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest, - @Nullable List thePartitionIds) { + public Page queryLinksFromPartitionList( + @Nullable String theGoldenResourceId, + @Nullable String theSourceResourceId, + @Nullable String theMatchResult, + @Nullable String theLinkSource, + MdmTransactionContext theMdmTransactionContext, + MdmPageRequest thePageRequest, + @Nullable List thePartitionIds) { MdmQuerySearchParameters mdmQuerySearchParameters = new MdmQuerySearchParameters(thePageRequest) - .setGoldenResourceId(theGoldenResourceId) - .setSourceId(theSourceResourceId) - .setMatchResult(theMatchResult) - .setLinkSource(theLinkSource) - .setPartitionIds(thePartitionIds); + .setGoldenResourceId(theGoldenResourceId) + .setSourceId(theSourceResourceId) + .setMatchResult(theMatchResult) + .setLinkSource(theLinkSource) + .setPartitionIds(thePartitionIds); return queryLinksFromPartitionList(mdmQuerySearchParameters, theMdmTransactionContext); } @Override - public Page queryLinksFromPartitionList(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext) { + public Page queryLinksFromPartitionList( + MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext) { return myMdmLinkQuerySvc.queryLinks(theMdmQuerySearchParameters, theMdmTransactionContext); } @Override - public Page getDuplicateGoldenResources(MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest) { + public Page getDuplicateGoldenResources( + MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest) { return myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, null, null); } @Override - public Page getDuplicateGoldenResources(MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest, RequestDetails theRequestDetails, String theRequestResourceType) { + public Page getDuplicateGoldenResources( + MdmTransactionContext theMdmTransactionContext, + MdmPageRequest thePageRequest, + RequestDetails theRequestDetails, + String theRequestResourceType) { Page resultPage; - RequestPartitionId readPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null); + RequestPartitionId readPartitionId = + myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null); if (readPartitionId.isAllPartitions()) { - resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, null, theRequestResourceType); + resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources( + theMdmTransactionContext, thePageRequest, null, theRequestResourceType); } else { - resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, readPartitionId.getPartitionIds(), theRequestResourceType); + resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources( + theMdmTransactionContext, + thePageRequest, + readPartitionId.getPartitionIds(), + theRequestResourceType); } validateMdmQueryPermissions(readPartitionId, resultPage.getContent(), theRequestDetails); @@ -184,10 +229,16 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { } @Override - public IAnyResource updateLink(String theGoldenResourceId, String theSourceResourceId, String theMatchResult, MdmTransactionContext theMdmTransactionContext) { + public IAnyResource updateLink( + String theGoldenResourceId, + String theSourceResourceId, + String theMatchResult, + MdmTransactionContext theMdmTransactionContext) { MdmMatchResultEnum matchResult = MdmControllerUtil.extractMatchResultOrNull(theMatchResult); - IAnyResource goldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException(ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); - IAnyResource source = myMdmControllerHelper.getLatestSourceFromIdOrThrowException(ProviderConstants.MDM_UPDATE_LINK_RESOURCE_ID, theSourceResourceId); + IAnyResource goldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException( + ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); + IAnyResource source = myMdmControllerHelper.getLatestSourceFromIdOrThrowException( + ProviderConstants.MDM_UPDATE_LINK_RESOURCE_ID, theSourceResourceId); myMdmControllerHelper.validateSameVersion(goldenResource, theGoldenResourceId); myMdmControllerHelper.validateSameVersion(source, theSourceResourceId); @@ -195,10 +246,16 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { } @Override - public IAnyResource createLink(String theGoldenResourceId, String theSourceResourceId, @Nullable String theMatchResult, MdmTransactionContext theMdmTransactionContext) { + public IAnyResource createLink( + String theGoldenResourceId, + String theSourceResourceId, + @Nullable String theMatchResult, + MdmTransactionContext theMdmTransactionContext) { MdmMatchResultEnum matchResult = MdmControllerUtil.extractMatchResultOrNull(theMatchResult); - IAnyResource goldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException(ProviderConstants.MDM_CREATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); - IAnyResource source = myMdmControllerHelper.getLatestSourceFromIdOrThrowException(ProviderConstants.MDM_CREATE_LINK_RESOURCE_ID, theSourceResourceId); + IAnyResource goldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException( + ProviderConstants.MDM_CREATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); + IAnyResource source = myMdmControllerHelper.getLatestSourceFromIdOrThrowException( + ProviderConstants.MDM_CREATE_LINK_RESOURCE_ID, theSourceResourceId); myMdmControllerHelper.validateSameVersion(goldenResource, theGoldenResourceId); myMdmControllerHelper.validateSameVersion(source, theSourceResourceId); @@ -206,15 +263,22 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { } @Override - public IBaseParameters submitMdmClearJob(@Nonnull List theResourceNames, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) { + public IBaseParameters submitMdmClearJob( + @Nonnull List theResourceNames, + IPrimitiveType theBatchSize, + ServletRequestDetails theRequestDetails) { MdmClearJobParameters params = new MdmClearJobParameters(); params.setResourceNames(theResourceNames); - if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue().longValue() > 0) { + if (theBatchSize != null + && theBatchSize.getValue() != null + && theBatchSize.getValue().longValue() > 0) { params.setBatchSize(theBatchSize.getValue().intValue()); } - ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_MDM_CLEAR); - RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); + ReadPartitionIdRequestDetails details = + ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_MDM_CLEAR); + RequestPartitionId requestPartition = + myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); params.setRequestPartitionId(requestPartition); JobInstanceStartRequest request = new JobInstanceStartRequest(); @@ -224,16 +288,19 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { String id = response.getInstanceId(); IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext); - ParametersUtil.addParameterToParametersString(myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, id); + ParametersUtil.addParameterToParametersString( + myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, id); return retVal; } - @Override - public IBaseParameters submitMdmSubmitJob(List theUrls, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) { + public IBaseParameters submitMdmSubmitJob( + List theUrls, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) { MdmSubmitJobParameters params = new MdmSubmitJobParameters(); - if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue().longValue() > 0) { + if (theBatchSize != null + && theBatchSize.getValue() != null + && theBatchSize.getValue().longValue() > 0) { params.setBatchSize(theBatchSize.getValue().intValue()); } params.setRequestPartitionId(RequestPartitionId.allPartitions()); @@ -248,25 +315,35 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc { String id = batch2JobStartResponse.getInstanceId(); IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext); - ParametersUtil.addParameterToParametersString(myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, id); + ParametersUtil.addParameterToParametersString( + myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, id); return retVal; } @Override - public void notDuplicateGoldenResource(String theGoldenResourceId, String theTargetGoldenResourceId, MdmTransactionContext theMdmTransactionContext) { - IAnyResource goldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException(ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); - IAnyResource target = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException(ProviderConstants.MDM_UPDATE_LINK_RESOURCE_ID, theTargetGoldenResourceId); + public void notDuplicateGoldenResource( + String theGoldenResourceId, + String theTargetGoldenResourceId, + MdmTransactionContext theMdmTransactionContext) { + IAnyResource goldenResource = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException( + ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); + IAnyResource target = myMdmControllerHelper.getLatestGoldenResourceFromIdOrThrowException( + ProviderConstants.MDM_UPDATE_LINK_RESOURCE_ID, theTargetGoldenResourceId); myIMdmLinkUpdaterSvc.notDuplicateGoldenResource(goldenResource, target, theMdmTransactionContext); } - private void validateMdmQueryPermissions(RequestPartitionId theRequestPartitionId, List theMdmLinkJsonList, RequestDetails theRequestDetails) { + private void validateMdmQueryPermissions( + RequestPartitionId theRequestPartitionId, + List theMdmLinkJsonList, + RequestDetails theRequestDetails) { Set seenResourceTypes = new HashSet<>(); for (MdmLinkJson mdmLinkJson : theMdmLinkJsonList) { IdDt idDt = new IdDt(mdmLinkJson.getSourceId()); if (!seenResourceTypes.contains(idDt.getResourceType())) { - myRequestPartitionHelperSvc.validateHasPartitionPermissions(theRequestDetails, idDt.getResourceType(), theRequestPartitionId); + myRequestPartitionHelperSvc.validateHasPartitionPermissions( + theRequestDetails, idDt.getResourceType(), theRequestPartitionId); seenResourceTypes.add(idDt.getResourceType()); } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmEidUpdateService.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmEidUpdateService.java index 8c414adaa38..1f43b464b1f 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmEidUpdateService.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmEidUpdateService.java @@ -41,9 +41,9 @@ import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nullable; import java.util.List; import java.util.Optional; +import javax.annotation.Nullable; @Service public class MdmEidUpdateService { @@ -52,88 +52,165 @@ public class MdmEidUpdateService { @Autowired private MdmResourceDaoSvc myMdmResourceDaoSvc; + @Autowired private IMdmLinkSvc myMdmLinkSvc; + @Autowired private MdmGoldenResourceFindingSvc myMdmGoldenResourceFindingSvc; + @Autowired private GoldenResourceHelper myGoldenResourceHelper; + @Autowired private EIDHelper myEIDHelper; + @Autowired private MdmLinkDaoSvc myMdmLinkDaoSvc; + @Autowired private IMdmSettings myMdmSettings; + @Autowired private IMdmSurvivorshipService myMdmSurvivorshipService; - void handleMdmUpdate(IAnyResource theTargetResource, MatchedGoldenResourceCandidate theMatchedGoldenResourceCandidate, MdmTransactionContext theMdmTransactionContext) { + void handleMdmUpdate( + IAnyResource theTargetResource, + MatchedGoldenResourceCandidate theMatchedGoldenResourceCandidate, + MdmTransactionContext theMdmTransactionContext) { MdmUpdateContext updateContext = new MdmUpdateContext(theMatchedGoldenResourceCandidate, theTargetResource); - myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource(theTargetResource, updateContext.getMatchedGoldenResource(), theMdmTransactionContext); + myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource( + theTargetResource, updateContext.getMatchedGoldenResource(), theMdmTransactionContext); if (updateContext.isRemainsMatchedToSameGoldenResource()) { // Copy over any new external EIDs which don't already exist. if (!updateContext.isIncomingResourceHasAnEid() || updateContext.isHasEidsInCommon()) { - //update to patient that uses internal EIDs only. - myMdmLinkSvc.updateLink(updateContext.getMatchedGoldenResource(), theTargetResource, theMatchedGoldenResourceCandidate.getMatchResult(), MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + // update to patient that uses internal EIDs only. + myMdmLinkSvc.updateLink( + updateContext.getMatchedGoldenResource(), + theTargetResource, + theMatchedGoldenResourceCandidate.getMatchResult(), + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); } else if (!updateContext.isHasEidsInCommon()) { - handleNoEidsInCommon(theTargetResource, theMatchedGoldenResourceCandidate, theMdmTransactionContext, updateContext); + handleNoEidsInCommon( + theTargetResource, theMatchedGoldenResourceCandidate, theMdmTransactionContext, updateContext); } } else { - //This is a new linking scenario. we have to break the existing link and link to the new Golden Resource. For now, we create duplicate. - //updated patient has an EID that matches to a new candidate. Link them, and set the Golden Resources possible duplicates + // This is a new linking scenario. we have to break the existing link and link to the new Golden Resource. + // For now, we create duplicate. + // updated patient has an EID that matches to a new candidate. Link them, and set the Golden Resources + // possible duplicates IAnyResource theOldGoldenResource = updateContext.getExistingGoldenResource(); if (theOldGoldenResource == null) { - throw new InternalErrorException(Msg.code(2362) + "Old golden resource was null while updating MDM links with new golden resource. It is likely that a $mdm-clear was performed without a $mdm-submit. Link will not be updated."); + throw new InternalErrorException( + Msg.code(2362) + + "Old golden resource was null while updating MDM links with new golden resource. It is likely that a $mdm-clear was performed without a $mdm-submit. Link will not be updated."); } else { - linkToNewGoldenResourceAndFlagAsDuplicate(theTargetResource, theMatchedGoldenResourceCandidate.getMatchResult(), theOldGoldenResource, updateContext.getMatchedGoldenResource(), theMdmTransactionContext); + linkToNewGoldenResourceAndFlagAsDuplicate( + theTargetResource, + theMatchedGoldenResourceCandidate.getMatchResult(), + theOldGoldenResource, + updateContext.getMatchedGoldenResource(), + theMdmTransactionContext); - myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource(theTargetResource, updateContext.getMatchedGoldenResource(), theMdmTransactionContext); - myMdmResourceDaoSvc.upsertGoldenResource(updateContext.getMatchedGoldenResource(), theMdmTransactionContext.getResourceType()); + myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource( + theTargetResource, updateContext.getMatchedGoldenResource(), theMdmTransactionContext); + myMdmResourceDaoSvc.upsertGoldenResource( + updateContext.getMatchedGoldenResource(), theMdmTransactionContext.getResourceType()); } } } - private void handleNoEidsInCommon(IAnyResource theResource, MatchedGoldenResourceCandidate theMatchedGoldenResourceCandidate, MdmTransactionContext theMdmTransactionContext, MdmUpdateContext theUpdateContext) { + private void handleNoEidsInCommon( + IAnyResource theResource, + MatchedGoldenResourceCandidate theMatchedGoldenResourceCandidate, + MdmTransactionContext theMdmTransactionContext, + MdmUpdateContext theUpdateContext) { // the user is simply updating their EID. We propagate this change to the GoldenResource. - //overwrite. No EIDS in common, but still same GoldenResource. + // overwrite. No EIDS in common, but still same GoldenResource. if (myMdmSettings.isPreventMultipleEids()) { - if (myMdmLinkDaoSvc.findMdmMatchLinksByGoldenResource(theUpdateContext.getMatchedGoldenResource()).size() <= 1) { // If there is only 0/1 link on the GoldenResource, we can safely overwrite the EID. - handleExternalEidOverwrite(theUpdateContext.getMatchedGoldenResource(), theResource, theMdmTransactionContext); - } else { // If the GoldenResource has multiple targets tied to it, we can't just overwrite the EID, so we split the GoldenResource. - createNewGoldenResourceAndFlagAsDuplicate(theResource, theMdmTransactionContext, theUpdateContext.getExistingGoldenResource()); + if (myMdmLinkDaoSvc + .findMdmMatchLinksByGoldenResource(theUpdateContext.getMatchedGoldenResource()) + .size() + <= 1) { // If there is only 0/1 link on the GoldenResource, we can safely overwrite the EID. + handleExternalEidOverwrite( + theUpdateContext.getMatchedGoldenResource(), theResource, theMdmTransactionContext); + } else { // If the GoldenResource has multiple targets tied to it, we can't just overwrite the EID, so we + // split the GoldenResource. + createNewGoldenResourceAndFlagAsDuplicate( + theResource, theMdmTransactionContext, theUpdateContext.getExistingGoldenResource()); } } else { - myGoldenResourceHelper.handleExternalEidAddition(theUpdateContext.getMatchedGoldenResource(), theResource, theMdmTransactionContext); + myGoldenResourceHelper.handleExternalEidAddition( + theUpdateContext.getMatchedGoldenResource(), theResource, theMdmTransactionContext); } - myMdmLinkSvc.updateLink(theUpdateContext.getMatchedGoldenResource(), theResource, theMatchedGoldenResourceCandidate.getMatchResult(), MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + myMdmLinkSvc.updateLink( + theUpdateContext.getMatchedGoldenResource(), + theResource, + theMatchedGoldenResourceCandidate.getMatchResult(), + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); } - private void handleExternalEidOverwrite(IAnyResource theGoldenResource, IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) { + private void handleExternalEidOverwrite( + IAnyResource theGoldenResource, IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) { List eidFromResource = myEIDHelper.getExternalEid(theResource); if (!eidFromResource.isEmpty()) { myGoldenResourceHelper.overwriteExternalEids(theGoldenResource, eidFromResource); } } - private boolean candidateIsSameAsMdmLinkGoldenResource(IMdmLink theExistingMatchLink, MatchedGoldenResourceCandidate theGoldenResourceCandidate) { - return theExistingMatchLink.getGoldenResourcePersistenceId().equals(theGoldenResourceCandidate.getCandidateGoldenResourcePid()); + private boolean candidateIsSameAsMdmLinkGoldenResource( + IMdmLink theExistingMatchLink, MatchedGoldenResourceCandidate theGoldenResourceCandidate) { + return theExistingMatchLink + .getGoldenResourcePersistenceId() + .equals(theGoldenResourceCandidate.getCandidateGoldenResourcePid()); } - private void createNewGoldenResourceAndFlagAsDuplicate(IAnyResource theResource, MdmTransactionContext theMdmTransactionContext, IAnyResource theOldGoldenResource) { - log(theMdmTransactionContext, "Duplicate detected based on the fact that both resources have different external EIDs."); - IAnyResource newGoldenResource = myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource(theResource, theMdmTransactionContext); + private void createNewGoldenResourceAndFlagAsDuplicate( + IAnyResource theResource, + MdmTransactionContext theMdmTransactionContext, + IAnyResource theOldGoldenResource) { + log( + theMdmTransactionContext, + "Duplicate detected based on the fact that both resources have different external EIDs."); + IAnyResource newGoldenResource = + myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource(theResource, theMdmTransactionContext); - myMdmLinkSvc.updateLink(newGoldenResource, theResource, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); - myMdmLinkSvc.updateLink(newGoldenResource, theOldGoldenResource, MdmMatchOutcome.POSSIBLE_DUPLICATE, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + myMdmLinkSvc.updateLink( + newGoldenResource, + theResource, + MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); + myMdmLinkSvc.updateLink( + newGoldenResource, + theOldGoldenResource, + MdmMatchOutcome.POSSIBLE_DUPLICATE, + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); } - private void linkToNewGoldenResourceAndFlagAsDuplicate(IAnyResource theResource, MdmMatchOutcome theMatchResult, IAnyResource theOldGoldenResource, IAnyResource theNewGoldenResource, MdmTransactionContext theMdmTransactionContext) { + private void linkToNewGoldenResourceAndFlagAsDuplicate( + IAnyResource theResource, + MdmMatchOutcome theMatchResult, + IAnyResource theOldGoldenResource, + IAnyResource theNewGoldenResource, + MdmTransactionContext theMdmTransactionContext) { log(theMdmTransactionContext, "Changing a match link!"); myMdmLinkSvc.deleteLink(theOldGoldenResource, theResource, theMdmTransactionContext); - myMdmLinkSvc.updateLink(theNewGoldenResource, theResource, theMatchResult, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); - log(theMdmTransactionContext, "Duplicate detected based on the fact that both resources have different external EIDs."); - myMdmLinkSvc.updateLink(theNewGoldenResource, theOldGoldenResource, MdmMatchOutcome.POSSIBLE_DUPLICATE, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + myMdmLinkSvc.updateLink( + theNewGoldenResource, theResource, theMatchResult, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + log( + theMdmTransactionContext, + "Duplicate detected based on the fact that both resources have different external EIDs."); + myMdmLinkSvc.updateLink( + theNewGoldenResource, + theOldGoldenResource, + MdmMatchOutcome.POSSIBLE_DUPLICATE, + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); } private void log(MdmTransactionContext theMdmTransactionContext, String theMessage) { @@ -141,8 +218,12 @@ public class MdmEidUpdateService { ourLog.debug(theMessage); } - public void applySurvivorshipRulesAndSaveGoldenResource(IAnyResource theTargetResource, IAnyResource theGoldenResource, MdmTransactionContext theMdmTransactionContext) { - myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource(theTargetResource, theGoldenResource, theMdmTransactionContext); + public void applySurvivorshipRulesAndSaveGoldenResource( + IAnyResource theTargetResource, + IAnyResource theGoldenResource, + MdmTransactionContext theMdmTransactionContext) { + myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource( + theTargetResource, theGoldenResource, theMdmTransactionContext); myMdmResourceDaoSvc.upsertGoldenResource(theGoldenResource, theMdmTransactionContext.getResourceType()); } @@ -163,19 +244,24 @@ public class MdmEidUpdateService { MdmUpdateContext(MatchedGoldenResourceCandidate theMatchedGoldenResourceCandidate, IAnyResource theResource) { final String resourceType = theResource.getIdElement().getResourceType(); - myMatchedGoldenResource = myMdmGoldenResourceFindingSvc.getGoldenResourceFromMatchedGoldenResourceCandidate(theMatchedGoldenResourceCandidate, resourceType); + myMatchedGoldenResource = myMdmGoldenResourceFindingSvc.getGoldenResourceFromMatchedGoldenResourceCandidate( + theMatchedGoldenResourceCandidate, resourceType); myHasEidsInCommon = myEIDHelper.hasEidOverlap(myMatchedGoldenResource, theResource); - myIncomingResourceHasAnEid = !myEIDHelper.getExternalEid(theResource).isEmpty(); + myIncomingResourceHasAnEid = + !myEIDHelper.getExternalEid(theResource).isEmpty(); - Optional theExistingMatchOrPossibleMatchLink = myMdmLinkDaoSvc.getMatchedOrPossibleMatchedLinkForSource(theResource); + Optional theExistingMatchOrPossibleMatchLink = + myMdmLinkDaoSvc.getMatchedOrPossibleMatchedLinkForSource(theResource); myExistingGoldenResource = null; if (theExistingMatchOrPossibleMatchLink.isPresent()) { IMdmLink mdmLink = theExistingMatchOrPossibleMatchLink.get(); IResourcePersistentId existingGoldenResourcePid = mdmLink.getGoldenResourcePersistenceId(); - myExistingGoldenResource = myMdmResourceDaoSvc.readGoldenResourceByPid(existingGoldenResourcePid, resourceType); - myRemainsMatchedToSameGoldenResource = candidateIsSameAsMdmLinkGoldenResource(mdmLink, theMatchedGoldenResourceCandidate); + myExistingGoldenResource = + myMdmResourceDaoSvc.readGoldenResourceByPid(existingGoldenResourcePid, resourceType); + myRemainsMatchedToSameGoldenResource = + candidateIsSameAsMdmLinkGoldenResource(mdmLink, theMatchedGoldenResourceCandidate); } else { myRemainsMatchedToSameGoldenResource = false; } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkCreateSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkCreateSvcImpl.java index bae39ee1514..54d55d2a908 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkCreateSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkCreateSvcImpl.java @@ -24,7 +24,6 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc; -import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.mdm.api.IMdmLink; import ca.uhn.fhir.mdm.api.IMdmLinkCreateSvc; @@ -33,6 +32,7 @@ import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.mdm.model.MdmTransactionContext; +import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.mdm.util.MdmResourceUtil; import ca.uhn.fhir.mdm.util.MessageHelper; import ca.uhn.fhir.rest.api.Constants; @@ -52,20 +52,29 @@ public class MdmLinkCreateSvcImpl implements IMdmLinkCreateSvc { @Autowired FhirContext myFhirContext; + @Autowired IIdHelperService myIdHelperService; + @Autowired MdmLinkDaoSvc myMdmLinkDaoSvc; + @Autowired IMdmSettings myMdmSettings; + @Autowired MessageHelper myMessageHelper; + @Autowired MdmPartitionHelper myMdmPartitionHelper; @Transactional @Override - public IAnyResource createLink(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmMatchResultEnum theMatchResult, MdmTransactionContext theMdmContext) { + public IAnyResource createLink( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmMatchResultEnum theMatchResult, + MdmTransactionContext theMdmContext) { String sourceType = myFhirContext.getResourceType(theSourceResource); validateCreateLinkRequest(theGoldenResource, theSourceResource, sourceType); @@ -76,17 +85,22 @@ public class MdmLinkCreateSvcImpl implements IMdmLinkCreateSvc { // check if the golden resource and the source resource are in the same partition, throw error if not myMdmPartitionHelper.validateMdmResourcesPartitionMatches(theGoldenResource, theSourceResource); - Optional optionalMdmLink = myMdmLinkDaoSvc.getLinkByGoldenResourcePidAndSourceResourcePid(goldenResourceId, targetId); + Optional optionalMdmLink = + myMdmLinkDaoSvc.getLinkByGoldenResourcePidAndSourceResourcePid(goldenResourceId, targetId); if (optionalMdmLink.isPresent()) { - throw new InvalidRequestException(Msg.code(753) + myMessageHelper.getMessageForPresentLink(theGoldenResource, theSourceResource)); + throw new InvalidRequestException( + Msg.code(753) + myMessageHelper.getMessageForPresentLink(theGoldenResource, theSourceResource)); } - List mdmLinks = myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(targetId, MdmMatchResultEnum.MATCH); + List mdmLinks = + myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(targetId, MdmMatchResultEnum.MATCH); if (mdmLinks.size() > 0 && theMatchResult == MdmMatchResultEnum.MATCH) { - throw new InvalidRequestException(Msg.code(754) + myMessageHelper.getMessageForMultipleGoldenRecords(theSourceResource)); + throw new InvalidRequestException( + Msg.code(754) + myMessageHelper.getMessageForMultipleGoldenRecords(theSourceResource)); } - IMdmLink mdmLink = myMdmLinkDaoSvc.getOrCreateMdmLinkByGoldenResourceAndSourceResource(theGoldenResource, theSourceResource); + IMdmLink mdmLink = myMdmLinkDaoSvc.getOrCreateMdmLinkByGoldenResourceAndSourceResource( + theGoldenResource, theSourceResource); mdmLink.setLinkSource(MdmLinkSourceEnum.MANUAL); mdmLink.setMdmSourceType(sourceType); if (theMatchResult == null) { @@ -95,30 +109,40 @@ public class MdmLinkCreateSvcImpl implements IMdmLinkCreateSvc { mdmLink.setMatchResult(theMatchResult); } // Add partition for the mdm link if it doesn't exist - RequestPartitionId goldenResourcePartitionId = (RequestPartitionId) theGoldenResource.getUserData(Constants.RESOURCE_PARTITION_ID); - if (goldenResourcePartitionId != null && goldenResourcePartitionId.hasPartitionIds() && goldenResourcePartitionId.getFirstPartitionIdOrNull() != null && - (mdmLink.getPartitionId() == null || mdmLink.getPartitionId().getPartitionId() == null)) { - mdmLink.setPartitionId(new PartitionablePartitionId(goldenResourcePartitionId.getFirstPartitionIdOrNull(), goldenResourcePartitionId.getPartitionDate())); + RequestPartitionId goldenResourcePartitionId = + (RequestPartitionId) theGoldenResource.getUserData(Constants.RESOURCE_PARTITION_ID); + if (goldenResourcePartitionId != null + && goldenResourcePartitionId.hasPartitionIds() + && goldenResourcePartitionId.getFirstPartitionIdOrNull() != null + && (mdmLink.getPartitionId() == null || mdmLink.getPartitionId().getPartitionId() == null)) { + mdmLink.setPartitionId(new PartitionablePartitionId( + goldenResourcePartitionId.getFirstPartitionIdOrNull(), + goldenResourcePartitionId.getPartitionDate())); } - ourLog.info("Manually creating a " + theGoldenResource.getIdElement().toVersionless() + " to " + theSourceResource.getIdElement().toVersionless() + " mdm link."); + ourLog.info("Manually creating a " + theGoldenResource.getIdElement().toVersionless() + " to " + + theSourceResource.getIdElement().toVersionless() + " mdm link."); myMdmLinkDaoSvc.save(mdmLink); return theGoldenResource; } - private void validateCreateLinkRequest(IAnyResource theGoldenRecord, IAnyResource theSourceResource, String theSourceType) { + private void validateCreateLinkRequest( + IAnyResource theGoldenRecord, IAnyResource theSourceResource, String theSourceType) { String goldenRecordType = myFhirContext.getResourceType(theGoldenRecord); if (!myMdmSettings.isSupportedMdmType(goldenRecordType)) { - throw new InvalidRequestException(Msg.code(755) + myMessageHelper.getMessageForUnsupportedFirstArgumentTypeInUpdate(goldenRecordType)); + throw new InvalidRequestException(Msg.code(755) + + myMessageHelper.getMessageForUnsupportedFirstArgumentTypeInUpdate(goldenRecordType)); } if (!myMdmSettings.isSupportedMdmType(theSourceType)) { - throw new InvalidRequestException(Msg.code(756) + myMessageHelper.getMessageForUnsupportedSecondArgumentTypeInUpdate(theSourceType)); + throw new InvalidRequestException( + Msg.code(756) + myMessageHelper.getMessageForUnsupportedSecondArgumentTypeInUpdate(theSourceType)); } if (!Objects.equals(goldenRecordType, theSourceType)) { - throw new InvalidRequestException(Msg.code(757) + myMessageHelper.getMessageForArgumentTypeMismatchInUpdate(goldenRecordType, theSourceType)); + throw new InvalidRequestException(Msg.code(757) + + myMessageHelper.getMessageForArgumentTypeMismatchInUpdate(goldenRecordType, theSourceType)); } if (!MdmResourceUtil.isMdmManaged(theGoldenRecord)) { diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkQuerySvcImplSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkQuerySvcImplSvc.java index cb0ba8e5159..91f6a90801e 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkQuerySvcImplSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkQuerySvcImplSvc.java @@ -24,9 +24,9 @@ import ca.uhn.fhir.mdm.api.IMdmLink; import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc; import ca.uhn.fhir.mdm.api.MdmHistorySearchParameters; import ca.uhn.fhir.mdm.api.MdmLinkJson; -import ca.uhn.fhir.mdm.api.MdmLinkWithRevisionJson; import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.MdmLinkWithRevision; +import ca.uhn.fhir.mdm.api.MdmLinkWithRevisionJson; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.mdm.api.MdmQuerySearchParameters; import ca.uhn.fhir.mdm.api.paging.MdmPageRequest; @@ -54,12 +54,18 @@ public class MdmLinkQuerySvcImplSvc implements IMdmLinkQuerySvc { @Override @Deprecated @Transactional - public Page queryLinks(IIdType theGoldenResourceId, IIdType theSourceResourceId, MdmMatchResultEnum theMatchResult, MdmLinkSourceEnum theLinkSource, MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest) { + public Page queryLinks( + IIdType theGoldenResourceId, + IIdType theSourceResourceId, + MdmMatchResultEnum theMatchResult, + MdmLinkSourceEnum theLinkSource, + MdmTransactionContext theMdmContext, + MdmPageRequest thePageRequest) { MdmQuerySearchParameters mdmQuerySearchParameters = new MdmQuerySearchParameters(thePageRequest) - .setGoldenResourceId(theGoldenResourceId) - .setSourceId(theSourceResourceId) - .setMatchResult(theMatchResult) - .setLinkSource(theLinkSource); + .setGoldenResourceId(theGoldenResourceId) + .setSourceId(theSourceResourceId) + .setMatchResult(theMatchResult) + .setLinkSource(theLinkSource); return queryLinks(mdmQuerySearchParameters, theMdmContext); } @@ -67,40 +73,51 @@ public class MdmLinkQuerySvcImplSvc implements IMdmLinkQuerySvc { @Override @Deprecated @Transactional - public Page queryLinks(IIdType theGoldenResourceId, IIdType theSourceResourceId, MdmMatchResultEnum theMatchResult, MdmLinkSourceEnum theLinkSource, MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest, List thePartitionIds) { + public Page queryLinks( + IIdType theGoldenResourceId, + IIdType theSourceResourceId, + MdmMatchResultEnum theMatchResult, + MdmLinkSourceEnum theLinkSource, + MdmTransactionContext theMdmContext, + MdmPageRequest thePageRequest, + List thePartitionIds) { MdmQuerySearchParameters mdmQuerySearchParameters = new MdmQuerySearchParameters(thePageRequest) - .setGoldenResourceId(theGoldenResourceId) - .setSourceId(theSourceResourceId) - .setMatchResult(theMatchResult) - .setLinkSource(theLinkSource) - .setPartitionIds(thePartitionIds); + .setGoldenResourceId(theGoldenResourceId) + .setSourceId(theSourceResourceId) + .setMatchResult(theMatchResult) + .setLinkSource(theLinkSource) + .setPartitionIds(thePartitionIds); return queryLinks(mdmQuerySearchParameters, theMdmContext); } @Override @Transactional - public Page queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmContext) { + public Page queryLinks( + MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmContext) { @SuppressWarnings("unchecked") Page mdmLinks = myMdmLinkDaoSvc.executeTypedQuery(theMdmQuerySearchParameters); return mdmLinks.map(myMdmModelConverterSvc::toJson); } - @Override @Transactional - public Page getDuplicateGoldenResources(MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest) { + public Page getDuplicateGoldenResources( + MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest) { return getDuplicateGoldenResources(theMdmContext, thePageRequest, null, null); } @Override @Transactional - public Page getDuplicateGoldenResources(MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest, - List thePartitionIds, String theRequestResourceType) { + public Page getDuplicateGoldenResources( + MdmTransactionContext theMdmContext, + MdmPageRequest thePageRequest, + List thePartitionIds, + String theRequestResourceType) { MdmQuerySearchParameters mdmQuerySearchParameters = new MdmQuerySearchParameters(thePageRequest) - .setMatchResult(MdmMatchResultEnum.POSSIBLE_DUPLICATE) - .setPartitionIds(thePartitionIds) - .setResourceType(theRequestResourceType); + .setMatchResult(MdmMatchResultEnum.POSSIBLE_DUPLICATE) + .setPartitionIds(thePartitionIds) + .setResourceType(theRequestResourceType); @SuppressWarnings("unchecked") Page mdmLinkPage = myMdmLinkDaoSvc.executeTypedQuery(mdmQuerySearchParameters); @@ -109,10 +126,11 @@ public class MdmLinkQuerySvcImplSvc implements IMdmLinkQuerySvc { @Override public List queryLinkHistory(MdmHistorySearchParameters theMdmHistorySearchParameters) { - final List>> mdmLinkHistoryFromDao = myMdmLinkDaoSvc.findMdmLinkHistory(theMdmHistorySearchParameters); + final List>> mdmLinkHistoryFromDao = + myMdmLinkDaoSvc.findMdmLinkHistory(theMdmHistorySearchParameters); return mdmLinkHistoryFromDao.stream() - .map(myMdmModelConverterSvc::toJson) - .collect(Collectors.toUnmodifiableList()); + .map(myMdmModelConverterSvc::toJson) + .collect(Collectors.toUnmodifiableList()); } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkSvcImpl.java index 40324395d5f..3870b5d3393 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkSvcImpl.java @@ -38,9 +38,9 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; /** * This class is in charge of managing MdmLinks between Golden Resources and source resources @@ -52,19 +52,28 @@ public class MdmLinkSvcImpl implements IMdmLinkSvc { @Autowired private MdmResourceDaoSvc myMdmResourceDaoSvc; + @Autowired private MdmLinkDaoSvc myMdmLinkDaoSvc; + @Autowired private IIdHelperService myIdHelperService; @Override @Transactional - public void updateLink(@Nonnull IAnyResource theGoldenResource, @Nonnull IAnyResource theSourceResource, MdmMatchOutcome theMatchOutcome, MdmLinkSourceEnum theLinkSource, MdmTransactionContext theMdmTransactionContext) { - if (theMatchOutcome.isPossibleDuplicate() && goldenResourceLinkedAsNoMatch(theGoldenResource, theSourceResource)) { - log(theMdmTransactionContext, theGoldenResource.getIdElement().toUnqualifiedVersionless() + - " is linked as NO_MATCH with " + - theSourceResource.getIdElement().toUnqualifiedVersionless() + - " not linking as POSSIBLE_DUPLICATE."); + public void updateLink( + @Nonnull IAnyResource theGoldenResource, + @Nonnull IAnyResource theSourceResource, + MdmMatchOutcome theMatchOutcome, + MdmLinkSourceEnum theLinkSource, + MdmTransactionContext theMdmTransactionContext) { + if (theMatchOutcome.isPossibleDuplicate() + && goldenResourceLinkedAsNoMatch(theGoldenResource, theSourceResource)) { + log( + theMdmTransactionContext, + theGoldenResource.getIdElement().toUnqualifiedVersionless() + " is linked as NO_MATCH with " + + theSourceResource.getIdElement().toUnqualifiedVersionless() + + " not linking as POSSIBLE_DUPLICATE."); return; } @@ -72,7 +81,8 @@ public class MdmLinkSvcImpl implements IMdmLinkSvc { validateRequestIsLegal(theGoldenResource, theSourceResource, matchResultEnum, theLinkSource); myMdmResourceDaoSvc.upsertGoldenResource(theGoldenResource, theMdmTransactionContext.getResourceType()); - IMdmLink link = createOrUpdateLinkEntity(theGoldenResource, theSourceResource, theMatchOutcome, theLinkSource, theMdmTransactionContext); + IMdmLink link = createOrUpdateLinkEntity( + theGoldenResource, theSourceResource, theMatchOutcome, theLinkSource, theMdmTransactionContext); theMdmTransactionContext.addMdmLink(link); } @@ -80,19 +90,33 @@ public class MdmLinkSvcImpl implements IMdmLinkSvc { IResourcePersistentId goldenResourceId = myIdHelperService.getPidOrThrowException(theGoldenResource); IResourcePersistentId sourceId = myIdHelperService.getPidOrThrowException(theSourceResource); // TODO perf collapse into one query - return myMdmLinkDaoSvc.getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(goldenResourceId, sourceId, MdmMatchResultEnum.NO_MATCH).isPresent() || - myMdmLinkDaoSvc.getMdmLinksByGoldenResourcePidSourcePidAndMatchResult(sourceId, goldenResourceId, MdmMatchResultEnum.NO_MATCH).isPresent(); + return myMdmLinkDaoSvc + .getMdmLinksByGoldenResourcePidSourcePidAndMatchResult( + goldenResourceId, sourceId, MdmMatchResultEnum.NO_MATCH) + .isPresent() + || myMdmLinkDaoSvc + .getMdmLinksByGoldenResourcePidSourcePidAndMatchResult( + sourceId, goldenResourceId, MdmMatchResultEnum.NO_MATCH) + .isPresent(); } @Override - public void deleteLink(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmTransactionContext theMdmTransactionContext) { + public void deleteLink( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmTransactionContext theMdmTransactionContext) { if (theGoldenResource == null) { return; } - Optional optionalMdmLink = getMdmLinkForGoldenResourceSourceResourcePair(theGoldenResource, theSourceResource); + Optional optionalMdmLink = + getMdmLinkForGoldenResourceSourceResourcePair(theGoldenResource, theSourceResource); if (optionalMdmLink.isPresent()) { IMdmLink mdmLink = optionalMdmLink.get(); - log(theMdmTransactionContext, "Deleting MdmLink [" + theGoldenResource.getIdElement().toVersionless() + " -> " + theSourceResource.getIdElement().toVersionless() + "] with result: " + mdmLink.getMatchResult()); + log( + theMdmTransactionContext, + "Deleting MdmLink [" + theGoldenResource.getIdElement().toVersionless() + " -> " + + theSourceResource.getIdElement().toVersionless() + "] with result: " + + mdmLink.getMatchResult()); myMdmLinkDaoSvc.deleteLink(mdmLink); theMdmTransactionContext.addMdmLink(mdmLink); } @@ -107,14 +131,21 @@ public class MdmLinkSvcImpl implements IMdmLinkSvc { /** * Helper function which runs various business rules about what types of requests are allowed. */ - private void validateRequestIsLegal(IAnyResource theGoldenResource, IAnyResource theResource, MdmMatchResultEnum theMatchResult, MdmLinkSourceEnum theLinkSource) { - Optional oExistingLink = getMdmLinkForGoldenResourceSourceResourcePair(theGoldenResource, theResource); + private void validateRequestIsLegal( + IAnyResource theGoldenResource, + IAnyResource theResource, + MdmMatchResultEnum theMatchResult, + MdmLinkSourceEnum theLinkSource) { + Optional oExistingLink = + getMdmLinkForGoldenResourceSourceResourcePair(theGoldenResource, theResource); if (oExistingLink.isPresent() && systemIsAttemptingToModifyManualLink(theLinkSource, oExistingLink.get())) { - throw new InternalErrorException(Msg.code(760) + "MDM system is not allowed to modify links on manually created links"); + throw new InternalErrorException( + Msg.code(760) + "MDM system is not allowed to modify links on manually created links"); } if (systemIsAttemptingToAddNoMatch(theLinkSource, theMatchResult)) { - throw new InternalErrorException(Msg.code(761) + "MDM system is not allowed to automatically NO_MATCH a resource"); + throw new InternalErrorException( + Msg.code(761) + "MDM system is not allowed to automatically NO_MATCH a resource"); } } @@ -128,23 +159,31 @@ public class MdmLinkSvcImpl implements IMdmLinkSvc { /** * Helper function to let us catch when System MDM rules are attempting to override a manually defined link. */ - private boolean systemIsAttemptingToModifyManualLink(MdmLinkSourceEnum theIncomingSource, IMdmLink theExistingSource) { + private boolean systemIsAttemptingToModifyManualLink( + MdmLinkSourceEnum theIncomingSource, IMdmLink theExistingSource) { return theIncomingSource == MdmLinkSourceEnum.AUTO && theExistingSource.isManual(); } - private Optional getMdmLinkForGoldenResourceSourceResourcePair(@Nonnull IAnyResource theGoldenResource, @Nonnull IAnyResource theCandidate) { - if (theGoldenResource.getIdElement().getIdPart() == null || theCandidate.getIdElement().getIdPart() == null) { + private Optional getMdmLinkForGoldenResourceSourceResourcePair( + @Nonnull IAnyResource theGoldenResource, @Nonnull IAnyResource theCandidate) { + if (theGoldenResource.getIdElement().getIdPart() == null + || theCandidate.getIdElement().getIdPart() == null) { return Optional.empty(); } else { return myMdmLinkDaoSvc.getLinkByGoldenResourcePidAndSourceResourcePid( - myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theGoldenResource), - myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theCandidate) - ); + myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theGoldenResource), + myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theCandidate)); } } - private IMdmLink createOrUpdateLinkEntity(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmMatchOutcome theMatchOutcome, MdmLinkSourceEnum theLinkSource, MdmTransactionContext theMdmTransactionContext) { - return myMdmLinkDaoSvc.createOrUpdateLinkEntity(theGoldenResource, theSourceResource, theMatchOutcome, theLinkSource, theMdmTransactionContext); + private IMdmLink createOrUpdateLinkEntity( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmMatchOutcome theMatchOutcome, + MdmLinkSourceEnum theLinkSource, + MdmTransactionContext theMdmTransactionContext) { + return myMdmLinkDaoSvc.createOrUpdateLinkEntity( + theGoldenResource, theSourceResource, theMatchOutcome, theLinkSource, theMdmTransactionContext); } private void log(MdmTransactionContext theMdmTransactionContext, String theMessage) { diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImpl.java index 972cde76c21..1302253aeec 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmLinkUpdaterSvcImpl.java @@ -24,7 +24,6 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc; -import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.mdm.api.IMdmLink; import ca.uhn.fhir.mdm.api.IMdmLinkUpdaterSvc; @@ -34,6 +33,7 @@ import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.mdm.model.MdmTransactionContext; +import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.mdm.util.MdmResourceUtil; import ca.uhn.fhir.mdm.util.MessageHelper; import ca.uhn.fhir.rest.api.Constants; @@ -55,26 +55,38 @@ public class MdmLinkUpdaterSvcImpl implements IMdmLinkUpdaterSvc { @Autowired FhirContext myFhirContext; + @Autowired IIdHelperService myIdHelperService; + @Autowired MdmLinkDaoSvc myMdmLinkDaoSvc; + @Autowired MdmResourceDaoSvc myMdmResourceDaoSvc; + @Autowired MdmMatchLinkSvc myMdmMatchLinkSvc; + @Autowired IMdmSettings myMdmSettings; + @Autowired MessageHelper myMessageHelper; + @Autowired IMdmSurvivorshipService myMdmSurvivorshipService; + @Autowired MdmPartitionHelper myMdmPartitionHelper; @Transactional @Override - public IAnyResource updateLink(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmMatchResultEnum theMatchResult, MdmTransactionContext theMdmContext) { + public IAnyResource updateLink( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmMatchResultEnum theMatchResult, + MdmTransactionContext theMdmContext) { String sourceType = myFhirContext.getResourceType(theSourceResource); validateUpdateLinkRequest(theGoldenResource, theSourceResource, theMatchResult, sourceType); @@ -82,12 +94,15 @@ public class MdmLinkUpdaterSvcImpl implements IMdmLinkUpdaterSvc { IResourcePersistentId goldenResourceId = myIdHelperService.getPidOrThrowException(theGoldenResource); IResourcePersistentId sourceResourceId = myIdHelperService.getPidOrThrowException(theSourceResource); - // check if the golden resource and the source resource are in the same partition if cross partition mdm is not allowed, throw error if not + // check if the golden resource and the source resource are in the same partition if cross partition mdm is not + // allowed, throw error if not myMdmPartitionHelper.validateMdmResourcesPartitionMatches(theGoldenResource, theSourceResource); - Optional optionalMdmLink = myMdmLinkDaoSvc.getLinkByGoldenResourcePidAndSourceResourcePid(goldenResourceId, sourceResourceId); + Optional optionalMdmLink = + myMdmLinkDaoSvc.getLinkByGoldenResourcePidAndSourceResourcePid(goldenResourceId, sourceResourceId); if (optionalMdmLink.isEmpty()) { - throw new InvalidRequestException(Msg.code(738) + myMessageHelper.getMessageForNoLink(theGoldenResource, theSourceResource)); + throw new InvalidRequestException( + Msg.code(738) + myMessageHelper.getMessageForNoLink(theGoldenResource, theSourceResource)); } IMdmLink mdmLink = optionalMdmLink.get(); @@ -95,31 +110,44 @@ public class MdmLinkUpdaterSvcImpl implements IMdmLinkUpdaterSvc { validateNoMatchPresentWhenAcceptingPossibleMatch(theSourceResource, goldenResourceId, theMatchResult); if (mdmLink.getMatchResult() == theMatchResult) { - ourLog.warn("MDM Link for " + theGoldenResource.getIdElement().toVersionless() + ", " + theSourceResource.getIdElement().toVersionless() + " already has value " + theMatchResult + ". Nothing to do."); + ourLog.warn("MDM Link for " + theGoldenResource.getIdElement().toVersionless() + ", " + + theSourceResource.getIdElement().toVersionless() + " already has value " + theMatchResult + + ". Nothing to do."); return theGoldenResource; } - ourLog.info("Manually updating MDM Link for " + theGoldenResource.getIdElement().toVersionless() + ", " + theSourceResource.getIdElement().toVersionless() + " from " + mdmLink.getMatchResult() + " to " + theMatchResult + "."); + ourLog.info("Manually updating MDM Link for " + + theGoldenResource.getIdElement().toVersionless() + ", " + + theSourceResource.getIdElement().toVersionless() + " from " + mdmLink.getMatchResult() + " to " + + theMatchResult + "."); mdmLink.setMatchResult(theMatchResult); mdmLink.setLinkSource(MdmLinkSourceEnum.MANUAL); // Add partition for the mdm link if it doesn't exist - RequestPartitionId goldenResourcePartitionId = (RequestPartitionId) theGoldenResource.getUserData(Constants.RESOURCE_PARTITION_ID); - if (goldenResourcePartitionId != null && goldenResourcePartitionId.hasPartitionIds() && goldenResourcePartitionId.getFirstPartitionIdOrNull() != null && - (mdmLink.getPartitionId() == null || mdmLink.getPartitionId().getPartitionId() == null)) { - mdmLink.setPartitionId(new PartitionablePartitionId(goldenResourcePartitionId.getFirstPartitionIdOrNull(), goldenResourcePartitionId.getPartitionDate())); + RequestPartitionId goldenResourcePartitionId = + (RequestPartitionId) theGoldenResource.getUserData(Constants.RESOURCE_PARTITION_ID); + if (goldenResourcePartitionId != null + && goldenResourcePartitionId.hasPartitionIds() + && goldenResourcePartitionId.getFirstPartitionIdOrNull() != null + && (mdmLink.getPartitionId() == null || mdmLink.getPartitionId().getPartitionId() == null)) { + mdmLink.setPartitionId(new PartitionablePartitionId( + goldenResourcePartitionId.getFirstPartitionIdOrNull(), + goldenResourcePartitionId.getPartitionDate())); } myMdmLinkDaoSvc.save(mdmLink); if (theMatchResult == MdmMatchResultEnum.MATCH) { // only apply survivorship rules in case of a match - myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource(theSourceResource, theGoldenResource, theMdmContext); + myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource( + theSourceResource, theGoldenResource, theMdmContext); } myMdmResourceDaoSvc.upsertGoldenResource(theGoldenResource, theMdmContext.getResourceType()); if (theMatchResult == MdmMatchResultEnum.NO_MATCH) { // We need to return no match for when a Golden Resource has already been found elsewhere - if (myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(sourceResourceId, MdmMatchResultEnum.MATCH).isEmpty()) { + if (myMdmLinkDaoSvc + .getMdmLinksBySourcePidAndMatchResult(sourceResourceId, MdmMatchResultEnum.MATCH) + .isEmpty()) { // Need to find a new Golden Resource to link this target to myMdmMatchLinkSvc.updateMdmLinksForMdmSource(theSourceResource, theMdmContext); } @@ -131,45 +159,55 @@ public class MdmLinkUpdaterSvcImpl implements IMdmLinkUpdaterSvc { * When updating POSSIBLE_MATCH link to a MATCH we need to validate that a MATCH to a different golden resource * doesn't exist, because a resource mustn't be a MATCH to more than one golden resource */ - private void validateNoMatchPresentWhenAcceptingPossibleMatch(IAnyResource theSourceResource, - IResourcePersistentId theGoldenResourceId, MdmMatchResultEnum theMatchResult) { + private void validateNoMatchPresentWhenAcceptingPossibleMatch( + IAnyResource theSourceResource, + IResourcePersistentId theGoldenResourceId, + MdmMatchResultEnum theMatchResult) { // if theMatchResult != MATCH, we are not accepting POSSIBLE_MATCH so there is nothing to validate - if (theMatchResult != MdmMatchResultEnum.MATCH) { return; } + if (theMatchResult != MdmMatchResultEnum.MATCH) { + return; + } IResourcePersistentId sourceResourceId = myIdHelperService.getPidOrThrowException(theSourceResource); - List mdmLinks = myMdmLinkDaoSvc - .getMdmLinksBySourcePidAndMatchResult(sourceResourceId, MdmMatchResultEnum.MATCH); + List mdmLinks = + myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(sourceResourceId, MdmMatchResultEnum.MATCH); // if a link for a different golden resource exists, throw an exception for (IMdmLink mdmLink : mdmLinks) { if (mdmLink.getGoldenResourcePersistenceId() != theGoldenResourceId) { - IAnyResource existingGolden = myMdmResourceDaoSvc.readGoldenResourceByPid(mdmLink.getGoldenResourcePersistenceId(), mdmLink.getMdmSourceType()); - throw new InvalidRequestException(Msg.code(2218) + - myMessageHelper.getMessageForAlreadyAcceptedLink(existingGolden, theSourceResource)); + IAnyResource existingGolden = myMdmResourceDaoSvc.readGoldenResourceByPid( + mdmLink.getGoldenResourcePersistenceId(), mdmLink.getMdmSourceType()); + throw new InvalidRequestException(Msg.code(2218) + + myMessageHelper.getMessageForAlreadyAcceptedLink(existingGolden, theSourceResource)); } } } - - private void validateUpdateLinkRequest(IAnyResource theGoldenRecord, IAnyResource theSourceResource, MdmMatchResultEnum theMatchResult, String theSourceType) { + private void validateUpdateLinkRequest( + IAnyResource theGoldenRecord, + IAnyResource theSourceResource, + MdmMatchResultEnum theMatchResult, + String theSourceType) { String goldenRecordType = myFhirContext.getResourceType(theGoldenRecord); - if (theMatchResult != MdmMatchResultEnum.NO_MATCH && - theMatchResult != MdmMatchResultEnum.MATCH) { + if (theMatchResult != MdmMatchResultEnum.NO_MATCH && theMatchResult != MdmMatchResultEnum.MATCH) { throw new InvalidRequestException(Msg.code(739) + myMessageHelper.getMessageForUnsupportedMatchResult()); } if (!myMdmSettings.isSupportedMdmType(goldenRecordType)) { - throw new InvalidRequestException(Msg.code(740) + myMessageHelper.getMessageForUnsupportedFirstArgumentTypeInUpdate(goldenRecordType)); + throw new InvalidRequestException(Msg.code(740) + + myMessageHelper.getMessageForUnsupportedFirstArgumentTypeInUpdate(goldenRecordType)); } if (!myMdmSettings.isSupportedMdmType(theSourceType)) { - throw new InvalidRequestException(Msg.code(741) + myMessageHelper.getMessageForUnsupportedSecondArgumentTypeInUpdate(theSourceType)); + throw new InvalidRequestException( + Msg.code(741) + myMessageHelper.getMessageForUnsupportedSecondArgumentTypeInUpdate(theSourceType)); } if (!Objects.equals(goldenRecordType, theSourceType)) { - throw new InvalidRequestException(Msg.code(742) + myMessageHelper.getMessageForArgumentTypeMismatchInUpdate(goldenRecordType, theSourceType)); + throw new InvalidRequestException(Msg.code(742) + + myMessageHelper.getMessageForArgumentTypeMismatchInUpdate(goldenRecordType, theSourceType)); } if (!MdmResourceUtil.isMdmManaged(theGoldenRecord)) { @@ -183,20 +221,27 @@ public class MdmLinkUpdaterSvcImpl implements IMdmLinkUpdaterSvc { @Transactional @Override - public void notDuplicateGoldenResource(IAnyResource theGoldenResource, IAnyResource theTargetGoldenResource, MdmTransactionContext theMdmContext) { + public void notDuplicateGoldenResource( + IAnyResource theGoldenResource, IAnyResource theTargetGoldenResource, MdmTransactionContext theMdmContext) { validateNotDuplicateGoldenResourceRequest(theGoldenResource, theTargetGoldenResource); IResourcePersistentId goldenResourceId = myIdHelperService.getPidOrThrowException(theGoldenResource); IResourcePersistentId targetId = myIdHelperService.getPidOrThrowException(theTargetGoldenResource); - Optional oMdmLink = myMdmLinkDaoSvc.getLinkByGoldenResourcePidAndSourceResourcePid(goldenResourceId, targetId); + Optional oMdmLink = + myMdmLinkDaoSvc.getLinkByGoldenResourcePidAndSourceResourcePid(goldenResourceId, targetId); if (oMdmLink.isEmpty()) { - throw new InvalidRequestException(Msg.code(745) + "No link exists between " + theGoldenResource.getIdElement().toVersionless() + " and " + theTargetGoldenResource.getIdElement().toVersionless()); + throw new InvalidRequestException(Msg.code(745) + "No link exists between " + + theGoldenResource.getIdElement().toVersionless() + " and " + + theTargetGoldenResource.getIdElement().toVersionless()); } IMdmLink mdmLink = oMdmLink.get(); if (!mdmLink.isPossibleDuplicate()) { - throw new InvalidRequestException(Msg.code(746) + theGoldenResource.getIdElement().toVersionless() + " and " + theTargetGoldenResource.getIdElement().toVersionless() + " are not linked as POSSIBLE_DUPLICATE."); + throw new InvalidRequestException( + Msg.code(746) + theGoldenResource.getIdElement().toVersionless() + " and " + + theTargetGoldenResource.getIdElement().toVersionless() + + " are not linked as POSSIBLE_DUPLICATE."); } mdmLink.setMatchResult(MdmMatchResultEnum.NO_MATCH); mdmLink.setLinkSource(MdmLinkSourceEnum.MANUAL); @@ -210,11 +255,15 @@ public class MdmLinkUpdaterSvcImpl implements IMdmLinkUpdaterSvc { String goldenResourceType = myFhirContext.getResourceType(theGoldenResource); String targetType = myFhirContext.getResourceType(theTarget); if (!goldenResourceType.equalsIgnoreCase(targetType)) { - throw new InvalidRequestException(Msg.code(747) + "First argument to " + ProviderConstants.MDM_UPDATE_LINK + " must be the same resource type as the second argument. Was " + goldenResourceType + "/" + targetType); + throw new InvalidRequestException(Msg.code(747) + "First argument to " + ProviderConstants.MDM_UPDATE_LINK + + " must be the same resource type as the second argument. Was " + goldenResourceType + "/" + + targetType); } if (!MdmResourceUtil.isMdmManaged(theGoldenResource) || !MdmResourceUtil.isMdmManaged(theTarget)) { - throw new InvalidRequestException(Msg.code(748) + "Only MDM Managed Golden Resources may be updated via this operation. The resource provided is not tagged as managed by HAPI-MDM"); + throw new InvalidRequestException( + Msg.code(748) + + "Only MDM Managed Golden Resources may be updated via this operation. The resource provided is not tagged as managed by HAPI-MDM"); } } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchFinderSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchFinderSvcImpl.java index 30fe27dccb5..0d3ee482998 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchFinderSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchFinderSvcImpl.java @@ -29,12 +29,12 @@ import org.hl7.fhir.instance.model.api.IAnyResource; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; - -import javax.annotation.Nonnull; import org.springframework.transaction.annotation.Transactional; + import java.util.Collection; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.jpa.mdm.svc.candidate.CandidateSearcher.idOrType; @@ -45,21 +45,24 @@ public class MdmMatchFinderSvcImpl implements IMdmMatchFinderSvc { @Autowired private MdmCandidateSearchSvc myMdmCandidateSearchSvc; + @Autowired private MdmResourceMatcherSvc myMdmResourceMatcherSvc; @Override @Nonnull @Transactional - public List getMatchedTargets(String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId) { - Collection targetCandidates = myMdmCandidateSearchSvc.findCandidates(theResourceType, theResource, theRequestPartitionId); + public List getMatchedTargets( + String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId) { + Collection targetCandidates = + myMdmCandidateSearchSvc.findCandidates(theResourceType, theResource, theRequestPartitionId); List matches = targetCandidates.stream() - .map(candidate -> new MatchedTarget(candidate, myMdmResourceMatcherSvc.getMatchResult(theResource, candidate))) - .collect(Collectors.toList()); + .map(candidate -> + new MatchedTarget(candidate, myMdmResourceMatcherSvc.getMatchResult(theResource, candidate))) + .collect(Collectors.toList()); ourLog.trace("Found {} matched targets for {}.", matches.size(), idOrType(theResource, theResourceType)); return matches; } - } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java index b70155ce793..a1db8b8234e 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmMatchLinkSvc.java @@ -53,10 +53,13 @@ public class MdmMatchLinkSvc { @Autowired private IMdmLinkSvc myMdmLinkSvc; + @Autowired private MdmGoldenResourceFindingSvc myMdmGoldenResourceFindingSvc; + @Autowired private GoldenResourceHelper myGoldenResourceHelper; + @Autowired private MdmEidUpdateService myEidUpdateService; @@ -70,7 +73,8 @@ public class MdmMatchLinkSvc { * @return an {@link TransactionLogMessages} which contains all informational messages related to MDM processing of this resource. */ @Transactional - public MdmTransactionContext updateMdmLinksForMdmSource(IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) { + public MdmTransactionContext updateMdmLinksForMdmSource( + IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) { if (MdmResourceUtil.isMdmAllowed(theResource)) { return doMdmUpdate(theResource, theMdmTransactionContext); } else { @@ -78,7 +82,8 @@ public class MdmMatchLinkSvc { } } - private MdmTransactionContext doMdmUpdate(IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) { + private MdmTransactionContext doMdmUpdate( + IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) { CandidateList candidateList = myMdmGoldenResourceFindingSvc.findGoldenResourceCandidates(theResource); if (candidateList.isEmpty()) { @@ -91,47 +96,62 @@ public class MdmMatchLinkSvc { return theMdmTransactionContext; } - private void handleMdmWithMultipleCandidates(IAnyResource theResource, CandidateList theCandidateList, MdmTransactionContext theMdmTransactionContext) { + private void handleMdmWithMultipleCandidates( + IAnyResource theResource, CandidateList theCandidateList, MdmTransactionContext theMdmTransactionContext) { MatchedGoldenResourceCandidate firstMatch = theCandidateList.getFirstMatch(); IResourcePersistentId sampleGoldenResourcePid = firstMatch.getCandidateGoldenResourcePid(); boolean allSameGoldenResource = theCandidateList.stream() - .allMatch(candidate -> candidate.getCandidateGoldenResourcePid().equals(sampleGoldenResourcePid)); + .allMatch(candidate -> candidate.getCandidateGoldenResourcePid().equals(sampleGoldenResourcePid)); if (allSameGoldenResource) { - log(theMdmTransactionContext, "MDM received multiple match candidates, but they are all linked to the same Golden Resource."); + log( + theMdmTransactionContext, + "MDM received multiple match candidates, but they are all linked to the same Golden Resource."); handleMdmWithSingleCandidate(theResource, firstMatch, theMdmTransactionContext); } else { - log(theMdmTransactionContext, "MDM received multiple match candidates, that were linked to different Golden Resources. Setting POSSIBLE_DUPLICATES and POSSIBLE_MATCHES."); + log( + theMdmTransactionContext, + "MDM received multiple match candidates, that were linked to different Golden Resources. Setting POSSIBLE_DUPLICATES and POSSIBLE_MATCHES."); - //Set them all as POSSIBLE_MATCH - List goldenResources = createPossibleMatches(theResource, theCandidateList, theMdmTransactionContext); + // Set them all as POSSIBLE_MATCH + List goldenResources = + createPossibleMatches(theResource, theCandidateList, theMdmTransactionContext); - //Set all GoldenResources as POSSIBLE_DUPLICATE of the last GoldenResource. + // Set all GoldenResources as POSSIBLE_DUPLICATE of the last GoldenResource. IAnyResource firstGoldenResource = goldenResources.get(0); - goldenResources.subList(1, goldenResources.size()) - .forEach(possibleDuplicateGoldenResource -> { - MdmMatchOutcome outcome = MdmMatchOutcome.POSSIBLE_DUPLICATE; - outcome.setEidMatch(theCandidateList.isEidMatch()); - myMdmLinkSvc.updateLink(firstGoldenResource, possibleDuplicateGoldenResource, outcome, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); - }); + goldenResources.subList(1, goldenResources.size()).forEach(possibleDuplicateGoldenResource -> { + MdmMatchOutcome outcome = MdmMatchOutcome.POSSIBLE_DUPLICATE; + outcome.setEidMatch(theCandidateList.isEidMatch()); + myMdmLinkSvc.updateLink( + firstGoldenResource, + possibleDuplicateGoldenResource, + outcome, + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); + }); } } - private List createPossibleMatches(IAnyResource theResource, CandidateList theCandidateList, MdmTransactionContext theMdmTransactionContext) { + private List createPossibleMatches( + IAnyResource theResource, CandidateList theCandidateList, MdmTransactionContext theMdmTransactionContext) { List goldenResources = new ArrayList<>(); for (MatchedGoldenResourceCandidate matchedGoldenResourceCandidate : theCandidateList.getCandidates()) { - IAnyResource goldenResource = myMdmGoldenResourceFindingSvc - .getGoldenResourceFromMatchedGoldenResourceCandidate(matchedGoldenResourceCandidate, theMdmTransactionContext.getResourceType()); + IAnyResource goldenResource = + myMdmGoldenResourceFindingSvc.getGoldenResourceFromMatchedGoldenResourceCandidate( + matchedGoldenResourceCandidate, theMdmTransactionContext.getResourceType()); - MdmMatchOutcome outcome = new MdmMatchOutcome(matchedGoldenResourceCandidate.getMatchResult().getVector(), - matchedGoldenResourceCandidate.getMatchResult().getScore()) - .setMdmRuleCount( matchedGoldenResourceCandidate.getMatchResult().getMdmRuleCount()); + MdmMatchOutcome outcome = new MdmMatchOutcome( + matchedGoldenResourceCandidate.getMatchResult().getVector(), + matchedGoldenResourceCandidate.getMatchResult().getScore()) + .setMdmRuleCount( + matchedGoldenResourceCandidate.getMatchResult().getMdmRuleCount()); outcome.setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_MATCH); outcome.setEidMatch(theCandidateList.isEidMatch()); - myMdmLinkSvc.updateLink(goldenResource, theResource, outcome, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + myMdmLinkSvc.updateLink( + goldenResource, theResource, outcome, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); goldenResources.add(goldenResource); } @@ -139,38 +159,75 @@ public class MdmMatchLinkSvc { } private void handleMdmWithNoCandidates(IAnyResource theResource, MdmTransactionContext theMdmTransactionContext) { - log(theMdmTransactionContext, String.format("There were no matched candidates for MDM, creating a new %s Golden Resource.", theResource.getIdElement().getResourceType())); - IAnyResource newGoldenResource = myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource(theResource, theMdmTransactionContext); + log( + theMdmTransactionContext, + String.format( + "There were no matched candidates for MDM, creating a new %s Golden Resource.", + theResource.getIdElement().getResourceType())); + IAnyResource newGoldenResource = + myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource(theResource, theMdmTransactionContext); // TODO GGG :) // 1. Get the right helper // 2. Create source resource for the MDM source // 3. UPDATE MDM LINK TABLE - myMdmLinkSvc.updateLink(newGoldenResource, theResource, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + myMdmLinkSvc.updateLink( + newGoldenResource, + theResource, + MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); } - private void handleMdmCreate(IAnyResource theTargetResource, MatchedGoldenResourceCandidate theGoldenResourceCandidate, MdmTransactionContext theMdmTransactionContext) { - IAnyResource goldenResource = myMdmGoldenResourceFindingSvc.getGoldenResourceFromMatchedGoldenResourceCandidate(theGoldenResourceCandidate, theMdmTransactionContext.getResourceType()); + private void handleMdmCreate( + IAnyResource theTargetResource, + MatchedGoldenResourceCandidate theGoldenResourceCandidate, + MdmTransactionContext theMdmTransactionContext) { + IAnyResource goldenResource = myMdmGoldenResourceFindingSvc.getGoldenResourceFromMatchedGoldenResourceCandidate( + theGoldenResourceCandidate, theMdmTransactionContext.getResourceType()); if (myGoldenResourceHelper.isPotentialDuplicate(goldenResource, theTargetResource)) { - log(theMdmTransactionContext, "Duplicate detected based on the fact that both resources have different external EIDs."); - IAnyResource newGoldenResource = myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource(theTargetResource, theMdmTransactionContext); + log( + theMdmTransactionContext, + "Duplicate detected based on the fact that both resources have different external EIDs."); + IAnyResource newGoldenResource = myGoldenResourceHelper.createGoldenResourceFromMdmSourceResource( + theTargetResource, theMdmTransactionContext); - myMdmLinkSvc.updateLink(newGoldenResource, theTargetResource, MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); - myMdmLinkSvc.updateLink(newGoldenResource, goldenResource, MdmMatchOutcome.POSSIBLE_DUPLICATE, MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + myMdmLinkSvc.updateLink( + newGoldenResource, + theTargetResource, + MdmMatchOutcome.NEW_GOLDEN_RESOURCE_MATCH, + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); + myMdmLinkSvc.updateLink( + newGoldenResource, + goldenResource, + MdmMatchOutcome.POSSIBLE_DUPLICATE, + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); } else { log(theMdmTransactionContext, "MDM has narrowed down to one candidate for matching."); if (theGoldenResourceCandidate.isMatch()) { - myGoldenResourceHelper.handleExternalEidAddition(goldenResource, theTargetResource, theMdmTransactionContext); - myEidUpdateService.applySurvivorshipRulesAndSaveGoldenResource(theTargetResource, goldenResource, theMdmTransactionContext); + myGoldenResourceHelper.handleExternalEidAddition( + goldenResource, theTargetResource, theMdmTransactionContext); + myEidUpdateService.applySurvivorshipRulesAndSaveGoldenResource( + theTargetResource, goldenResource, theMdmTransactionContext); } - myMdmLinkSvc.updateLink(goldenResource, theTargetResource, theGoldenResourceCandidate.getMatchResult(), MdmLinkSourceEnum.AUTO, theMdmTransactionContext); + myMdmLinkSvc.updateLink( + goldenResource, + theTargetResource, + theGoldenResourceCandidate.getMatchResult(), + MdmLinkSourceEnum.AUTO, + theMdmTransactionContext); } } - private void handleMdmWithSingleCandidate(IAnyResource theResource, MatchedGoldenResourceCandidate theGoldenResourceCandidate, MdmTransactionContext theMdmTransactionContext) { + private void handleMdmWithSingleCandidate( + IAnyResource theResource, + MatchedGoldenResourceCandidate theGoldenResourceCandidate, + MdmTransactionContext theMdmTransactionContext) { if (theMdmTransactionContext.getRestOperation().equals(MdmTransactionContext.OperationType.UPDATE_RESOURCE)) { log(theMdmTransactionContext, "MDM has narrowed down to one candidate for matching."); myEidUpdateService.handleMdmUpdate(theResource, theGoldenResourceCandidate, theMdmTransactionContext); diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmModelConverterSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmModelConverterSvcImpl.java index 6d0b0a0ac42..2124f814f2e 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmModelConverterSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmModelConverterSvcImpl.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.jpa.mdm.svc; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.mdm.api.IMdmLink; import ca.uhn.fhir.mdm.api.MdmLinkJson; -import ca.uhn.fhir.mdm.api.MdmLinkWithRevisionJson; import ca.uhn.fhir.mdm.api.MdmLinkWithRevision; +import ca.uhn.fhir.mdm.api.MdmLinkWithRevisionJson; import org.springframework.beans.factory.annotation.Autowired; public class MdmModelConverterSvcImpl implements IMdmModelConverterSvc { @@ -34,9 +34,15 @@ public class MdmModelConverterSvcImpl implements IMdmModelConverterSvc { @Override public MdmLinkJson toJson(IMdmLink theLink) { MdmLinkJson retVal = new MdmLinkJson(); - String sourceId = myIdHelperService.resourceIdFromPidOrThrowException(theLink.getSourcePersistenceId(), theLink.getMdmSourceType()).toVersionless().getValue(); + String sourceId = myIdHelperService + .resourceIdFromPidOrThrowException(theLink.getSourcePersistenceId(), theLink.getMdmSourceType()) + .toVersionless() + .getValue(); retVal.setSourceId(sourceId); - String goldenResourceId = myIdHelperService.resourceIdFromPidOrThrowException(theLink.getGoldenResourcePersistenceId(), theLink.getMdmSourceType()).toVersionless().getValue(); + String goldenResourceId = myIdHelperService + .resourceIdFromPidOrThrowException(theLink.getGoldenResourcePersistenceId(), theLink.getMdmSourceType()) + .toVersionless() + .getValue(); retVal.setGoldenResourceId(goldenResourceId); retVal.setCreated(theLink.getCreated()); retVal.setEidMatch(theLink.getEidMatch()); @@ -55,6 +61,9 @@ public class MdmModelConverterSvcImpl implements IMdmModelConverterSvc { public MdmLinkWithRevisionJson toJson(MdmLinkWithRevision> theMdmLinkRevision) { final MdmLinkJson mdmLinkJson = toJson(theMdmLinkRevision.getMdmLink()); - return new MdmLinkWithRevisionJson(mdmLinkJson, theMdmLinkRevision.getEnversRevision().getRevisionNumber(), theMdmLinkRevision.getEnversRevision().getRevisionTimestamp()); + return new MdmLinkWithRevisionJson( + mdmLinkJson, + theMdmLinkRevision.getEnversRevision().getRevisionNumber(), + theMdmLinkRevision.getEnversRevision().getRevisionTimestamp()); } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmResourceDaoSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmResourceDaoSvc.java index 2f7811111e9..90d581ca649 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmResourceDaoSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmResourceDaoSvc.java @@ -25,13 +25,13 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.mdm.api.MdmConstants; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -40,9 +40,9 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; @Service public class MdmResourceDaoSvc { @@ -51,12 +51,14 @@ public class MdmResourceDaoSvc { @Autowired DaoRegistry myDaoRegistry; + @Autowired IMdmSettings myMdmSettings; public DaoMethodOutcome upsertGoldenResource(IAnyResource theGoldenResource, String theResourceType) { IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(theResourceType); - RequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId((RequestPartitionId) theGoldenResource.getUserData(Constants.RESOURCE_PARTITION_ID)); + RequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId((RequestPartitionId) + theGoldenResource.getUserData(Constants.RESOURCE_PARTITION_ID)); if (theGoldenResource.getIdElement().hasIdPart()) { return resourceDao.update(theGoldenResource, requestDetails); } else { @@ -72,8 +74,14 @@ public class MdmResourceDaoSvc { */ public void removeGoldenResourceTag(IAnyResource theGoldenResource, String theResourcetype) { IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(theResourcetype); - RequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId((RequestPartitionId) theGoldenResource.getUserData(Constants.RESOURCE_PARTITION_ID)); - resourceDao.removeTag(theGoldenResource.getIdElement(), TagTypeEnum.TAG, MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD, requestDetails); + RequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId((RequestPartitionId) + theGoldenResource.getUserData(Constants.RESOURCE_PARTITION_ID)); + resourceDao.removeTag( + theGoldenResource.getIdElement(), + TagTypeEnum.TAG, + MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, + MdmConstants.CODE_GOLDEN_RECORD, + requestDetails); } public IAnyResource readGoldenResourceByPid(IResourcePersistentId theGoldenResourcePid, String theResourceType) { @@ -85,7 +93,8 @@ public class MdmResourceDaoSvc { return this.searchGoldenResourceByEID(theEid, theResourceType, null); } - public Optional searchGoldenResourceByEID(String theEid, String theResourceType, RequestPartitionId thePartitionId) { + public Optional searchGoldenResourceByEID( + String theEid, String theResourceType, RequestPartitionId thePartitionId) { SearchParameterMap map = buildEidSearchParameterMap(theEid, theResourceType); IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(theResourceType); @@ -97,15 +106,14 @@ public class MdmResourceDaoSvc { if (resources.isEmpty()) { return Optional.empty(); } else if (resources.size() > 1) { - throw new InternalErrorException(Msg.code(737) + "Found more than one active " + - MdmConstants.CODE_HAPI_MDM_MANAGED + - " Golden Resource with EID " + - theEid + - ": " + - resources.get(0).getIdElement().getValue() + - ", " + - resources.get(1).getIdElement().getValue() - ); + throw new InternalErrorException( + Msg.code(737) + "Found more than one active " + MdmConstants.CODE_HAPI_MDM_MANAGED + + " Golden Resource with EID " + + theEid + + ": " + + resources.get(0).getIdElement().getValue() + + ", " + + resources.get(1).getIdElement().getValue()); } else { return Optional.of((IAnyResource) resources.get(0)); } @@ -115,7 +123,10 @@ public class MdmResourceDaoSvc { private SearchParameterMap buildEidSearchParameterMap(String theEid, String theResourceType) { SearchParameterMap map = new SearchParameterMap(); map.setLoadSynchronous(true); - map.add("identifier", new TokenParam(myMdmSettings.getMdmRules().getEnterpriseEIDSystemForResourceType(theResourceType), theEid)); + map.add( + "identifier", + new TokenParam( + myMdmSettings.getMdmRules().getEnterpriseEIDSystemForResourceType(theResourceType), theEid)); map.add("_tag", new TokenParam(MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD)); return map; } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmResourceFilteringSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmResourceFilteringSvc.java index 33c0a68bfba..d9d3523c34d 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmResourceFilteringSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmResourceFilteringSvc.java @@ -38,8 +38,10 @@ public class MdmResourceFilteringSvc { @Autowired private IMdmSettings myMdmSettings; + @Autowired MdmSearchParamSvc myMdmSearchParamSvc; + @Autowired FhirContext myFhirContext; @@ -62,17 +64,19 @@ public class MdmResourceFilteringSvc { } String resourceType = myFhirContext.getResourceType(theResource); - List candidateSearchParams = myMdmSettings.getMdmRules().getCandidateSearchParams(); + List candidateSearchParams = + myMdmSettings.getMdmRules().getCandidateSearchParams(); if (candidateSearchParams.isEmpty()) { return true; } boolean containsValueForSomeSearchParam = candidateSearchParams.stream() - .filter(csp -> myMdmSearchParamSvc.searchParamTypeIsValidForResourceType(csp.getResourceType(), resourceType)) - .flatMap(csp -> csp.getSearchParams().stream()) - .map(searchParam -> myMdmSearchParamSvc.getValueFromResourceForSearchParam(theResource, searchParam)) - .anyMatch(valueList -> !valueList.isEmpty()); + .filter(csp -> + myMdmSearchParamSvc.searchParamTypeIsValidForResourceType(csp.getResourceType(), resourceType)) + .flatMap(csp -> csp.getSearchParams().stream()) + .map(searchParam -> myMdmSearchParamSvc.getValueFromResourceForSearchParam(theResource, searchParam)) + .anyMatch(valueList -> !valueList.isEmpty()); ourLog.trace("Is {} suitable for MDM processing? : {}", theResource.getId(), containsValueForSomeSearchParam); return containsValueForSomeSearchParam; diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImpl.java index 4f04a51d466..e1624d7999e 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImpl.java @@ -43,13 +43,22 @@ public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService { * @param */ @Override - public void applySurvivorshipRulesToGoldenResource(T theTargetResource, T theGoldenResource, MdmTransactionContext theMdmTransactionContext) { + public void applySurvivorshipRulesToGoldenResource( + T theTargetResource, T theGoldenResource, MdmTransactionContext theMdmTransactionContext) { switch (theMdmTransactionContext.getRestOperation()) { case MERGE_GOLDEN_RESOURCES: - TerserUtil.mergeFields(myFhirContext, (IBaseResource) theTargetResource, (IBaseResource) theGoldenResource, TerserUtil.EXCLUDE_IDS_AND_META); + TerserUtil.mergeFields( + myFhirContext, + (IBaseResource) theTargetResource, + (IBaseResource) theGoldenResource, + TerserUtil.EXCLUDE_IDS_AND_META); break; default: - TerserUtil.replaceFields(myFhirContext, (IBaseResource) theTargetResource, (IBaseResource) theGoldenResource, TerserUtil.EXCLUDE_IDS_AND_META); + TerserUtil.replaceFields( + myFhirContext, + (IBaseResource) theTargetResource, + (IBaseResource) theGoldenResource, + TerserUtil.EXCLUDE_IDS_AND_META); break; } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/BaseCandidateFinder.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/BaseCandidateFinder.java index 52c14beb005..be27dd2f758 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/BaseCandidateFinder.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/BaseCandidateFinder.java @@ -29,6 +29,7 @@ import java.util.List; public abstract class BaseCandidateFinder { @Autowired IIdHelperService myIdHelperService; + @Autowired MdmLinkDaoSvc myMdmLinkDaoSvc; diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateList.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateList.java index 5357af0304c..5ee2d019531 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateList.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateList.java @@ -40,7 +40,9 @@ public class CandidateList { return myList.isEmpty(); } - public void addAll(List theList) { myList.addAll(theList); } + public void addAll(List theList) { + myList.addAll(theList); + } public MatchedGoldenResourceCandidate getOnlyMatch() { assert myList.size() == 1; @@ -48,7 +50,7 @@ public class CandidateList { } public boolean exactlyOneMatch() { - return myList.size()== 1; + return myList.size() == 1; } public Stream stream() { @@ -63,11 +65,11 @@ public class CandidateList { return myList.get(0); } - public boolean isEidMatch() { + public boolean isEidMatch() { return myStrategy.isEidMatch(); - } + } - public int size() { + public int size() { return myList.size(); - } + } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateSearcher.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateSearcher.java index feff31940b5..999483f6c01 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateSearcher.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateSearcher.java @@ -41,7 +41,8 @@ public class CandidateSearcher { private final MdmSearchParamSvc myMdmSearchParamSvc; @Autowired - public CandidateSearcher(DaoRegistry theDaoRegistry, IMdmSettings theMdmSettings, MdmSearchParamSvc theMdmSearchParamSvc) { + public CandidateSearcher( + DaoRegistry theDaoRegistry, IMdmSettings theMdmSettings, MdmSearchParamSvc theMdmSearchParamSvc) { myDaoRegistry = theDaoRegistry; myMdmSettings = theMdmSettings; myMdmSearchParamSvc = theMdmSearchParamSvc; @@ -56,8 +57,10 @@ public class CandidateSearcher { * @return Optional.empty() if >= IMdmSettings.getCandidateSearchLimit() candidates are found, otherwise * return the bundle provider for the search results. */ - public Optional search(String theResourceType, String theResourceCriteria, RequestPartitionId partitionId) { - SearchParameterMap searchParameterMap = myMdmSearchParamSvc.mapFromCriteria(theResourceType, theResourceCriteria); + public Optional search( + String theResourceType, String theResourceCriteria, RequestPartitionId partitionId) { + SearchParameterMap searchParameterMap = + myMdmSearchParamSvc.mapFromCriteria(theResourceType, theResourceCriteria); searchParameterMap.setLoadSynchronousUpTo(myMdmSettings.getCandidateSearchLimit()); diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateStrategyEnum.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateStrategyEnum.java index 4337fc62b73..6b83b59a418 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateStrategyEnum.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/CandidateStrategyEnum.java @@ -27,7 +27,7 @@ public enum CandidateStrategyEnum { /** Find Golden Resource candidates based on other sources that match the incoming source using the MDM Matching rules */ SCORE; - public boolean isEidMatch() { - return this == EID; - } + public boolean isEidMatch() { + return this == EID; + } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByEidSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByEidSvc.java index f7107ba9085..2ab9e19ace0 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByEidSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByEidSvc.java @@ -22,12 +22,12 @@ package ca.uhn.fhir.jpa.mdm.svc.candidate; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc; import ca.uhn.fhir.jpa.mdm.svc.MdmResourceDaoSvc; -import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.mdm.api.IMdmLink; import ca.uhn.fhir.mdm.api.MdmMatchOutcome; import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.mdm.model.CanonicalEID; import ca.uhn.fhir.mdm.util.EIDHelper; +import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.hl7.fhir.instance.model.api.IAnyResource; import org.slf4j.Logger; @@ -45,10 +45,13 @@ public class FindCandidateByEidSvc extends BaseCandidateFinder { @Autowired private EIDHelper myEIDHelper; + @Autowired private MdmResourceDaoSvc myMdmResourceDaoSvc; + @Autowired private MdmLinkDaoSvc myMdmLinkDaoSvc; + @Autowired MdmPartitionHelper myMdmPartitionHelper; @@ -59,16 +62,25 @@ public class FindCandidateByEidSvc extends BaseCandidateFinder { List eidFromResource = myEIDHelper.getExternalEid(theIncomingResource); if (!eidFromResource.isEmpty()) { for (CanonicalEID eid : eidFromResource) { - Optional oFoundGoldenResource = myMdmResourceDaoSvc.searchGoldenResourceByEID(eid.getValue(), theIncomingResource.getIdElement().getResourceType(), myMdmPartitionHelper.getRequestPartitionIdFromResourceForSearch(theIncomingResource)); + Optional oFoundGoldenResource = myMdmResourceDaoSvc.searchGoldenResourceByEID( + eid.getValue(), + theIncomingResource.getIdElement().getResourceType(), + myMdmPartitionHelper.getRequestPartitionIdFromResourceForSearch(theIncomingResource)); if (oFoundGoldenResource.isPresent()) { IAnyResource foundGoldenResource = oFoundGoldenResource.get(); // Exclude manually declared NO_MATCH links from candidates if (isNoMatch(foundGoldenResource, theIncomingResource)) { continue; } - IResourcePersistentId pidOrNull = myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), foundGoldenResource); - MatchedGoldenResourceCandidate mpc = new MatchedGoldenResourceCandidate(pidOrNull, MdmMatchOutcome.EID_MATCH); - ourLog.debug("Incoming Resource {} matched Golden Resource {} by EID {}", theIncomingResource.getIdElement().toUnqualifiedVersionless(), foundGoldenResource.getIdElement().toUnqualifiedVersionless(), eid); + IResourcePersistentId pidOrNull = + myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), foundGoldenResource); + MatchedGoldenResourceCandidate mpc = + new MatchedGoldenResourceCandidate(pidOrNull, MdmMatchOutcome.EID_MATCH); + ourLog.debug( + "Incoming Resource {} matched Golden Resource {} by EID {}", + theIncomingResource.getIdElement().toUnqualifiedVersionless(), + foundGoldenResource.getIdElement().toUnqualifiedVersionless(), + eid); retval.add(mpc); } @@ -78,7 +90,8 @@ public class FindCandidateByEidSvc extends BaseCandidateFinder { } private boolean isNoMatch(IAnyResource theGoldenResource, IAnyResource theSourceResource) { - Optional oLink = myMdmLinkDaoSvc.getLinkByGoldenResourceAndSourceResource(theGoldenResource, theSourceResource); + Optional oLink = + myMdmLinkDaoSvc.getLinkByGoldenResourceAndSourceResource(theGoldenResource, theSourceResource); if (oLink.isEmpty()) { return false; } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByExampleSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByExampleSvc.java index d969de563dd..464239aa97c 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByExampleSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByExampleSvc.java @@ -23,12 +23,12 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc; -import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.mdm.api.IMdmLink; import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc; import ca.uhn.fhir.mdm.api.MatchedTarget; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.mdm.log.Logs; +import ca.uhn.fhir.mdm.util.MdmPartitionHelper; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -44,14 +44,19 @@ import java.util.stream.Collectors; @Service public class FindCandidateByExampleSvc

    extends BaseCandidateFinder { private static final Logger ourLog = Logs.getMdmTroubleshootingLog(); + @Autowired IIdHelperService

    myIdHelperService; + @Autowired private FhirContext myFhirContext; + @Autowired private MdmLinkDaoSvc> myMdmLinkDaoSvc; + @Autowired private IMdmMatchFinderSvc myMdmMatchFinderSvc; + @Autowired MdmPartitionHelper myMdmPartitionHelper; @@ -69,45 +74,58 @@ public class FindCandidateByExampleSvc

    extends List

    goldenResourcePidsToExclude = getNoMatchGoldenResourcePids(theTarget); - List matchedCandidates = myMdmMatchFinderSvc.getMatchedTargets(myFhirContext.getResourceType(theTarget), theTarget, myMdmPartitionHelper.getRequestPartitionIdFromResourceForSearch(theTarget)); + List matchedCandidates = myMdmMatchFinderSvc.getMatchedTargets( + myFhirContext.getResourceType(theTarget), + theTarget, + myMdmPartitionHelper.getRequestPartitionIdFromResourceForSearch(theTarget)); // Convert all possible match targets to their equivalent Golden Resources by looking up in the MdmLink table, // while ensuring that the matches aren't in our NO_MATCH list. // The data flow is as follows -> // MatchedTargetCandidate -> Golden Resource -> MdmLink -> MatchedGoldenResourceCandidate - matchedCandidates = matchedCandidates.stream().filter(mc -> mc.isMatch() || mc.isPossibleMatch()).collect(Collectors.toList()); + matchedCandidates = matchedCandidates.stream() + .filter(mc -> mc.isMatch() || mc.isPossibleMatch()) + .collect(Collectors.toList()); List skippedLogMessages = new ArrayList<>(); List matchedLogMessages = new ArrayList<>(); for (MatchedTarget match : matchedCandidates) { - Optional optionalMdmLink = myMdmLinkDaoSvc.getMatchedLinkForSourcePid(myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), match.getTarget())); + Optional optionalMdmLink = myMdmLinkDaoSvc.getMatchedLinkForSourcePid( + myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), match.getTarget())); if (!optionalMdmLink.isPresent()) { if (ourLog.isDebugEnabled()) { - skippedLogMessages.add(String.format("%s does not link to a Golden Resource (it may be a Golden Resource itself). Removing candidate.", match.getTarget().getIdElement().toUnqualifiedVersionless())); + skippedLogMessages.add(String.format( + "%s does not link to a Golden Resource (it may be a Golden Resource itself). Removing candidate.", + match.getTarget().getIdElement().toUnqualifiedVersionless())); } continue; } - IMdmLink matchMdmLink = optionalMdmLink.get(); if (goldenResourcePidsToExclude.contains(matchMdmLink.getGoldenResourcePersistenceId())) { - skippedLogMessages.add(String.format("Skipping MDM on candidate Golden Resource with PID %s due to manual NO_MATCH", matchMdmLink.getGoldenResourcePersistenceId().toString())); + skippedLogMessages.add(String.format( + "Skipping MDM on candidate Golden Resource with PID %s due to manual NO_MATCH", + matchMdmLink.getGoldenResourcePersistenceId().toString())); continue; } - MatchedGoldenResourceCandidate candidate = new MatchedGoldenResourceCandidate(matchMdmLink.getGoldenResourcePersistenceId(), match.getMatchResult()); + MatchedGoldenResourceCandidate candidate = new MatchedGoldenResourceCandidate( + matchMdmLink.getGoldenResourcePersistenceId(), match.getMatchResult()); if (ourLog.isDebugEnabled()) { - matchedLogMessages.add(String.format("Navigating from matched resource %s to its Golden Resource %s", match.getTarget().getIdElement().toUnqualifiedVersionless(), matchMdmLink.getGoldenResourcePersistenceId().toString())); + matchedLogMessages.add(String.format( + "Navigating from matched resource %s to its Golden Resource %s", + match.getTarget().getIdElement().toUnqualifiedVersionless(), + matchMdmLink.getGoldenResourcePersistenceId().toString())); } retval.add(candidate); } if (ourLog.isDebugEnabled()) { - for (String logMessage: skippedLogMessages) { + for (String logMessage : skippedLogMessages) { ourLog.debug(logMessage); } - for (String logMessage: matchedLogMessages) { + for (String logMessage : matchedLogMessages) { ourLog.debug(logMessage); } } @@ -116,10 +134,9 @@ public class FindCandidateByExampleSvc

    extends private List

    getNoMatchGoldenResourcePids(IBaseResource theBaseResource) { P targetPid = myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theBaseResource); - return myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(targetPid, MdmMatchResultEnum.NO_MATCH) - .stream() - .map(IMdmLink::getGoldenResourcePersistenceId) - .collect(Collectors.toList()); + return myMdmLinkDaoSvc.getMdmLinksBySourcePidAndMatchResult(targetPid, MdmMatchResultEnum.NO_MATCH).stream() + .map(IMdmLink::getGoldenResourcePersistenceId) + .collect(Collectors.toList()); } @Override diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByLinkSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByLinkSvc.java index 2f848ccfc14..3c03ac81f13 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByLinkSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByLinkSvc.java @@ -51,7 +51,7 @@ public class FindCandidateByLinkSvc extends BaseCandidateFinder { if (oLink.isPresent()) { IResourcePersistentId goldenResourcePid = oLink.get().getGoldenResourcePersistenceId(); ourLog.debug("Resource previously linked. Using existing link."); - retval.add(new MatchedGoldenResourceCandidate(goldenResourcePid, oLink.get())); + retval.add(new MatchedGoldenResourceCandidate(goldenResourcePid, oLink.get())); } } return retval; diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MatchedGoldenResourceCandidate.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MatchedGoldenResourceCandidate.java index 8854c6a500b..b349ae448a9 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MatchedGoldenResourceCandidate.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MatchedGoldenResourceCandidate.java @@ -35,7 +35,8 @@ public class MatchedGoldenResourceCandidate { public MatchedGoldenResourceCandidate(IResourcePersistentId theGoldenResourcePid, IMdmLink theMdmLink) { myCandidateGoldenResourcePid = theGoldenResourcePid; - myMdmMatchOutcome = new MdmMatchOutcome(theMdmLink.getVector(), theMdmLink.getScore()).setMatchResultEnum(theMdmLink.getMatchResult()); + myMdmMatchOutcome = new MdmMatchOutcome(theMdmLink.getVector(), theMdmLink.getScore()) + .setMatchResultEnum(theMdmLink.getMatchResult()); } public IResourcePersistentId getCandidateGoldenResourcePid() { diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchCriteriaBuilderSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchCriteriaBuilderSvc.java index 2dea91d2ab4..7eddf52c5c3 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchCriteriaBuilderSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchCriteriaBuilderSvc.java @@ -26,12 +26,12 @@ import org.hl7.fhir.instance.model.api.IAnyResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; @Service public class MdmCandidateSearchCriteriaBuilderSvc { @@ -46,32 +46,37 @@ public class MdmCandidateSearchCriteriaBuilderSvc { * Patient?active=true&name.given=Gary,Grant&name.family=Graham */ @Nonnull - public Optional buildResourceQueryString(String theResourceType, IAnyResource theResource, List theFilterCriteria, @Nullable MdmResourceSearchParamJson resourceSearchParam) { + public Optional buildResourceQueryString( + String theResourceType, + IAnyResource theResource, + List theFilterCriteria, + @Nullable MdmResourceSearchParamJson resourceSearchParam) { List criteria = new ArrayList<>(); // If there are candidate search params, then make use of them, otherwise, search with only the filters. if (resourceSearchParam != null) { resourceSearchParam.iterator().forEachRemaining(searchParam -> { - //to compare it to all known GOLDEN_RESOURCE objects, using the overlapping search parameters that they have. - List valuesFromResourceForSearchParam = myMdmSearchParamSvc.getValueFromResourceForSearchParam(theResource, searchParam); + // to compare it to all known GOLDEN_RESOURCE objects, using the overlapping search parameters that they + // have. + List valuesFromResourceForSearchParam = + myMdmSearchParamSvc.getValueFromResourceForSearchParam(theResource, searchParam); if (!valuesFromResourceForSearchParam.isEmpty()) { criteria.add(buildResourceMatchQuery(searchParam, valuesFromResourceForSearchParam)); } }); if (criteria.isEmpty()) { - //TODO GGG/KHS, re-evaluate whether we should early drop here. + // TODO GGG/KHS, re-evaluate whether we should early drop here. return Optional.empty(); } } criteria.addAll(theFilterCriteria); - return Optional.of(theResourceType + "?" + String.join("&", criteria)); + return Optional.of(theResourceType + "?" + String.join("&", criteria)); } private String buildResourceMatchQuery(String theSearchParamName, List theResourceValues) { - String nameValueOrList = theResourceValues.stream() - .map(UrlUtil::escapeUrlParam) - .collect(Collectors.joining(",")); + String nameValueOrList = + theResourceValues.stream().map(UrlUtil::escapeUrlParam).collect(Collectors.joining(",")); return theSearchParamName + "=" + nameValueOrList; } } diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java index 8c29830d5d4..aafdff442c6 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmCandidateSearchSvc.java @@ -53,15 +53,17 @@ public class MdmCandidateSearchSvc { @Autowired private IMdmSettings myMdmSettings; + @Autowired private IIdHelperService myIdHelperService; + @Autowired private MdmCandidateSearchCriteriaBuilderSvc myMdmCandidateSearchCriteriaBuilderSvc; + @Autowired private CandidateSearcher myCandidateSearcher; - public MdmCandidateSearchSvc() { - } + public MdmCandidateSearchSvc() {} /** * Given a source resource, search for all resources that are considered an MDM match based on defined MDM rules. @@ -73,16 +75,20 @@ public class MdmCandidateSearchSvc { * @return the list of candidate {@link IBaseResource} which could be matches to theResource */ @Transactional - public Collection findCandidates(String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId) { + public Collection findCandidates( + String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId) { Map matchedPidsToResources = new HashMap<>(); - List filterSearchParams = myMdmSettings.getMdmRules().getCandidateFilterSearchParams(); + List filterSearchParams = + myMdmSettings.getMdmRules().getCandidateFilterSearchParams(); List filterCriteria = buildFilterQuery(filterSearchParams, theResourceType); - List candidateSearchParams = myMdmSettings.getMdmRules().getCandidateSearchParams(); + List candidateSearchParams = + myMdmSettings.getMdmRules().getCandidateSearchParams(); - //If there are zero MdmResourceSearchParamJson, we end up only making a single search, otherwise we - //must perform one search per MdmResourceSearchParamJson. + // If there are zero MdmResourceSearchParamJson, we end up only making a single search, otherwise we + // must perform one search per MdmResourceSearchParamJson. if (candidateSearchParams.isEmpty()) { - searchForIdsAndAddToMap(theResourceType, theResource, matchedPidsToResources, filterCriteria, null, theRequestPartitionId); + searchForIdsAndAddToMap( + theResourceType, theResource, matchedPidsToResources, filterCriteria, null, theRequestPartitionId); } else { for (MdmResourceSearchParamJson resourceSearchParam : candidateSearchParams) { @@ -90,19 +96,32 @@ public class MdmCandidateSearchSvc { continue; } - searchForIdsAndAddToMap(theResourceType, theResource, matchedPidsToResources, filterCriteria, resourceSearchParam, theRequestPartitionId); + searchForIdsAndAddToMap( + theResourceType, + theResource, + matchedPidsToResources, + filterCriteria, + resourceSearchParam, + theRequestPartitionId); } } // Obviously we don't want to consider the incoming resource as a potential candidate. // Sometimes, we are running this function on a resource that has not yet been persisted, // so it may not have an ID yet, precluding the need to remove it. if (theResource.getIdElement().getIdPart() != null) { - if (matchedPidsToResources.remove(myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theResource)) != null) { - ourLog.debug("Removing incoming resource {} from list of candidates.", theResource.getIdElement().toUnqualifiedVersionless()); + if (matchedPidsToResources.remove( + myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), theResource)) + != null) { + ourLog.debug( + "Removing incoming resource {} from list of candidates.", + theResource.getIdElement().toUnqualifiedVersionless()); } } - ourLog.info("Candidate search found {} matching resources for {}", matchedPidsToResources.size(), idOrType(theResource, theResourceType)); + ourLog.info( + "Candidate search found {} matching resources for {}", + matchedPidsToResources.size(), + idOrType(theResource, theResourceType)); return matchedPidsToResources.values(); } @@ -119,26 +138,36 @@ public class MdmCandidateSearchSvc { * 4. Store all results in `theMatchedPidsToResources` */ @SuppressWarnings("rawtypes") - private void searchForIdsAndAddToMap(String theResourceType, IAnyResource theResource, Map theMatchedPidsToResources, List theFilterCriteria, MdmResourceSearchParamJson resourceSearchParam, RequestPartitionId theRequestPartitionId) { - //1. - Optional oResourceCriteria = myMdmCandidateSearchCriteriaBuilderSvc.buildResourceQueryString(theResourceType, theResource, theFilterCriteria, resourceSearchParam); + private void searchForIdsAndAddToMap( + String theResourceType, + IAnyResource theResource, + Map theMatchedPidsToResources, + List theFilterCriteria, + MdmResourceSearchParamJson resourceSearchParam, + RequestPartitionId theRequestPartitionId) { + // 1. + Optional oResourceCriteria = myMdmCandidateSearchCriteriaBuilderSvc.buildResourceQueryString( + theResourceType, theResource, theFilterCriteria, resourceSearchParam); if (!oResourceCriteria.isPresent()) { return; } String resourceCriteria = oResourceCriteria.get(); ourLog.debug("Searching for {} candidates with {}", theResourceType, resourceCriteria); - //2. - Optional bundleProvider = myCandidateSearcher.search(theResourceType, resourceCriteria, theRequestPartitionId); + // 2. + Optional bundleProvider = + myCandidateSearcher.search(theResourceType, resourceCriteria, theRequestPartitionId); if (!bundleProvider.isPresent()) { - throw new TooManyCandidatesException(Msg.code(762) + "More than " + myMdmSettings.getCandidateSearchLimit() + " candidate matches found for " + resourceCriteria + ". Aborting mdm matching."); + throw new TooManyCandidatesException(Msg.code(762) + "More than " + myMdmSettings.getCandidateSearchLimit() + + " candidate matches found for " + resourceCriteria + ". Aborting mdm matching."); } List resources = bundleProvider.get().getAllResources(); int initialSize = theMatchedPidsToResources.size(); - //4. - resources.forEach(resource -> theMatchedPidsToResources.put(myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), resource), (IAnyResource) resource)); + // 4. + resources.forEach(resource -> theMatchedPidsToResources.put( + myIdHelperService.getPidOrNull(RequestPartitionId.allPartitions(), resource), (IAnyResource) resource)); int newSize = theMatchedPidsToResources.size(); @@ -147,15 +176,17 @@ public class MdmCandidateSearchSvc { } } - private List buildFilterQuery(List theFilterSearchParams, String theResourceType) { + private List buildFilterQuery( + List theFilterSearchParams, String theResourceType) { return Collections.unmodifiableList(theFilterSearchParams.stream() - .filter(spFilterJson -> paramIsOnCorrectType(theResourceType, spFilterJson)) - .map(this::convertToQueryString) - .collect(Collectors.toList())); + .filter(spFilterJson -> paramIsOnCorrectType(theResourceType, spFilterJson)) + .map(this::convertToQueryString) + .collect(Collectors.toList())); } private boolean paramIsOnCorrectType(String theResourceType, MdmFilterSearchParamJson spFilterJson) { - return spFilterJson.getResourceType().equals(theResourceType) || spFilterJson.getResourceType().equalsIgnoreCase(ALL_RESOURCE_SEARCH_PARAM_TYPE); + return spFilterJson.getResourceType().equals(theResourceType) + || spFilterJson.getResourceType().equalsIgnoreCase(ALL_RESOURCE_SEARCH_PARAM_TYPE); } private String convertToQueryString(MdmFilterSearchParamJson theSpFilterJson) { diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmGoldenResourceFindingSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmGoldenResourceFindingSvc.java index 0d7b55cb049..b72d3addea3 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmGoldenResourceFindingSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/MdmGoldenResourceFindingSvc.java @@ -67,15 +67,16 @@ public class MdmGoldenResourceFindingSvc { } if (matchedGoldenResourceCandidates.isEmpty()) { - //OK, so we have not found any links in the MdmLink table with us as a source. Next, let's find - //possible Golden Resources matches by following MDM rules. + // OK, so we have not found any links in the MdmLink table with us as a source. Next, let's find + // possible Golden Resources matches by following MDM rules. matchedGoldenResourceCandidates = myFindCandidateByExampleSvc.findCandidates(theResource); } return matchedGoldenResourceCandidates; } - public IAnyResource getGoldenResourceFromMatchedGoldenResourceCandidate(MatchedGoldenResourceCandidate theMatchedGoldenResourceCandidate, String theResourceType) { + public IAnyResource getGoldenResourceFromMatchedGoldenResourceCandidate( + MatchedGoldenResourceCandidate theMatchedGoldenResourceCandidate, String theResourceType) { IResourcePersistentId goldenResourcePid = theMatchedGoldenResourceCandidate.getCandidateGoldenResourcePid(); return myMdmResourceDaoSvc.readGoldenResourceByPid(goldenResourcePid, theResourceType); } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java index f5f07acb083..31820b2a8cb 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java @@ -45,7 +45,8 @@ public class PartitionSettings { * * @since 6.6.0 */ - public void setAlwaysOpenNewTransactionForDifferentPartition(boolean theAlwaysOpenNewTransactionForDifferentPartition) { + public void setAlwaysOpenNewTransactionForDifferentPartition( + boolean theAlwaysOpenNewTransactionForDifferentPartition) { myAlwaysOpenNewTransactionForDifferentPartition = theAlwaysOpenNewTransactionForDifferentPartition; } @@ -158,7 +159,8 @@ public class PartitionSettings { * If enabled the JPA server will allow unqualified cross partition reference */ public boolean isAllowUnqualifiedCrossPartitionReference() { - return myAllowReferencesAcrossPartitions.equals(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); + return myAllowReferencesAcrossPartitions.equals( + PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); } public enum CrossPartitionReferenceMode { @@ -173,7 +175,5 @@ public class PartitionSettings { * will be managed by the database. */ ALLOWED_UNQUALIFIED, - } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java index e3efe12595c..129da6e2344 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java @@ -32,5 +32,4 @@ public interface IResourceLookup> { Date getDeleted(); T getPersistentId(); - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgres94Dialect.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgres94Dialect.java index e79c31488f5..57f42b5322a 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgres94Dialect.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiFhirPostgres94Dialect.java @@ -30,7 +30,6 @@ public class HapiFhirPostgres94Dialect extends PostgreSQL94Dialect { public HapiFhirPostgres94Dialect() { super(); - registerColumnType( Types.CLOB, "oid" ); + registerColumnType(Types.CLOB, "oid"); } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java index 5eaf92ea409..9e373c05e5d 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java @@ -44,10 +44,13 @@ import java.util.Properties; * and by default will therefore work exactly as the default would, but allows for customization. */ @SuppressWarnings("unused") -public class HapiSequenceStyleGenerator implements IdentifierGenerator, PersistentIdentifierGenerator, BulkInsertionCapableIdentifierGenerator { +public class HapiSequenceStyleGenerator + implements IdentifierGenerator, PersistentIdentifierGenerator, BulkInsertionCapableIdentifierGenerator { private final SequenceStyleGenerator myGen = new SequenceStyleGenerator(); + @Autowired private StorageSettings myStorageSettings; + private ISequenceValueMassager myIdMassager; private boolean myConfigured; private String myGeneratorName; @@ -63,7 +66,8 @@ public class HapiSequenceStyleGenerator implements IdentifierGenerator, Persiste } @Override - public Serializable generate(SharedSessionContractImplementor theSession, Object theObject) throws HibernateException { + public Serializable generate(SharedSessionContractImplementor theSession, Object theObject) + throws HibernateException { Long retVal = myIdMassager.generate(myGeneratorName); if (retVal == null) { Long next = (Long) myGen.generate(theSession, theObject); @@ -73,7 +77,8 @@ public class HapiSequenceStyleGenerator implements IdentifierGenerator, Persiste } @Override - public void configure(Type theType, Properties theParams, ServiceRegistry theServiceRegistry) throws MappingException { + public void configure(Type theType, Properties theParams, ServiceRegistry theServiceRegistry) + throws MappingException { // Instantiate the ID massager // StorageSettings should only be null when running in the DDL generation maven plugin @@ -109,5 +114,4 @@ public class HapiSequenceStyleGenerator implements IdentifierGenerator, Persiste public boolean supportsJdbcBatchInserts() { return myGen.supportsJdbcBatchInserts(); } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java index 8c815ebad07..df4c3a7ba52 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java @@ -42,15 +42,11 @@ public interface ISequenceValueMassager { return null; } - final class NoopSequenceValueMassager implements ISequenceValueMassager { @Override public Long massage(String theGeneratorName, Long theId) { return theId; } - } - - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/AuditableBasePartitionable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/AuditableBasePartitionable.java index 60629e3a56e..473235d6e8d 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/AuditableBasePartitionable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/AuditableBasePartitionable.java @@ -23,11 +23,11 @@ package ca.uhn.fhir.jpa.model.entity; import org.hibernate.envers.Audited; import org.hibernate.envers.NotAudited; +import java.io.Serializable; import javax.annotation.Nullable; import javax.persistence.Column; import javax.persistence.Embedded; import javax.persistence.MappedSuperclass; -import java.io.Serializable; /** * This is a copy of (@link {@link BasePartitionable} used ONLY for entities that are audited by Hibernate Envers. @@ -57,5 +57,4 @@ public class AuditableBasePartitionable implements Serializable { public void setPartitionId(PartitionablePartitionId thePartitionId) { myPartitionId = thePartitionId; } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java index 1888ea041e1..ad3205f2795 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java @@ -25,6 +25,8 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.model.primitive.InstantDt; import org.hibernate.annotations.OptimisticLock; +import java.util.Collection; +import java.util.Date; import javax.persistence.Column; import javax.persistence.EnumType; import javax.persistence.Enumerated; @@ -32,16 +34,14 @@ import javax.persistence.MappedSuperclass; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; -import java.util.Collection; -import java.util.Date; - -import static org.apache.commons.lang3.StringUtils.defaultString; @MappedSuperclass -public abstract class BaseHasResource extends BasePartitionable implements IBaseResourceEntity, IBasePersistedResource { +public abstract class BaseHasResource extends BasePartitionable + implements IBaseResourceEntity, IBasePersistedResource { public static final String RES_PUBLISHED = "RES_PUBLISHED"; public static final String RES_UPDATED = "RES_UPDATED"; + @Column(name = "RES_DELETED_AT", nullable = true) @Temporal(TemporalType.TIMESTAMP) private Date myDeleted; @@ -80,7 +80,6 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase myTransientForcedId = theTransientForcedId; } - public abstract BaseTag addTag(TagDefinition theDef); @Override @@ -97,15 +96,13 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase myFhirVersion = theFhirVersion; } - abstract public ForcedId getForcedId(); + public abstract ForcedId getForcedId(); - abstract public void setForcedId(ForcedId theForcedId); + public abstract void setForcedId(ForcedId theForcedId); @Override public abstract Long getId(); - - public void setDeleted(Date theDate) { myDeleted = theDate; } @@ -172,5 +169,4 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase } return retVal; } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BasePartitionable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BasePartitionable.java index de78932d586..0c3fe7ceed0 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BasePartitionable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BasePartitionable.java @@ -19,11 +19,11 @@ */ package ca.uhn.fhir.jpa.model.entity; +import java.io.Serializable; import javax.annotation.Nullable; import javax.persistence.Column; import javax.persistence.Embedded; import javax.persistence.MappedSuperclass; -import java.io.Serializable; /** * This is the base class for entities with partitioning that does NOT include Hibernate Envers logging. @@ -33,7 +33,6 @@ import java.io.Serializable; @MappedSuperclass public abstract class BasePartitionable implements Serializable { - @Embedded private PartitionablePartitionId myPartitionId; @@ -55,9 +54,8 @@ public abstract class BasePartitionable implements Serializable { @Override public String toString() { - return "BasePartitionable{" + - "myPartitionId=" + myPartitionId + - ", myPartitionIdValue=" + myPartitionIdValue + - '}'; + return "BasePartitionable{" + "myPartitionId=" + + myPartitionId + ", myPartitionIdValue=" + + myPartitionIdValue + '}'; } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndex.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndex.java index 1dfff3b9f2e..9107f06862a 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndex.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndex.java @@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.model.entity; import org.apache.commons.lang3.ObjectUtils; -import javax.persistence.MappedSuperclass; import java.io.Serializable; +import javax.persistence.MappedSuperclass; @MappedSuperclass public abstract class BaseResourceIndex extends BasePartitionable implements Serializable { @@ -56,5 +56,4 @@ public abstract class BaseResourceIndex extends BasePartitionable implements Ser public abstract boolean equals(Object obj); public abstract void copyMutableValuesFrom(T theSource); - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java index 39146b8ce0e..bdb31c75c8d 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java @@ -35,13 +35,13 @@ import org.apache.commons.lang3.StringUtils; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; +import java.util.Date; +import java.util.List; import javax.persistence.Column; import javax.persistence.MappedSuperclass; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; -import java.util.Date; -import java.util.List; @MappedSuperclass public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex { @@ -55,6 +55,7 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex { * Don't make this public 'cause nobody better be able to modify it! */ private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8); + private static final long serialVersionUID = 1L; @GenericField @@ -102,6 +103,7 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex { * Get the Resource this SP indexes */ public abstract ResourceTable getResource(); + public abstract BaseResourceIndexedSearchParam setResource(ResourceTable theResource); @Override @@ -168,16 +170,29 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex { return this; } - public static long calculateHashIdentity(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName) { + public static long calculateHashIdentity( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); return calculateHashIdentity(thePartitionSettings, requestPartitionId, theResourceType, theParamName); } - public static long calculateHashIdentity(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName) { + public static long calculateHashIdentity( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName) { return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName); } - public static long calculateHashIdentity(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, List theAdditionalValues) { + public static long calculateHashIdentity( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + List theAdditionalValues) { String[] values = new String[theAdditionalValues.size() + 2]; values[0] = theResourceType; values[1] = theParamName; @@ -191,12 +206,16 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex { /** * Applies a fast and consistent hashing algorithm to a set of strings */ - static long hash(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String... theValues) { + static long hash( + PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String... theValues) { Hasher hasher = HASH_FUNCTION.newHasher(); - if (thePartitionSettings.isPartitioningEnabled() && thePartitionSettings.isIncludePartitionInSearchHashes() && theRequestPartitionId != null) { + if (thePartitionSettings.isPartitioningEnabled() + && thePartitionSettings.isIncludePartitionInSearchHashes() + && theRequestPartitionId != null) { if (theRequestPartitionId.getPartitionIds().size() > 1) { - throw new InternalErrorException(Msg.code(1527) + "Can not search multiple partitions when partitions are included in search hashes"); + throw new InternalErrorException(Msg.code(1527) + + "Can not search multiple partitions when partitions are included in search hashes"); } Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull(); if (partitionId != null) { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParamQuantity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParamQuantity.java index 38408384911..ac1d27332b0 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParamQuantity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParamQuantity.java @@ -33,11 +33,11 @@ public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourc private static final int MAX_LENGTH = 200; private static final long serialVersionUID = 1L; - + @Column(name = "SP_SYSTEM", nullable = true, length = MAX_LENGTH) @FullTextField public String mySystem; - + @Column(name = "SP_UNITS", nullable = true, length = MAX_LENGTH) @FullTextField public String myUnits; @@ -83,8 +83,10 @@ public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourc String units = getUnits(); String system = getSystem(); setHashIdentity(calculateHashIdentity(getPartitionSettings(), getPartitionId(), resourceType, paramName)); - setHashIdentityAndUnits(calculateHashUnits(getPartitionSettings(), getPartitionId(), resourceType, paramName, units)); - setHashIdentitySystemAndUnits(calculateHashSystemAndUnits(getPartitionSettings(), getPartitionId(), resourceType, paramName, system, units)); + setHashIdentityAndUnits( + calculateHashUnits(getPartitionSettings(), getPartitionId(), resourceType, paramName, units)); + setHashIdentitySystemAndUnits(calculateHashSystemAndUnits( + getPartitionSettings(), getPartitionId(), resourceType, paramName, system, units)); } public Long getHashIdentity() { @@ -138,23 +140,44 @@ public abstract class BaseResourceIndexedSearchParamQuantity extends BaseResourc return b.toHashCode(); } - - public static long calculateHashSystemAndUnits(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, String theSystem, String theUnits) { + public static long calculateHashSystemAndUnits( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theSystem, + String theUnits) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); - return calculateHashSystemAndUnits(thePartitionSettings, requestPartitionId, theResourceType, theParamName, theSystem, theUnits); + return calculateHashSystemAndUnits( + thePartitionSettings, requestPartitionId, theResourceType, theParamName, theSystem, theUnits); } - public static long calculateHashSystemAndUnits(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, String theSystem, String theUnits) { + public static long calculateHashSystemAndUnits( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theSystem, + String theUnits) { return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theSystem, theUnits); } - public static long calculateHashUnits(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, String theUnits) { + public static long calculateHashUnits( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theUnits) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); return calculateHashUnits(thePartitionSettings, requestPartitionId, theResourceType, theParamName, theUnits); } - public static long calculateHashUnits(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, String theUnits) { + public static long calculateHashUnits( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theUnits) { return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theUnits); } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseTag.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseTag.java index fc507f05fe2..bc4405d2d1a 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseTag.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseTag.java @@ -19,11 +19,11 @@ */ package ca.uhn.fhir.jpa.model.entity; +import java.io.Serializable; import javax.persistence.Column; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.MappedSuperclass; -import java.io.Serializable; @MappedSuperclass public abstract class BaseTag extends BasePartitionable implements Serializable { @@ -48,5 +48,4 @@ public abstract class BaseTag extends BasePartitionable implements Serializable public void setTag(TagDefinition theTag) { myTag = theTag; } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java index 48b1e42764c..d44d8e446f5 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BinaryStorageEntity.java @@ -19,6 +19,8 @@ */ package ca.uhn.fhir.jpa.model.entity; +import java.sql.Blob; +import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Id; @@ -26,8 +28,6 @@ import javax.persistence.Lob; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; -import java.sql.Blob; -import java.util.Date; @Entity @Table(name = "HFJ_BINARY_STORAGE_BLOB") @@ -35,20 +35,26 @@ public class BinaryStorageEntity { @Id @Column(name = "BLOB_ID", length = 200, nullable = false) - //N.B GGG: Note that the `blob id` is the same as the `externalized binary id`. + // N.B GGG: Note that the `blob id` is the same as the `externalized binary id`. private String myBlobId; + @Column(name = "RESOURCE_ID", length = 100, nullable = false) private String myResourceId; + @Column(name = "BLOB_SIZE", nullable = true) private long mySize; + @Column(name = "CONTENT_TYPE", nullable = false, length = 100) private String myBlobContentType; + @Lob @Column(name = "BLOB_DATA", nullable = false, insertable = true, updatable = false) private Blob myBlob; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "PUBLISHED_DATE", nullable = false) private Date myPublished; + @Column(name = "BLOB_HASH", length = 128, nullable = true) private String myHash; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/EnversRevision.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/EnversRevision.java index 4bc7867b891..8d915978a43 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/EnversRevision.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/EnversRevision.java @@ -55,7 +55,9 @@ public class EnversRevision { if (this == theO) return true; if (theO == null || getClass() != theO.getClass()) return false; final EnversRevision that = (EnversRevision) theO; - return myRevisionNumber == that.myRevisionNumber && myRevisionTimestamp == that.myRevisionTimestamp && myRevisionType == that.myRevisionType; + return myRevisionNumber == that.myRevisionNumber + && myRevisionTimestamp == that.myRevisionTimestamp + && myRevisionType == that.myRevisionType; } @Override @@ -66,9 +68,9 @@ public class EnversRevision { @Override public String toString() { return new ToStringBuilder(this) - .append("myRevisionType", myRevisionType) - .append("myRevisionNumber", myRevisionNumber) - .append("myRevisionTimestamp", myRevisionTimestamp) - .toString(); + .append("myRevisionType", myRevisionType) + .append("myRevisionNumber", myRevisionNumber) + .append("myRevisionTimestamp", myRevisionTimestamp) + .toString(); } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ForcedId.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ForcedId.java index f5a9d42354a..7046e8c5c3e 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ForcedId.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ForcedId.java @@ -38,27 +38,34 @@ import javax.persistence.Table; import javax.persistence.UniqueConstraint; @Entity() -@Table(name = ForcedId.HFJ_FORCED_ID, uniqueConstraints = { - @UniqueConstraint(name = "IDX_FORCEDID_RESID", columnNames = {"RESOURCE_PID"}), - /* - * This index is called IDX_FORCEDID_TYPE_FID and guarantees - * uniqueness of RESOURCE_TYPE,FORCED_ID. This doesn't make sense - * for partitioned servers, so we replace it on those servers - * with IDX_FORCEDID_TYPE_PFID covering - * PARTITION_ID,RESOURCE_TYPE,FORCED_ID - */ - @UniqueConstraint(name = ForcedId.IDX_FORCEDID_TYPE_FID, columnNames = {"RESOURCE_TYPE", "FORCED_ID"}) -}, indexes = { - /* - * NB: We previously had indexes named - * - IDX_FORCEDID_TYPE_FORCEDID - * - IDX_FORCEDID_TYPE_RESID - * so don't reuse these names - */ - @Index(name = "IDX_FORCEID_FID", columnList = "FORCED_ID"), - //@Index(name = "IDX_FORCEID_RESID", columnList = "RESOURCE_PID"), - //TODO GGG potentiall add a type + res_id index here, specifically for deletion? -}) +@Table( + name = ForcedId.HFJ_FORCED_ID, + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_FORCEDID_RESID", + columnNames = {"RESOURCE_PID"}), + /* + * This index is called IDX_FORCEDID_TYPE_FID and guarantees + * uniqueness of RESOURCE_TYPE,FORCED_ID. This doesn't make sense + * for partitioned servers, so we replace it on those servers + * with IDX_FORCEDID_TYPE_PFID covering + * PARTITION_ID,RESOURCE_TYPE,FORCED_ID + */ + @UniqueConstraint( + name = ForcedId.IDX_FORCEDID_TYPE_FID, + columnNames = {"RESOURCE_TYPE", "FORCED_ID"}) + }, + indexes = { + /* + * NB: We previously had indexes named + * - IDX_FORCEDID_TYPE_FORCEDID + * - IDX_FORCEDID_TYPE_RESID + * so don't reuse these names + */ + @Index(name = "IDX_FORCEID_FID", columnList = "FORCED_ID"), + // @Index(name = "IDX_FORCEID_RESID", columnList = "RESOURCE_PID"), + // TODO GGG potentiall add a type + res_id index here, specifically for deletion? + }) public class ForcedId extends BasePartitionable { public static final int MAX_FORCED_ID_LENGTH = 100; @@ -74,7 +81,11 @@ public class ForcedId extends BasePartitionable { @Column(name = "PID") private Long myId; - @JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE")) + @JoinColumn( + name = "RESOURCE_PID", + nullable = false, + updatable = false, + foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE")) @OneToOne(fetch = FetchType.LAZY) private ResourceTable myResource; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/IBaseResourceEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/IBaseResourceEntity.java index bbdff924f65..2da32bd3661 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/IBaseResourceEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/IBaseResourceEntity.java @@ -20,12 +20,11 @@ package ca.uhn.fhir.jpa.model.entity; import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.InstantDt; -import javax.annotation.Nullable; import java.util.Date; +import javax.annotation.Nullable; public interface IBaseResourceEntity { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/IResourceIndexComboSearchParameter.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/IResourceIndexComboSearchParameter.java index ac9c3fa577e..a23748eb5bf 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/IResourceIndexComboSearchParameter.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/IResourceIndexComboSearchParameter.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.model.entity; import org.hl7.fhir.instance.model.api.IIdType; - /** * Provides a common interface used to extract Combo Unique ({@link ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique}) * and Combo Non-Unique ({@link ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique}) SearchParameters diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NormalizedQuantitySearchLevel.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NormalizedQuantitySearchLevel.java index 647bb423eb5..8b235926d20 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NormalizedQuantitySearchLevel.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NormalizedQuantitySearchLevel.java @@ -21,44 +21,41 @@ package ca.uhn.fhir.jpa.model.entity; /** * Support different UCUM services level for FHIR Quantity data type. - * + * * @since 5.3.0 */ - public enum NormalizedQuantitySearchLevel { /** - * default, Quantity is stored in {@link ResourceIndexedSearchParamQuantity} only and it is used by searching. + * default, Quantity is stored in {@link ResourceIndexedSearchParamQuantity} only and it is used by searching. */ NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED, - + /** - * Quantity is stored in both {@link ResourceIndexedSearchParamQuantity} - * and {@link ResourceIndexedSearchParamQuantityNormalized}, + * Quantity is stored in both {@link ResourceIndexedSearchParamQuantity} + * and {@link ResourceIndexedSearchParamQuantityNormalized}, * but {@link ResourceIndexedSearchParamQuantity} is used by searching. */ NORMALIZED_QUANTITY_STORAGE_SUPPORTED, /** - * Quantity is stored in both {@link ResourceIndexedSearchParamQuantity} - * and {@link ResourceIndexedSearchParamQuantityNormalized}, + * Quantity is stored in both {@link ResourceIndexedSearchParamQuantity} + * and {@link ResourceIndexedSearchParamQuantityNormalized}, * {@link ResourceIndexedSearchParamQuantityNormalized} is used by searching. */ NORMALIZED_QUANTITY_SEARCH_SUPPORTED; /** - * Quantity is stored in only in {@link ResourceIndexedSearchParamQuantityNormalized}, + * Quantity is stored in only in {@link ResourceIndexedSearchParamQuantityNormalized}, * {@link ResourceIndexedSearchParamQuantityNormalized} is used by searching. - * The existing non normalized quantity will be not supported + * The existing non normalized quantity will be not supported * NOTE: this option is not supported in this release */ // When this is enabled, we can enable testSortByQuantityWithNormalizedQuantitySearchFullSupported() - //NORMALIZED_QUANTITY_SEARCH_FULL_SUPPORTED, + // NORMALIZED_QUANTITY_SEARCH_FULL_SUPPORTED, public boolean storageOrSearchSupported() { - return this.equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_STORAGE_SUPPORTED) - || this.equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED); + return this.equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_STORAGE_SUPPORTED) + || this.equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED); } - - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageEntity.java index 6a8e2f843b2..0c637ff84d5 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageEntity.java @@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.model.entity; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import java.util.Date; +import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; @@ -34,13 +36,11 @@ import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.UniqueConstraint; import javax.persistence.Version; -import java.util.Date; -import java.util.List; @Entity() -@Table(name = "NPM_PACKAGE", uniqueConstraints = { - @UniqueConstraint(name = "IDX_PACK_ID", columnNames = "PACKAGE_ID") -}) +@Table( + name = "NPM_PACKAGE", + uniqueConstraints = {@UniqueConstraint(name = "IDX_PACK_ID", columnNames = "PACKAGE_ID")}) public class NpmPackageEntity { protected static final int PACKAGE_ID_LENGTH = 200; @@ -50,16 +50,21 @@ public class NpmPackageEntity { @Id @Column(name = "PID") private Long myId; + @Column(name = "PACKAGE_ID", length = PACKAGE_ID_LENGTH, nullable = false) private String myPackageId; + @Column(name = "CUR_VERSION_ID", length = NpmPackageVersionEntity.VERSION_ID_LENGTH, nullable = true) private String myCurrentVersionId; + @Temporal(TemporalType.TIMESTAMP) @Version @Column(name = "UPDATED_TIME", nullable = false) private Date myVersion; + @Column(name = "PACKAGE_DESC", length = NpmPackageVersionEntity.VERSION_ID_LENGTH, nullable = true) private String myDescription; + @OneToMany(mappedBy = "myPackage") private List myVersions; @@ -91,16 +96,12 @@ public class NpmPackageEntity { NpmPackageEntity that = (NpmPackageEntity) theO; - return new EqualsBuilder() - .append(myPackageId, that.myPackageId) - .isEquals(); + return new EqualsBuilder().append(myPackageId, that.myPackageId).isEquals(); } @Override public int hashCode() { - return new HashCodeBuilder(17, 37) - .append(myPackageId) - .toHashCode(); + return new HashCodeBuilder(17, 37).append(myPackageId).toHashCode(); } public String getCurrentVersionId() { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageVersionEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageVersionEntity.java index 0fa66f73e4e..00eee555884 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageVersionEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageVersionEntity.java @@ -24,6 +24,8 @@ import ca.uhn.fhir.util.StringUtil; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.util.Date; +import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; @@ -42,16 +44,16 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Version; -import java.util.Date; -import java.util.List; @Entity() -@Table(name = "NPM_PACKAGE_VER", uniqueConstraints = { -}, indexes = { - @Index(name = "IDX_PACKVER", columnList = "PACKAGE_ID,VERSION_ID", unique = true), - @Index(name = "FK_NPM_PKV_PKG", columnList = "PACKAGE_PID"), - @Index(name = "FK_NPM_PKV_RESID", columnList = "BINARY_RES_ID") -}) +@Table( + name = "NPM_PACKAGE_VER", + uniqueConstraints = {}, + indexes = { + @Index(name = "IDX_PACKVER", columnList = "PACKAGE_ID,VERSION_ID", unique = true), + @Index(name = "FK_NPM_PKV_PKG", columnList = "PACKAGE_PID"), + @Index(name = "FK_NPM_PKV_RESID", columnList = "BINARY_RES_ID") + }) public class NpmPackageVersionEntity { public static final int VERSION_ID_LENGTH = 200; @@ -64,36 +66,53 @@ public class NpmPackageVersionEntity { @Id @Column(name = "PID") private Long myId; + @Column(name = "PACKAGE_ID", length = NpmPackageEntity.PACKAGE_ID_LENGTH, nullable = false) private String myPackageId; + @Column(name = "VERSION_ID", length = NpmPackageVersionEntity.VERSION_ID_LENGTH, nullable = false) private String myVersionId; + @ManyToOne @JoinColumn(name = "PACKAGE_PID", nullable = false, foreignKey = @ForeignKey(name = "FK_NPM_PKV_PKG")) private NpmPackageEntity myPackage; + @OneToOne - @JoinColumn(name = "BINARY_RES_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_NPM_PKV_RESID")) + @JoinColumn( + name = "BINARY_RES_ID", + referencedColumnName = "RES_ID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_NPM_PKV_RESID")) private ResourceTable myPackageBinary; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "SAVED_TIME", nullable = false) private Date mySavedTime; + @Column(name = "PKG_DESC", nullable = true, length = PACKAGE_DESC_LENGTH) private String myDescription; + @Column(name = "DESC_UPPER", nullable = true, length = PACKAGE_DESC_LENGTH) private String myDescriptionUpper; + @Column(name = "CURRENT_VERSION", nullable = false) private boolean myCurrentVersion; + @Column(name = "FHIR_VERSION_ID", length = NpmPackageVersionEntity.FHIR_VERSION_ID_LENGTH, nullable = false) private String myFhirVersionId; + @Enumerated(EnumType.STRING) @Column(name = "FHIR_VERSION", length = NpmPackageVersionEntity.FHIR_VERSION_LENGTH, nullable = false) private FhirVersionEnum myFhirVersion; + @Column(name = "PACKAGE_SIZE_BYTES", nullable = false) private long myPackageSizeBytes; + @Temporal(TemporalType.TIMESTAMP) @Version @Column(name = "UPDATED_TIME", nullable = false) private Date myUpdatedTime; + @OneToMany(mappedBy = "myPackageVersion") private List myResources; @@ -181,12 +200,12 @@ public class NpmPackageVersionEntity { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("myId", myId) - .append("myPackageId", myPackageId) - .append("myVersionId", myVersionId) - .append("myDescriptionUpper", myDescriptionUpper) - .append("myFhirVersionId", myFhirVersionId) - .toString(); + .append("myId", myId) + .append("myPackageId", myPackageId) + .append("myVersionId", myVersionId) + .append("myDescriptionUpper", myDescriptionUpper) + .append("myFhirVersionId", myFhirVersionId) + .toString(); } public List getResources() { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageVersionResourceEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageVersionResourceEntity.java index 1e19757f4ba..d9d9f0d8b5a 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageVersionResourceEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/NpmPackageVersionResourceEntity.java @@ -23,6 +23,7 @@ import ca.uhn.fhir.context.FhirVersionEnum; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; @@ -40,15 +41,16 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Version; -import java.util.Date; @Entity() -@Table(name = "NPM_PACKAGE_VER_RES", uniqueConstraints = { -}, indexes = { - @Index(name = "IDX_PACKVERRES_URL", columnList = "CANONICAL_URL"), - @Index(name = "FK_NPM_PACKVERRES_PACKVER", columnList = "PACKVER_PID"), - @Index(name = "FK_NPM_PKVR_RESID", columnList = "BINARY_RES_ID") -}) +@Table( + name = "NPM_PACKAGE_VER_RES", + uniqueConstraints = {}, + indexes = { + @Index(name = "IDX_PACKVERRES_URL", columnList = "CANONICAL_URL"), + @Index(name = "FK_NPM_PACKVERRES_PACKVER", columnList = "PACKVER_PID"), + @Index(name = "FK_NPM_PKVR_RESID", columnList = "BINARY_RES_ID") + }) public class NpmPackageVersionResourceEntity { @Id @@ -56,29 +58,48 @@ public class NpmPackageVersionResourceEntity { @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_NPM_PACKVERRES") @Column(name = "PID") private Long myId; + @ManyToOne - @JoinColumn(name = "PACKVER_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_NPM_PACKVERRES_PACKVER"), nullable = false) + @JoinColumn( + name = "PACKVER_PID", + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_NPM_PACKVERRES_PACKVER"), + nullable = false) private NpmPackageVersionEntity myPackageVersion; + @OneToOne - @JoinColumn(name = "BINARY_RES_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_NPM_PKVR_RESID")) + @JoinColumn( + name = "BINARY_RES_ID", + referencedColumnName = "RES_ID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_NPM_PKVR_RESID")) private ResourceTable myResourceBinary; + @Column(name = "FILE_DIR", length = 200) private String myDirectory; + @Column(name = "FILE_NAME", length = 200) private String myFilename; + @Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN, nullable = false) private String myResourceType; + @Column(name = "CANONICAL_URL", length = 200) private String myCanonicalUrl; + @Column(name = "CANONICAL_VERSION", length = 200) private String myCanonicalVersion; + @Enumerated(EnumType.STRING) @Column(name = "FHIR_VERSION", length = NpmPackageVersionEntity.FHIR_VERSION_LENGTH, nullable = false) private FhirVersionEnum myFhirVersion; + @Column(name = "FHIR_VERSION_ID", length = NpmPackageVersionEntity.FHIR_VERSION_ID_LENGTH, nullable = false) private String myFhirVersionId; + @Column(name = "RES_SIZE_BYTES", nullable = false) private long myResSizeBytes; + @Temporal(TemporalType.TIMESTAMP) @Version @Column(name = "UPDATED_TIME", nullable = false) @@ -164,16 +185,15 @@ public class NpmPackageVersionResourceEntity { public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("myId", myId) - .append("myCanonicalUrl", myCanonicalUrl) - .append("myCanonicalVersion", myCanonicalVersion) - .append("myResourceType", myResourceType) - .append("myDirectory", myDirectory) - .append("myFilename", myFilename) - .append("myPackageVersion", myPackageVersion) - .append("myResSizeBytes", myResSizeBytes) - .append("myVersion", myVersion) - .toString(); + .append("myId", myId) + .append("myCanonicalUrl", myCanonicalUrl) + .append("myCanonicalVersion", myCanonicalVersion) + .append("myResourceType", myResourceType) + .append("myDirectory", myDirectory) + .append("myFilename", myFilename) + .append("myPackageVersion", myPackageVersion) + .append("myResSizeBytes", myResSizeBytes) + .append("myVersion", myVersion) + .toString(); } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java index ccadafb7e83..30c8660c1f7 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java @@ -24,11 +24,11 @@ import ca.uhn.fhir.jpa.model.config.PartitionSettings; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import java.time.LocalDate; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.persistence.Column; import javax.persistence.Embeddable; -import java.time.LocalDate; @Embeddable public class PartitionablePartitionId implements Cloneable { @@ -37,6 +37,7 @@ public class PartitionablePartitionId implements Cloneable { @Column(name = PARTITION_ID, nullable = true, insertable = true, updatable = false) private Integer myPartitionId; + @Column(name = "PARTITION_DATE", nullable = true, insertable = true, updatable = false) private LocalDate myPartitionDate; @@ -72,12 +73,18 @@ public class PartitionablePartitionId implements Cloneable { } PartitionablePartitionId that = (PartitionablePartitionId) theO; - return new EqualsBuilder().append(myPartitionId, that.myPartitionId).append(myPartitionDate, that.myPartitionDate).isEquals(); + return new EqualsBuilder() + .append(myPartitionId, that.myPartitionId) + .append(myPartitionDate, that.myPartitionDate) + .isEquals(); } @Override public int hashCode() { - return new HashCodeBuilder(17, 37).append(myPartitionId).append(myPartitionDate).toHashCode(); + return new HashCodeBuilder(17, 37) + .append(myPartitionId) + .append(myPartitionDate) + .toHashCode(); } @Nullable @@ -93,9 +100,7 @@ public class PartitionablePartitionId implements Cloneable { @SuppressWarnings({"CloneDoesntDeclareCloneNotSupportedException", "MethodDoesntCallSuperMethod"}) @Override protected PartitionablePartitionId clone() { - return new PartitionablePartitionId() - .setPartitionId(getPartitionId()) - .setPartitionDate(getPartitionDate()); + return new PartitionablePartitionId().setPartitionId(getPartitionId()).setPartitionDate(getPartitionDate()); } public RequestPartitionId toPartitionId() { @@ -104,10 +109,9 @@ public class PartitionablePartitionId implements Cloneable { @Override public String toString() { - return "PartitionablePartitionId{" + - "myPartitionId=" + myPartitionId + - ", myPartitionDate=" + myPartitionDate + - '}'; + return "PartitionablePartitionId{" + "myPartitionId=" + + myPartitionId + ", myPartitionDate=" + + myPartitionDate + '}'; } @Nonnull @@ -120,12 +124,12 @@ public class PartitionablePartitionId implements Cloneable { } @Nonnull - public static PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId, @Nonnull PartitionSettings thePartitionSettings) { + public static PartitionablePartitionId toStoragePartition( + @Nonnull RequestPartitionId theRequestPartitionId, @Nonnull PartitionSettings thePartitionSettings) { Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull(); if (partitionId == null) { partitionId = thePartitionSettings.getDefaultPartitionId(); } return new PartitionablePartitionId(partitionId, theRequestPartitionId.getPartitionDate()); } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java index f9f15e1a329..87d16b44b0d 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java @@ -54,5 +54,4 @@ public enum ResourceEncodingEnum { public IParser newParser(FhirContext theContext) { return theContext.newJsonParser(); } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java index ff77948ab88..754ab9b613c 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryProvenanceEntity.java @@ -34,31 +34,45 @@ import javax.persistence.ManyToOne; import javax.persistence.MapsId; import javax.persistence.OneToOne; import javax.persistence.Table; -import javax.persistence.UniqueConstraint; import static ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable.SOURCE_URI_LENGTH; -@Table(name = "HFJ_RES_VER_PROV", indexes = { - @Index(name = "IDX_RESVERPROV_SOURCEURI", columnList = "SOURCE_URI"), - @Index(name = "IDX_RESVERPROV_REQUESTID", columnList = "REQUEST_ID"), - @Index(name = "IDX_RESVERPROV_RES_PID", columnList = "RES_PID") -}) +@Table( + name = "HFJ_RES_VER_PROV", + indexes = { + @Index(name = "IDX_RESVERPROV_SOURCEURI", columnList = "SOURCE_URI"), + @Index(name = "IDX_RESVERPROV_REQUESTID", columnList = "REQUEST_ID"), + @Index(name = "IDX_RESVERPROV_RES_PID", columnList = "RES_PID") + }) @Entity public class ResourceHistoryProvenanceEntity extends BasePartitionable { - @Id @Column(name = "RES_VER_PID") private Long myId; + @OneToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "RES_VER_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_RESVERPROV_RESVER_PID"), nullable = false, insertable = false, updatable = false) + @JoinColumn( + name = "RES_VER_PID", + referencedColumnName = "PID", + foreignKey = @ForeignKey(name = "FK_RESVERPROV_RESVER_PID"), + nullable = false, + insertable = false, + updatable = false) @MapsId private ResourceHistoryTable myResourceHistoryTable; + @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "RES_PID", referencedColumnName = "RES_ID", foreignKey = @ForeignKey(name = "FK_RESVERPROV_RES_PID"), nullable = false) + @JoinColumn( + name = "RES_PID", + referencedColumnName = "RES_ID", + foreignKey = @ForeignKey(name = "FK_RESVERPROV_RES_PID"), + nullable = false) private ResourceTable myResourceTable; + @Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true) private String mySourceUri; + @Column(name = "REQUEST_ID", length = Constants.REQUEST_ID_LENGTH, nullable = true) private String myRequestId; @@ -105,6 +119,4 @@ public class ResourceHistoryProvenanceEntity extends BasePartitionable { public Long getId() { return myId; } - - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java index 5d3f7c27f91..12ceee4a4d4 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java @@ -27,19 +27,24 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hibernate.annotations.OptimisticLock; -import javax.persistence.*; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; +import javax.persistence.*; @Entity -@Table(name = ResourceHistoryTable.HFJ_RES_VER, uniqueConstraints = { - @UniqueConstraint(name = ResourceHistoryTable.IDX_RESVER_ID_VER, columnNames = {"RES_ID", "RES_VER"}) -}, indexes = { - @Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED"), - @Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"), - @Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED") -}) +@Table( + name = ResourceHistoryTable.HFJ_RES_VER, + uniqueConstraints = { + @UniqueConstraint( + name = ResourceHistoryTable.IDX_RESVER_ID_VER, + columnNames = {"RES_ID", "RES_VER"}) + }, + indexes = { + @Index(name = "IDX_RESVER_TYPE_DATE", columnList = "RES_TYPE,RES_UPDATED"), + @Index(name = "IDX_RESVER_ID_DATE", columnList = "RES_ID,RES_UPDATED"), + @Index(name = "IDX_RESVER_DATE", columnList = "RES_UPDATED") + }) public class ResourceHistoryTable extends BaseHasResource implements Serializable { public static final String IDX_RESVER_ID_VER = "IDX_RESVER_ID_VER"; public static final int SOURCE_URI_LENGTH = 100; @@ -49,38 +54,55 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl // Don't reduce the visibility here, we reference this from Smile @SuppressWarnings("WeakerAccess") public static final int ENCODING_COL_LENGTH = 5; + public static final String HFJ_RES_VER = "HFJ_RES_VER"; public static final int RES_TEXT_VC_MAX_LENGTH = 4000; private static final long serialVersionUID = 1L; + @Id @SequenceGenerator(name = "SEQ_RESOURCE_HISTORY_ID", sequenceName = "SEQ_RESOURCE_HISTORY_ID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESOURCE_HISTORY_ID") @Column(name = "PID") private Long myId; + @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "RES_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_RESOURCE_HISTORY_RESOURCE")) + @JoinColumn( + name = "RES_ID", + nullable = false, + updatable = false, + foreignKey = @ForeignKey(name = "FK_RESOURCE_HISTORY_RESOURCE")) private ResourceTable myResourceTable; + @Column(name = "RES_ID", nullable = false, updatable = false, insertable = false) private Long myResourceId; + @Column(name = "RES_TYPE", length = ResourceTable.RESTYPE_LEN, nullable = false) private String myResourceType; + @Column(name = "RES_VER", nullable = false) private Long myResourceVersion; + @OneToMany(mappedBy = "myResourceHistory", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true) private Collection myTags; + @Column(name = "RES_TEXT", length = Integer.MAX_VALUE - 1, nullable = true) @Lob() @OptimisticLock(excluded = true) private byte[] myResource; + @Column(name = "RES_TEXT_VC", length = RES_TEXT_VC_MAX_LENGTH, nullable = true) @org.hibernate.annotations.Type(type = JpaConstants.ORG_HIBERNATE_TYPE_TEXT_TYPE) @OptimisticLock(excluded = true) private String myResourceTextVc; + @Column(name = "RES_ENCODING", nullable = false, length = ENCODING_COL_LENGTH) @Enumerated(EnumType.STRING) @OptimisticLock(excluded = true) private ResourceEncodingEnum myEncoding; - @OneToOne(mappedBy = "myResourceHistoryTable", cascade = {CascadeType.REMOVE}) + + @OneToOne( + mappedBy = "myResourceHistoryTable", + cascade = {CascadeType.REMOVE}) private ResourceHistoryProvenanceEntity myProvenance; // TODO: This was added in 6.8.0 - In the future we should drop ResourceHistoryProvenanceEntity @Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true) @@ -115,11 +137,11 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("resourceId", myResourceId) - .append("resourceType", myResourceType) - .append("resourceVersion", myResourceVersion) - .append("pid", myId) - .toString(); + .append("resourceId", myResourceId) + .append("resourceType", myResourceType) + .append("resourceVersion", myResourceVersion) + .append("pid", myId) + .toString(); } public String getResourceTextVc() { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java index 96b54015bb2..98db54f6cf4 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTag.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.jpa.model.entity; +import java.io.Serializable; import javax.persistence.Column; import javax.persistence.Embeddable; import javax.persistence.Entity; @@ -32,15 +33,17 @@ import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.UniqueConstraint; -import java.io.Serializable; @Embeddable @Entity -@Table(name = "HFJ_HISTORY_TAG", uniqueConstraints = { - @UniqueConstraint(name = "IDX_RESHISTTAG_TAGID", columnNames = {"RES_VER_PID", "TAG_ID"}), -}, indexes = { - @Index(name = "IDX_RESHISTTAG_RESID", columnList = "RES_ID") -}) +@Table( + name = "HFJ_HISTORY_TAG", + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_RESHISTTAG_TAGID", + columnNames = {"RES_VER_PID", "TAG_ID"}), + }, + indexes = {@Index(name = "IDX_RESHISTTAG_RESID", columnList = "RES_ID")}) public class ResourceHistoryTag extends BaseTag implements Serializable { private static final long serialVersionUID = 1L; @@ -52,7 +55,11 @@ public class ResourceHistoryTag extends BaseTag implements Serializable { private Long myId; @ManyToOne() - @JoinColumn(name = "RES_VER_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_HISTORYTAG_HISTORY")) + @JoinColumn( + name = "RES_VER_PID", + referencedColumnName = "PID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_HISTORYTAG_HISTORY")) private ResourceHistoryTable myResourceHistory; @Column(name = "RES_VER_PID", insertable = false, updatable = false, nullable = false) @@ -64,11 +71,12 @@ public class ResourceHistoryTag extends BaseTag implements Serializable { @Column(name = "RES_ID", nullable = false) private Long myResourceId; - public ResourceHistoryTag() { - } + public ResourceHistoryTag() {} - - public ResourceHistoryTag(ResourceHistoryTable theResourceHistoryTable, TagDefinition theTag, PartitionablePartitionId theRequestPartitionId) { + public ResourceHistoryTag( + ResourceHistoryTable theResourceHistoryTable, + TagDefinition theTag, + PartitionablePartitionId theRequestPartitionId) { setTag(theTag); setResource(theResourceHistoryTable); setResourceId(theResourceHistoryTable.getResourceId()); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboStringUnique.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboStringUnique.java index 3d76fedd04a..1b386c23afb 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboStringUnique.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboStringUnique.java @@ -30,11 +30,20 @@ import org.hl7.fhir.instance.model.api.IIdType; import javax.persistence.*; @Entity() -@Table(name = "HFJ_IDX_CMP_STRING_UNIQ", indexes = { - @Index(name = ResourceIndexedComboStringUnique.IDX_IDXCMPSTRUNIQ_STRING, columnList = "IDX_STRING", unique = true), - @Index(name = ResourceIndexedComboStringUnique.IDX_IDXCMPSTRUNIQ_RESOURCE, columnList = "RES_ID", unique = false) -}) -public class ResourceIndexedComboStringUnique extends BasePartitionable implements Comparable, IResourceIndexComboSearchParameter { +@Table( + name = "HFJ_IDX_CMP_STRING_UNIQ", + indexes = { + @Index( + name = ResourceIndexedComboStringUnique.IDX_IDXCMPSTRUNIQ_STRING, + columnList = "IDX_STRING", + unique = true), + @Index( + name = ResourceIndexedComboStringUnique.IDX_IDXCMPSTRUNIQ_RESOURCE, + columnList = "RES_ID", + unique = false) + }) +public class ResourceIndexedComboStringUnique extends BasePartitionable + implements Comparable, IResourceIndexComboSearchParameter { public static final int MAX_STRING_LENGTH = 500; public static final String IDX_IDXCMPSTRUNIQ_STRING = "IDX_IDXCMPSTRUNIQ_STRING"; @@ -45,11 +54,17 @@ public class ResourceIndexedComboStringUnique extends BasePartitionable implemen @Id @Column(name = "PID") private Long myId; + @ManyToOne - @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", foreignKey = @ForeignKey(name = "FK_IDXCMPSTRUNIQ_RES_ID")) + @JoinColumn( + name = "RES_ID", + referencedColumnName = "RES_ID", + foreignKey = @ForeignKey(name = "FK_IDXCMPSTRUNIQ_RES_ID")) private ResourceTable myResource; + @Column(name = "RES_ID", insertable = false, updatable = false) private Long myResourceId; + @Column(name = "IDX_STRING", nullable = false, length = MAX_STRING_LENGTH) private String myIndexString; @@ -59,6 +74,7 @@ public class ResourceIndexedComboStringUnique extends BasePartitionable implemen @SuppressWarnings("unused") @Column(name = PartitionablePartitionId.PARTITION_ID, insertable = false, updatable = false, nullable = true) private Integer myPartitionIdValue; + @Transient private IIdType mySearchParameterId; @@ -72,7 +88,8 @@ public class ResourceIndexedComboStringUnique extends BasePartitionable implemen /** * Constructor */ - public ResourceIndexedComboStringUnique(ResourceTable theResource, String theIndexString, IIdType theSearchParameterId) { + public ResourceIndexedComboStringUnique( + ResourceTable theResource, String theIndexString, IIdType theSearchParameterId) { setResource(theResource); setIndexString(theIndexString); setPartitionId(theResource.getPartitionId()); @@ -96,9 +113,7 @@ public class ResourceIndexedComboStringUnique extends BasePartitionable implemen ResourceIndexedComboStringUnique that = (ResourceIndexedComboStringUnique) theO; - return new EqualsBuilder() - .append(myIndexString, that.myIndexString) - .isEquals(); + return new EqualsBuilder().append(myIndexString, that.myIndexString).isEquals(); } @Override @@ -121,19 +136,17 @@ public class ResourceIndexedComboStringUnique extends BasePartitionable implemen @Override public int hashCode() { - return new HashCodeBuilder(17, 37) - .append(myIndexString) - .toHashCode(); + return new HashCodeBuilder(17, 37).append(myIndexString).toHashCode(); } @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("id", myId) - .append("resourceId", myResourceId) - .append("indexString", myIndexString) - .append("partition", getPartitionId()) - .toString(); + .append("id", myId) + .append("resourceId", myResourceId) + .append("indexString", myIndexString) + .append("partition", getPartitionId()) + .toString(); } /** diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboTokenNonUnique.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboTokenNonUnique.java index bafd39e7ae0..d4f96c2f7d0 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboTokenNonUnique.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedComboTokenNonUnique.java @@ -43,11 +43,14 @@ import javax.persistence.Transient; import static ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam.hash; @Entity -@Table(name = "HFJ_IDX_CMB_TOK_NU", indexes = { - @Index(name = "IDX_IDXCMBTOKNU_STR", columnList = "IDX_STRING", unique = false), - @Index(name = "IDX_IDXCMBTOKNU_RES", columnList = "RES_ID", unique = false) -}) -public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex implements Comparable, IResourceIndexComboSearchParameter { +@Table( + name = "HFJ_IDX_CMB_TOK_NU", + indexes = { + @Index(name = "IDX_IDXCMBTOKNU_STR", columnList = "IDX_STRING", unique = false), + @Index(name = "IDX_IDXCMBTOKNU_RES", columnList = "RES_ID", unique = false) + }) +public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex + implements Comparable, IResourceIndexComboSearchParameter { @SequenceGenerator(name = "SEQ_IDXCMBTOKNU_ID", sequenceName = "SEQ_IDXCMBTOKNU_ID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_IDXCMBTOKNU_ID") @@ -56,7 +59,10 @@ public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex implem private Long myId; @ManyToOne - @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", foreignKey = @ForeignKey(name = "FK_IDXCMBTOKNU_RES_ID")) + @JoinColumn( + name = "RES_ID", + referencedColumnName = "RES_ID", + foreignKey = @ForeignKey(name = "FK_IDXCMBTOKNU_RES_ID")) private ResourceTable myResource; @Column(name = "RES_ID", insertable = false, updatable = false) @@ -81,7 +87,8 @@ public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex implem super(); } - public ResourceIndexedComboTokenNonUnique(PartitionSettings thePartitionSettings, ResourceTable theEntity, String theQueryString) { + public ResourceIndexedComboTokenNonUnique( + PartitionSettings thePartitionSettings, ResourceTable theEntity, String theQueryString) { myPartitionSettings = thePartitionSettings; myResource = theEntity; myIndexString = theQueryString; @@ -151,9 +158,7 @@ public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex implem @Override public int hashCode() { - return new HashCodeBuilder(17, 37) - .append(myIndexString) - .toHashCode(); + return new HashCodeBuilder(17, 37).append(myIndexString).toHashCode(); } public PartitionSettings getPartitionSettings() { @@ -192,19 +197,21 @@ public class ResourceIndexedComboTokenNonUnique extends BaseResourceIndex implem @Override public String toString() { return new ToStringBuilder(this) - .append("id", myId) - .append("resourceId", myResourceId) - .append("hashComplete", myHashComplete) - .append("indexString", myIndexString) - .toString(); + .append("id", myId) + .append("resourceId", myResourceId) + .append("hashComplete", myHashComplete) + .append("indexString", myIndexString) + .toString(); } - public static long calculateHashComplete(PartitionSettings partitionSettings, PartitionablePartitionId thePartitionId, String queryString) { + public static long calculateHashComplete( + PartitionSettings partitionSettings, PartitionablePartitionId thePartitionId, String queryString) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(thePartitionId); return hash(partitionSettings, requestPartitionId, queryString); } - public static long calculateHashComplete(PartitionSettings partitionSettings, RequestPartitionId partitionId, String queryString) { + public static long calculateHashComplete( + PartitionSettings partitionSettings, RequestPartitionId partitionId, String queryString) { return hash(partitionSettings, partitionId, queryString); } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java index 01bfddd6dac..dbad03ccc81 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java @@ -42,22 +42,29 @@ import javax.persistence.Table; @Embeddable @Entity -@Table(name = "HFJ_SPIDX_COORDS", indexes = { - @Index(name = "IDX_SP_COORDS_HASH_V2", columnList = "HASH_IDENTITY,SP_LATITUDE,SP_LONGITUDE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"), - @Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID") -}) +@Table( + name = "HFJ_SPIDX_COORDS", + indexes = { + @Index( + name = "IDX_SP_COORDS_HASH_V2", + columnList = "HASH_IDENTITY,SP_LATITUDE,SP_LONGITUDE,RES_ID,PARTITION_ID"), + @Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"), + @Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID") + }) public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchParam { public static final int MAX_LENGTH = 100; private static final long serialVersionUID = 1L; + @Column(name = "SP_LATITUDE") - //@FullTextField + // @FullTextField public double myLatitude; + @Column(name = "SP_LONGITUDE") - //@FullTextField + // @FullTextField public double myLongitude; + @Id @SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_COORDS") @@ -69,15 +76,25 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP @Column(name = "HASH_IDENTITY", nullable = true) private Long myHashIdentity; - @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) - @JoinColumn(foreignKey = @ForeignKey(name = "FKC97MPK37OKWU8QVTCEG2NH9VN"), - name = "RES_ID", referencedColumnName = "RES_ID", nullable = false) + @ManyToOne( + optional = false, + fetch = FetchType.LAZY, + cascade = {}) + @JoinColumn( + foreignKey = @ForeignKey(name = "FKC97MPK37OKWU8QVTCEG2NH9VN"), + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false) private ResourceTable myResource; - public ResourceIndexedSearchParamCoords() { - } + public ResourceIndexedSearchParamCoords() {} - public ResourceIndexedSearchParamCoords(PartitionSettings thePartitionSettings, String theResourceType, String theParamName, double theLatitude, double theLongitude) { + public ResourceIndexedSearchParamCoords( + PartitionSettings thePartitionSettings, + String theResourceType, + String theParamName, + double theLatitude, + double theLongitude) { setPartitionSettings(thePartitionSettings); setResourceType(theResourceType); setParamName(theParamName); @@ -146,7 +163,6 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP myId = theId; } - public double getLatitude() { return myLatitude; } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java index 1df92e2275c..01fcab6f275 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java @@ -19,10 +19,24 @@ */ package ca.uhn.fhir.jpa.model.entity; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.model.api.IQueryParameterType; +import ca.uhn.fhir.model.api.TemporalPrecisionEnum; +import ca.uhn.fhir.model.primitive.InstantDt; +import ca.uhn.fhir.rest.param.DateParam; +import ca.uhn.fhir.rest.param.DateRangeParam; +import ca.uhn.fhir.util.DateUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hl7.fhir.r4.model.DateTimeType; + import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; - import javax.persistence.Column; import javax.persistence.Embeddable; import javax.persistence.Entity; @@ -40,32 +54,28 @@ import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.builder.EqualsBuilder; -import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; -import org.hl7.fhir.r4.model.DateTimeType; - -import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.model.api.IQueryParameterType; -import ca.uhn.fhir.model.api.TemporalPrecisionEnum; -import ca.uhn.fhir.model.primitive.InstantDt; -import ca.uhn.fhir.rest.param.DateParam; -import ca.uhn.fhir.rest.param.DateRangeParam; -import ca.uhn.fhir.util.DateUtils; - @Embeddable @Entity -@Table(name = "HFJ_SPIDX_DATE", indexes = { - // We previously had an index called IDX_SP_DATE - Dont reuse - @Index(name = "IDX_SP_DATE_HASH_V2", columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_DATE_HASH_HIGH_V2", columnList = "HASH_IDENTITY,SP_VALUE_HIGH,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_DATE_ORD_HASH_V2", columnList = "HASH_IDENTITY,SP_VALUE_LOW_DATE_ORDINAL,SP_VALUE_HIGH_DATE_ORDINAL,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_DATE_ORD_HASH_HIGH_V2", columnList = "HASH_IDENTITY,SP_VALUE_HIGH_DATE_ORDINAL,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_DATE_RESID_V2", columnList = "RES_ID,HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH,SP_VALUE_LOW_DATE_ORDINAL,SP_VALUE_HIGH_DATE_ORDINAL,PARTITION_ID"), -}) +@Table( + name = "HFJ_SPIDX_DATE", + indexes = { + // We previously had an index called IDX_SP_DATE - Dont reuse + @Index( + name = "IDX_SP_DATE_HASH_V2", + columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH,RES_ID,PARTITION_ID"), + @Index(name = "IDX_SP_DATE_HASH_HIGH_V2", columnList = "HASH_IDENTITY,SP_VALUE_HIGH,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_DATE_ORD_HASH_V2", + columnList = + "HASH_IDENTITY,SP_VALUE_LOW_DATE_ORDINAL,SP_VALUE_HIGH_DATE_ORDINAL,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_DATE_ORD_HASH_HIGH_V2", + columnList = "HASH_IDENTITY,SP_VALUE_HIGH_DATE_ORDINAL,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_DATE_RESID_V2", + columnList = + "RES_ID,HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH,SP_VALUE_LOW_DATE_ORDINAL,SP_VALUE_HIGH_DATE_ORDINAL,PARTITION_ID"), + }) public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchParam { private static final long serialVersionUID = 1L; @@ -86,11 +96,13 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar */ @Column(name = "SP_VALUE_LOW_DATE_ORDINAL") public Integer myValueLowDateOrdinal; + @Column(name = "SP_VALUE_HIGH_DATE_ORDINAL") public Integer myValueHighDateOrdinal; @Transient private transient String myOriginalValue; + @Id @SequenceGenerator(name = "SEQ_SPIDX_DATE", sequenceName = "SEQ_SPIDX_DATE") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_DATE") @@ -105,10 +117,15 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar @Column(name = "HASH_IDENTITY", nullable = true) private Long myHashIdentity; - @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) - @JoinColumn( nullable = false, - name = "RES_ID", referencedColumnName = "RES_ID", - foreignKey = @ForeignKey(name="FK_SP_DATE_RES")) + @ManyToOne( + optional = false, + fetch = FetchType.LAZY, + cascade = {}) + @JoinColumn( + nullable = false, + name = "RES_ID", + referencedColumnName = "RES_ID", + foreignKey = @ForeignKey(name = "FK_SP_DATE_RES")) private ResourceTable myResource; /** @@ -121,7 +138,15 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar /** * Constructor */ - public ResourceIndexedSearchParamDate(PartitionSettings thePartitionSettings, String theResourceType, String theParamName, Date theLow, String theLowString, Date theHigh, String theHighString, String theOriginalValue) { + public ResourceIndexedSearchParamDate( + PartitionSettings thePartitionSettings, + String theResourceType, + String theParamName, + Date theLow, + String theLowString, + Date theHigh, + String theHighString, + String theOriginalValue) { setPartitionSettings(thePartitionSettings); setResourceType(theResourceType); setParamName(theParamName); @@ -147,44 +172,44 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar } private void reComputeValueHighDate(Date theHigh, String theHighString) { - if (StringUtils.isBlank(theHighString) || theHigh == null) - return; + if (StringUtils.isBlank(theHighString) || theHigh == null) return; // FT : 2021-09-10 not very comfortable to set the high value to the last second // Timezone? existing data? // if YYYY or YYYY-MM or YYYY-MM-DD add the last second if (theHighString.length() == 4 || theHighString.length() == 7 || theHighString.length() == 10) { - - String theCompleteDateStr = DateUtils.getCompletedDate(theHighString).getRight(); + + String theCompleteDateStr = + DateUtils.getCompletedDate(theHighString).getRight(); try { - Date complateDate = new SimpleDateFormat("yyyy-MM-dd").parse(theCompleteDateStr); - this.myValueHigh = DateUtils.getEndOfDay(complateDate); + Date complateDate = new SimpleDateFormat("yyyy-MM-dd").parse(theCompleteDateStr); + this.myValueHigh = DateUtils.getEndOfDay(complateDate); } catch (ParseException e) { - // do nothing; + // do nothing; } } - } + private int generateLowOrdinalDateInteger(String theDateString) { if (theDateString.contains("T")) { theDateString = theDateString.substring(0, theDateString.indexOf("T")); } - + theDateString = DateUtils.getCompletedDate(theDateString).getLeft(); theDateString = theDateString.replace("-", ""); return Integer.valueOf(theDateString); } private int generateHighOrdinalDateInteger(String theDateString) { - + if (theDateString.contains("T")) { theDateString = theDateString.substring(0, theDateString.indexOf("T")); } - + theDateString = DateUtils.getCompletedDate(theDateString).getRight(); theDateString = theDateString.replace("-", ""); return Integer.valueOf(theDateString); } - + private void computeValueLowDateOrdinal(String theLow) { if (StringUtils.isNotBlank(theLow)) { this.myValueLowDateOrdinal = generateLowOrdinalDateInteger(theLow); @@ -331,7 +356,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar DateParam dateParam = (DateParam) theParam; DateRangeParam range = new DateRangeParam(dateParam); - boolean result; if (dateParam.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) { result = matchesOrdinalDateBounds(range); @@ -369,18 +393,21 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar return false; } if (lowerBoundAsDateInteger != null) { - //TODO as we run into equality issues - result &= (myValueLowDateOrdinal.equals(lowerBoundAsDateInteger) || myValueLowDateOrdinal > lowerBoundAsDateInteger); - result &= (myValueHighDateOrdinal.equals(lowerBoundAsDateInteger) || myValueHighDateOrdinal > lowerBoundAsDateInteger); + // TODO as we run into equality issues + result &= (myValueLowDateOrdinal.equals(lowerBoundAsDateInteger) + || myValueLowDateOrdinal > lowerBoundAsDateInteger); + result &= (myValueHighDateOrdinal.equals(lowerBoundAsDateInteger) + || myValueHighDateOrdinal > lowerBoundAsDateInteger); } if (upperBoundAsDateInteger != null) { - result &= (myValueHighDateOrdinal.equals(upperBoundAsDateInteger) || myValueHighDateOrdinal < upperBoundAsDateInteger); - result &= (myValueLowDateOrdinal.equals(upperBoundAsDateInteger) || myValueLowDateOrdinal < upperBoundAsDateInteger); + result &= (myValueHighDateOrdinal.equals(upperBoundAsDateInteger) + || myValueHighDateOrdinal < upperBoundAsDateInteger); + result &= (myValueLowDateOrdinal.equals(upperBoundAsDateInteger) + || myValueLowDateOrdinal < upperBoundAsDateInteger); } return result; } - public static Long calculateOrdinalValue(Date theDate) { if (theDate == null) { return null; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java index 253b8d7fe7c..e98508339ce 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java @@ -28,6 +28,8 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField; +import java.math.BigDecimal; +import java.util.Objects; import javax.persistence.Column; import javax.persistence.Embeddable; import javax.persistence.Entity; @@ -41,19 +43,20 @@ import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.math.BigDecimal; -import java.util.Objects; @Embeddable @Entity -@Table(name = "HFJ_SPIDX_NUMBER", indexes = { -// We used to have an index with name IDX_SP_NUMBER - Dont reuse - @Index(name = "IDX_SP_NUMBER_HASH_VAL_V2", columnList = "HASH_IDENTITY,SP_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_NUMBER_RESID_V2", columnList = "RES_ID, HASH_IDENTITY, SP_VALUE, PARTITION_ID") -}) +@Table( + name = "HFJ_SPIDX_NUMBER", + indexes = { + // We used to have an index with name IDX_SP_NUMBER - Dont reuse + @Index(name = "IDX_SP_NUMBER_HASH_VAL_V2", columnList = "HASH_IDENTITY,SP_VALUE,RES_ID,PARTITION_ID"), + @Index(name = "IDX_SP_NUMBER_RESID_V2", columnList = "RES_ID, HASH_IDENTITY, SP_VALUE, PARTITION_ID") + }) public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchParam { private static final long serialVersionUID = 1L; + @Column(name = "SP_VALUE", nullable = true) @ScaledNumberField public BigDecimal myValue; @@ -69,15 +72,21 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP @Column(name = "HASH_IDENTITY", nullable = true) private Long myHashIdentity; - @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) - @JoinColumn(foreignKey = @ForeignKey(name = "FK_SP_NUMBER_RES"), - name = "RES_ID", referencedColumnName = "RES_ID", nullable = false) + @ManyToOne( + optional = false, + fetch = FetchType.LAZY, + cascade = {}) + @JoinColumn( + foreignKey = @ForeignKey(name = "FK_SP_NUMBER_RES"), + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false) private ResourceTable myResource; - public ResourceIndexedSearchParamNumber() { - } + public ResourceIndexedSearchParamNumber() {} - public ResourceIndexedSearchParamNumber(PartitionSettings thePartitionSettings, String theResourceType, String theParamName, BigDecimal theValue) { + public ResourceIndexedSearchParamNumber( + PartitionSettings thePartitionSettings, String theResourceType, String theParamName, BigDecimal theValue) { setPartitionSettings(thePartitionSettings); setResourceType(theResourceType); setParamName(theParamName); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java index c47b448f0eb..80841720449 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java @@ -27,6 +27,8 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField; +import java.math.BigDecimal; +import java.util.Objects; import javax.persistence.Column; import javax.persistence.Embeddable; import javax.persistence.Entity; @@ -40,46 +42,65 @@ import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.math.BigDecimal; -import java.util.Objects; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isBlank; -//@formatter:off +// @formatter:off @Embeddable @Entity -@Table(name = "HFJ_SPIDX_QUANTITY", indexes = { -// We used to have an index named IDX_SP_QUANTITY - Dont reuse - @Index(name = "IDX_SP_QUANTITY_HASH_V2", columnList = "HASH_IDENTITY,SP_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_QUANTITY_HASH_UN_V2", columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_QUANTITY_HASH_SYSUN_V2", columnList = "HASH_IDENTITY_SYS_UNITS,SP_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_QUANTITY_RESID_V2", columnList = "RES_ID,HASH_IDENTITY,HASH_IDENTITY_SYS_UNITS,HASH_IDENTITY_AND_UNITS,SP_VALUE,PARTITION_ID") -}) +@Table( + name = "HFJ_SPIDX_QUANTITY", + indexes = { + // We used to have an index named IDX_SP_QUANTITY - Dont reuse + @Index(name = "IDX_SP_QUANTITY_HASH_V2", columnList = "HASH_IDENTITY,SP_VALUE,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_QUANTITY_HASH_UN_V2", + columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_QUANTITY_HASH_SYSUN_V2", + columnList = "HASH_IDENTITY_SYS_UNITS,SP_VALUE,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_QUANTITY_RESID_V2", + columnList = + "RES_ID,HASH_IDENTITY,HASH_IDENTITY_SYS_UNITS,HASH_IDENTITY_AND_UNITS,SP_VALUE,PARTITION_ID") + }) public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearchParamQuantity { private static final long serialVersionUID = 1L; - + @Id @SequenceGenerator(name = "SEQ_SPIDX_QUANTITY", sequenceName = "SEQ_SPIDX_QUANTITY") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_QUANTITY") @Column(name = "SP_ID") private Long myId; - + @Column(name = "SP_VALUE", nullable = true) @ScaledNumberField public Double myValue; - @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) - @JoinColumn(foreignKey = @ForeignKey(name = "FK_SP_QUANTITY_RES"), - name = "RES_ID", referencedColumnName = "RES_ID", nullable = false) + @ManyToOne( + optional = false, + fetch = FetchType.LAZY, + cascade = {}) + @JoinColumn( + foreignKey = @ForeignKey(name = "FK_SP_QUANTITY_RES"), + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false) private ResourceTable myResource; public ResourceIndexedSearchParamQuantity() { super(); } - public ResourceIndexedSearchParamQuantity(PartitionSettings thePartitionSettings, String theResourceType, String theParamName, BigDecimal theValue, String theSystem, String theUnits) { + public ResourceIndexedSearchParamQuantity( + PartitionSettings thePartitionSettings, + String theResourceType, + String theParamName, + BigDecimal theValue, + String theSystem, + String theUnits) { this(); setPartitionSettings(thePartitionSettings); setResourceType(theResourceType); @@ -101,7 +122,7 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc setHashIdentityAndUnits(source.getHashIdentityAndUnits()); setHashIdentitySystemAndUnits(source.getHashIdentitySystemAndUnits()); } - + public BigDecimal getValue() { return myValue != null ? new BigDecimal(myValue) : null; } @@ -110,7 +131,7 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc myValue = theValue != null ? theValue.doubleValue() : null; return this; } - + @Override public Long getId() { return myId; @@ -120,7 +141,7 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc public void setId(Long theId) { myId = theId; } - + @Override public IQueryParameterType toQueryParameterType() { return new QuantityParam(null, getValue(), getSystem(), getUnits()); @@ -161,10 +182,10 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc b.append(getValue(), obj.getValue()); return b.isEquals(); } - + @Override public boolean matches(IQueryParameterType theParam) { - + if (!(theParam instanceof QuantityParam)) { return false; } @@ -180,24 +201,24 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc } else { String unitsString = defaultString(getUnits()); if (quantity.getSystem() == null) { - if (unitsString.equalsIgnoreCase(quantityUnitsString) && - Objects.equals(getValue(), quantity.getValue())) { + if (unitsString.equalsIgnoreCase(quantityUnitsString) + && Objects.equals(getValue(), quantity.getValue())) { retval = true; } } else if (isBlank(quantityUnitsString)) { - if (getSystem().equalsIgnoreCase(quantity.getSystem()) && - Objects.equals(getValue(), quantity.getValue())) { + if (getSystem().equalsIgnoreCase(quantity.getSystem()) + && Objects.equals(getValue(), quantity.getValue())) { retval = true; } } else { - if (getSystem().equalsIgnoreCase(quantity.getSystem()) && - unitsString.equalsIgnoreCase(quantityUnitsString) && - Objects.equals(getValue(), quantity.getValue())) { + if (getSystem().equalsIgnoreCase(quantity.getSystem()) + && unitsString.equalsIgnoreCase(quantityUnitsString) + && Objects.equals(getValue(), quantity.getValue())) { retval = true; } } } - + return retval; } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalized.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalized.java index 991dd1fc83e..2a7ca219558 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalized.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantityNormalized.java @@ -29,6 +29,8 @@ import org.apache.commons.lang3.builder.ToStringStyle; import org.fhir.ucum.Pair; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField; +import java.math.BigDecimal; +import java.util.Objects; import javax.persistence.Column; import javax.persistence.Embeddable; import javax.persistence.Entity; @@ -42,21 +44,28 @@ import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import java.math.BigDecimal; -import java.util.Objects; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isBlank; -//@formatter:off +// @formatter:off @Embeddable @Entity -@Table(name = "HFJ_SPIDX_QUANTITY_NRML", indexes = { - @Index(name = "IDX_SP_QNTY_NRML_HASH_V2", columnList = "HASH_IDENTITY,SP_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_QNTY_NRML_HASH_UN_V2", columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_QNTY_NRML_HASH_SYSUN_V2", columnList = "HASH_IDENTITY_SYS_UNITS,SP_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_QNTY_NRML_RESID_V2", columnList = "RES_ID,HASH_IDENTITY,HASH_IDENTITY_SYS_UNITS,HASH_IDENTITY_AND_UNITS,SP_VALUE,PARTITION_ID") -}) +@Table( + name = "HFJ_SPIDX_QUANTITY_NRML", + indexes = { + @Index(name = "IDX_SP_QNTY_NRML_HASH_V2", columnList = "HASH_IDENTITY,SP_VALUE,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_QNTY_NRML_HASH_UN_V2", + columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_QNTY_NRML_HASH_SYSUN_V2", + columnList = "HASH_IDENTITY_SYS_UNITS,SP_VALUE,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_QNTY_NRML_RESID_V2", + columnList = + "RES_ID,HASH_IDENTITY,HASH_IDENTITY_SYS_UNITS,HASH_IDENTITY_AND_UNITS,SP_VALUE,PARTITION_ID") + }) /** * Support UCUM service * @since 5.3.0 @@ -72,22 +81,35 @@ public class ResourceIndexedSearchParamQuantityNormalized extends BaseResourceIn @Column(name = "SP_ID") private Long myId; - // Changed to double here for storing the value after converted to the CanonicalForm due to BigDecimal maps NUMBER(19,2) + // Changed to double here for storing the value after converted to the CanonicalForm due to BigDecimal maps + // NUMBER(19,2) // The precision may lost even to store 1.2cm which is 0.012m in the CanonicalForm @Column(name = "SP_VALUE", nullable = true) @ScaledNumberField public Double myValue; - @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) - @JoinColumn(foreignKey = @ForeignKey(name = "FK_SP_QUANTITYNM_RES"), - name = "RES_ID", referencedColumnName = "RES_ID", nullable = false) + @ManyToOne( + optional = false, + fetch = FetchType.LAZY, + cascade = {}) + @JoinColumn( + foreignKey = @ForeignKey(name = "FK_SP_QUANTITYNM_RES"), + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false) private ResourceTable myResource; public ResourceIndexedSearchParamQuantityNormalized() { super(); } - public ResourceIndexedSearchParamQuantityNormalized(PartitionSettings thePartitionSettings, String theResourceType, String theParamName, double theValue, String theSystem, String theUnits) { + public ResourceIndexedSearchParamQuantityNormalized( + PartitionSettings thePartitionSettings, + String theResourceType, + String theParamName, + double theValue, + String theSystem, + String theUnits) { this(); setPartitionSettings(thePartitionSettings); setResourceType(theResourceType); @@ -110,10 +132,11 @@ public class ResourceIndexedSearchParamQuantityNormalized extends BaseResourceIn setHashIdentitySystemAndUnits(source.getHashIdentitySystemAndUnits()); } - //- myValue + // - myValue public Double getValue() { return myValue; } + public ResourceIndexedSearchParamQuantityNormalized setValue(Double theValue) { myValue = theValue; return this; @@ -124,11 +147,12 @@ public class ResourceIndexedSearchParamQuantityNormalized extends BaseResourceIn return this; } - //-- myId + // -- myId @Override public Long getId() { return myId; } + @Override public void setId(Long theId) { myId = theId; @@ -187,11 +211,10 @@ public class ResourceIndexedSearchParamQuantityNormalized extends BaseResourceIn String quantitySystem = quantity.getSystem(); BigDecimal quantityValue = quantity.getValue(); Double quantityDoubleValue = null; - if (quantityValue != null) - quantityDoubleValue = quantityValue.doubleValue(); + if (quantityValue != null) quantityDoubleValue = quantityValue.doubleValue(); String quantityUnits = defaultString(quantity.getUnits()); - //-- convert the value/unit to the canonical form if any, otherwise store the original value/units pair + // -- convert the value/unit to the canonical form if any, otherwise store the original value/units pair Pair canonicalForm = UcumServiceUtil.getCanonicalForm(quantitySystem, quantityValue, quantityUnits); if (canonicalForm != null) { quantityDoubleValue = Double.parseDouble(canonicalForm.getValue().asDecimal()); @@ -206,19 +229,17 @@ public class ResourceIndexedSearchParamQuantityNormalized extends BaseResourceIn } else { String unitsString = defaultString(getUnits()); if (quantitySystem == null) { - if (unitsString.equalsIgnoreCase(quantityUnits) && - Objects.equals(getValue(), quantityDoubleValue)) { + if (unitsString.equalsIgnoreCase(quantityUnits) && Objects.equals(getValue(), quantityDoubleValue)) { retval = true; } } else if (isBlank(quantityUnits)) { - if (getSystem().equalsIgnoreCase(quantitySystem) && - Objects.equals(getValue(), quantityDoubleValue)) { + if (getSystem().equalsIgnoreCase(quantitySystem) && Objects.equals(getValue(), quantityDoubleValue)) { retval = true; } } else { - if (getSystem().equalsIgnoreCase(quantitySystem) && - unitsString.equalsIgnoreCase(quantityUnits) && - Objects.equals(getValue(), quantityDoubleValue)) { + if (getSystem().equalsIgnoreCase(quantitySystem) + && unitsString.equalsIgnoreCase(quantityUnits) + && Objects.equals(getValue(), quantityDoubleValue)) { retval = true; } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java index 2b2df9f1f7b..870c367a418 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java @@ -45,24 +45,26 @@ import javax.persistence.Table; import static org.apache.commons.lang3.StringUtils.defaultString; -//@formatter:off +// @formatter:off @Embeddable @Entity -@Table(name = "HFJ_SPIDX_STRING", indexes = { - /* - * Note: We previously had indexes with the following names, - * do not reuse these names: - * IDX_SP_STRING - */ +@Table( + name = "HFJ_SPIDX_STRING", + indexes = { + /* + * Note: We previously had indexes with the following names, + * do not reuse these names: + * IDX_SP_STRING + */ - // This is used for sorting, and for :contains queries currently - @Index(name = "IDX_SP_STRING_HASH_IDENT_V2", columnList = "HASH_IDENTITY,RES_ID,PARTITION_ID"), - - @Index(name = "IDX_SP_STRING_HASH_NRM_V2", columnList = "HASH_NORM_PREFIX,SP_VALUE_NORMALIZED,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_STRING_HASH_EXCT_V2", columnList = "HASH_EXACT,RES_ID,PARTITION_ID"), - - @Index(name = "IDX_SP_STRING_RESID_V2", columnList = "RES_ID,HASH_NORM_PREFIX,PARTITION_ID") -}) + // This is used for sorting, and for :contains queries currently + @Index(name = "IDX_SP_STRING_HASH_IDENT_V2", columnList = "HASH_IDENTITY,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_STRING_HASH_NRM_V2", + columnList = "HASH_NORM_PREFIX,SP_VALUE_NORMALIZED,RES_ID,PARTITION_ID"), + @Index(name = "IDX_SP_STRING_HASH_EXCT_V2", columnList = "HASH_EXACT,RES_ID,PARTITION_ID"), + @Index(name = "IDX_SP_STRING_RESID_V2", columnList = "RES_ID,HASH_NORM_PREFIX,PARTITION_ID") + }) public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam { /* @@ -71,6 +73,7 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP public static final int MAX_LENGTH = 200; public static final int HASH_PREFIX_LENGTH = 1; private static final long serialVersionUID = 1L; + @Id @SequenceGenerator(name = "SEQ_SPIDX_STRING", sequenceName = "SEQ_SPIDX_STRING") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_STRING") @@ -78,8 +81,11 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP private Long myId; @ManyToOne(optional = false) - @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, - foreignKey = @ForeignKey(name = "FK_SPIDXSTR_RESOURCE")) + @JoinColumn( + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_SPIDXSTR_RESOURCE")) private ResourceTable myResource; @Column(name = "SP_VALUE_EXACT", length = MAX_LENGTH, nullable = true) @@ -107,7 +113,13 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP super(); } - public ResourceIndexedSearchParamString(PartitionSettings thePartitionSettings, StorageSettings theStorageSettings, String theResourceType, String theParamName, String theValueNormalized, String theValueExact) { + public ResourceIndexedSearchParamString( + PartitionSettings thePartitionSettings, + StorageSettings theStorageSettings, + String theResourceType, + String theParamName, + String theValueNormalized, + String theValueExact) { setPartitionSettings(thePartitionSettings); setStorageSettings(theStorageSettings); setResourceType(theResourceType); @@ -135,7 +147,6 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP myHashExact = null; } - @Override public void calculateHashes() { if (myHashIdentity != null || myHashExact != null || myHashNormalizedPrefix != null) { @@ -146,7 +157,13 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP String paramName = getParamName(); String valueNormalized = getValueNormalized(); String valueExact = getValueExact(); - setHashNormalizedPrefix(calculateHashNormalized(getPartitionSettings(), getPartitionId(), getStorageSettings(), resourceType, paramName, valueNormalized)); + setHashNormalizedPrefix(calculateHashNormalized( + getPartitionSettings(), + getPartitionId(), + getStorageSettings(), + resourceType, + paramName, + valueNormalized)); setHashExact(calculateHashExact(getPartitionSettings(), getPartitionId(), resourceType, paramName, valueExact)); setHashIdentity(calculateHashIdentity(getPartitionSettings(), getPartitionId(), resourceType, paramName)); } @@ -208,7 +225,6 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP myId = theId; } - public String getValueExact() { return myValueExact; } @@ -274,21 +290,50 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP return defaultString(getValueNormalized()).startsWith(normalizedString); } - public static long calculateHashExact(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, String theValueExact) { + public static long calculateHashExact( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theValueExact) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); - return calculateHashExact(thePartitionSettings, requestPartitionId, theResourceType, theParamName, theValueExact); + return calculateHashExact( + thePartitionSettings, requestPartitionId, theResourceType, theParamName, theValueExact); } - public static long calculateHashExact(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, String theValueExact) { + public static long calculateHashExact( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theValueExact) { return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theValueExact); } - public static long calculateHashNormalized(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, StorageSettings theStorageSettings, String theResourceType, String theParamName, String theValueNormalized) { + public static long calculateHashNormalized( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + StorageSettings theStorageSettings, + String theResourceType, + String theParamName, + String theValueNormalized) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); - return calculateHashNormalized(thePartitionSettings, requestPartitionId, theStorageSettings, theResourceType, theParamName, theValueNormalized); + return calculateHashNormalized( + thePartitionSettings, + requestPartitionId, + theStorageSettings, + theResourceType, + theParamName, + theValueNormalized); } - public static long calculateHashNormalized(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, StorageSettings theStorageSettings, String theResourceType, String theParamName, String theValueNormalized) { + public static long calculateHashNormalized( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + StorageSettings theStorageSettings, + String theResourceType, + String theParamName, + String theValueNormalized) { /* * If we're not allowing contained searches, we'll add the first * bit of the normalized value to the hash. This helps to diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java index 5a3fc976566..098f167a909 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java @@ -51,21 +51,24 @@ import static org.apache.commons.lang3.StringUtils.trim; @Embeddable @Entity -@Table(name = "HFJ_SPIDX_TOKEN", indexes = { - /* - * Note: We previously had indexes with the following names, - * do not reuse these names: - * IDX_SP_TOKEN - * IDX_SP_TOKEN_UNQUAL - */ +@Table( + name = "HFJ_SPIDX_TOKEN", + indexes = { + /* + * Note: We previously had indexes with the following names, + * do not reuse these names: + * IDX_SP_TOKEN + * IDX_SP_TOKEN_UNQUAL + */ - @Index(name = "IDX_SP_TOKEN_HASH_V2", columnList = "HASH_IDENTITY,SP_SYSTEM,SP_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_TOKEN_HASH_S_V2", columnList = "HASH_SYS,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_TOKEN_HASH_SV_V2", columnList = "HASH_SYS_AND_VALUE,RES_ID,PARTITION_ID"), - @Index(name = "IDX_SP_TOKEN_HASH_V_V2", columnList = "HASH_VALUE,RES_ID,PARTITION_ID"), - - @Index(name = "IDX_SP_TOKEN_RESID_V2", columnList = "RES_ID,HASH_SYS_AND_VALUE,HASH_VALUE,HASH_SYS,HASH_IDENTITY,PARTITION_ID") -}) + @Index(name = "IDX_SP_TOKEN_HASH_V2", columnList = "HASH_IDENTITY,SP_SYSTEM,SP_VALUE,RES_ID,PARTITION_ID"), + @Index(name = "IDX_SP_TOKEN_HASH_S_V2", columnList = "HASH_SYS,RES_ID,PARTITION_ID"), + @Index(name = "IDX_SP_TOKEN_HASH_SV_V2", columnList = "HASH_SYS_AND_VALUE,RES_ID,PARTITION_ID"), + @Index(name = "IDX_SP_TOKEN_HASH_V_V2", columnList = "HASH_VALUE,RES_ID,PARTITION_ID"), + @Index( + name = "IDX_SP_TOKEN_RESID_V2", + columnList = "RES_ID,HASH_SYS_AND_VALUE,HASH_VALUE,HASH_SYS,HASH_IDENTITY,PARTITION_ID") + }) public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchParam { public static final int MAX_LENGTH = 200; @@ -107,9 +110,15 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa @Column(name = "HASH_VALUE", nullable = true) private Long myHashValue; - @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) - @JoinColumn(foreignKey = @ForeignKey(name="FK_SP_TOKEN_RES"), - name = "RES_ID", referencedColumnName = "RES_ID", nullable = false) + @ManyToOne( + optional = false, + fetch = FetchType.LAZY, + cascade = {}) + @JoinColumn( + foreignKey = @ForeignKey(name = "FK_SP_TOKEN_RES"), + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false) private ResourceTable myResource; /** @@ -122,7 +131,12 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa /** * Constructor */ - public ResourceIndexedSearchParamToken(PartitionSettings thePartitionSettings, String theResourceType, String theParamName, String theSystem, String theValue) { + public ResourceIndexedSearchParamToken( + PartitionSettings thePartitionSettings, + String theResourceType, + String theParamName, + String theSystem, + String theValue) { super(); setPartitionSettings(thePartitionSettings); setResourceType(theResourceType); @@ -135,7 +149,8 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa /** * Constructor */ - public ResourceIndexedSearchParamToken(PartitionSettings thePartitionSettings, String theResourceType, String theParamName, boolean theMissing) { + public ResourceIndexedSearchParamToken( + PartitionSettings thePartitionSettings, String theResourceType, String theParamName, boolean theMissing) { super(); setPartitionSettings(thePartitionSettings); setResourceType(theResourceType); @@ -165,7 +180,6 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa myHashValue = null; } - @Override public void calculateHashes() { if (myHashIdentity != null || myHashSystem != null || myHashValue != null || myHashSystemAndValue != null) { @@ -177,13 +191,15 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa String system = getSystem(); String value = getValue(); setHashIdentity(calculateHashIdentity(getPartitionSettings(), getPartitionId(), resourceType, paramName)); - setHashSystemAndValue(calculateHashSystemAndValue(getPartitionSettings(), getPartitionId(), resourceType, paramName, system, value)); + setHashSystemAndValue(calculateHashSystemAndValue( + getPartitionSettings(), getPartitionId(), resourceType, paramName, system, value)); // Searches using the :of-type modifier can never be partial (system-only or value-only) so don't // bother saving these boolean calculatePartialHashes = !StringUtils.endsWith(paramName, Constants.PARAMQUALIFIER_TOKEN_OF_TYPE); if (calculatePartialHashes) { - setHashSystem(calculateHashSystem(getPartitionSettings(), getPartitionId(), resourceType, paramName, system)); + setHashSystem( + calculateHashSystem(getPartitionSettings(), getPartitionId(), resourceType, paramName, system)); setHashValue(calculateHashValue(getPartitionSettings(), getPartitionId(), resourceType, paramName, value)); } } @@ -322,44 +338,80 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa retVal = true; } } else { - if (token.getSystem().equalsIgnoreCase(getSystem()) && - valueString.equalsIgnoreCase(tokenValueString)) { + if (token.getSystem().equalsIgnoreCase(getSystem()) && valueString.equalsIgnoreCase(tokenValueString)) { retVal = true; } } return retVal; } - - public static long calculateHashSystem(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, String theSystem) { + public static long calculateHashSystem( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theSystem) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); return calculateHashSystem(thePartitionSettings, requestPartitionId, theResourceType, theParamName, theSystem); } - public static long calculateHashSystem(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, String theSystem) { + public static long calculateHashSystem( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theSystem) { return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, trim(theSystem)); } - public static long calculateHashSystemAndValue(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, String theSystem, String theValue) { + public static long calculateHashSystemAndValue( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theSystem, + String theValue) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); - return calculateHashSystemAndValue(thePartitionSettings, requestPartitionId, theResourceType, theParamName, theSystem, theValue); + return calculateHashSystemAndValue( + thePartitionSettings, requestPartitionId, theResourceType, theParamName, theSystem, theValue); } - public static long calculateHashSystemAndValue(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, String theSystem, String theValue) { - return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, defaultString(trim(theSystem)), trim(theValue)); + public static long calculateHashSystemAndValue( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theSystem, + String theValue) { + return hash( + thePartitionSettings, + theRequestPartitionId, + theResourceType, + theParamName, + defaultString(trim(theSystem)), + trim(theValue)); } - public static long calculateHashValue(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, String theValue) { + public static long calculateHashValue( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theValue) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); return calculateHashValue(thePartitionSettings, requestPartitionId, theResourceType, theParamName, theValue); } - public static long calculateHashValue(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, String theValue) { + public static long calculateHashValue( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theValue) { String value = trim(theValue); return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, value); } - @Override public ResourceTable getResource() { return myResource; @@ -373,10 +425,10 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa } @PrePersist -/** - * We truncate the fields at the last moment because the tables have limited size. - * We don't truncate earlier in the flow because the index hashes MUST be calculated on the full string. - */ + /** + * We truncate the fields at the last moment because the tables have limited size. + * We don't truncate earlier in the flow because the index hashes MUST be calculated on the full string. + */ public void truncateFieldsForDB() { mySystem = StringUtils.truncate(mySystem, MAX_LENGTH); myValue = StringUtils.truncate(myValue, MAX_LENGTH); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java index 17135f0f56f..f2a2d8fe33c 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java @@ -47,14 +47,19 @@ import static org.apache.commons.lang3.StringUtils.defaultString; @Embeddable @Entity -@Table(name = "HFJ_SPIDX_URI", indexes = { - // for queries - @Index(name = "IDX_SP_URI_HASH_URI_V2", columnList = "HASH_URI,RES_ID,PARTITION_ID", unique = true), - // for sorting - @Index(name = "IDX_SP_URI_HASH_IDENTITY_V2", columnList = "HASH_IDENTITY,SP_URI,RES_ID,PARTITION_ID", unique = true), - // for index create/delete - @Index(name = "IDX_SP_URI_COORDS", columnList = "RES_ID") -}) +@Table( + name = "HFJ_SPIDX_URI", + indexes = { + // for queries + @Index(name = "IDX_SP_URI_HASH_URI_V2", columnList = "HASH_URI,RES_ID,PARTITION_ID", unique = true), + // for sorting + @Index( + name = "IDX_SP_URI_HASH_IDENTITY_V2", + columnList = "HASH_IDENTITY,SP_URI,RES_ID,PARTITION_ID", + unique = true), + // for index create/delete + @Index(name = "IDX_SP_URI_COORDS", columnList = "RES_ID") + }) public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchParam { /* @@ -65,12 +70,11 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara public static final int MAX_LENGTH = 500; private static final long serialVersionUID = 1L; + @Column(name = "SP_URI", nullable = true, length = MAX_LENGTH) @FullTextField public String myUri; - - @Id @SequenceGenerator(name = "SEQ_SPIDX_URI", sequenceName = "SEQ_SPIDX_URI") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_URI") @@ -87,9 +91,15 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara @Column(name = "HASH_IDENTITY", nullable = true) private Long myHashIdentity; - @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) - @JoinColumn(foreignKey = @ForeignKey(name = "FKGXSREUTYMMFJUWDSWV3Y887DO"), - name = "RES_ID", referencedColumnName = "RES_ID", nullable = false) + @ManyToOne( + optional = false, + fetch = FetchType.LAZY, + cascade = {}) + @JoinColumn( + foreignKey = @ForeignKey(name = "FKGXSREUTYMMFJUWDSWV3Y887DO"), + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false) private ResourceTable myResource; /** @@ -102,7 +112,8 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara /** * Constructor */ - public ResourceIndexedSearchParamUri(PartitionSettings thePartitionSettings, String theResourceType, String theParamName, String theUri) { + public ResourceIndexedSearchParamUri( + PartitionSettings thePartitionSettings, String theResourceType, String theParamName, String theUri) { setPartitionSettings(thePartitionSettings); setResourceType(theResourceType); setParamName(theParamName); @@ -125,7 +136,6 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara myHashUri = null; } - @Override public void calculateHashes() { if (myHashIdentity != null || myHashUri != null) { @@ -186,7 +196,6 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara myId = theId; } - public String getUri() { return myUri; } @@ -232,16 +241,25 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara return defaultString(getUri()).equalsIgnoreCase(uri.getValueNotNull()); } - public static long calculateHashUri(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, String theUri) { + public static long calculateHashUri( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theUri) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); return calculateHashUri(thePartitionSettings, requestPartitionId, theResourceType, theParamName, theUri); } - public static long calculateHashUri(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, String theUri) { + public static long calculateHashUri( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + String theUri) { return hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, theUri); } - @Override public ResourceTable getResource() { return myResource; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java index 10c69a0b565..8877dd90dbb 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java @@ -25,6 +25,7 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import org.hl7.fhir.instance.model.api.IIdType; +import java.util.Date; import javax.annotation.Nullable; import javax.persistence.Column; import javax.persistence.Entity; @@ -41,20 +42,26 @@ import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; -import java.util.Date; @Entity -@Table(name = "HFJ_RES_LINK", indexes = { - // We need to join both ways, so index from src->tgt and from tgt->src. - // From src->tgt, rows are usually written all together as part of ingestion - keep the index small, and read blocks as needed. - @Index(name = "IDX_RL_SRC", columnList = "SRC_RESOURCE_ID"), - // But from tgt->src, include all the match columns. Targets will usually be randomly distributed - each row in separate block. - @Index(name = "IDX_RL_TGT_v2", columnList = "TARGET_RESOURCE_ID, SRC_PATH, SRC_RESOURCE_ID, TARGET_RESOURCE_TYPE,PARTITION_ID") -}) +@Table( + name = "HFJ_RES_LINK", + indexes = { + // We need to join both ways, so index from src->tgt and from tgt->src. + // From src->tgt, rows are usually written all together as part of ingestion - keep the index small, and + // read blocks as needed. + @Index(name = "IDX_RL_SRC", columnList = "SRC_RESOURCE_ID"), + // But from tgt->src, include all the match columns. Targets will usually be randomly distributed - each row + // in separate block. + @Index( + name = "IDX_RL_TGT_v2", + columnList = "TARGET_RESOURCE_ID, SRC_PATH, SRC_RESOURCE_ID, TARGET_RESOURCE_TYPE,PARTITION_ID") + }) public class ResourceLink extends BaseResourceIndex { public static final int SRC_PATH_LENGTH = 500; private static final long serialVersionUID = 1L; + @SequenceGenerator(name = "SEQ_RESLINK_ID", sequenceName = "SEQ_RESLINK_ID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESLINK_ID") @Id @@ -65,7 +72,11 @@ public class ResourceLink extends BaseResourceIndex { private String mySourcePath; @ManyToOne(optional = false, fetch = FetchType.LAZY) - @JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESLINK_SOURCE")) + @JoinColumn( + name = "SRC_RESOURCE_ID", + referencedColumnName = "RES_ID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_RESLINK_SOURCE")) private ResourceTable mySourceResource; @Column(name = "SRC_RESOURCE_ID", insertable = false, updatable = false, nullable = false) @@ -76,7 +87,13 @@ public class ResourceLink extends BaseResourceIndex { private String mySourceResourceType; @ManyToOne(optional = true, fetch = FetchType.LAZY) - @JoinColumn(name = "TARGET_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = true, insertable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_RESLINK_TARGET")) + @JoinColumn( + name = "TARGET_RESOURCE_ID", + referencedColumnName = "RES_ID", + nullable = true, + insertable = false, + updatable = false, + foreignKey = @ForeignKey(name = "FK_RESLINK_TARGET")) private ResourceTable myTargetResource; @Column(name = "TARGET_RESOURCE_ID", insertable = true, updatable = true, nullable = true) @@ -90,12 +107,15 @@ public class ResourceLink extends BaseResourceIndex { @Column(name = "TARGET_RESOURCE_URL", length = 200, nullable = true) @FullTextField private String myTargetResourceUrl; + @Column(name = "TARGET_RESOURCE_VERSION", nullable = true) private Long myTargetResourceVersion; + @FullTextField @Column(name = "SP_UPDATED", nullable = true) // TODO: make this false after HAPI 2.3 @Temporal(TemporalType.TIMESTAMP) private Date myUpdated; + @Transient private transient String myTargetResourceId; @@ -147,7 +167,8 @@ public class ResourceLink extends BaseResourceIndex { b.append(myTargetResourceUrl, obj.myTargetResourceUrl); b.append(myTargetResourceType, obj.myTargetResourceType); b.append(myTargetResourceVersion, obj.myTargetResourceVersion); - // In cases where we are extracting links from a resource that has not yet been persisted, the target resource pid + // In cases where we are extracting links from a resource that has not yet been persisted, the target resource + // pid // will be null so we use the target resource id to differentiate instead if (getTargetResourcePid() == null) { b.append(getTargetResourceId(), obj.getTargetResourceId()); @@ -207,14 +228,14 @@ public class ResourceLink extends BaseResourceIndex { Validate.isTrue(theTargetResourceUrl.hasBaseUrl()); Validate.isTrue(theTargetResourceUrl.hasResourceType()); -// if (theTargetResourceUrl.hasIdPart()) { + // if (theTargetResourceUrl.hasIdPart()) { // do nothing -// } else { + // } else { // Must have set an url like http://example.org/something // We treat 'something' as the resource type because of fix for #659. Prior to #659 fix, 'something' was // treated as the id and 'example.org' was treated as the resource type // Maybe log a warning? -// } + // } myTargetResourceType = theTargetResourceUrl.getResourceType(); myTargetResourceUrl = theTargetResourceUrl.getValue(); @@ -267,7 +288,8 @@ public class ResourceLink extends BaseResourceIndex { b.append(myTargetResourceUrl); b.append(myTargetResourceVersion); - // In cases where we are extracting links from a resource that has not yet been persisted, the target resource pid + // In cases where we are extracting links from a resource that has not yet been persisted, the target resource + // pid // will be null so we use the target resource id to differentiate instead if (getTargetResourcePid() == null) { b.append(getTargetResourceId()); @@ -318,7 +340,8 @@ public class ResourceLink extends BaseResourceIndex { return retVal; } - public static ResourceLink forAbsoluteReference(String theSourcePath, ResourceTable theSourceResource, IIdType theTargetResourceUrl, Date theUpdated) { + public static ResourceLink forAbsoluteReference( + String theSourcePath, ResourceTable theSourceResource, IIdType theTargetResourceUrl, Date theUpdated) { ResourceLink retVal = new ResourceLink(); retVal.setSourcePath(theSourcePath); retVal.setSourceResource(theSourceResource); @@ -330,7 +353,8 @@ public class ResourceLink extends BaseResourceIndex { /** * Factory for canonical URL */ - public static ResourceLink forLogicalReference(String theSourcePath, ResourceTable theSourceResource, String theTargetResourceUrl, Date theUpdated) { + public static ResourceLink forLogicalReference( + String theSourcePath, ResourceTable theSourceResource, String theTargetResourceUrl, Date theUpdated) { ResourceLink retVal = new ResourceLink(); retVal.setSourcePath(theSourcePath); retVal.setSourceResource(theSourceResource); @@ -342,7 +366,14 @@ public class ResourceLink extends BaseResourceIndex { /** * @param theTargetResourceVersion This should only be populated if the reference actually had a version */ - public static ResourceLink forLocalReference(String theSourcePath, ResourceTable theSourceResource, String theTargetResourceType, Long theTargetResourcePid, String theTargetResourceId, Date theUpdated, @Nullable Long theTargetResourceVersion) { + public static ResourceLink forLocalReference( + String theSourcePath, + ResourceTable theSourceResource, + String theTargetResourceType, + Long theTargetResourcePid, + String theTargetResourceId, + Date theUpdated, + @Nullable Long theTargetResourceVersion) { ResourceLink retVal = new ResourceLink(); retVal.setSourcePath(theSourcePath); retVal.setSourceResource(theSourceResource); @@ -351,5 +382,4 @@ public class ResourceLink extends BaseResourceIndex { retVal.setUpdated(theUpdated); return retVal; } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntity.java index 8a576d74cef..681d20222f8 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceSearchUrlEntity.java @@ -19,6 +19,7 @@ */ package ca.uhn.fhir.jpa.model.entity; +import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Id; @@ -26,14 +27,14 @@ import javax.persistence.Index; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; -import java.util.Date; @Entity -@Table(name = "HFJ_RES_SEARCH_URL", - indexes = { - @Index(name = "IDX_RESSEARCHURL_RES", columnList = "RES_ID"), - @Index(name = "IDX_RESSEARCHURL_TIME", columnList = "CREATED_TIME") -}) +@Table( + name = "HFJ_RES_SEARCH_URL", + indexes = { + @Index(name = "IDX_RESSEARCHURL_RES", columnList = "RES_ID"), + @Index(name = "IDX_RESSEARCHURL_TIME", columnList = "CREATED_TIME") + }) public class ResourceSearchUrlEntity { public static final String RES_SEARCH_URL_COLUMN_NAME = "RES_SEARCH_URL"; @@ -53,9 +54,9 @@ public class ResourceSearchUrlEntity { public static ResourceSearchUrlEntity from(String theUrl, Long theId) { return new ResourceSearchUrlEntity() - .setResourcePid(theId) - .setSearchUrl(theUrl) - .setCreatedTime(new Date()); + .setResourcePid(theId) + .setSearchUrl(theUrl) + .setCreatedTime(new Date()); } public Long getResourcePid() { @@ -85,5 +86,3 @@ public class ResourceSearchUrlEntity { return this; } } - - diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java index 196418b3468..a379fbd9931 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java @@ -50,6 +50,13 @@ import org.hibernate.tuple.ValueGenerator; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.InstantType; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; @@ -67,23 +74,20 @@ import javax.persistence.PreUpdate; import javax.persistence.Table; import javax.persistence.Transient; import javax.persistence.Version; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.HashSet; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; @Indexed(routingBinder = @RoutingBinderRef(type = ResourceTableRoutingBinder.class)) @Entity -@Table(name = ResourceTable.HFJ_RESOURCE, uniqueConstraints = {}, indexes = { - // Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE - @Index(name = "IDX_RES_DATE", columnList = BaseHasResource.RES_UPDATED), - @Index(name = "IDX_RES_TYPE_DEL_UPDATED", columnList = "RES_TYPE,RES_DELETED_AT,RES_UPDATED,PARTITION_ID,RES_ID"), - @Index(name = "IDX_RES_RESID_UPDATED", columnList = "RES_ID,RES_UPDATED,PARTITION_ID"), -}) +@Table( + name = ResourceTable.HFJ_RESOURCE, + uniqueConstraints = {}, + indexes = { + // Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE + @Index(name = "IDX_RES_DATE", columnList = BaseHasResource.RES_UPDATED), + @Index( + name = "IDX_RES_TYPE_DEL_UPDATED", + columnList = "RES_TYPE,RES_DELETED_AT,RES_UPDATED,PARTITION_ID,RES_ID"), + @Index(name = "IDX_RES_RESID_UPDATED", columnList = "RES_ID,RES_UPDATED,PARTITION_ID"), + }) @NamedEntityGraph(name = "Resource.noJoins") public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource { public static final int RESTYPE_LEN = 40; @@ -99,10 +103,26 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas * Note that we depend on `myVersion` updated for this field to be indexed. */ @Transient - @FullTextField(name = "myContentText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer") - @FullTextField(name = "myContentTextEdgeNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteEdgeAnalyzer") - @FullTextField(name = "myContentTextNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteNGramAnalyzer") - @FullTextField(name = "myContentTextPhonetic", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompletePhoneticAnalyzer") + @FullTextField( + name = "myContentText", + searchable = Searchable.YES, + projectable = Projectable.YES, + analyzer = "standardAnalyzer") + @FullTextField( + name = "myContentTextEdgeNGram", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompleteEdgeAnalyzer") + @FullTextField( + name = "myContentTextNGram", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompleteNGramAnalyzer") + @FullTextField( + name = "myContentTextPhonetic", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompletePhoneticAnalyzer") @OptimisticLock(excluded = true) @IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion"))) private String myContentText; @@ -135,10 +155,26 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas * Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB */ @Transient() - @FullTextField(name = "myNarrativeText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer") - @FullTextField(name = "myNarrativeTextEdgeNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteEdgeAnalyzer") - @FullTextField(name = "myNarrativeTextNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteNGramAnalyzer") - @FullTextField(name = "myNarrativeTextPhonetic", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompletePhoneticAnalyzer") + @FullTextField( + name = "myNarrativeText", + searchable = Searchable.YES, + projectable = Projectable.YES, + analyzer = "standardAnalyzer") + @FullTextField( + name = "myNarrativeTextEdgeNGram", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompleteEdgeAnalyzer") + @FullTextField( + name = "myNarrativeTextNGram", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompleteNGramAnalyzer") + @FullTextField( + name = "myNarrativeTextPhonetic", + searchable = Searchable.YES, + projectable = Projectable.NO, + analyzer = "autocompletePhoneticAnalyzer") @OptimisticLock(excluded = true) @IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion"))) private String myNarrativeText; @@ -148,7 +184,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @PropertyBinding(binder = @PropertyBinderRef(type = SearchParamTextPropertyBinder.class)) private ExtendedHSearchIndexData myLuceneIndexData; - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsCoords; @@ -156,7 +196,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @OptimisticLock(excluded = true) private boolean myParamsCoordsPopulated; - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsDate; @@ -165,14 +209,22 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas private boolean myParamsDatePopulated; @OptimisticLock(excluded = true) - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) private Collection myParamsNumber; @Column(name = "SP_NUMBER_PRESENT") @OptimisticLock(excluded = true) private boolean myParamsNumberPopulated; - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsQuantity; @@ -184,7 +236,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas * Added to support UCUM conversion * since 5.3.0 */ - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsQuantityNormalized; @@ -197,7 +253,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @OptimisticLock(excluded = true) private Boolean myParamsQuantityNormalizedPopulated = Boolean.FALSE; - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsString; @@ -205,7 +265,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @OptimisticLock(excluded = true) private boolean myParamsStringPopulated; - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsToken; @@ -213,7 +277,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @OptimisticLock(excluded = true) private boolean myParamsTokenPopulated; - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsUri; @@ -226,7 +294,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @Column(name = "SP_CMPSTR_UNIQ_PRESENT") private Boolean myParamsComboStringUniquePresent = false; - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsComboStringUnique; @@ -235,11 +307,19 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @Column(name = "SP_CMPTOKS_PRESENT") private Boolean myParamsComboTokensNonUniquePresent = false; - @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myParamsComboTokensNonUnique; - @OneToMany(mappedBy = "mySourceResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "mySourceResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myResourceLinks; @@ -261,7 +341,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myResourceLinks"))) private String myResourceLinksField; - @OneToMany(mappedBy = "myTargetResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) + @OneToMany( + mappedBy = "myTargetResource", + cascade = {}, + fetch = FetchType.LAZY, + orphanRemoval = false) @OptimisticLock(excluded = true) private Collection myResourceLinksAsTarget; @@ -286,11 +370,12 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas * Will contain either the client-assigned id, or the sequence value. * Will be null during insert time until the first read. */ - @Column(name = "FHIR_ID", - // [A-Za-z0-9\-\.]{1,64} - https://www.hl7.org/fhir/datatypes.html#id - length = 64, - // we never update this after insert, and the Generator will otherwise "dirty" the object. - updatable = false) + @Column( + name = "FHIR_ID", + // [A-Za-z0-9\-\.]{1,64} - https://www.hl7.org/fhir/datatypes.html#id + length = 64, + // we never update this after insert, and the Generator will otherwise "dirty" the object. + updatable = false) // inject the pk for server-assigned sequence ids. @GeneratorType(when = GenerationTime.INSERT, type = FhirIdGenerator.class) @@ -310,6 +395,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @Version @Column(name = "RES_VER") private long myVersion; + @OneToMany(mappedBy = "myResourceTable", fetch = FetchType.LAZY) private Collection myProvenance; @@ -322,7 +408,12 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @Transient private transient boolean myVersionUpdatedInCurrentTransaction; - @OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false, mappedBy = "myResource") + @OneToOne( + optional = true, + fetch = FetchType.EAGER, + cascade = {}, + orphanRemoval = false, + mappedBy = "myResource") @OptimisticLock(excluded = true) private ForcedId myForcedId; @@ -487,7 +578,8 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas return myParamsQuantityNormalized; } - public void setParamsQuantityNormalized(Collection theQuantityNormalizedParams) { + public void setParamsQuantityNormalized( + Collection theQuantityNormalizedParams) { if (!isParamsQuantityNormalizedPopulated() && theQuantityNormalizedParams.isEmpty()) { return; } @@ -602,7 +694,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas myVersion = theVersion; } - @Override public boolean isDeleted() { return getDeleted() != null; @@ -693,17 +784,13 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas } public Boolean isParamsQuantityNormalizedPopulated() { - if (myParamsQuantityNormalizedPopulated == null) - return Boolean.FALSE; - else - return myParamsQuantityNormalizedPopulated; + if (myParamsQuantityNormalizedPopulated == null) return Boolean.FALSE; + else return myParamsQuantityNormalizedPopulated; } public void setParamsQuantityNormalizedPopulated(Boolean theParamsQuantityNormalizedPopulated) { - if (theParamsQuantityNormalizedPopulated == null) - myParamsQuantityNormalizedPopulated = Boolean.FALSE; - else - myParamsQuantityNormalizedPopulated = theParamsQuantityNormalizedPopulated; + if (theParamsQuantityNormalizedPopulated == null) myParamsQuantityNormalizedPopulated = Boolean.FALSE; + else myParamsQuantityNormalizedPopulated = theParamsQuantityNormalizedPopulated; } public boolean isParamsStringPopulated() { @@ -828,12 +915,11 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @PreUpdate public void preSave() { if (myHasLinks && myResourceLinks != null) { - myResourceLinksField = getResourceLinks() - .stream() - .map(ResourceLink::getTargetResourcePid) - .filter(Objects::nonNull) - .map(Object::toString) - .collect(Collectors.joining(" ")); + myResourceLinksField = getResourceLinks().stream() + .map(ResourceLink::getTargetResourcePid) + .filter(Objects::nonNull) + .map(Object::toString) + .collect(Collectors.joining(" ")); } else { myResourceLinksField = null; } @@ -888,7 +974,13 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas retVal.setValue(getResourceType() + '/' + myFhirId + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); } else if (getTransientForcedId() != null) { // Avoid a join query if possible - retVal.setValue(getResourceType() + '/' + getTransientForcedId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); + retVal.setValue(getResourceType() + + '/' + + getTransientForcedId() + + '/' + + Constants.PARAM_HISTORY + + '/' + + getVersion()); } else if (getForcedId() == null) { Long id = this.getResourceId(); retVal.setValue(getResourceType() + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTag.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTag.java index 3f7512a583e..ba7f24a91ae 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTag.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTag.java @@ -40,13 +40,16 @@ import javax.persistence.UniqueConstraint; @Entity @Table( - name = "HFJ_RES_TAG", - indexes = { - @Index(name = "IDX_RES_TAG_RES_TAG", columnList = "RES_ID, TAG_ID, PARTITION_ID"), - @Index(name = "IDX_RES_TAG_TAG_RES", columnList = "TAG_ID, RES_ID, PARTITION_ID") - }, - uniqueConstraints = { @UniqueConstraint(name = "IDX_RESTAG_TAGID", columnNames = {"RES_ID", "TAG_ID"}) -}) + name = "HFJ_RES_TAG", + indexes = { + @Index(name = "IDX_RES_TAG_RES_TAG", columnList = "RES_ID, TAG_ID, PARTITION_ID"), + @Index(name = "IDX_RES_TAG_TAG_RES", columnList = "TAG_ID, RES_ID, PARTITION_ID") + }, + uniqueConstraints = { + @UniqueConstraint( + name = "IDX_RESTAG_TAGID", + columnNames = {"RES_ID", "TAG_ID"}) + }) public class ResourceTag extends BaseTag { private static final long serialVersionUID = 1L; @@ -57,7 +60,9 @@ public class ResourceTag extends BaseTag { @Column(name = "PID") private Long myId; - @ManyToOne(cascade = {}, fetch = FetchType.LAZY) + @ManyToOne( + cascade = {}, + fetch = FetchType.LAZY) @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", foreignKey = @ForeignKey(name = "FK_RESTAG_RESOURCE")) private ResourceTable myResource; @@ -77,7 +82,8 @@ public class ResourceTag extends BaseTag { /** * Constructor */ - public ResourceTag(ResourceTable theResourceTable, TagDefinition theTag, PartitionablePartitionId theRequestPartitionId) { + public ResourceTag( + ResourceTable theResourceTable, TagDefinition theTag, PartitionablePartitionId theRequestPartitionId) { setTag(theTag); setResource(theResourceTable); setResourceId(theResourceTable.getId()); @@ -145,5 +151,4 @@ public class ResourceTag extends BaseTag { b.append("tag", getTag().getId()); return b.build(); } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/SearchParamPresentEntity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/SearchParamPresentEntity.java index b23be685bc1..b4ceebd9235 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/SearchParamPresentEntity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/SearchParamPresentEntity.java @@ -27,15 +27,17 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import javax.persistence.*; import java.io.Serializable; +import javax.persistence.*; @Entity -@Table(name = "HFJ_RES_PARAM_PRESENT", indexes = { - // We used to have a constraint named IDX_RESPARMPRESENT_SPID_RESID - Don't reuse - @Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID"), - @Index(name = "IDX_RESPARMPRESENT_HASHPRES", columnList = "HASH_PRESENCE") -}) +@Table( + name = "HFJ_RES_PARAM_PRESENT", + indexes = { + // We used to have a constraint named IDX_RESPARMPRESENT_SPID_RESID - Don't reuse + @Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID"), + @Index(name = "IDX_RESPARMPRESENT_HASHPRES", columnList = "HASH_PRESENCE") + }) public class SearchParamPresentEntity extends BasePartitionable implements Serializable { private static final long serialVersionUID = 1L; @@ -45,17 +47,27 @@ public class SearchParamPresentEntity extends BasePartitionable implements Seria @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESPARMPRESENT_ID") @Column(name = "PID") private Long myId; + @Column(name = "SP_PRESENT", nullable = false) private boolean myPresent; + @ManyToOne() - @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_RESID")) + @JoinColumn( + name = "RES_ID", + referencedColumnName = "RES_ID", + nullable = false, + foreignKey = @ForeignKey(name = "FK_RESPARMPRES_RESID")) private ResourceTable myResource; + @Column(name = "RES_ID", nullable = false, insertable = false, updatable = false) private Long myResourcePid; + @Transient private transient String myParamName; + @Column(name = "HASH_PRESENCE") private Long myHashPresence; + @Transient private transient PartitionSettings myPartitionSettings; @@ -81,7 +93,8 @@ public class SearchParamPresentEntity extends BasePartitionable implements Seria String resourceType = getResource().getResourceType(); String paramName = getParamName(); boolean present = myPresent; - setHashPresence(calculateHashPresence(getPartitionSettings(), getPartitionId(), resourceType, paramName, present)); + setHashPresence( + calculateHashPresence(getPartitionSettings(), getPartitionId(), resourceType, paramName, present)); } } @@ -169,14 +182,25 @@ public class SearchParamPresentEntity extends BasePartitionable implements Seria setPresent(theSource.isPresent()); } - public static long calculateHashPresence(PartitionSettings thePartitionSettings, PartitionablePartitionId theRequestPartitionId, String theResourceType, String theParamName, Boolean thePresent) { + public static long calculateHashPresence( + PartitionSettings thePartitionSettings, + PartitionablePartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + Boolean thePresent) { RequestPartitionId requestPartitionId = PartitionablePartitionId.toRequestPartitionId(theRequestPartitionId); - return calculateHashPresence(thePartitionSettings, requestPartitionId, theResourceType, theParamName, thePresent); + return calculateHashPresence( + thePartitionSettings, requestPartitionId, theResourceType, theParamName, thePresent); } - public static long calculateHashPresence(PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, String theParamName, Boolean thePresent) { + public static long calculateHashPresence( + PartitionSettings thePartitionSettings, + RequestPartitionId theRequestPartitionId, + String theResourceType, + String theParamName, + Boolean thePresent) { String string = thePresent != null ? Boolean.toString(thePresent) : Boolean.toString(false); - return BaseResourceIndexedSearchParam.hash(thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, string); + return BaseResourceIndexedSearchParam.hash( + thePartitionSettings, theRequestPartitionId, theResourceType, theParamName, string); } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java index 7382f6bf82c..0bdf6704fae 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java @@ -51,6 +51,7 @@ public class StorageSettings { */ // Thread Pool size used by batch in bundle public static final int DEFAULT_BUNDLE_BATCH_POOL_SIZE = 20; // 1 for single thread + public static final int DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE = 100; // 1 for single thread /** * Default {@link #getTreatReferencesAsLogical() logical URL bases}. Includes the following @@ -61,7 +62,13 @@ public class StorageSettings { *

  • "http://hl7.org/fhir/StructureDefinition/*"
  • * */ - public static final Set DEFAULT_LOGICAL_BASE_URLS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("http://hl7.org/fhir/ValueSet/*", "http://hl7.org/fhir/CodeSystem/*", "http://hl7.org/fhir/valueset-*", "http://hl7.org/fhir/codesystem-*", "http://hl7.org/fhir/StructureDefinition/*"))); + public static final Set DEFAULT_LOGICAL_BASE_URLS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + "http://hl7.org/fhir/ValueSet/*", + "http://hl7.org/fhir/CodeSystem/*", + "http://hl7.org/fhir/valueset-*", + "http://hl7.org/fhir/codesystem-*", + "http://hl7.org/fhir/StructureDefinition/*"))); + public static final String DEFAULT_WEBSOCKET_CONTEXT_PATH = "/websocket"; /* *

    @@ -82,6 +89,7 @@ public class StorageSettings { * update setter javadoc if default changes */ private boolean myAllowContainsSearches = false; + private boolean myAllowExternalReferences = false; private Set myTreatBaseUrlsAsLocal = new HashSet<>(); private Set myTreatReferencesAsLogical = new HashSet<>(DEFAULT_LOGICAL_BASE_URLS); @@ -102,6 +110,7 @@ public class StorageSettings { * Update setter javadoc if default changes. */ private boolean myUseOrdinalDatesForDayPrecisionSearches = true; + private boolean mySuppressStringIndexingInTokens = false; private Class mySequenceValueMassagerClass; private IPrimitiveType myPeriodIndexStartOfTime; @@ -754,12 +763,12 @@ public class StorageSettings { return this; } - /** * This setting indicates which subscription channel types are supported by the server. Any subscriptions submitted * to the server matching these types will be activated. */ - public StorageSettings addSupportedSubscriptionType(Subscription.SubscriptionChannelType theSubscriptionChannelType) { + public StorageSettings addSupportedSubscriptionType( + Subscription.SubscriptionChannelType theSubscriptionChannelType) { mySupportedSubscriptionTypes.add(theSubscriptionChannelType); return this; } @@ -780,7 +789,6 @@ public class StorageSettings { /** * If e-mail subscriptions are supported, the From address used when sending e-mails */ - public String getEmailFromAddress() { return myEmailFromAddress; } @@ -788,7 +796,6 @@ public class StorageSettings { /** * If e-mail subscriptions are supported, the From address used when sending e-mails */ - public void setEmailFromAddress(String theEmailFromAddress) { myEmailFromAddress = theEmailFromAddress; } @@ -796,7 +803,6 @@ public class StorageSettings { /** * If websocket subscriptions are enabled, this specifies the context path that listens to them. Default value "/websocket". */ - public String getWebsocketContextPath() { return myWebsocketContextPath; } @@ -804,7 +810,6 @@ public class StorageSettings { /** * If websocket subscriptions are enabled, this specifies the context path that listens to them. Default value "/websocket". */ - public void setWebsocketContextPath(String theWebsocketContextPath) { myWebsocketContextPath = theWebsocketContextPath; } @@ -1097,7 +1102,6 @@ public class StorageSettings { byType.computeIfAbsent(type, t -> new HashSet<>()).add(nextPath); } - myAutoVersionReferenceAtPaths = paths; myTypeToAutoVersionReferenceAtPaths = byType; } @@ -1281,23 +1285,20 @@ public class StorageSettings { return myQualifySubscriptionMatchingChannelName; } - - private static void validateTreatBaseUrlsAsLocal(String theUrl) { Validate.notBlank(theUrl, "Base URL must not be null or empty"); int starIdx = theUrl.indexOf('*'); if (starIdx != -1) { if (starIdx != theUrl.length() - 1) { - throw new IllegalArgumentException(Msg.code(1525) + "Base URL wildcard character (*) can only appear at the end of the string: " + theUrl); + throw new IllegalArgumentException(Msg.code(1525) + + "Base URL wildcard character (*) can only appear at the end of the string: " + theUrl); } } - } public enum IndexEnabledEnum { ENABLED, DISABLED } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/TagDefinition.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/TagDefinition.java index f562684ae63..111c1c83e04 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/TagDefinition.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/TagDefinition.java @@ -25,6 +25,8 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; +import java.io.Serializable; +import java.util.Collection; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; @@ -38,19 +40,19 @@ import javax.persistence.OneToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Transient; -import java.io.Serializable; -import java.util.Collection; @Entity @Table( - name = "HFJ_TAG_DEF", - indexes = { - @Index(name = "IDX_TAG_DEF_TP_CD_SYS", columnList = "TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_ID, TAG_VERSION, TAG_USER_SELECTED"), - } -) + name = "HFJ_TAG_DEF", + indexes = { + @Index( + name = "IDX_TAG_DEF_TP_CD_SYS", + columnList = "TAG_TYPE, TAG_CODE, TAG_SYSTEM, TAG_ID, TAG_VERSION, TAG_USER_SELECTED"), + }) public class TagDefinition implements Serializable { private static final long serialVersionUID = 1L; + @Column(name = "TAG_CODE", length = 200) private String myCode; @@ -63,10 +65,16 @@ public class TagDefinition implements Serializable { @Column(name = "TAG_ID") private Long myId; - @OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myTag") + @OneToMany( + cascade = {}, + fetch = FetchType.LAZY, + mappedBy = "myTag") private Collection myResources; - @OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myTag") + @OneToMany( + cascade = {}, + fetch = FetchType.LAZY, + mappedBy = "myTag") private Collection myResourceVersions; @Column(name = "TAG_SYSTEM", length = 200) @@ -168,7 +176,6 @@ public class TagDefinition implements Serializable { myUserSelected = theUserSelected; } - @Override public boolean equals(Object obj) { if (this == obj) { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/TagTypeEnum.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/TagTypeEnum.java index 454d52a6b01..1a53c87cc32 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/TagTypeEnum.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/TagTypeEnum.java @@ -25,11 +25,10 @@ public enum TagTypeEnum { * Do not reorder these! If you add to this list, add to the * TagTypeEnumTest test too! */ - - TAG, - - PROFILE, - - SECURITY_LABEL + TAG, + + PROFILE, + + SECURITY_LABEL } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/DateSearchIndexData.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/DateSearchIndexData.java index bea6c843c27..5769babcc2a 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/DateSearchIndexData.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/DateSearchIndexData.java @@ -27,7 +27,8 @@ public class DateSearchIndexData { private final Date myUpperBoundDate; private final int myUpperBoundOrdinal; - public DateSearchIndexData(Date theLowerBoundDate, int theLowerBoundOrdinal, Date theUpperBoundDate, int theUpperBoundOrdinal) { + public DateSearchIndexData( + Date theLowerBoundDate, int theLowerBoundOrdinal, Date theUpperBoundDate, int theUpperBoundOrdinal) { myLowerBoundDate = theLowerBoundDate; myLowerBoundOrdinal = theLowerBoundOrdinal; myUpperBoundDate = theUpperBoundDate; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ExtendedHSearchIndexData.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ExtendedHSearchIndexData.java index 79b19e67ff9..bcc948bc794 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ExtendedHSearchIndexData.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ExtendedHSearchIndexData.java @@ -58,17 +58,18 @@ public class ExtendedHSearchIndexData { private String myResourceJSON; private IBaseResource myResource; - public ExtendedHSearchIndexData(FhirContext theFhirContext, StorageSettings theStorageSettings, IBaseResource theResource) { + public ExtendedHSearchIndexData( + FhirContext theFhirContext, StorageSettings theStorageSettings, IBaseResource theResource) { this.myFhirContext = theFhirContext; this.myStorageSettings = theStorageSettings; myResource = theResource; } private BiConsumer ifNotContained(BiConsumer theIndexWriter) { - return (s,v) -> { + return (s, v) -> { // Ignore contained resources for now. if (!s.contains(".")) { - theIndexWriter.accept(s,v); + theIndexWriter.accept(s, v); } }; } @@ -111,9 +112,9 @@ public class ExtendedHSearchIndexData { /** * Add if not already present. */ - public void addTokenIndexDataIfNotPresent(String theSpName, String theSystem, String theValue) { + public void addTokenIndexDataIfNotPresent(String theSpName, String theSystem, String theValue) { boolean isPresent = mySearchParamTokens.get(theSpName).stream() - .anyMatch(c -> Objects.equals(c.getSystem(), theSystem) && Objects.equals(c.getCode(), theValue)); + .anyMatch(c -> Objects.equals(c.getSystem(), theSystem) && Objects.equals(c.getCode(), theValue)); if (!isPresent) { addTokenIndexData(theSpName, new CodingDt(theSystem, theValue)); } @@ -131,15 +132,24 @@ public class ExtendedHSearchIndexData { mySearchParamLinks.put(theSpName, theTargetResourceId); } - public void addDateIndexData(String theSpName, Date theLowerBound, int theLowerBoundOrdinal, Date theUpperBound, int theUpperBoundOrdinal) { - addDateIndexData(theSpName, new DateSearchIndexData(theLowerBound, theLowerBoundOrdinal, theUpperBound, theUpperBoundOrdinal)); + public void addDateIndexData( + String theSpName, + Date theLowerBound, + int theLowerBoundOrdinal, + Date theUpperBound, + int theUpperBoundOrdinal) { + addDateIndexData( + theSpName, + new DateSearchIndexData(theLowerBound, theLowerBoundOrdinal, theUpperBound, theUpperBoundOrdinal)); } public void addDateIndexData(String theSpName, DateSearchIndexData value) { mySearchParamDates.put(theSpName, value); } - public SetMultimap getDateIndexData() { return mySearchParamDates; } + public SetMultimap getDateIndexData() { + return mySearchParamDates; + } public void addNumberIndexDataIfNotPresent(String theParamName, BigDecimal theValue) { mySearchParamNumbers.put(theParamName, theValue); @@ -149,7 +159,9 @@ public class ExtendedHSearchIndexData { mySearchParamQuantities.put(theSpName, value); } - public SetMultimap getQuantityIndexData () {return mySearchParamQuantities;} + public SetMultimap getQuantityIndexData() { + return mySearchParamQuantities; + } public void setForcedId(String theForcedId) { myForcedId = theForcedId; @@ -160,8 +172,8 @@ public class ExtendedHSearchIndexData { } public void setRawResourceData(String theResourceJSON) { - myResourceJSON = theResourceJSON; - } + myResourceJSON = theResourceJSON; + } public SetMultimap getSearchParamComposites() { return mySearchParamComposites; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/HSearchElementCache.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/HSearchElementCache.java index 124caeb3a63..c96ee902c79 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/HSearchElementCache.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/HSearchElementCache.java @@ -23,11 +23,11 @@ import org.hibernate.search.engine.backend.document.DocumentElement; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; /** * Provide a lookup of created Hibernate Search DocumentElement entries. diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/HSearchIndexWriter.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/HSearchIndexWriter.java index b9a0b51eff0..02ab28cc9b2 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/HSearchIndexWriter.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/HSearchIndexWriter.java @@ -61,13 +61,11 @@ public class HSearchIndexWriter { public static final String NUMBER_VALUE = "number-value"; - public static final String DATE_LOWER_ORD = "lower-ord"; public static final String DATE_LOWER = "lower"; public static final String DATE_UPPER_ORD = "upper-ord"; public static final String DATE_UPPER = "upper"; - final HSearchElementCache myNodeCache; final StorageSettings myStorageSettings; @@ -87,7 +85,8 @@ public class HSearchIndexWriter { public void writeStringIndex(String theSearchParam, String theValue) { DocumentElement stringIndexNode = getSearchParamIndexNode(theSearchParam, INDEX_TYPE_STRING); - // we are assuming that our analyzer matches StringUtil.normalizeStringForSearchIndexing(theValue).toLowerCase(Locale.ROOT)) + // we are assuming that our analyzer matches + // StringUtil.normalizeStringForSearchIndexing(theValue).toLowerCase(Locale.ROOT)) writeBasicStringFields(stringIndexNode, theValue); addDocumentValue(stringIndexNode, IDX_STRING_EXACT, theValue); addDocumentValue(stringIndexNode, IDX_STRING_TEXT, theValue); @@ -151,7 +150,6 @@ public class HSearchIndexWriter { addDocumentValue(dateIndexNode, DATE_UPPER, theValue.getUpperBoundDate().toInstant()); } - public void writeQuantityIndex(String theSearchParam, QuantitySearchIndexData theValue) { DocumentElement nestedRoot = myNodeCache.getObjectElement(NESTED_SEARCH_PARAM_ROOT); @@ -160,7 +158,6 @@ public class HSearchIndexWriter { ourLog.trace("Adding Search Param Quantity: {} -- {}", theSearchParam, theValue); writeQuantityFields(nestedQtyNode, theValue); - } public void writeQuantityFields(DocumentElement nestedQtyNode, QuantitySearchIndexData theValue) { @@ -168,13 +165,13 @@ public class HSearchIndexWriter { addDocumentValue(nestedQtyNode, QTY_SYSTEM, theValue.getSystem()); addDocumentValue(nestedQtyNode, QTY_VALUE, theValue.getValue()); - if ( ! myStorageSettings.getNormalizedQuantitySearchLevel().storageOrSearchSupported()) { + if (!myStorageSettings.getNormalizedQuantitySearchLevel().storageOrSearchSupported()) { return; } - //-- convert the value/unit to the canonical form if any - Pair canonicalForm = UcumServiceUtil.getCanonicalForm(theValue.getSystem(), - BigDecimal.valueOf(theValue.getValue()), theValue.getCode()); + // -- convert the value/unit to the canonical form if any + Pair canonicalForm = UcumServiceUtil.getCanonicalForm( + theValue.getSystem(), BigDecimal.valueOf(theValue.getValue()), theValue.getCode()); if (canonicalForm == null) { return; } @@ -186,9 +183,9 @@ public class HSearchIndexWriter { addDocumentValue(nestedQtyNode, QTY_VALUE_NORM, canonicalValue); } - public void writeUriIndex(String theParamName, Collection theUriValueCollection) { - DocumentElement uriNode = myNodeCache.getObjectElement(SEARCH_PARAM_ROOT).addObject(theParamName); + DocumentElement uriNode = + myNodeCache.getObjectElement(SEARCH_PARAM_ROOT).addObject(theParamName); for (String uriSearchIndexValue : theUriValueCollection) { ourLog.trace("Adding Search Param Uri: {} -- {}", theParamName, uriSearchIndexValue); writeUriFields(uriNode, uriSearchIndexValue); @@ -200,7 +197,8 @@ public class HSearchIndexWriter { } public void writeNumberIndex(String theParamName, Collection theNumberValueCollection) { - DocumentElement numberNode = myNodeCache.getObjectElement(SEARCH_PARAM_ROOT).addObject(theParamName); + DocumentElement numberNode = + myNodeCache.getObjectElement(SEARCH_PARAM_ROOT).addObject(theParamName); for (BigDecimal numberSearchIndexValue : theNumberValueCollection) { ourLog.trace("Adding Search Param Number: {} -- {}", theParamName, numberSearchIndexValue); writeNumberFields(numberNode, numberSearchIndexValue); @@ -215,11 +213,11 @@ public class HSearchIndexWriter { * @param ignoredParamName unused - for consistent api * @param theCompositeSearchIndexData extracted index data for this sp */ - public void writeCompositeIndex(String ignoredParamName, Set theCompositeSearchIndexData) { + public void writeCompositeIndex( + String ignoredParamName, Set theCompositeSearchIndexData) { // must be nested. for (CompositeSearchIndexData compositeSearchIndexDatum : theCompositeSearchIndexData) { compositeSearchIndexDatum.writeIndexEntry(this, myNodeCache); } - } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/QuantitySearchIndexData.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/QuantitySearchIndexData.java index af0ee905e9d..133902c351b 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/QuantitySearchIndexData.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/QuantitySearchIndexData.java @@ -26,17 +26,21 @@ public class QuantitySearchIndexData { private final String mySystem; private final double myValue; - public QuantitySearchIndexData(String theCode, String theSystem, double theValue) { myCode = theCode; mySystem = theSystem; myValue = theValue; } + public String getCode() { + return myCode; + } - public String getCode() { return myCode; } + public String getSystem() { + return mySystem; + } - public String getSystem() { return mySystem; } - - public double getValue() { return myValue; } + public double getValue() { + return myValue; + } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ResourceTableRoutingBinder.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ResourceTableRoutingBinder.java index 675d437c9e9..1f6f1f7a6df 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ResourceTableRoutingBinder.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ResourceTableRoutingBinder.java @@ -36,8 +36,12 @@ public class ResourceTableRoutingBinder implements RoutingBinder { private static class ResourceTableBridge implements RoutingBridge { @Override - public void route(DocumentRoutes theDocumentRoutes, Object theO, ResourceTable theResourceTable, RoutingBridgeRouteContext theRoutingBridgeRouteContext) { - if (theResourceTable.getDeleted() == null && theResourceTable.getIndexStatus() != null ) { + public void route( + DocumentRoutes theDocumentRoutes, + Object theO, + ResourceTable theResourceTable, + RoutingBridgeRouteContext theRoutingBridgeRouteContext) { + if (theResourceTable.getDeleted() == null && theResourceTable.getIndexStatus() != null) { theDocumentRoutes.addRoute(); } else { theDocumentRoutes.notIndexed(); @@ -45,7 +49,11 @@ public class ResourceTableRoutingBinder implements RoutingBinder { } @Override - public void previousRoutes(DocumentRoutes theDocumentRoutes, Object theO, ResourceTable theResourceTable, RoutingBridgeRouteContext theRoutingBridgeRouteContext) { + public void previousRoutes( + DocumentRoutes theDocumentRoutes, + Object theO, + ResourceTable theResourceTable, + RoutingBridgeRouteContext theRoutingBridgeRouteContext) { theDocumentRoutes.addRoute(); } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchParamTextPropertyBinder.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchParamTextPropertyBinder.java index d5b385fa2e7..7da9c909955 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchParamTextPropertyBinder.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchParamTextPropertyBinder.java @@ -51,7 +51,6 @@ import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.QTY_VALUE; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.QTY_VALUE_NORM; import static ca.uhn.fhir.jpa.model.search.HSearchIndexWriter.URI_VALUE; - /** * Allows hibernate search to index *

    @@ -69,9 +68,7 @@ public class SearchParamTextPropertyBinder implements PropertyBinder, PropertyBr public void bind(PropertyBindingContext thePropertyBindingContext) { // TODO Is it safe to use object identity of the Map to track dirty? // N.B. GGG I would hazard that it is not, we could potentially use Version of the resource. - thePropertyBindingContext.dependencies() - .use("mySearchParamStrings") - .use("mySearchParamQuantities"); + thePropertyBindingContext.dependencies().use("mySearchParamStrings").use("mySearchParamQuantities"); defineIndexingTemplate(thePropertyBindingContext); @@ -81,159 +78,248 @@ public class SearchParamTextPropertyBinder implements PropertyBinder, PropertyBr private void defineIndexingTemplate(PropertyBindingContext thePropertyBindingContext) { IndexSchemaElement indexSchemaElement = thePropertyBindingContext.indexSchemaElement(); - //In order to support dynamic fields, we have to use field templates. We _must_ define the template at bootstrap time and cannot - //create them adhoc. https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#mapper-orm-bridge-index-field-dsl-dynamic - //I _think_ im doing the right thing here by indicating that everything matching this template uses this analyzer. + // In order to support dynamic fields, we have to use field templates. We _must_ define the template at + // bootstrap time and cannot + // create them adhoc. + // https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#mapper-orm-bridge-index-field-dsl-dynamic + // I _think_ im doing the right thing here by indicating that everything matching this template uses this + // analyzer. IndexFieldTypeFactory indexFieldTypeFactory = thePropertyBindingContext.typeFactory(); - // TODO mb Once Ken finishes extracting a common base, we can share these constants with HapiElasticsearchAnalysisConfigurer and HapiLuceneAnalysisConfigurer - StringIndexFieldTypeOptionsStep standardAnalyzer = indexFieldTypeFactory.asString() - .analyzer("standardAnalyzer") - .projectable(Projectable.NO); + // TODO mb Once Ken finishes extracting a common base, we can share these constants with + // HapiElasticsearchAnalysisConfigurer and HapiLuceneAnalysisConfigurer + StringIndexFieldTypeOptionsStep standardAnalyzer = + indexFieldTypeFactory.asString().analyzer("standardAnalyzer").projectable(Projectable.NO); - StringIndexFieldTypeOptionsStep lowerCaseNormalizer = - indexFieldTypeFactory.asString() + StringIndexFieldTypeOptionsStep lowerCaseNormalizer = indexFieldTypeFactory + .asString() .normalizer(LOWERCASE_ASCIIFOLDING_NORMALIZER) .sortable(Sortable.YES) .projectable(Projectable.YES); - StringIndexFieldTypeOptionsStep exactAnalyzer = - indexFieldTypeFactory.asString() + StringIndexFieldTypeOptionsStep exactAnalyzer = indexFieldTypeFactory + .asString() .analyzer("exactAnalyzer") // default max-length is 256. Is that enough for code system uris? .projectable(Projectable.NO); - StringIndexFieldTypeOptionsStep normStringAnalyzer = indexFieldTypeFactory.asString() - .analyzer("normStringAnalyzer") - .projectable(Projectable.NO); + StringIndexFieldTypeOptionsStep normStringAnalyzer = + indexFieldTypeFactory.asString().analyzer("normStringAnalyzer").projectable(Projectable.NO); - StringIndexFieldTypeOptionsStep keywordFieldType = indexFieldTypeFactory.asString() - // TODO JB: may have to add normalizer to support case insensitive searches depending on token flags - .projectable(Projectable.NO) - .sortable(Sortable.YES) - .aggregable(Aggregable.YES); + StringIndexFieldTypeOptionsStep keywordFieldType = indexFieldTypeFactory + .asString() + // TODO JB: may have to add normalizer to support case insensitive searches depending on token flags + .projectable(Projectable.NO) + .sortable(Sortable.YES) + .aggregable(Aggregable.YES); - StandardIndexFieldTypeOptionsStep dateTimeFieldType = indexFieldTypeFactory.asInstant() - .projectable(Projectable.NO) - .sortable(Sortable.YES); + StandardIndexFieldTypeOptionsStep dateTimeFieldType = + indexFieldTypeFactory.asInstant().projectable(Projectable.NO).sortable(Sortable.YES); - StandardIndexFieldTypeOptionsStep dateTimeOrdinalFieldType = indexFieldTypeFactory.asInteger() - .projectable(Projectable.NO) - .sortable(Sortable.YES); + StandardIndexFieldTypeOptionsStep dateTimeOrdinalFieldType = + indexFieldTypeFactory.asInteger().projectable(Projectable.NO).sortable(Sortable.YES); - StandardIndexFieldTypeOptionsStep bigDecimalFieldType = indexFieldTypeFactory.asDouble() - .projectable(Projectable.NO) - .sortable(Sortable.YES); + StandardIndexFieldTypeOptionsStep bigDecimalFieldType = + indexFieldTypeFactory.asDouble().projectable(Projectable.NO).sortable(Sortable.YES); - StringIndexFieldTypeOptionsStep forcedIdType = indexFieldTypeFactory.asString() - .projectable(Projectable.YES) - .aggregable(Aggregable.NO); + StringIndexFieldTypeOptionsStep forcedIdType = + indexFieldTypeFactory.asString().projectable(Projectable.YES).aggregable(Aggregable.NO); // type to store payload fields that do not participate in search, only results - StringIndexFieldTypeOptionsStep stringStorageType = indexFieldTypeFactory.asString() - .searchable(Searchable.NO) - .projectable(Projectable.YES) - .aggregable(Aggregable.NO); + StringIndexFieldTypeOptionsStep stringStorageType = indexFieldTypeFactory + .asString() + .searchable(Searchable.NO) + .projectable(Projectable.YES) + .aggregable(Aggregable.NO); // the old style for _text and _contains indexSchemaElement - .fieldTemplate("SearchParamText", standardAnalyzer) - .matchingPathGlob(SEARCH_PARAM_TEXT_PREFIX + "*"); - + .fieldTemplate("SearchParamText", standardAnalyzer) + .matchingPathGlob(SEARCH_PARAM_TEXT_PREFIX + "*"); indexSchemaElement.field("myForcedId", forcedIdType).toReference(); indexSchemaElement.field("myRawResource", stringStorageType).toReference(); - // The following section is a bit ugly. We need to enforce order and dependency or the object matches will be too big. + // The following section is a bit ugly. We need to enforce order and dependency or the object matches will be + // too big. { - IndexSchemaObjectField spfield = indexSchemaElement.objectField(HSearchIndexWriter.SEARCH_PARAM_ROOT, ObjectStructure.FLATTENED); + IndexSchemaObjectField spfield = + indexSchemaElement.objectField(HSearchIndexWriter.SEARCH_PARAM_ROOT, ObjectStructure.FLATTENED); spfield.toReference(); - IndexSchemaObjectField nestedSpField = indexSchemaElement.objectField(HSearchIndexWriter.NESTED_SEARCH_PARAM_ROOT, ObjectStructure.FLATTENED); + IndexSchemaObjectField nestedSpField = indexSchemaElement.objectField( + HSearchIndexWriter.NESTED_SEARCH_PARAM_ROOT, ObjectStructure.FLATTENED); nestedSpField.toReference(); // Note: the lucene/elastic independent api is hurting a bit here. - // For lucene, we need a separate field for each analyzer. So we'll add string (for :exact), and text (for :text). + // For lucene, we need a separate field for each analyzer. So we'll add string (for :exact), and text (for + // :text). // They aren't marked stored, so there's no space cost beyond the index for each. - // But for elastic, we'd rather have a single field defined, with multi-field sub-fields. The index cost is the same, + // But for elastic, we'd rather have a single field defined, with multi-field sub-fields. The index cost is + // the same, // but elastic will actually store all fields in the source document and consume disk. // So triplicate the storage for now. :-( String stringPathGlob = "*.string"; - spfield.objectFieldTemplate("stringIndex", ObjectStructure.FLATTENED).matchingPathGlob(stringPathGlob); - spfield.fieldTemplate("string-norm", normStringAnalyzer).matchingPathGlob(stringPathGlob + "." + IDX_STRING_NORMALIZED).multiValued(); - spfield.fieldTemplate("string-exact", exactAnalyzer).matchingPathGlob(stringPathGlob + "." + IDX_STRING_EXACT).multiValued(); - spfield.fieldTemplate("string-text", standardAnalyzer).matchingPathGlob(stringPathGlob + "." + IDX_STRING_TEXT).multiValued(); - spfield.fieldTemplate("string-lower", lowerCaseNormalizer).matchingPathGlob(stringPathGlob + "." + IDX_STRING_LOWER).multiValued(); + spfield.objectFieldTemplate("stringIndex", ObjectStructure.FLATTENED) + .matchingPathGlob(stringPathGlob); + spfield.fieldTemplate("string-norm", normStringAnalyzer) + .matchingPathGlob(stringPathGlob + "." + IDX_STRING_NORMALIZED) + .multiValued(); + spfield.fieldTemplate("string-exact", exactAnalyzer) + .matchingPathGlob(stringPathGlob + "." + IDX_STRING_EXACT) + .multiValued(); + spfield.fieldTemplate("string-text", standardAnalyzer) + .matchingPathGlob(stringPathGlob + "." + IDX_STRING_TEXT) + .multiValued(); + spfield.fieldTemplate("string-lower", lowerCaseNormalizer) + .matchingPathGlob(stringPathGlob + "." + IDX_STRING_LOWER) + .multiValued(); - nestedSpField.objectFieldTemplate("nestedStringIndex", ObjectStructure.FLATTENED).matchingPathGlob(stringPathGlob); - nestedSpField.fieldTemplate("string-norm", normStringAnalyzer).matchingPathGlob(stringPathGlob + "." + IDX_STRING_NORMALIZED).multiValued(); - nestedSpField.fieldTemplate("string-text", standardAnalyzer).matchingPathGlob(stringPathGlob + "." + IDX_STRING_TEXT).multiValued(); + nestedSpField + .objectFieldTemplate("nestedStringIndex", ObjectStructure.FLATTENED) + .matchingPathGlob(stringPathGlob); + nestedSpField + .fieldTemplate("string-norm", normStringAnalyzer) + .matchingPathGlob(stringPathGlob + "." + IDX_STRING_NORMALIZED) + .multiValued(); + nestedSpField + .fieldTemplate("string-text", standardAnalyzer) + .matchingPathGlob(stringPathGlob + "." + IDX_STRING_TEXT) + .multiValued(); // token // Ideally, we'd store a single code-system string and use a custom tokenizer to // generate "system|" "|code" and "system|code" tokens to support all three. - // But the standard tokenizers aren't that flexible. As second best, it would be nice to use elastic multi-fields + // But the standard tokenizers aren't that flexible. As second best, it would be nice to use elastic + // multi-fields // to apply three different tokenizers to a single value. // Instead, just be simple and expand into three full fields for now String tokenPathGlob = "*.token"; spfield.objectFieldTemplate("tokenIndex", ObjectStructure.FLATTENED).matchingPathGlob(tokenPathGlob); - spfield.fieldTemplate("token-code", keywordFieldType).matchingPathGlob(tokenPathGlob + ".code").multiValued(); - spfield.fieldTemplate("token-code-system", keywordFieldType).matchingPathGlob(tokenPathGlob + ".code-system").multiValued(); - spfield.fieldTemplate("token-system", keywordFieldType).matchingPathGlob(tokenPathGlob + ".system").multiValued(); + spfield.fieldTemplate("token-code", keywordFieldType) + .matchingPathGlob(tokenPathGlob + ".code") + .multiValued(); + spfield.fieldTemplate("token-code-system", keywordFieldType) + .matchingPathGlob(tokenPathGlob + ".code-system") + .multiValued(); + spfield.fieldTemplate("token-system", keywordFieldType) + .matchingPathGlob(tokenPathGlob + ".system") + .multiValued(); - nestedSpField.objectFieldTemplate("nestedTokenIndex", ObjectStructure.FLATTENED).matchingPathGlob(tokenPathGlob); - nestedSpField.fieldTemplate("token-code", keywordFieldType).matchingPathGlob(tokenPathGlob + ".code").multiValued(); - nestedSpField.fieldTemplate("token-code-system", keywordFieldType).matchingPathGlob(tokenPathGlob + ".code-system").multiValued(); - nestedSpField.fieldTemplate("token-system", keywordFieldType).matchingPathGlob(tokenPathGlob + ".system").multiValued(); + nestedSpField + .objectFieldTemplate("nestedTokenIndex", ObjectStructure.FLATTENED) + .matchingPathGlob(tokenPathGlob); + nestedSpField + .fieldTemplate("token-code", keywordFieldType) + .matchingPathGlob(tokenPathGlob + ".code") + .multiValued(); + nestedSpField + .fieldTemplate("token-code-system", keywordFieldType) + .matchingPathGlob(tokenPathGlob + ".code-system") + .multiValued(); + nestedSpField + .fieldTemplate("token-system", keywordFieldType) + .matchingPathGlob(tokenPathGlob + ".system") + .multiValued(); // reference - spfield.fieldTemplate("reference-value", keywordFieldType).matchingPathGlob("*.reference.value").multiValued(); + spfield.fieldTemplate("reference-value", keywordFieldType) + .matchingPathGlob("*.reference.value") + .multiValued(); // uri - spfield.fieldTemplate("uriValueTemplate", keywordFieldType).matchingPathGlob("*." + URI_VALUE).multiValued(); - nestedSpField.fieldTemplate("uriValueTemplate", keywordFieldType).matchingPathGlob("*." + URI_VALUE).multiValued(); + spfield.fieldTemplate("uriValueTemplate", keywordFieldType) + .matchingPathGlob("*." + URI_VALUE) + .multiValued(); + nestedSpField + .fieldTemplate("uriValueTemplate", keywordFieldType) + .matchingPathGlob("*." + URI_VALUE) + .multiValued(); // number spfield.fieldTemplate("numberValueTemplate", bigDecimalFieldType).matchingPathGlob("*." + NUMBER_VALUE); - nestedSpField.fieldTemplate("numberValueTemplate", bigDecimalFieldType).matchingPathGlob("*." + NUMBER_VALUE); + nestedSpField + .fieldTemplate("numberValueTemplate", bigDecimalFieldType) + .matchingPathGlob("*." + NUMBER_VALUE); - //quantity + // quantity String quantityPathGlob = "*.quantity"; - nestedSpField.objectFieldTemplate("quantityTemplate", ObjectStructure.FLATTENED).matchingPathGlob(quantityPathGlob); - nestedSpField.fieldTemplate(QTY_SYSTEM, keywordFieldType).matchingPathGlob(quantityPathGlob + "." + QTY_SYSTEM); + nestedSpField + .objectFieldTemplate("quantityTemplate", ObjectStructure.FLATTENED) + .matchingPathGlob(quantityPathGlob); + nestedSpField + .fieldTemplate(QTY_SYSTEM, keywordFieldType) + .matchingPathGlob(quantityPathGlob + "." + QTY_SYSTEM); nestedSpField.fieldTemplate(QTY_CODE, keywordFieldType).matchingPathGlob(quantityPathGlob + "." + QTY_CODE); - nestedSpField.fieldTemplate(QTY_VALUE, bigDecimalFieldType).matchingPathGlob(quantityPathGlob + "." + QTY_VALUE); - nestedSpField.fieldTemplate(QTY_CODE_NORM, keywordFieldType).matchingPathGlob(quantityPathGlob + "." + QTY_CODE_NORM); - nestedSpField.fieldTemplate(QTY_VALUE_NORM, bigDecimalFieldType).matchingPathGlob(quantityPathGlob + "." + QTY_VALUE_NORM); + nestedSpField + .fieldTemplate(QTY_VALUE, bigDecimalFieldType) + .matchingPathGlob(quantityPathGlob + "." + QTY_VALUE); + nestedSpField + .fieldTemplate(QTY_CODE_NORM, keywordFieldType) + .matchingPathGlob(quantityPathGlob + "." + QTY_CODE_NORM); + nestedSpField + .fieldTemplate(QTY_VALUE_NORM, bigDecimalFieldType) + .matchingPathGlob(quantityPathGlob + "." + QTY_VALUE_NORM); // date String dateTimePathGlob = "*.dt"; - spfield.objectFieldTemplate("datetimeIndex", ObjectStructure.FLATTENED).matchingPathGlob(dateTimePathGlob); - spfield.fieldTemplate("datetime-lower-ordinal", dateTimeOrdinalFieldType).matchingPathGlob(dateTimePathGlob + ".lower-ord").multiValued(); - spfield.fieldTemplate("datetime-lower-value", dateTimeFieldType).matchingPathGlob(dateTimePathGlob + ".lower").multiValued(); - spfield.fieldTemplate("datetime-upper-ordinal", dateTimeOrdinalFieldType).matchingPathGlob(dateTimePathGlob + ".upper-ord").multiValued(); - spfield.fieldTemplate("datetime-upper-value", dateTimeFieldType).matchingPathGlob(dateTimePathGlob + ".upper").multiValued(); + spfield.objectFieldTemplate("datetimeIndex", ObjectStructure.FLATTENED) + .matchingPathGlob(dateTimePathGlob); + spfield.fieldTemplate("datetime-lower-ordinal", dateTimeOrdinalFieldType) + .matchingPathGlob(dateTimePathGlob + ".lower-ord") + .multiValued(); + spfield.fieldTemplate("datetime-lower-value", dateTimeFieldType) + .matchingPathGlob(dateTimePathGlob + ".lower") + .multiValued(); + spfield.fieldTemplate("datetime-upper-ordinal", dateTimeOrdinalFieldType) + .matchingPathGlob(dateTimePathGlob + ".upper-ord") + .multiValued(); + spfield.fieldTemplate("datetime-upper-value", dateTimeFieldType) + .matchingPathGlob(dateTimePathGlob + ".upper") + .multiValued(); - nestedSpField.objectFieldTemplate("nestedDatetimeIndex", ObjectStructure.FLATTENED).matchingPathGlob(dateTimePathGlob); - nestedSpField.fieldTemplate("datetime-lower-ordinal", dateTimeOrdinalFieldType).matchingPathGlob(dateTimePathGlob + ".lower-ord").multiValued(); - nestedSpField.fieldTemplate("datetime-lower-value", dateTimeFieldType).matchingPathGlob(dateTimePathGlob + ".lower").multiValued(); - nestedSpField.fieldTemplate("datetime-upper-ordinal", dateTimeOrdinalFieldType).matchingPathGlob(dateTimePathGlob + ".upper-ord").multiValued(); - nestedSpField.fieldTemplate("datetime-upper-value", dateTimeFieldType).matchingPathGlob(dateTimePathGlob + ".upper").multiValued(); + nestedSpField + .objectFieldTemplate("nestedDatetimeIndex", ObjectStructure.FLATTENED) + .matchingPathGlob(dateTimePathGlob); + nestedSpField + .fieldTemplate("datetime-lower-ordinal", dateTimeOrdinalFieldType) + .matchingPathGlob(dateTimePathGlob + ".lower-ord") + .multiValued(); + nestedSpField + .fieldTemplate("datetime-lower-value", dateTimeFieldType) + .matchingPathGlob(dateTimePathGlob + ".lower") + .multiValued(); + nestedSpField + .fieldTemplate("datetime-upper-ordinal", dateTimeOrdinalFieldType) + .matchingPathGlob(dateTimePathGlob + ".upper-ord") + .multiValued(); + nestedSpField + .fieldTemplate("datetime-upper-value", dateTimeFieldType) + .matchingPathGlob(dateTimePathGlob + ".upper") + .multiValued(); // last, since the globs are matched in declaration order, and * matches even nested nodes. spfield.objectFieldTemplate("spObject", ObjectStructure.FLATTENED).matchingPathGlob("*"); // we use nested search params for the autocomplete search. - nestedSpField.objectFieldTemplate("nestedSpSubObject", ObjectStructure.FLATTENED).matchingPathGlob("*.*").multiValued(); - nestedSpField.objectFieldTemplate("nestedSpObject", ObjectStructure.NESTED).matchingPathGlob("*").multiValued(); + nestedSpField + .objectFieldTemplate("nestedSpSubObject", ObjectStructure.FLATTENED) + .matchingPathGlob("*.*") + .multiValued(); + nestedSpField + .objectFieldTemplate("nestedSpObject", ObjectStructure.NESTED) + .matchingPathGlob("*") + .multiValued(); } } @Override - public void write(DocumentElement theDocument, ExtendedHSearchIndexData theIndexData, PropertyBridgeWriteContext thePropertyBridgeWriteContext) { + public void write( + DocumentElement theDocument, + ExtendedHSearchIndexData theIndexData, + PropertyBridgeWriteContext thePropertyBridgeWriteContext) { if (theIndexData != null) { ourLog.trace("Writing index data for {}", theIndexData); theIndexData.writeIndexElements(theDocument); } } - } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/CodeSystemHash.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/CodeSystemHash.java index 221b11adf9c..d40f8e28f64 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/CodeSystemHash.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/CodeSystemHash.java @@ -27,26 +27,26 @@ import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; public class CodeSystemHash { - private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0); - private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8); + private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0); + private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8); - static public long hashCodeSystem( String system, String code ) { - Hasher hasher = HASH_FUNCTION.newHasher(); - addStringToHasher(hasher, system); - addStringToHasher(hasher, code); + public static long hashCodeSystem(String system, String code) { + Hasher hasher = HASH_FUNCTION.newHasher(); + addStringToHasher(hasher, system); + addStringToHasher(hasher, code); - HashCode hashCode = hasher.hash(); - return hashCode.asLong(); - } + HashCode hashCode = hasher.hash(); + return hashCode.asLong(); + } - static private void addStringToHasher(Hasher hasher, String next) { - if (next == null) { - hasher.putByte((byte) 0); - } else { - next = UrlUtil.escapeUrlParam(next); - byte[] bytes = next.getBytes(Charsets.UTF_8); - hasher.putBytes(bytes); - } - hasher.putBytes(DELIMITER_BYTES); - } + private static void addStringToHasher(Hasher hasher, String next) { + if (next == null) { + hasher.putByte((byte) 0); + } else { + next = UrlUtil.escapeUrlParam(next); + byte[] bytes = next.getBytes(Charsets.UTF_8); + hasher.putBytes(bytes); + } + hasher.putBytes(DELIMITER_BYTES); + } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java index b1445d9e6ef..ec8dc48f8cc 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java @@ -28,7 +28,8 @@ public class JpaConstants { /** * Userdata key for tracking the fact that a resource ID was assigned by the server */ - public static final String RESOURCE_ID_SERVER_ASSIGNED = JpaConstants.class.getName() + "_RESOURCE_ID_SERVER_ASSIGNED"; + public static final String RESOURCE_ID_SERVER_ASSIGNED = + JpaConstants.class.getName() + "_RESOURCE_ID_SERVER_ASSIGNED"; /** * Operation name for the $apply-codesystem-delta-add operation */ @@ -58,22 +59,26 @@ public class JpaConstants { * @deprecated Replace with {@link ProviderConstants#OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT} */ @Deprecated - public static final String OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES; + public static final String OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES = + ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES; /** * @deprecated Replace with {@link ProviderConstants#OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT} */ @Deprecated - public static final String OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS; + public static final String OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS = + ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS; /** * @deprecated Replace with {@link ProviderConstants#OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT} */ @Deprecated - public static final String OPERATION_EXPUNGE_PARAM_EXPUNGE_EVERYTHING = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_EVERYTHING; + public static final String OPERATION_EXPUNGE_PARAM_EXPUNGE_EVERYTHING = + ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_EVERYTHING; /** * @deprecated Replace with {@link ProviderConstants#OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT} */ @Deprecated - public static final String OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT = ProviderConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT; + public static final String OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT = + ProviderConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT; /** * Header name for the "X-Meta-Snapshot-Mode" header, which * specifies that properties in meta (tags, profiles, security labels) @@ -100,7 +105,8 @@ public class JpaConstants { /** * Operation name for the $validate operation */ - // NB don't delete this, it's used in Smile as well, even though hapi-fhir-server uses the version from Constants.java + // NB don't delete this, it's used in Smile as well, even though hapi-fhir-server uses the version from + // Constants.java public static final String OPERATION_VALIDATE = Constants.EXTOP_VALIDATE; /** * Operation name for the $everything operation @@ -210,8 +216,6 @@ public class JpaConstants { */ public static final String PARAM_EXPORT_PATIENT = "patient"; - - /** * Parameter for the $import operation */ @@ -246,13 +250,13 @@ public class JpaConstants { /** * Parameter for delete to indicate the deleted resources should also be expunged */ - public static final String PARAM_DELETE_EXPUNGE = "_expunge"; /** * URL for extension on a SearchParameter indicating that text values should not be indexed */ - public static final String EXTENSION_EXT_SYSTEMDEFINED = JpaConstants.class.getName() + "_EXTENSION_EXT_SYSTEMDEFINED"; + public static final String EXTENSION_EXT_SYSTEMDEFINED = + JpaConstants.class.getName() + "_EXTENSION_EXT_SYSTEMDEFINED"; /** * Deprecated. Please use {@link HapiExtensions#EXT_SEARCHPARAM_PHONETIC_ENCODER} instead. @@ -276,11 +280,11 @@ public class JpaConstants { * Parameter for the $expand operation */ public static final String OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY = "includeHierarchy"; + public static final String OPERATION_EXPAND_PARAM_DISPLAY_LANGUAGE = "displayLanguage"; public static final String HEADER_UPSERT_EXISTENCE_CHECK = "X-Upsert-Extistence-Check"; public static final String HEADER_UPSERT_EXISTENCE_CHECK_DISABLED = "disabled"; - /** * Parameters for the rewrite history operation */ @@ -295,12 +299,15 @@ public class JpaConstants { * IPS Generation operation URL */ public static final String SUMMARY_OPERATION_URL = "http://hl7.org/fhir/uv/ips/OperationDefinition/summary"; - public static final String ORG_HIBERNATE_TYPE_TEXT_TYPE = "org.hibernate.type.TextType"; - public static final String BULK_META_EXTENSION_EXPORT_IDENTIFIER = "https://hapifhir.org/NamingSystem/bulk-export-identifier"; - public static final String BULK_META_EXTENSION_JOB_ID = "https://hapifhir.org/NamingSystem/bulk-export-job-id"; - public static final String BULK_META_EXTENSION_RESOURCE_TYPE = "https://hapifhir.org/NamingSystem/bulk-export-binary-resource-type"; - /** + public static final String ORG_HIBERNATE_TYPE_TEXT_TYPE = "org.hibernate.type.TextType"; + public static final String BULK_META_EXTENSION_EXPORT_IDENTIFIER = + "https://hapifhir.org/NamingSystem/bulk-export-identifier"; + public static final String BULK_META_EXTENSION_JOB_ID = "https://hapifhir.org/NamingSystem/bulk-export-job-id"; + public static final String BULK_META_EXTENSION_RESOURCE_TYPE = + "https://hapifhir.org/NamingSystem/bulk-export-binary-resource-type"; + + /** * Non-instantiable */ private JpaConstants() { diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/UcumServiceUtil.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/UcumServiceUtil.java index c9268b94754..56ef214852e 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/UcumServiceUtil.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/UcumServiceUtil.java @@ -19,14 +19,10 @@ */ package ca.uhn.fhir.jpa.model.util; -import java.io.InputStream; -import java.math.BigDecimal; -import java.math.MathContext; -import java.math.RoundingMode; - import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.param.QuantityParam; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.util.ClasspathUtil; import org.fhir.ucum.Decimal; import org.fhir.ucum.Pair; import org.fhir.ucum.UcumEssenceService; @@ -34,8 +30,9 @@ import org.fhir.ucum.UcumException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import ca.uhn.fhir.util.ClasspathUtil; - +import java.io.InputStream; +import java.math.BigDecimal; +import java.math.RoundingMode; import javax.annotation.Nullable; /** @@ -55,14 +52,12 @@ public class UcumServiceUtil { private static UcumEssenceService myUcumEssenceService = null; - private UcumServiceUtil() { - } + private UcumServiceUtil() {} // lazy load UCUM_SOURCE only once private static void init() { - if (myUcumEssenceService != null) - return; + if (myUcumEssenceService != null) return; synchronized (UcumServiceUtil.class) { InputStream input = ClasspathUtil.loadResourceAsStream(UCUM_SOURCE); @@ -80,9 +75,9 @@ public class UcumServiceUtil { /** * Get the canonical form of a code, it's define at * http://unitsofmeasure.org - * + * * e.g. 12cm -> 0.12m where m is the canonical form of the length. - * + * * @param theSystem must be http://unitsofmeasure.org * @param theValue the value in the original form e.g. 0.12 * @param theCode the code in the original form e.g. 'cm' @@ -91,14 +86,17 @@ public class UcumServiceUtil { public static Pair getCanonicalForm(String theSystem, BigDecimal theValue, String theCode) { // -- only for http://unitsofmeasure.org - if (!UCUM_CODESYSTEM_URL.equals(theSystem) || theValue == null || theCode == null) - return null; + if (!UCUM_CODESYSTEM_URL.equals(theSystem) || theValue == null || theCode == null) return null; - if ( isCelsiusOrFahrenheit(theCode) ) { + if (isCelsiusOrFahrenheit(theCode)) { try { return getCanonicalFormForCelsiusOrFahrenheit(theValue, theCode); } catch (UcumException theE) { - ourLog.error("Exception when trying to obtain canonical form for value {} and code {}: {}", theValue, theCode, theE.getMessage()); + ourLog.error( + "Exception when trying to obtain canonical form for value {} and code {}: {}", + theValue, + theCode, + theE.getMessage()); return null; } } @@ -110,8 +108,7 @@ public class UcumServiceUtil { Decimal theDecimal = new Decimal(theValue.toPlainString(), theValue.precision()); theCanonicalPair = myUcumEssenceService.getCanonicalForm(new Pair(theDecimal, theCode)); // For some reason code [degF], degree Fahrenheit, can't be converted. it returns value null. - if (theCanonicalPair.getValue() == null) - return null; + if (theCanonicalPair.getValue() == null) return null; } catch (UcumException e) { return null; } @@ -119,11 +116,9 @@ public class UcumServiceUtil { return theCanonicalPair; } - - private static Pair getCanonicalFormForCelsiusOrFahrenheit(BigDecimal theValue, String theCode) throws UcumException { - return theCode.equals(CELSIUS_CODE) - ? canonicalizeCelsius(theValue) - : canonicalizeFahrenheit(theValue); + private static Pair getCanonicalFormForCelsiusOrFahrenheit(BigDecimal theValue, String theCode) + throws UcumException { + return theCode.equals(CELSIUS_CODE) ? canonicalizeCelsius(theValue) : canonicalizeFahrenheit(theValue); } /** @@ -131,10 +126,9 @@ public class UcumServiceUtil { * Formula is K = (x°F − 32) × 5/9 + 273.15 */ private static Pair canonicalizeFahrenheit(BigDecimal theValue) throws UcumException { - BigDecimal converted = theValue - .subtract( BigDecimal.valueOf(32) ) - .multiply( BigDecimal.valueOf(5f / 9f) ) - .add( BigDecimal.valueOf(CELSIUS_KELVIN_DIFF) ); + BigDecimal converted = theValue.subtract(BigDecimal.valueOf(32)) + .multiply(BigDecimal.valueOf(5f / 9f)) + .add(BigDecimal.valueOf(CELSIUS_KELVIN_DIFF)); // disallow precision larger than input, as it matters when defining ranges BigDecimal adjusted = converted.setScale(theValue.precision(), RoundingMode.HALF_UP); @@ -147,33 +141,31 @@ public class UcumServiceUtil { */ private static Pair canonicalizeCelsius(BigDecimal theValue) throws UcumException { Decimal valueDec = new Decimal(theValue.toPlainString(), theValue.precision()); - Decimal converted = valueDec - .add(new Decimal(Float.toString(CELSIUS_KELVIN_DIFF))); + Decimal converted = valueDec.add(new Decimal(Float.toString(CELSIUS_KELVIN_DIFF))); return new Pair(converted, "K"); } - private static boolean isCelsiusOrFahrenheit(String theCode) { return theCode.equals(CELSIUS_CODE) || theCode.equals(FAHRENHEIT_CODE); } - @Nullable - public static QuantityParam toCanonicalQuantityOrNull(QuantityParam theQuantityParam) { - Pair canonicalForm = getCanonicalForm(theQuantityParam.getSystem(), theQuantityParam.getValue(), theQuantityParam.getUnits()); - if (canonicalForm != null) { - BigDecimal valueValue = new BigDecimal(canonicalForm.getValue().asDecimal()); - String unitsValue = canonicalForm.getCode(); - return new QuantityParam() - .setSystem(theQuantityParam.getSystem()) - .setValue(valueValue) - .setUnits(unitsValue) - .setPrefix(theQuantityParam.getPrefix()); - } else { - return null; - } - } + public static QuantityParam toCanonicalQuantityOrNull(QuantityParam theQuantityParam) { + Pair canonicalForm = getCanonicalForm( + theQuantityParam.getSystem(), theQuantityParam.getValue(), theQuantityParam.getUnits()); + if (canonicalForm != null) { + BigDecimal valueValue = new BigDecimal(canonicalForm.getValue().asDecimal()); + String unitsValue = canonicalForm.getCode(); + return new QuantityParam() + .setSystem(theQuantityParam.getSystem()) + .setValue(valueValue) + .setUnits(unitsValue) + .setPrefix(theQuantityParam.getPrefix()); + } else { + return null; + } + } public static double convert(double theDistanceKm, String theSourceUnits, String theTargetUnits) { init(); diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java index de2111e8045..4f80dd9bbc2 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java @@ -38,16 +38,27 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails { private final String myResourceType; private final RestOperationTypeEnum myRestOperationType; private final IIdType myReadResourceId; + @Nullable private final SearchParameterMap mySearchParams; + @Nullable private final IBaseResource myConditionalTargetOrNull; + @Nullable private final String mySearchUuid; + @Nullable private final String myExtendedOperationName; - private ReadPartitionIdRequestDetails(String theResourceType, RestOperationTypeEnum theRestOperationType, IIdType theReadResourceId, @Nullable SearchParameterMap theSearchParams, @Nullable IBaseResource theConditionalTargetOrNull, @Nullable String theSearchUuid, String theExtendedOperationName) { + private ReadPartitionIdRequestDetails( + String theResourceType, + RestOperationTypeEnum theRestOperationType, + IIdType theReadResourceId, + @Nullable SearchParameterMap theSearchParams, + @Nullable IBaseResource theConditionalTargetOrNull, + @Nullable String theSearchUuid, + String theExtendedOperationName) { myResourceType = theResourceType; myRestOperationType = theRestOperationType; myReadResourceId = theReadResourceId; @@ -98,7 +109,8 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails { return forRead(theId.getResourceType(), theId, false); } - public static ReadPartitionIdRequestDetails forOperation(@Nullable String theResourceType, @Nullable IIdType theId, @Nonnull String theExtendedOperationName) { + public static ReadPartitionIdRequestDetails forOperation( + @Nullable String theResourceType, @Nullable IIdType theId, @Nonnull String theExtendedOperationName) { RestOperationTypeEnum op; if (theId != null) { op = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE; @@ -111,13 +123,23 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails { return new ReadPartitionIdRequestDetails(theResourceType, op, null, null, null, null, theExtendedOperationName); } - public static ReadPartitionIdRequestDetails forRead(String theResourceType, @Nonnull IIdType theId, boolean theIsVread) { + public static ReadPartitionIdRequestDetails forRead( + String theResourceType, @Nonnull IIdType theId, boolean theIsVread) { RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ; - return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null, null, null); + return new ReadPartitionIdRequestDetails( + theResourceType, op, theId.withResourceType(theResourceType), null, null, null, null); } - public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) { - return new ReadPartitionIdRequestDetails(theResourceType, RestOperationTypeEnum.SEARCH_TYPE, null, theParams, theConditionalOperationTargetOrNull, null, null); + public static ReadPartitionIdRequestDetails forSearchType( + String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) { + return new ReadPartitionIdRequestDetails( + theResourceType, + RestOperationTypeEnum.SEARCH_TYPE, + null, + theParams, + theConditionalOperationTargetOrNull, + null, + null); } public static ReadPartitionIdRequestDetails forHistory(String theResourceType, IIdType theIdType) { @@ -129,7 +151,8 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails { } else { restOperationTypeEnum = RestOperationTypeEnum.HISTORY_SYSTEM; } - return new ReadPartitionIdRequestDetails(theResourceType, restOperationTypeEnum, theIdType, null, null, null, null); + return new ReadPartitionIdRequestDetails( + theResourceType, restOperationTypeEnum, theIdType, null, null, null, null); } public static ReadPartitionIdRequestDetails forSearchUuid(String theUuid) { diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceChangeEvent.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceChangeEvent.java index 76c9ac5d0c2..2ab2483c29f 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceChangeEvent.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceChangeEvent.java @@ -29,7 +29,9 @@ import java.util.List; */ public interface IResourceChangeEvent { List getCreatedResourceIds(); + List getUpdatedResourceIds(); + List getDeletedResourceIds(); /** diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceChangeListenerRegistry.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceChangeListenerRegistry.java index f9e72033f9b..fa6ba39cf72 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceChangeListenerRegistry.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceChangeListenerRegistry.java @@ -47,7 +47,11 @@ public interface IResourceChangeListenerRegistry { * @throws IllegalArgumentException if theSearchParamMap cannot be evaluated in-memory * @return RegisteredResourceChangeListener a handle to the created cache that can be used to manually refresh the cache if required */ - IResourceChangeListenerCache registerResourceResourceChangeListener(String theResourceName, SearchParameterMap theSearchParameterMap, IResourceChangeListener theResourceChangeListener, long theRemoteRefreshIntervalMs); + IResourceChangeListenerCache registerResourceResourceChangeListener( + String theResourceName, + SearchParameterMap theSearchParameterMap, + IResourceChangeListener theResourceChangeListener, + long theRemoteRefreshIntervalMs); /** * Unregister a listener from this service @@ -78,7 +82,6 @@ public interface IResourceChangeListenerRegistry { * caches so their listeners are notified the next time the caches are refreshed. * @param theResource the resource that changed that might trigger a refresh */ - void requestRefreshIfWatching(IBaseResource theResource); /** diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java index f2921864b39..56c554c1ab8 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java @@ -23,8 +23,8 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; /** * This interface is used by the {@literal IResourceChangeListenerCacheRefresher} to read resources matching the provided @@ -32,7 +32,8 @@ import java.util.List; */ public interface IResourceVersionSvc { @Nonnull - ResourceVersionMap getVersionMap(RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap); + ResourceVersionMap getVersionMap( + RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap); @Nonnull default ResourceVersionMap getVersionMap(String theResourceName, SearchParameterMap theSearchParamMap) { diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeEvent.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeEvent.java index 6f4081219b1..47c39e0262a 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeEvent.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeEvent.java @@ -36,13 +36,19 @@ public class ResourceChangeEvent implements IResourceChangeEvent { private final List myUpdatedResourceIds; private final List myDeletedResourceIds; - private ResourceChangeEvent(Collection theCreatedResourceIds, Collection theUpdatedResourceIds, Collection theDeletedResourceIds) { + private ResourceChangeEvent( + Collection theCreatedResourceIds, + Collection theUpdatedResourceIds, + Collection theDeletedResourceIds) { myCreatedResourceIds = copyFrom(theCreatedResourceIds); myUpdatedResourceIds = copyFrom(theUpdatedResourceIds); myDeletedResourceIds = copyFrom(theDeletedResourceIds); } - public static ResourceChangeEvent fromCreatedUpdatedDeletedResourceIds(List theCreatedResourceIds, List theUpdatedResourceIds, List theDeletedResourceIds) { + public static ResourceChangeEvent fromCreatedUpdatedDeletedResourceIds( + List theCreatedResourceIds, + List theUpdatedResourceIds, + List theDeletedResourceIds) { return new ResourceChangeEvent(theCreatedResourceIds, theUpdatedResourceIds, theDeletedResourceIds); } @@ -75,9 +81,9 @@ public class ResourceChangeEvent implements IResourceChangeEvent { @Override public String toString() { return new ToStringBuilder(this) - .append("myCreatedResourceIds", myCreatedResourceIds) - .append("myUpdatedResourceIds", myUpdatedResourceIds) - .append("myDeletedResourceIds", myDeletedResourceIds) - .toString(); + .append("myCreatedResourceIds", myCreatedResourceIds) + .append("myUpdatedResourceIds", myUpdatedResourceIds) + .append("myDeletedResourceIds", myDeletedResourceIds) + .toString(); } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCache.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCache.java index d1a60768ce9..1efec3226db 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCache.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCache.java @@ -49,6 +49,7 @@ public class ResourceChangeListenerCache implements IResourceChangeListenerCache @Autowired IResourceChangeListenerCacheRefresher myResourceChangeListenerCacheRefresher; + @Autowired SearchParamMatcher mySearchParamMatcher; @@ -61,7 +62,11 @@ public class ResourceChangeListenerCache implements IResourceChangeListenerCache private boolean myInitialized = false; private Instant myNextRefreshTime = Instant.MIN; - public ResourceChangeListenerCache(String theResourceName, IResourceChangeListener theResourceChangeListener, SearchParameterMap theSearchParameterMap, long theRemoteRefreshIntervalMs) { + public ResourceChangeListenerCache( + String theResourceName, + IResourceChangeListener theResourceChangeListener, + SearchParameterMap theSearchParameterMap, + long theRemoteRefreshIntervalMs) { myResourceName = theResourceName; myResourceChangeListener = theResourceChangeListener; mySearchParameterMap = SerializationUtils.clone(theSearchParameterMap); @@ -99,7 +104,8 @@ public class ResourceChangeListenerCache implements IResourceChangeListenerCache InMemoryMatchResult result = mySearchParamMatcher.match(mySearchParameterMap, theResource); if (!result.supported()) { // This should never happen since we enforce only in-memory SearchParamMaps at registration time - throw new IllegalStateException(Msg.code(483) + "Search Parameter Map " + mySearchParameterMap + " cannot be processed in-memory: " + result.getUnsupportedReason()); + throw new IllegalStateException(Msg.code(483) + "Search Parameter Map " + mySearchParameterMap + + " cannot be processed in-memory: " + result.getUnsupportedReason()); } return result.matched(); } @@ -135,16 +141,19 @@ public class ResourceChangeListenerCache implements IResourceChangeListenerCache } @VisibleForTesting - public void setResourceChangeListenerCacheRefresher(IResourceChangeListenerCacheRefresher theResourceChangeListenerCacheRefresher) { + public void setResourceChangeListenerCacheRefresher( + IResourceChangeListenerCacheRefresher theResourceChangeListenerCacheRefresher) { myResourceChangeListenerCacheRefresher = theResourceChangeListenerCacheRefresher; } private ResourceChangeResult refreshCacheAndNotifyListenersWithRetry() { - Retrier refreshCacheRetrier = new Retrier<>(() -> { - synchronized (this) { - return myResourceChangeListenerCacheRefresher.refreshCacheAndNotifyListener(this); - } - }, MAX_RETRIES); + Retrier refreshCacheRetrier = new Retrier<>( + () -> { + synchronized (this) { + return myResourceChangeListenerCacheRefresher.refreshCacheAndNotifyListener(this); + } + }, + MAX_RETRIES); return refreshCacheRetrier.runWithRetry(); } @@ -209,9 +218,9 @@ public class ResourceChangeListenerCache implements IResourceChangeListenerCache @Override public String toString() { return new ToStringBuilder(this) - .append("myResourceName", myResourceName) - .append("mySearchParameterMap", mySearchParameterMap) - .append("myInitialized", myInitialized) - .toString(); + .append("myResourceName", myResourceName) + .append("mySearchParameterMap", mySearchParameterMap) + .append("myInitialized", myInitialized) + .toString(); } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCacheFactory.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCacheFactory.java index ce7e61187e9..92e8f725db1 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCacheFactory.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCacheFactory.java @@ -29,7 +29,16 @@ public class ResourceChangeListenerCacheFactory { @Autowired ApplicationContext myApplicationContext; - public ResourceChangeListenerCache newResourceChangeListenerCache(String theResourceName, SearchParameterMap theMap, IResourceChangeListener theResourceChangeListener, long theRemoteRefreshIntervalMs) { - return myApplicationContext.getBean(ResourceChangeListenerCache.class, theResourceName, theResourceChangeListener, theMap, theRemoteRefreshIntervalMs); + public ResourceChangeListenerCache newResourceChangeListenerCache( + String theResourceName, + SearchParameterMap theMap, + IResourceChangeListener theResourceChangeListener, + long theRemoteRefreshIntervalMs) { + return myApplicationContext.getBean( + ResourceChangeListenerCache.class, + theResourceName, + theResourceChangeListener, + theMap, + theRemoteRefreshIntervalMs); } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCacheRefresherImpl.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCacheRefresherImpl.java index af354dc074a..131b5a6851d 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCacheRefresherImpl.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerCacheRefresherImpl.java @@ -50,7 +50,8 @@ import java.util.List; * if any entries in the new cache are different from the last time that cache was loaded. */ @Service -public class ResourceChangeListenerCacheRefresherImpl implements IResourceChangeListenerCacheRefresher, IHasScheduledJobs { +public class ResourceChangeListenerCacheRefresherImpl + implements IResourceChangeListenerCacheRefresher, IHasScheduledJobs { private static final Logger ourLog = LoggerFactory.getLogger(ResourceChangeListenerCacheRefresherImpl.class); /** @@ -60,8 +61,10 @@ public class ResourceChangeListenerCacheRefresherImpl implements IResourceChange @Autowired private IResourceVersionSvc myResourceVersionSvc; + @Autowired private ResourceChangeListenerRegistryImpl myResourceChangeListenerRegistry; + private boolean myStopping = false; @Override @@ -105,7 +108,8 @@ public class ResourceChangeListenerCacheRefresherImpl implements IResourceChange } @VisibleForTesting - public void setResourceChangeListenerRegistry(ResourceChangeListenerRegistryImpl theResourceChangeListenerRegistry) { + public void setResourceChangeListenerRegistry( + ResourceChangeListenerRegistryImpl theResourceChangeListenerRegistry) { myResourceChangeListenerRegistry = theResourceChangeListenerRegistry; } @@ -114,7 +118,6 @@ public class ResourceChangeListenerCacheRefresherImpl implements IResourceChange myResourceVersionSvc = theResourceVersionSvc; } - @EventListener(ContextRefreshedEvent.class) public void start() { myStopping = false; @@ -141,7 +144,8 @@ public class ResourceChangeListenerCacheRefresherImpl implements IResourceChange return retVal; } SearchParameterMap searchParamMap = theCache.getSearchParameterMap(); - ResourceVersionMap newResourceVersionMap = myResourceVersionSvc.getVersionMap(theCache.getResourceName(), searchParamMap); + ResourceVersionMap newResourceVersionMap = + myResourceVersionSvc.getVersionMap(theCache.getResourceName(), searchParamMap); retVal = retVal.plus(notifyListener(theCache, newResourceVersionMap)); return retVal; @@ -154,12 +158,14 @@ public class ResourceChangeListenerCacheRefresherImpl implements IResourceChange * @param theNewResourceVersionMap the measured new resources * @return the list of created, updated and deleted ids */ - ResourceChangeResult notifyListener(IResourceChangeListenerCache theCache, ResourceVersionMap theNewResourceVersionMap) { + ResourceChangeResult notifyListener( + IResourceChangeListenerCache theCache, ResourceVersionMap theNewResourceVersionMap) { ResourceChangeResult retval; ResourceChangeListenerCache cache = (ResourceChangeListenerCache) theCache; IResourceChangeListener resourceChangeListener = cache.getResourceChangeListener(); if (theCache.isInitialized()) { - retval = compareLastVersionMapToNewVersionMapAndNotifyListenerOfChanges(resourceChangeListener, cache.getResourceVersionCache(), theNewResourceVersionMap); + retval = compareLastVersionMapToNewVersionMapAndNotifyListenerOfChanges( + resourceChangeListener, cache.getResourceVersionCache(), theNewResourceVersionMap); } else { cache.getResourceVersionCache().initialize(theNewResourceVersionMap); resourceChangeListener.handleInit(theNewResourceVersionMap.getSourceIds()); @@ -169,15 +175,17 @@ public class ResourceChangeListenerCacheRefresherImpl implements IResourceChange return retval; } - private ResourceChangeResult compareLastVersionMapToNewVersionMapAndNotifyListenerOfChanges(IResourceChangeListener theListener, ResourceVersionCache theOldResourceVersionCache, ResourceVersionMap theNewResourceVersionMap) { + private ResourceChangeResult compareLastVersionMapToNewVersionMapAndNotifyListenerOfChanges( + IResourceChangeListener theListener, + ResourceVersionCache theOldResourceVersionCache, + ResourceVersionMap theNewResourceVersionMap) { // If the new ResourceVersionMap does not have the old key - delete it List deletedIds = new ArrayList<>(); - theOldResourceVersionCache.keySet() - .forEach(id -> { - if (!theNewResourceVersionMap.containsKey(id)) { - deletedIds.add(id); - } - }); + theOldResourceVersionCache.keySet().forEach(id -> { + if (!theNewResourceVersionMap.containsKey(id)) { + deletedIds.add(id); + } + }); deletedIds.forEach(theOldResourceVersionCache::removeResourceId); List createdIds = new ArrayList<>(); @@ -193,7 +201,8 @@ public class ResourceChangeListenerCacheRefresherImpl implements IResourceChange } } - IResourceChangeEvent resourceChangeEvent = ResourceChangeEvent.fromCreatedUpdatedDeletedResourceIds(createdIds, updatedIds, deletedIds); + IResourceChangeEvent resourceChangeEvent = + ResourceChangeEvent.fromCreatedUpdatedDeletedResourceIds(createdIds, updatedIds, deletedIds); if (!resourceChangeEvent.isEmpty()) { theListener.handleChange(resourceChangeEvent); } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerRegistryImpl.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerRegistryImpl.java index d7f72315c24..8b30b1af72d 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerRegistryImpl.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerRegistryImpl.java @@ -31,12 +31,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; -import javax.annotation.Nonnull; import java.util.Iterator; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * This component holds an in-memory list of all registered {@link IResourceChangeListener} instances along @@ -53,7 +53,10 @@ public class ResourceChangeListenerRegistryImpl implements IResourceChangeListen private final ResourceChangeListenerCacheFactory myResourceChangeListenerCacheFactory; private InMemoryResourceMatcher myInMemoryResourceMatcher; - public ResourceChangeListenerRegistryImpl(FhirContext theFhirContext, ResourceChangeListenerCacheFactory theResourceChangeListenerCacheFactory, InMemoryResourceMatcher theInMemoryResourceMatcher) { + public ResourceChangeListenerRegistryImpl( + FhirContext theFhirContext, + ResourceChangeListenerCacheFactory theResourceChangeListenerCacheFactory, + InMemoryResourceMatcher theInMemoryResourceMatcher) { myFhirContext = theFhirContext; myResourceChangeListenerCacheFactory = theResourceChangeListenerCacheFactory; myInMemoryResourceMatcher = theInMemoryResourceMatcher; @@ -74,12 +77,19 @@ public class ResourceChangeListenerRegistryImpl implements IResourceChangeListen * @throws IllegalArgumentException if theSearchParamMap cannot be evaluated in-memory */ @Override - public IResourceChangeListenerCache registerResourceResourceChangeListener(String theResourceName, SearchParameterMap theSearchParameterMap, IResourceChangeListener theResourceChangeListener, long theRemoteRefreshIntervalMs) { + public IResourceChangeListenerCache registerResourceResourceChangeListener( + String theResourceName, + SearchParameterMap theSearchParameterMap, + IResourceChangeListener theResourceChangeListener, + long theRemoteRefreshIntervalMs) { // Clone searchparameter map RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theResourceName); - InMemoryMatchResult inMemoryMatchResult = myInMemoryResourceMatcher.canBeEvaluatedInMemory(theSearchParameterMap, resourceDef); + InMemoryMatchResult inMemoryMatchResult = + myInMemoryResourceMatcher.canBeEvaluatedInMemory(theSearchParameterMap, resourceDef); if (!inMemoryMatchResult.supported()) { - throw new IllegalArgumentException(Msg.code(482) + "SearchParameterMap " + theSearchParameterMap + " cannot be evaluated in-memory: " + inMemoryMatchResult.getUnsupportedReason() + ". Only search parameter maps that can be evaluated in-memory may be registered."); + throw new IllegalArgumentException(Msg.code(482) + "SearchParameterMap " + theSearchParameterMap + + " cannot be evaluated in-memory: " + inMemoryMatchResult.getUnsupportedReason() + + ". Only search parameter maps that can be evaluated in-memory may be registered."); } return add(theResourceName, theResourceChangeListener, theSearchParameterMap, theRemoteRefreshIntervalMs); } @@ -99,8 +109,13 @@ public class ResourceChangeListenerRegistryImpl implements IResourceChangeListen myListenerEntries.remove(theResourceChangeListenerCache); } - private IResourceChangeListenerCache add(String theResourceName, IResourceChangeListener theResourceChangeListener, SearchParameterMap theMap, long theRemoteRefreshIntervalMs) { - ResourceChangeListenerCache retval = myResourceChangeListenerCacheFactory.newResourceChangeListenerCache(theResourceName, theMap, theResourceChangeListener, theRemoteRefreshIntervalMs); + private IResourceChangeListenerCache add( + String theResourceName, + IResourceChangeListener theResourceChangeListener, + SearchParameterMap theMap, + long theRemoteRefreshIntervalMs) { + ResourceChangeListenerCache retval = myResourceChangeListenerCacheFactory.newResourceChangeListenerCache( + theResourceName, theMap, theResourceChangeListener, theRemoteRefreshIntervalMs); myListenerEntries.add(retval); return retval; } @@ -146,8 +161,8 @@ public class ResourceChangeListenerRegistryImpl implements IResourceChangeListen @Override public Set getWatchedResourceNames() { return myListenerEntries.stream() - .map(ResourceChangeListenerCache::getResourceName) - .collect(Collectors.toSet()); + .map(ResourceChangeListenerCache::getResourceName) + .collect(Collectors.toSet()); } @Override diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerRegistryInterceptor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerRegistryInterceptor.java index 07ecf3a5ca0..feec3df43ae 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerRegistryInterceptor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeListenerRegistryInterceptor.java @@ -41,10 +41,10 @@ import javax.annotation.PreDestroy; public class ResourceChangeListenerRegistryInterceptor { @Autowired private IInterceptorService myInterceptorBroadcaster; + @Autowired private IResourceChangeListenerRegistry myResourceChangeListenerRegistry; - @EventListener(classes = {ContextRefreshedEvent.class}) @Order(IHapiBootOrder.REGISTER_INTERCEPTORS) public void start() { diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeResult.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeResult.java index e3cb663cbd7..c5ca4cffa6d 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeResult.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceChangeResult.java @@ -47,19 +47,23 @@ public class ResourceChangeResult { } public static ResourceChangeResult fromResourceChangeEvent(IResourceChangeEvent theResourceChangeEvent) { - return new ResourceChangeResult(theResourceChangeEvent.getCreatedResourceIds().size(), theResourceChangeEvent.getUpdatedResourceIds().size(), theResourceChangeEvent.getDeletedResourceIds().size()); + return new ResourceChangeResult( + theResourceChangeEvent.getCreatedResourceIds().size(), + theResourceChangeEvent.getUpdatedResourceIds().size(), + theResourceChangeEvent.getDeletedResourceIds().size()); } public ResourceChangeResult plus(ResourceChangeResult theResult) { - return new ResourceChangeResult(created + theResult.created, updated + theResult.updated, deleted + theResult.deleted); + return new ResourceChangeResult( + created + theResult.created, updated + theResult.updated, deleted + theResult.deleted); } @Override public String toString() { return new ToStringBuilder(this) - .append("created", created) - .append("updated", updated) - .append("deleted", deleted) - .toString(); + .append("created", created) + .append("updated", updated) + .append("deleted", deleted) + .toString(); } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourcePersistentIdMap.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourcePersistentIdMap.java index 7cf97dde1f3..071dc8f1f20 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourcePersistentIdMap.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourcePersistentIdMap.java @@ -29,7 +29,8 @@ import java.util.Map; public class ResourcePersistentIdMap { private final Map myMap = new HashMap<>(); - public static ResourcePersistentIdMap fromResourcePersistentIds(List theResourcePersistentIds) { + public static ResourcePersistentIdMap fromResourcePersistentIds( + List theResourcePersistentIds) { ResourcePersistentIdMap retval = new ResourcePersistentIdMap(); theResourcePersistentIds.forEach(retval::add); return retval; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java index 0ed0502b0e9..81b7b4f580f 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java @@ -41,6 +41,7 @@ public class ResourceVersionMap { private final Set mySourceIds = new HashSet<>(); // Key versionless id, value version private final Map myMap = new HashMap<>(); + private ResourceVersionMap() {} public static ResourceVersionMap fromResourceTableEntities(List theEntities) { diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java index 2191fac8ca2..ea03d2cb32d 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java @@ -26,45 +26,54 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Set; public interface IRequestPartitionHelperSvc { @Nonnull - RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails); + RequestPartitionId determineReadPartitionForRequest( + @Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails); @Nonnull - default RequestPartitionId determineReadPartitionForRequestForRead(RequestDetails theRequest, String theResourceType, @Nonnull IIdType theId) { - ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forRead(theResourceType, theId, theId.hasVersionIdPart()); + default RequestPartitionId determineReadPartitionForRequestForRead( + RequestDetails theRequest, String theResourceType, @Nonnull IIdType theId) { + ReadPartitionIdRequestDetails details = + ReadPartitionIdRequestDetails.forRead(theResourceType, theId, theId.hasVersionIdPart()); return determineReadPartitionForRequest(theRequest, details); } @Nonnull - default RequestPartitionId determineReadPartitionForRequestForSearchType(RequestDetails theRequest, String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) { - ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forSearchType(theResourceType, theParams, theConditionalOperationTargetOrNull); + default RequestPartitionId determineReadPartitionForRequestForSearchType( + RequestDetails theRequest, + String theResourceType, + SearchParameterMap theParams, + IBaseResource theConditionalOperationTargetOrNull) { + ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forSearchType( + theResourceType, theParams, theConditionalOperationTargetOrNull); return determineReadPartitionForRequest(theRequest, details); } RequestPartitionId determineGenericPartitionForRequest(RequestDetails theRequestDetails); @Nonnull - default RequestPartitionId determineReadPartitionForRequestForHistory(RequestDetails theRequest, String theResourceType, IIdType theIdType) { + default RequestPartitionId determineReadPartitionForRequestForHistory( + RequestDetails theRequest, String theResourceType, IIdType theIdType) { ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(theResourceType, theIdType); return determineReadPartitionForRequest(theRequest, details); } @Nonnull - default void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) { - } + default void validateHasPartitionPermissions( + RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {} @Nonnull - RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType); + RequestPartitionId determineCreatePartitionForRequest( + @Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType); @Nonnull Set toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId); boolean isResourcePartitionable(String theResourceType); - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java index 02662779596..82a812d2864 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/MatchUrlService.java @@ -52,10 +52,12 @@ public class MatchUrlService { @Autowired private FhirContext myFhirContext; + @Autowired private ISearchParamRegistry mySearchParamRegistry; - public SearchParameterMap translateMatchUrl(String theMatchUrl, RuntimeResourceDefinition theResourceDefinition, Flag... theFlags) { + public SearchParameterMap translateMatchUrl( + String theMatchUrl, RuntimeResourceDefinition theResourceDefinition, Flag... theFlags) { SearchParameterMap paramMap = new SearchParameterMap(); List parameters = UrlUtil.translateMatchUrl(theMatchUrl); @@ -78,7 +80,8 @@ public class MatchUrlService { } } - QualifiedParamList paramList = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(qualifier, next.getValue()); + QualifiedParamList paramList = + QualifiedParamList.splitQueryStringByCommasIgnoreEscape(qualifier, next.getValue()); nameToParamLists.put(paramName, paramList); } @@ -94,7 +97,9 @@ public class MatchUrlService { if (Constants.PARAM_LASTUPDATED.equals(nextParamName)) { if (paramList != null && paramList.size() > 0) { if (paramList.size() > 2) { - throw new InvalidRequestException(Msg.code(484) + "Failed to parse match URL[" + theMatchUrl + "] - Can not have more than 2 " + Constants.PARAM_LASTUPDATED + " parameter repetitions"); + throw new InvalidRequestException(Msg.code(484) + "Failed to parse match URL[" + theMatchUrl + + "] - Can not have more than 2 " + Constants.PARAM_LASTUPDATED + + " parameter repetitions"); } else { DateRangeParam p1 = new DateRangeParam(); p1.setValuesAsQueryTokens(myFhirContext, nextParamName, paramList); @@ -102,59 +107,75 @@ public class MatchUrlService { } } } else if (Constants.PARAM_HAS.equals(nextParamName)) { - IQueryParameterAnd param = JpaParamUtil.parseQueryParams(myFhirContext, RestSearchParameterTypeEnum.HAS, nextParamName, paramList); + IQueryParameterAnd param = JpaParamUtil.parseQueryParams( + myFhirContext, RestSearchParameterTypeEnum.HAS, nextParamName, paramList); paramMap.add(nextParamName, param); } else if (Constants.PARAM_COUNT.equals(nextParamName)) { - if (paramList != null && paramList.size() > 0 && paramList.get(0).size() > 0) { + if (paramList != null + && paramList.size() > 0 + && paramList.get(0).size() > 0) { String intString = paramList.get(0).get(0); try { paramMap.setCount(Integer.parseInt(intString)); } catch (NumberFormatException e) { - throw new InvalidRequestException(Msg.code(485) + "Invalid " + Constants.PARAM_COUNT + " value: " + intString); + throw new InvalidRequestException( + Msg.code(485) + "Invalid " + Constants.PARAM_COUNT + " value: " + intString); } } } else if (Constants.PARAM_SEARCH_TOTAL_MODE.equals(nextParamName)) { - if (paramList != null && ! paramList.isEmpty() && ! paramList.get(0).isEmpty()) { + if (paramList != null + && !paramList.isEmpty() + && !paramList.get(0).isEmpty()) { String totalModeEnumStr = paramList.get(0).get(0); try { paramMap.setSearchTotalMode(SearchTotalModeEnum.valueOf(totalModeEnumStr)); } catch (IllegalArgumentException e) { - throw new InvalidRequestException(Msg.code(2078) + "Invalid " + Constants.PARAM_SEARCH_TOTAL_MODE + " value: " + totalModeEnumStr); + throw new InvalidRequestException(Msg.code(2078) + "Invalid " + + Constants.PARAM_SEARCH_TOTAL_MODE + " value: " + totalModeEnumStr); } } } else if (Constants.PARAM_OFFSET.equals(nextParamName)) { - if (paramList != null && paramList.size() > 0 && paramList.get(0).size() > 0) { + if (paramList != null + && paramList.size() > 0 + && paramList.get(0).size() > 0) { String intString = paramList.get(0).get(0); try { paramMap.setOffset(Integer.parseInt(intString)); } catch (NumberFormatException e) { - throw new InvalidRequestException(Msg.code(486) + "Invalid " + Constants.PARAM_OFFSET + " value: " + intString); + throw new InvalidRequestException( + Msg.code(486) + "Invalid " + Constants.PARAM_OFFSET + " value: " + intString); } } } else if (ResourceMetaParams.RESOURCE_META_PARAMS.containsKey(nextParamName)) { - if (isNotBlank(paramList.get(0).getQualifier()) && paramList.get(0).getQualifier().startsWith(".")) { - throw new InvalidRequestException(Msg.code(487) + "Invalid parameter chain: " + nextParamName + paramList.get(0).getQualifier()); + if (isNotBlank(paramList.get(0).getQualifier()) + && paramList.get(0).getQualifier().startsWith(".")) { + throw new InvalidRequestException(Msg.code(487) + "Invalid parameter chain: " + nextParamName + + paramList.get(0).getQualifier()); } IQueryParameterAnd type = newInstanceAnd(nextParamName); type.setValuesAsQueryTokens(myFhirContext, nextParamName, (paramList)); paramMap.add(nextParamName, type); } else if (Constants.PARAM_SOURCE.equals(nextParamName)) { - IQueryParameterAnd param = JpaParamUtil.parseQueryParams(myFhirContext, RestSearchParameterTypeEnum.URI, nextParamName, paramList); + IQueryParameterAnd param = JpaParamUtil.parseQueryParams( + myFhirContext, RestSearchParameterTypeEnum.URI, nextParamName, paramList); paramMap.add(nextParamName, param); } else if (JpaConstants.PARAM_DELETE_EXPUNGE.equals(nextParamName)) { paramMap.setDeleteExpunge(true); } else if (Constants.PARAM_LIST.equals(nextParamName)) { - IQueryParameterAnd param = JpaParamUtil.parseQueryParams(myFhirContext, RestSearchParameterTypeEnum.TOKEN, nextParamName, paramList); + IQueryParameterAnd param = JpaParamUtil.parseQueryParams( + myFhirContext, RestSearchParameterTypeEnum.TOKEN, nextParamName, paramList); paramMap.add(nextParamName, param); } else if (nextParamName.startsWith("_")) { // ignore these since they aren't search params (e.g. _sort) } else { - RuntimeSearchParam paramDef = mySearchParamRegistry.getActiveSearchParam(theResourceDefinition.getName(), nextParamName); + RuntimeSearchParam paramDef = + mySearchParamRegistry.getActiveSearchParam(theResourceDefinition.getName(), nextParamName); if (paramDef == null) { throw throwUnrecognizedParamException(theMatchUrl, theResourceDefinition, nextParamName); } - IQueryParameterAnd param = JpaParamUtil.parseQueryParams(mySearchParamRegistry, myFhirContext, paramDef, nextParamName, paramList); + IQueryParameterAnd param = JpaParamUtil.parseQueryParams( + mySearchParamRegistry, myFhirContext, paramDef, nextParamName, paramList); paramMap.add(nextParamName, param); } } @@ -180,8 +201,14 @@ public class MatchUrlService { return myParamName; } } - private InvalidRequestException throwUnrecognizedParamException(String theMatchUrl, RuntimeResourceDefinition theResourceDefinition, String nextParamName) { - return new UnrecognizedSearchParameterException(Msg.code(488) + "Failed to parse match URL[" + theMatchUrl + "] - Resource type " + theResourceDefinition.getName() + " does not have a parameter with name: " + nextParamName, theResourceDefinition.getName(), nextParamName); + + private InvalidRequestException throwUnrecognizedParamException( + String theMatchUrl, RuntimeResourceDefinition theResourceDefinition, String nextParamName) { + return new UnrecognizedSearchParameterException( + Msg.code(488) + "Failed to parse match URL[" + theMatchUrl + "] - Resource type " + + theResourceDefinition.getName() + " does not have a parameter with name: " + nextParamName, + theResourceDefinition.getName(), + nextParamName); } private IQueryParameterAnd newInstanceAnd(String theParamType) { @@ -214,8 +241,8 @@ public class MatchUrlService { // nothing } - abstract void process(String theParamName, List theValues, SearchParameterMap theMapToPopulate); - + abstract void process( + String theParamName, List theValues, SearchParameterMap theMapToPopulate); } /** @@ -229,19 +256,19 @@ public class MatchUrlService { if (Constants.PARAM_INCLUDE.equals(theParamName)) { for (QualifiedParamList nextQualifiedList : theValues) { for (String nextValue : nextQualifiedList) { - theMapToPopulate.addInclude(new Include(nextValue, ParameterUtil.isIncludeIterate(nextQualifiedList.getQualifier()))); + theMapToPopulate.addInclude(new Include( + nextValue, ParameterUtil.isIncludeIterate(nextQualifiedList.getQualifier()))); } } } else if (Constants.PARAM_REVINCLUDE.equals(theParamName)) { for (QualifiedParamList nextQualifiedList : theValues) { for (String nextValue : nextQualifiedList) { - theMapToPopulate.addRevInclude(new Include(nextValue, ParameterUtil.isIncludeIterate(nextQualifiedList.getQualifier()))); + theMapToPopulate.addRevInclude(new Include( + nextValue, ParameterUtil.isIncludeIterate(nextQualifiedList.getQualifier()))); } } } - } }; } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java index d62683929e1..e37924741bf 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/ResourceSearch.java @@ -31,7 +31,10 @@ public class ResourceSearch { private final SearchParameterMap mySearchParameterMap; private final RequestPartitionId myRequestPartitionId; - public ResourceSearch(RuntimeResourceDefinition theRuntimeResourceDefinition, SearchParameterMap theSearchParameterMap, RequestPartitionId theRequestPartitionId) { + public ResourceSearch( + RuntimeResourceDefinition theRuntimeResourceDefinition, + SearchParameterMap theSearchParameterMap, + RequestPartitionId theRequestPartitionId) { myRuntimeResourceDefinition = theRuntimeResourceDefinition; mySearchParameterMap = theSearchParameterMap; myRequestPartitionId = theRequestPartitionId; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/SearchParameterMap.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/SearchParameterMap.java index c1f84d6e388..0d5784a5818 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/SearchParameterMap.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/SearchParameterMap.java @@ -43,7 +43,6 @@ import org.apache.commons.lang3.builder.CompareToBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import javax.annotation.Nonnull; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; @@ -56,6 +55,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.rest.param.ParamPrefixEnum.GREATERTHAN_OR_EQUALS; import static ca.uhn.fhir.rest.param.ParamPrefixEnum.LESSTHAN_OR_EQUALS; @@ -134,7 +134,6 @@ public class SearchParameterMap implements Serializable { map.put(entry.getKey(), newAndParams); } - return map; } @@ -404,7 +403,6 @@ public class SearchParameterMap implements Serializable { return this; } - /** * If set, tells the server the maximum number of observations to return for each * observation code in the result set of a lastn operation @@ -422,7 +420,6 @@ public class SearchParameterMap implements Serializable { return this; } - /** * This method creates a URL query string representation of the parameters in this * object, excluding the part before the parameters, e.g. @@ -455,7 +452,6 @@ public class SearchParameterMap implements Serializable { if (nextValuesOrsOut.size() > 0) { nextValuesAndsOut.add(nextValuesOrsOut); } - } // for AND nextValuesAndsOut.sort(new QueryParameterOrComparator(theCtx)); @@ -492,7 +488,6 @@ public class SearchParameterMap implements Serializable { b.append(UrlUtil.escapeUrlParam(valueAsQueryToken)); } } - } // for keys SortSpec sort = getSort(); @@ -565,8 +560,9 @@ public class SearchParameterMap implements Serializable { b.append(getSearchTotalMode().getCode()); } - //Contained mode - //For some reason, instead of null here, we default to false. That said, ommitting it is identical to setting it to false. + // Contained mode + // For some reason, instead of null here, we default to false. That said, ommitting it is identical to setting + // it to false. if (getSearchContainedMode() != SearchContainedModeEnum.FALSE) { addUrlParamSeparator(b); b.append(Constants.PARAM_CONTAINED); @@ -582,7 +578,10 @@ public class SearchParameterMap implements Serializable { } private boolean isNotEqualsComparator(DateParam theLowerBound, DateParam theUpperBound) { - return theLowerBound != null && theUpperBound != null && theLowerBound.getPrefix().equals(NOT_EQUAL) && theUpperBound.getPrefix().equals(NOT_EQUAL); + return theLowerBound != null + && theUpperBound != null + && theLowerBound.getPrefix().equals(NOT_EQUAL) + && theUpperBound.getPrefix().equals(NOT_EQUAL); } /** @@ -611,7 +610,6 @@ public class SearchParameterMap implements Serializable { return b.toString(); } - public void clean() { for (Map.Entry>> nextParamEntry : this.entrySet()) { String nextParamName = nextParamEntry.getKey(); @@ -624,19 +622,16 @@ public class SearchParameterMap implements Serializable { * Given a particular named parameter, e.g. `name`, iterate over AndOrParams and remove any which are empty. */ private void cleanParameter(String theParamName, List> theAndOrParams) { - theAndOrParams - .forEach( - orList -> { - List emptyParameters = orList.stream() - .filter(nextOr -> nextOr.getMissing() == null) - .filter(nextOr -> nextOr instanceof QuantityParam) - .filter(nextOr -> isBlank(((QuantityParam) nextOr).getValueAsString())) - .collect(Collectors.toList()); + theAndOrParams.forEach(orList -> { + List emptyParameters = orList.stream() + .filter(nextOr -> nextOr.getMissing() == null) + .filter(nextOr -> nextOr instanceof QuantityParam) + .filter(nextOr -> isBlank(((QuantityParam) nextOr).getValueAsString())) + .collect(Collectors.toList()); - ourLog.debug("Ignoring empty parameter: {}", theParamName); - orList.removeAll(emptyParameters); - } - ); + ourLog.debug("Ignoring empty parameter: {}", theParamName); + orList.removeAll(emptyParameters); + }); theAndOrParams.removeIf(List::isEmpty); } @@ -719,9 +714,9 @@ public class SearchParameterMap implements Serializable { List> andList = mySearchParameterMap.remove(theName); if (andList != null) { for (List orList : andList) { - if (!orList.isEmpty() && - StringUtils.defaultString(orList.get(0).getQueryParameterQualifier(), "") - .equals(theModifier)) { + if (!orList.isEmpty() + && StringUtils.defaultString(orList.get(0).getQueryParameterQualifier(), "") + .equals(theModifier)) { matchingParameters.add(orList); } else { remainderParameters.add(orList); @@ -734,10 +729,10 @@ public class SearchParameterMap implements Serializable { mySearchParameterMap.put(theName, remainderParameters); } return matchingParameters; - } - public List> removeByNameAndModifier(String theName, @Nonnull TokenParamModifier theModifier) { + public List> removeByNameAndModifier( + String theName, @Nonnull TokenParamModifier theModifier) { return removeByNameAndModifier(theName, theModifier.getValue()); } @@ -767,7 +762,6 @@ public class SearchParameterMap implements Serializable { } return retVal; - } public Map>> removeByQualifier(@Nonnull TokenParamModifier theModifier) { @@ -871,7 +865,6 @@ public class SearchParameterMap implements Serializable { } return retVal; } - } public static class QueryParameterOrComparator implements Comparator> { @@ -886,7 +879,6 @@ public class SearchParameterMap implements Serializable { // These lists will never be empty return SearchParameterMap.compare(myCtx, theO1.get(0), theO2.get(0)); } - } public static class QueryParameterTypeComparator implements Comparator { @@ -901,8 +893,5 @@ public class SearchParameterMap implements Serializable { public int compare(IQueryParameterType theO1, IQueryParameterType theO2) { return SearchParameterMap.compare(myCtx, theO1, theO2); } - } - - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/config/SearchParamConfig.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/config/SearchParamConfig.java index fa2b85d238f..f1522621382 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/config/SearchParamConfig.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/config/SearchParamConfig.java @@ -59,7 +59,6 @@ public class SearchParamConfig { @Bean public ISearchParamExtractor searchParamExtractor() { switch (myFhirContext.getVersion().getVersion()) { - case DSTU2: return new SearchParamExtractorDstu2(); case DSTU3: @@ -73,7 +72,8 @@ public class SearchParamConfig { case DSTU2_HL7ORG: case DSTU2_1: default: - throw new IllegalStateException(Msg.code(501) + "Can not handle version: " + myFhirContext.getVersion().getVersion()); + throw new IllegalStateException(Msg.code(501) + "Can not handle version: " + + myFhirContext.getVersion().getVersion()); } } @@ -115,8 +115,12 @@ public class SearchParamConfig { } @Bean - IResourceChangeListenerRegistry resourceChangeListenerRegistry(FhirContext theFhirContext, ResourceChangeListenerCacheFactory theResourceChangeListenerCacheFactory, InMemoryResourceMatcher theInMemoryResourceMatcher) { - return new ResourceChangeListenerRegistryImpl(theFhirContext, theResourceChangeListenerCacheFactory, theInMemoryResourceMatcher); + IResourceChangeListenerRegistry resourceChangeListenerRegistry( + FhirContext theFhirContext, + ResourceChangeListenerCacheFactory theResourceChangeListenerCacheFactory, + InMemoryResourceMatcher theInMemoryResourceMatcher) { + return new ResourceChangeListenerRegistryImpl( + theFhirContext, theResourceChangeListenerCacheFactory, theInMemoryResourceMatcher); } @Bean @@ -131,8 +135,13 @@ public class SearchParamConfig { @Bean @Scope("prototype") - ResourceChangeListenerCache registeredResourceChangeListener(String theResourceName, IResourceChangeListener theResourceChangeListener, SearchParameterMap theSearchParameterMap, long theRemoteRefreshIntervalMs) { - return new ResourceChangeListenerCache(theResourceName, theResourceChangeListener, theSearchParameterMap, theRemoteRefreshIntervalMs); + ResourceChangeListenerCache registeredResourceChangeListener( + String theResourceName, + IResourceChangeListener theResourceChangeListener, + SearchParameterMap theSearchParameterMap, + long theRemoteRefreshIntervalMs) { + return new ResourceChangeListenerCache( + theResourceName, theResourceChangeListener, theSearchParameterMap, theRemoteRefreshIntervalMs); } @Bean @@ -140,5 +149,4 @@ public class SearchParamConfig { SearchParameterHelper searchParameterHelper(FhirContext theFhirContext) { return new SearchParameterHelper(searchParameterCanonicalizer(theFhirContext)); } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java index 3136e862fa7..ee5228052c5 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java @@ -29,8 +29,8 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.model.entity.*; import ca.uhn.fhir.jpa.model.entity.StorageSettings; +import ca.uhn.fhir.jpa.model.entity.*; import ca.uhn.fhir.jpa.model.util.UcumServiceUtil; import ca.uhn.fhir.jpa.searchparam.SearchParamConstants; import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; @@ -63,16 +63,9 @@ import org.hl7.fhir.instance.model.api.IBaseReference; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.hl7.fhir.r5.model.Base; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; -import javax.measure.quantity.Quantity; -import javax.measure.unit.NonSI; -import javax.measure.unit.Unit; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; @@ -86,12 +79,17 @@ import java.util.Optional; import java.util.Set; import java.util.TreeSet; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; +import javax.measure.quantity.Quantity; +import javax.measure.unit.NonSI; +import javax.measure.unit.Unit; import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.DATE; import static ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum.REFERENCE; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; -import static org.apache.commons.lang3.StringUtils.startsWith; import static org.apache.commons.lang3.StringUtils.trim; public abstract class BaseSearchParamExtractor implements ISearchParamExtractor { @@ -106,14 +104,19 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor @Autowired protected ApplicationContext myApplicationContext; + @Autowired private FhirContext myContext; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private StorageSettings myStorageSettings; + @Autowired private PartitionSettings myPartitionSettings; + private Set myIgnoredForSearchDatatypes; private BaseRuntimeChildDefinition myQuantityValueValueChild; private BaseRuntimeChildDefinition myQuantitySystemValueChild; @@ -169,7 +172,11 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor /** * UNIT TEST constructor */ - BaseSearchParamExtractor(StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) { + BaseSearchParamExtractor( + StorageSettings theStorageSettings, + PartitionSettings thePartitionSettings, + FhirContext theCtx, + ISearchParamRegistry theSearchParamRegistry) { Validate.notNull(theStorageSettings); Validate.notNull(theCtx); Validate.notNull(theSearchParamRegistry); @@ -183,7 +190,12 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor @Override public SearchParamSet extractResourceLinks(IBaseResource theResource, boolean theWantLocalReferences) { IExtractor extractor = createReferenceExtractor(); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.REFERENCE, theWantLocalReferences, ISearchParamExtractor.ALL_PARAMS); + return extractSearchParams( + theResource, + extractor, + RestSearchParameterTypeEnum.REFERENCE, + theWantLocalReferences, + ISearchParamExtractor.ALL_PARAMS); } private IExtractor createReferenceExtractor() { @@ -205,7 +217,6 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } else { return extractParamsAsQueryTokens(theSearchParam, theResource, extractor); } - } @Nonnull @@ -239,12 +250,14 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor case COMPOSITE: case HAS: default: - throw new UnsupportedOperationException(Msg.code(503) + "Type " + theSearchParam.getParamType() + " not supported for extraction"); + throw new UnsupportedOperationException( + Msg.code(503) + "Type " + theSearchParam.getParamType() + " not supported for extraction"); } return extractor; } - private List extractReferenceParamsAsQueryTokens(RuntimeSearchParam theSearchParam, IBaseResource theResource, IExtractor theExtractor) { + private List extractReferenceParamsAsQueryTokens( + RuntimeSearchParam theSearchParam, IBaseResource theResource, IExtractor theExtractor) { SearchParamSet params = new SearchParamSet<>(); extractSearchParam(theSearchParam, theResource, theExtractor, params, false); return refsToStringList(params); @@ -252,12 +265,13 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor private List refsToStringList(SearchParamSet theParams) { return theParams.stream() - .map(PathAndRef::getRef) - .map(ref -> ref.getReferenceElement().toUnqualifiedVersionless().getValue()) - .collect(Collectors.toList()); + .map(PathAndRef::getRef) + .map(ref -> ref.getReferenceElement().toUnqualifiedVersionless().getValue()) + .collect(Collectors.toList()); } - private List extractParamsAsQueryTokens(RuntimeSearchParam theSearchParam, IBaseResource theResource, IExtractor theExtractor) { + private List extractParamsAsQueryTokens( + RuntimeSearchParam theSearchParam, IBaseResource theResource, IExtractor theExtractor) { SearchParamSet params = new SearchParamSet<>(); extractSearchParam(theSearchParam, theResource, theExtractor, params, false); return toStringList(params); @@ -265,19 +279,20 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor private List toStringList(SearchParamSet theParams) { return theParams.stream() - .map(param -> param.toQueryParameterType().getValueAsQueryToken(myContext)) - .collect(Collectors.toList()); + .map(param -> param.toQueryParameterType().getValueAsQueryToken(myContext)) + .collect(Collectors.toList()); } @Override - public SearchParamSet extractSearchParamComposites(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + public SearchParamSet extractSearchParamComposites( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { IExtractor extractor = createCompositeExtractor(theResource); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.COMPOSITE, false, theSearchParamFilter); + return extractSearchParams( + theResource, extractor, RestSearchParameterTypeEnum.COMPOSITE, false, theSearchParamFilter); } private IExtractor createCompositeExtractor(IBaseResource theResource) { return new CompositeExtractor(theResource); - } /** @@ -303,11 +318,18 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor * @param theWantLocalReferences passed down to reference extraction */ @Override - public void extract(SearchParamSet theParams, RuntimeSearchParam theCompositeSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences) { + public void extract( + SearchParamSet theParams, + RuntimeSearchParam theCompositeSearchParam, + IBase theValue, + String thePath, + boolean theWantLocalReferences) { // skip broken SPs if (!isExtractableComposite(theCompositeSearchParam)) { - ourLog.info("CompositeExtractor - skipping unsupported search parameter {}", theCompositeSearchParam.getName()); + ourLog.info( + "CompositeExtractor - skipping unsupported search parameter {}", + theCompositeSearchParam.getName()); return; } @@ -321,9 +343,15 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor String expression = component.getExpression(); RuntimeSearchParam componentSp = mySearchParamRegistry.getActiveSearchParamByUrl(componentSpRef); - Validate.notNull(componentSp, "Misconfigured SP %s - failed to load component %s", compositeSpName, componentSpRef); + Validate.notNull( + componentSp, + "Misconfigured SP %s - failed to load component %s", + compositeSpName, + componentSpRef); - SearchParamSet componentIndexedSearchParams = extractCompositeComponentIndexData(theValue, componentSp, expression, theWantLocalReferences, theCompositeSearchParam); + SearchParamSet componentIndexedSearchParams = + extractCompositeComponentIndexData( + theValue, componentSp, expression, theWantLocalReferences, theCompositeSearchParam); if (componentIndexedSearchParams.isEmpty()) { // If any of the components are empty, no search can ever match. Short circuit, and bail out. return; @@ -347,12 +375,26 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor * @return the extracted index beans for theFocusElement */ @Nonnull - private SearchParamSet extractCompositeComponentIndexData(IBase theFocusElement, RuntimeSearchParam theComponentSearchParam, String theSubPathExpression, boolean theWantLocalReferences, RuntimeSearchParam theCompositeSearchParam) { + private SearchParamSet extractCompositeComponentIndexData( + IBase theFocusElement, + RuntimeSearchParam theComponentSearchParam, + String theSubPathExpression, + boolean theWantLocalReferences, + RuntimeSearchParam theCompositeSearchParam) { IExtractor componentExtractor = createExtractor(theComponentSearchParam, myResource); SearchParamSet componentIndexData = new SearchParamSet<>(); - extractSearchParam(theComponentSearchParam, theSubPathExpression, theFocusElement, componentExtractor, componentIndexData, theWantLocalReferences); - ourLog.trace("CompositeExtractor - extracted {} index values for {}", componentIndexData.size(), theComponentSearchParam.getName()); + extractSearchParam( + theComponentSearchParam, + theSubPathExpression, + theFocusElement, + componentExtractor, + componentIndexData, + theWantLocalReferences); + ourLog.trace( + "CompositeExtractor - extracted {} index values for {}", + componentIndexData.size(), + theComponentSearchParam.getName()); return componentIndexData; } @@ -365,43 +407,51 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor */ private boolean isExtractableComposite(RuntimeSearchParam theSearchParam) { // this is a composite SP - return RestSearchParameterTypeEnum.COMPOSITE.equals(theSearchParam.getParamType()) && - theSearchParam.getComponents().stream() - .noneMatch(this::isNotExtractableCompositeComponent); + return RestSearchParameterTypeEnum.COMPOSITE.equals(theSearchParam.getParamType()) + && theSearchParam.getComponents().stream().noneMatch(this::isNotExtractableCompositeComponent); } private boolean isNotExtractableCompositeComponent(RuntimeSearchParam.Component c) { RuntimeSearchParam componentSearchParam = mySearchParamRegistry.getActiveSearchParamByUrl(c.getReference()); return // Does the sub-param link work? - componentSearchParam == null || + componentSearchParam == null + || // Is this the right type? - RestSearchParameterTypeEnum.COMPOSITE.equals(componentSearchParam.getParamType()) || + RestSearchParameterTypeEnum.COMPOSITE.equals(componentSearchParam.getParamType()) + || - // Bug workaround: the component expressions are null in the FhirContextSearchParamRegistry. We can't do anything with them. - c.getExpression() == null || + // Bug workaround: the component expressions are null in the FhirContextSearchParamRegistry. We + // can't do anything with them. + c.getExpression() == null + || - // TODO mb Bug workaround: we don't support the %resource variable, but standard SPs on MolecularSequence use it. + // TODO mb Bug workaround: we don't support the %resource variable, but standard SPs on + // MolecularSequence use it. // Skip them for now. c.getExpression().contains("%resource"); } - } @Override - public SearchParamSet extractSearchParamComboUnique(String theResourceType, ResourceIndexedSearchParams theParams){ + public SearchParamSet extractSearchParamComboUnique( + String theResourceType, ResourceIndexedSearchParams theParams) { SearchParamSet retVal = new SearchParamSet<>(); - List runtimeComboUniqueParams = mySearchParamRegistry.getActiveComboSearchParams(theResourceType, ComboSearchParamType.UNIQUE); + List runtimeComboUniqueParams = + mySearchParamRegistry.getActiveComboSearchParams(theResourceType, ComboSearchParamType.UNIQUE); for (RuntimeSearchParam runtimeParam : runtimeComboUniqueParams) { - Set comboUniqueParams = createComboUniqueParam(theResourceType, theParams, runtimeParam); + Set comboUniqueParams = + createComboUniqueParam(theResourceType, theParams, runtimeParam); retVal.addAll(comboUniqueParams); } return retVal; } - private SearchParamSet createComboUniqueParam(String theResourceType, ResourceIndexedSearchParams theParams, RuntimeSearchParam theRuntimeParam) { + private SearchParamSet createComboUniqueParam( + String theResourceType, ResourceIndexedSearchParams theParams, RuntimeSearchParam theRuntimeParam) { SearchParamSet retVal = new SearchParamSet<>(); - Set queryStringsToPopulate = extractParameterCombinationsForComboParam(theParams, theResourceType, theRuntimeParam); + Set queryStringsToPopulate = + extractParameterCombinationsForComboParam(theParams, theResourceType, theRuntimeParam); for (String nextQueryString : queryStringsToPopulate) { ourLog.trace("Adding composite unique SP: {}", nextQueryString); @@ -414,20 +464,25 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public SearchParamSet extractSearchParamComboNonUnique(String theResourceType, ResourceIndexedSearchParams theParams){ + public SearchParamSet extractSearchParamComboNonUnique( + String theResourceType, ResourceIndexedSearchParams theParams) { SearchParamSet retVal = new SearchParamSet<>(); - List runtimeComboNonUniqueParams = mySearchParamRegistry.getActiveComboSearchParams(theResourceType, ComboSearchParamType.NON_UNIQUE); + List runtimeComboNonUniqueParams = + mySearchParamRegistry.getActiveComboSearchParams(theResourceType, ComboSearchParamType.NON_UNIQUE); for (RuntimeSearchParam runtimeParam : runtimeComboNonUniqueParams) { - Set comboNonUniqueParams = createComboNonUniqueParam(theResourceType, theParams, runtimeParam); + Set comboNonUniqueParams = + createComboNonUniqueParam(theResourceType, theParams, runtimeParam); retVal.addAll(comboNonUniqueParams); } return retVal; } - private SearchParamSet createComboNonUniqueParam(String theResourceType, ResourceIndexedSearchParams theParams, RuntimeSearchParam theRuntimeParam) { + private SearchParamSet createComboNonUniqueParam( + String theResourceType, ResourceIndexedSearchParams theParams, RuntimeSearchParam theRuntimeParam) { SearchParamSet retVal = new SearchParamSet<>(); - Set queryStringsToPopulate = extractParameterCombinationsForComboParam(theParams, theResourceType, theRuntimeParam); + Set queryStringsToPopulate = + extractParameterCombinationsForComboParam(theParams, theResourceType, theRuntimeParam); for (String nextQueryString : queryStringsToPopulate) { ourLog.trace("Adding composite unique SP: {}", nextQueryString); @@ -441,12 +496,15 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Nonnull - private Set extractParameterCombinationsForComboParam(ResourceIndexedSearchParams theParams, String theResourceType, RuntimeSearchParam theParam) { + private Set extractParameterCombinationsForComboParam( + ResourceIndexedSearchParams theParams, String theResourceType, RuntimeSearchParam theParam) { List> partsChoices = new ArrayList<>(); - List compositeComponents = JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParam); + List compositeComponents = + JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, theParam); for (RuntimeSearchParam nextCompositeOf : compositeComponents) { - Collection paramsListForCompositePart = findParameterIndexes(theParams, nextCompositeOf); + Collection paramsListForCompositePart = + findParameterIndexes(theParams, nextCompositeOf); Collection linksForCompositePart = null; switch (nextCompositeOf.getParamType()) { @@ -492,7 +550,9 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor String value = nextParamAsClientParam.getValueAsQueryToken(myContext); RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceType, key); - if (theParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE && param != null && param.getParamType() == RestSearchParameterTypeEnum.STRING) { + if (theParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE + && param != null + && param.getParamType() == RestSearchParameterTypeEnum.STRING) { value = StringUtil.normalizeStringForSearchIndexing(value); } @@ -521,7 +581,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Nullable - private Collection findParameterIndexes(ResourceIndexedSearchParams theParams, RuntimeSearchParam nextCompositeOf) { + private Collection findParameterIndexes( + ResourceIndexedSearchParams theParams, RuntimeSearchParam nextCompositeOf) { Collection paramsListForCompositePart = null; switch (nextCompositeOf.getParamType()) { case NUMBER: @@ -549,22 +610,24 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor break; } if (paramsListForCompositePart != null) { - paramsListForCompositePart = paramsListForCompositePart - .stream() - .filter(t -> t.getParamName().equals(nextCompositeOf.getName())) - .collect(Collectors.toList()); + paramsListForCompositePart = paramsListForCompositePart.stream() + .filter(t -> t.getParamName().equals(nextCompositeOf.getName())) + .collect(Collectors.toList()); } return paramsListForCompositePart; } @Override - public SearchParamSet extractSearchParamTokens(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + public SearchParamSet extractSearchParamTokens( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { IExtractor extractor = createTokenExtractor(theResource); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.TOKEN, false, theSearchParamFilter); + return extractSearchParams( + theResource, extractor, RestSearchParameterTypeEnum.TOKEN, false, theSearchParamFilter); } @Override - public SearchParamSet extractSearchParamTokens(IBaseResource theResource, RuntimeSearchParam theSearchParam) { + public SearchParamSet extractSearchParamTokens( + IBaseResource theResource, RuntimeSearchParam theSearchParam) { IExtractor extractor = createTokenExtractor(theResource); SearchParamSet setToPopulate = new SearchParamSet<>(); extractSearchParam(theSearchParam, theResource, extractor, setToPopulate, false); @@ -576,7 +639,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor String useSystem; if (getContext().getVersion().getVersion().equals(FhirVersionEnum.DSTU2)) { if (resourceTypeName.equals("ValueSet")) { - ca.uhn.fhir.model.dstu2.resource.ValueSet dstu2ValueSet = (ca.uhn.fhir.model.dstu2.resource.ValueSet) theResource; + ca.uhn.fhir.model.dstu2.resource.ValueSet dstu2ValueSet = + (ca.uhn.fhir.model.dstu2.resource.ValueSet) theResource; useSystem = dstu2ValueSet.getCodeSystem().getSystem(); } else { useSystem = null; @@ -593,10 +657,12 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public SearchParamSet extractSearchParamSpecial(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + public SearchParamSet extractSearchParamSpecial( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { String resourceTypeName = toRootTypeName(theResource); IExtractor extractor = createSpecialExtractor(resourceTypeName); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.SPECIAL, false, theSearchParamFilter); + return extractSearchParams( + theResource, extractor, RestSearchParameterTypeEnum.SPECIAL, false, theSearchParamFilter); } private IExtractor createSpecialExtractor(String theResourceTypeName) { @@ -607,15 +673,20 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor }; } - private void addUnexpectedDatatypeWarning(SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath) { + private void addUnexpectedDatatypeWarning( + SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath) { String typeDesc = myContext.getElementDefinition(theValue.getClass()).getName(); - theParams.addWarning("Search param " + theSearchParam.getBase() + "#" + theSearchParam.getName() + " is unable to index value of type " + typeDesc + " as a " + theSearchParam.getParamType().name() + " at path: " + thePath); + theParams.addWarning("Search param " + theSearchParam.getBase() + "#" + theSearchParam.getName() + + " is unable to index value of type " + typeDesc + " as a " + + theSearchParam.getParamType().name() + " at path: " + thePath); } @Override - public SearchParamSet extractSearchParamUri(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + public SearchParamSet extractSearchParamUri( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { IExtractor extractor = createUriExtractor(theResource); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.URI, false, theSearchParamFilter); + return extractSearchParams( + theResource, extractor, RestSearchParameterTypeEnum.URI, false, theSearchParamFilter); } private IExtractor createUriExtractor(IBaseResource theResource) { @@ -638,7 +709,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public SearchParamSet extractSearchParamDates(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + public SearchParamSet extractSearchParamDates( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { IExtractor extractor = createDateExtractor(theResource); return extractSearchParams(theResource, extractor, DATE, false, theSearchParamFilter); } @@ -654,9 +726,11 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public SearchParamSet extractSearchParamNumber(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + public SearchParamSet extractSearchParamNumber( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { IExtractor extractor = createNumberExtractor(theResource); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.NUMBER, false, theSearchParamFilter); + return extractSearchParams( + theResource, extractor, RestSearchParameterTypeEnum.NUMBER, false, theSearchParamFilter); } private IExtractor createNumberExtractor(IBaseResource theResource) { @@ -689,26 +763,29 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public SearchParamSet extractSearchParamQuantity(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + public SearchParamSet extractSearchParamQuantity( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { IExtractor extractor = createQuantityUnnormalizedExtractor(theResource); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.QUANTITY, false, theSearchParamFilter); + return extractSearchParams( + theResource, extractor, RestSearchParameterTypeEnum.QUANTITY, false, theSearchParamFilter); } - @Override - public SearchParamSet extractSearchParamQuantityNormalized(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { - IExtractor extractor = createQuantityNormalizedExtractor(theResource); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.QUANTITY, false, theSearchParamFilter); + public SearchParamSet extractSearchParamQuantityNormalized( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + IExtractor extractor = + createQuantityNormalizedExtractor(theResource); + return extractSearchParams( + theResource, extractor, RestSearchParameterTypeEnum.QUANTITY, false, theSearchParamFilter); } @Nonnull - private IExtractor createQuantityExtractor(IBaseResource theResource) { + private IExtractor createQuantityExtractor( + IBaseResource theResource) { IExtractor result; if (myStorageSettings.getNormalizedQuantitySearchLevel().storageOrSearchSupported()) { result = new MultiplexExtractor( - createQuantityUnnormalizedExtractor(theResource), - createQuantityNormalizedExtractor(theResource) - ); + createQuantityUnnormalizedExtractor(theResource), createQuantityNormalizedExtractor(theResource)); } else { result = createQuantityUnnormalizedExtractor(theResource); } @@ -716,7 +793,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Nonnull - private IExtractor createQuantityUnnormalizedExtractor(IBaseResource theResource) { + private IExtractor createQuantityUnnormalizedExtractor( + IBaseResource theResource) { String resourceType = toRootTypeName(theResource); return (params, searchParam, value, path, theWantLocalReferences) -> { if (value.getClass().equals(myLocationPositionDefinition.getImplementingClass())) { @@ -741,7 +819,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor }; } - private IExtractor createQuantityNormalizedExtractor(IBaseResource theResource) { + private IExtractor createQuantityNormalizedExtractor( + IBaseResource theResource) { return (params, searchParam, value, path, theWantLocalReferences) -> { if (value.getClass().equals(myLocationPositionDefinition.getImplementingClass())) { @@ -768,16 +847,17 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public SearchParamSet extractSearchParamStrings(IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { + public SearchParamSet extractSearchParamStrings( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter) { IExtractor extractor = createStringExtractor(theResource); - return extractSearchParams(theResource, extractor, RestSearchParameterTypeEnum.STRING, false, theSearchParamFilter); + return extractSearchParams( + theResource, extractor, RestSearchParameterTypeEnum.STRING, false, theSearchParamFilter); } private IExtractor createStringExtractor(IBaseResource theResource) { String resourceType = toRootTypeName(theResource); return (params, searchParam, value, path, theWantLocalReferences) -> { - if (value instanceof IPrimitiveType) { IPrimitiveType nextValue = (IPrimitiveType) value; String valueAsString = nextValue.getValueAsString(); @@ -825,7 +905,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor // This path is hard to parse and isn't likely to produce anything useful anyway if (myContext.getVersion().getVersion().equals(FhirVersionEnum.DSTU2) - && nextPath.equals("Bundle.entry.resource(0)")) { + && nextPath.equals("Bundle.entry.resource(0)")) { continue; } @@ -834,7 +914,9 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor try { allValues = allValuesFunc.get(); } catch (Exception e) { - String msg = getContext().getLocalizer().getMessage(BaseSearchParamExtractor.class, "failedToExtractPaths", nextPath, e.toString()); + String msg = getContext() + .getLocalizer() + .getMessage(BaseSearchParamExtractor.class, "failedToExtractPaths", nextPath, e.toString()); throw new InternalErrorException(Msg.code(504) + msg, e); } @@ -879,46 +961,70 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor private Collection getSearchParams(IBaseResource theResource) { RuntimeResourceDefinition def = getContext().getResourceDefinition(theResource); - Collection retVal = mySearchParamRegistry.getActiveSearchParams(def.getName()).values(); + Collection retVal = + mySearchParamRegistry.getActiveSearchParams(def.getName()).values(); List defaultList = Collections.emptyList(); retVal = ObjectUtils.defaultIfNull(retVal, defaultList); return retVal; } - private void addQuantity_Quantity(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { - Optional> valueField = myQuantityValueValueChild.getAccessor().getFirstValueOrNull(theValue); + private void addQuantity_Quantity( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { + Optional> valueField = + myQuantityValueValueChild.getAccessor().getFirstValueOrNull(theValue); if (valueField.isPresent() && valueField.get().getValue() != null) { BigDecimal nextValueValue = valueField.get().getValue(); String system = extractValueAsString(myQuantitySystemValueChild, theValue); String code = extractValueAsString(myQuantityCodeValueChild, theValue); - ResourceIndexedSearchParamQuantity nextEntity = new ResourceIndexedSearchParamQuantity(myPartitionSettings, theResourceType, theSearchParam.getName(), nextValueValue, system, code); + ResourceIndexedSearchParamQuantity nextEntity = new ResourceIndexedSearchParamQuantity( + myPartitionSettings, theResourceType, theSearchParam.getName(), nextValueValue, system, code); theParams.add(nextEntity); } } - private void addQuantity_QuantityNormalized(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { - Optional> valueField = myQuantityValueValueChild.getAccessor().getFirstValueOrNull(theValue); + private void addQuantity_QuantityNormalized( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { + Optional> valueField = + myQuantityValueValueChild.getAccessor().getFirstValueOrNull(theValue); if (valueField.isPresent() && valueField.get().getValue() != null) { BigDecimal nextValueValue = valueField.get().getValue(); String system = extractValueAsString(myQuantitySystemValueChild, theValue); String code = extractValueAsString(myQuantityCodeValueChild, theValue); - //-- convert the value/unit to the canonical form if any + // -- convert the value/unit to the canonical form if any Pair canonicalForm = UcumServiceUtil.getCanonicalForm(system, nextValueValue, code); if (canonicalForm != null) { - double canonicalValue = Double.parseDouble(canonicalForm.getValue().asDecimal()); + double canonicalValue = + Double.parseDouble(canonicalForm.getValue().asDecimal()); String canonicalUnits = canonicalForm.getCode(); - ResourceIndexedSearchParamQuantityNormalized nextEntity = new ResourceIndexedSearchParamQuantityNormalized(myPartitionSettings, theResourceType, theSearchParam.getName(), canonicalValue, system, canonicalUnits); + ResourceIndexedSearchParamQuantityNormalized nextEntity = + new ResourceIndexedSearchParamQuantityNormalized( + myPartitionSettings, + theResourceType, + theSearchParam.getName(), + canonicalValue, + system, + canonicalUnits); theParams.add(nextEntity); } - } } - private void addQuantity_Money(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { - Optional> valueField = myMoneyValueChild.getAccessor().getFirstValueOrNull(theValue); + private void addQuantity_Money( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { + Optional> valueField = + myMoneyValueChild.getAccessor().getFirstValueOrNull(theValue); if (valueField.isPresent() && valueField.get().getValue() != null) { BigDecimal nextValueValue = valueField.get().getValue(); @@ -926,13 +1032,24 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor String nextValueCode = extractValueAsString(myMoneyCurrencyChild, theValue); String searchParamName = theSearchParam.getName(); - ResourceIndexedSearchParamQuantity nextEntity = new ResourceIndexedSearchParamQuantity(myPartitionSettings, theResourceType, searchParamName, nextValueValue, nextValueString, nextValueCode); + ResourceIndexedSearchParamQuantity nextEntity = new ResourceIndexedSearchParamQuantity( + myPartitionSettings, + theResourceType, + searchParamName, + nextValueValue, + nextValueString, + nextValueCode); theParams.add(nextEntity); } } - private void addQuantity_MoneyNormalized(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { - Optional> valueField = myMoneyValueChild.getAccessor().getFirstValueOrNull(theValue); + private void addQuantity_MoneyNormalized( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { + Optional> valueField = + myMoneyValueChild.getAccessor().getFirstValueOrNull(theValue); if (valueField.isPresent() && valueField.get().getValue() != null) { BigDecimal nextValueValue = valueField.get().getValue(); @@ -940,12 +1057,23 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor String nextValueCode = extractValueAsString(myMoneyCurrencyChild, theValue); String searchParamName = theSearchParam.getName(); - ResourceIndexedSearchParamQuantityNormalized nextEntityNormalized = new ResourceIndexedSearchParamQuantityNormalized(myPartitionSettings, theResourceType, searchParamName, nextValueValue.doubleValue(), nextValueString, nextValueCode); + ResourceIndexedSearchParamQuantityNormalized nextEntityNormalized = + new ResourceIndexedSearchParamQuantityNormalized( + myPartitionSettings, + theResourceType, + searchParamName, + nextValueValue.doubleValue(), + nextValueString, + nextValueCode); theParams.add(nextEntityNormalized); } } - private void addQuantity_Range(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addQuantity_Range( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { Optional low = myRangeLowValueChild.getAccessor().getFirstValueOrNull(theValue); low.ifPresent(theIBase -> addQuantity_Quantity(theResourceType, theParams, theSearchParam, theIBase)); @@ -953,16 +1081,25 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor high.ifPresent(theIBase -> addQuantity_Quantity(theResourceType, theParams, theSearchParam, theIBase)); } - private void addQuantity_RangeNormalized(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addQuantity_RangeNormalized( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { Optional low = myRangeLowValueChild.getAccessor().getFirstValueOrNull(theValue); low.ifPresent(theIBase -> addQuantity_QuantityNormalized(theResourceType, theParams, theSearchParam, theIBase)); Optional high = myRangeHighValueChild.getAccessor().getFirstValueOrNull(theValue); - high.ifPresent(theIBase -> addQuantity_QuantityNormalized(theResourceType, theParams, theSearchParam, theIBase)); + high.ifPresent( + theIBase -> addQuantity_QuantityNormalized(theResourceType, theParams, theSearchParam, theIBase)); } @SuppressWarnings("unchecked") - private void addToken_Identifier(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addToken_Identifier( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { String system = extractValueAsString(myIdentifierSystemValueChild, theValue); String value = extractValueAsString(myIdentifierValueValueChild, theValue); if (isNotBlank(value)) { @@ -972,31 +1109,43 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor if (indexIdentifierType) { Optional type = myIdentifierTypeValueChild.getAccessor().getFirstValueOrNull(theValue); if (type.isPresent()) { - List codings = myCodeableConceptCodingValueChild.getAccessor().getValues(type.get()); + List codings = + myCodeableConceptCodingValueChild.getAccessor().getValues(type.get()); for (IBase nextCoding : codings) { - String typeSystem = myCodingSystemValueChild.getAccessor().getFirstValueOrNull(nextCoding).map(t -> ((IPrimitiveType) t).getValue()).orElse(null); - String typeValue = myCodingCodeValueChild.getAccessor().getFirstValueOrNull(nextCoding).map(t -> ((IPrimitiveType) t).getValue()).orElse(null); + String typeSystem = myCodingSystemValueChild + .getAccessor() + .getFirstValueOrNull(nextCoding) + .map(t -> ((IPrimitiveType) t).getValue()) + .orElse(null); + String typeValue = myCodingCodeValueChild + .getAccessor() + .getFirstValueOrNull(nextCoding) + .map(t -> ((IPrimitiveType) t).getValue()) + .orElse(null); if (isNotBlank(typeSystem) && isNotBlank(typeValue)) { String paramName = theSearchParam.getName() + Constants.PARAMQUALIFIER_TOKEN_OF_TYPE; - ResourceIndexedSearchParamToken token = createTokenIndexIfNotBlank(theResourceType, typeSystem, typeValue + "|" + value, paramName); + ResourceIndexedSearchParamToken token = createTokenIndexIfNotBlank( + theResourceType, typeSystem, typeValue + "|" + value, paramName); if (token != null) { theParams.add(token); } } - } } } } - } protected boolean shouldIndexTextComponentOfToken(RuntimeSearchParam theSearchParam) { return tokenTextIndexingEnabledForSearchParam(myStorageSettings, theSearchParam); } - private void addToken_CodeableConcept(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addToken_CodeableConcept( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { List codings = getCodingsFromCodeableConcept(theValue); for (IBase nextCoding : codings) { addToken_Coding(theResourceType, theParams, theSearchParam, nextCoding); @@ -1030,8 +1179,13 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } } - private void addToken_Coding(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { - ResourceIndexedSearchParamToken resourceIndexedSearchParamToken = createSearchParamForCoding(theResourceType, theSearchParam, theValue); + private void addToken_Coding( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { + ResourceIndexedSearchParamToken resourceIndexedSearchParamToken = + createSearchParamForCoding(theResourceType, theSearchParam, theValue); if (resourceIndexedSearchParamToken != null) { theParams.add(resourceIndexedSearchParamToken); } @@ -1043,7 +1197,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public ResourceIndexedSearchParamToken createSearchParamForCoding(String theResourceType, RuntimeSearchParam theSearchParam, IBase theValue) { + public ResourceIndexedSearchParamToken createSearchParamForCoding( + String theResourceType, RuntimeSearchParam theSearchParam, IBase theValue) { String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue); if ("Coding".equals(nextType)) { String system = extractValueAsString(myCodingSystemValueChild, theValue); @@ -1064,32 +1219,54 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } } - private void addToken_ContactPoint(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addToken_ContactPoint( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { String system = extractValueAsString(myContactPointSystemValueChild, theValue); String value = extractValueAsString(myContactPointValueValueChild, theValue); createTokenIndexIfNotBlankAndAdd(theResourceType, theParams, theSearchParam, system, value); } - private void addToken_CodeableReference(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addToken_CodeableReference( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { Optional conceptOpt = myCodeableReferenceConcept.getAccessor().getFirstValueOrNull(theValue); conceptOpt.ifPresent(concept -> addToken_CodeableConcept(theResourceType, theParams, theSearchParam, concept)); } - private void addToken_PatientCommunication(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { - List values = myPatientCommunicationLanguageValueChild.getAccessor().getValues(theValue); + private void addToken_PatientCommunication( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { + List values = + myPatientCommunicationLanguageValueChild.getAccessor().getValues(theValue); for (IBase next : values) { addToken_CodeableConcept(theResourceType, theParams, theSearchParam, next); } } - private void addToken_CapabilityStatementRestSecurity(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { - List values = myCapabilityStatementRestSecurityServiceValueChild.getAccessor().getValues(theValue); + private void addToken_CapabilityStatementRestSecurity( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { + List values = + myCapabilityStatementRestSecurityServiceValueChild.getAccessor().getValues(theValue); for (IBase nextValue : values) { addToken_CodeableConcept(theResourceType, theParams, theSearchParam, nextValue); } } - private void addDate_Period(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addDate_Period( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { Date start = extractValueAsDate(myPeriodStartValueChild, theValue); String startAsString = extractValueAsString(myPeriodStartValueChild, theValue); Date end = extractValueAsDate(myPeriodEndValueChild, theValue); @@ -1106,12 +1283,24 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor endAsString = myStorageSettings.getPeriodIndexEndOfTime().getValueAsString(); } - ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), start, startAsString, end, endAsString, startAsString); + ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate( + myPartitionSettings, + theResourceType, + theSearchParam.getName(), + start, + startAsString, + end, + endAsString, + startAsString); theParams.add(nextEntity); } } - private void addDate_Timing(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addDate_Timing( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { List> values = extractValuesAsFhirDates(myTimingEventValueChild, theValue); TreeSet dates = new TreeSet<>(); @@ -1130,7 +1319,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor Optional repeat = myTimingRepeatValueChild.getAccessor().getFirstValueOrNull(theValue); if (repeat.isPresent()) { - Optional bounds = myTimingRepeatBoundsValueChild.getAccessor().getFirstValueOrNull(repeat.get()); + Optional bounds = + myTimingRepeatBoundsValueChild.getAccessor().getFirstValueOrNull(repeat.get()); if (bounds.isPresent()) { String boundsType = toRootTypeName(bounds.get()); if ("Period".equals(boundsType)) { @@ -1139,7 +1329,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor String endString = extractValueAsString(myPeriodEndValueChild, bounds.get()); dates.add(start); dates.add(end); - //TODO Check if this logic is valid. Does the start of the first period indicate a lower bound?? + // TODO Check if this logic is valid. Does the start of the first period indicate a lower bound?? if (firstValue == null) { firstValue = extractValueAsString(myPeriodStartValueChild, bounds.get()); } @@ -1149,35 +1339,57 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } if (!dates.isEmpty()) { - ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), dates.first(), firstValue, dates.last(), finalValue, firstValue); + ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate( + myPartitionSettings, + theResourceType, + theSearchParam.getName(), + dates.first(), + firstValue, + dates.last(), + finalValue, + firstValue); theParams.add(nextEntity); } } - private void addNumber_Duration(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addNumber_Duration( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { String system = extractValueAsString(myDurationSystemValueChild, theValue); String code = extractValueAsString(myDurationCodeValueChild, theValue); BigDecimal value = extractValueAsBigDecimal(myDurationValueValueChild, theValue); if (value != null) { value = normalizeQuantityContainingTimeUnitsIntoDaysForNumberParam(system, code, value); - ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber(myPartitionSettings, theResourceType, theSearchParam.getName(), value); + ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber( + myPartitionSettings, theResourceType, theSearchParam.getName(), value); theParams.add(nextEntity); } } - private void addNumber_Quantity(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addNumber_Quantity( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { BigDecimal value = extractValueAsBigDecimal(myQuantityValueValueChild, theValue); if (value != null) { String system = extractValueAsString(myQuantitySystemValueChild, theValue); String code = extractValueAsString(myQuantityCodeValueChild, theValue); value = normalizeQuantityContainingTimeUnitsIntoDaysForNumberParam(system, code, value); - ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber(myPartitionSettings, theResourceType, theSearchParam.getName(), value); + ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber( + myPartitionSettings, theResourceType, theSearchParam.getName(), value); theParams.add(nextEntity); } } @SuppressWarnings("unchecked") - private void addNumber_Range(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addNumber_Range( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { Optional low = myRangeLowValueChild.getAccessor().getFirstValueOrNull(theValue); low.ifPresent(value -> addNumber_Quantity(theResourceType, theParams, theSearchParam, value)); @@ -1185,43 +1397,57 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor high.ifPresent(value -> addNumber_Quantity(theResourceType, theParams, theSearchParam, value)); } - @SuppressWarnings("unchecked") - private void addNumber_Integer(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addNumber_Integer( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { IPrimitiveType value = (IPrimitiveType) theValue; if (value.getValue() != null) { BigDecimal valueDecimal = new BigDecimal(value.getValue()); - ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber(myPartitionSettings, theResourceType, theSearchParam.getName(), valueDecimal); + ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber( + myPartitionSettings, theResourceType, theSearchParam.getName(), valueDecimal); theParams.add(nextEntity); } - } @SuppressWarnings("unchecked") - private void addNumber_Decimal(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addNumber_Decimal( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { IPrimitiveType value = (IPrimitiveType) theValue; if (value.getValue() != null) { BigDecimal valueDecimal = value.getValue(); - ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber(myPartitionSettings, theResourceType, theSearchParam.getName(), valueDecimal); + ResourceIndexedSearchParamNumber nextEntity = new ResourceIndexedSearchParamNumber( + myPartitionSettings, theResourceType, theSearchParam.getName(), valueDecimal); theParams.add(nextEntity); } - } - private void addCoords_Position(String theResourceType, SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addCoords_Position( + String theResourceType, + SearchParamSet theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { BigDecimal latitude = null; BigDecimal longitude = null; if (theValue instanceof org.hl7.fhir.dstu3.model.Location.LocationPositionComponent) { - org.hl7.fhir.dstu3.model.Location.LocationPositionComponent value = (org.hl7.fhir.dstu3.model.Location.LocationPositionComponent) theValue; + org.hl7.fhir.dstu3.model.Location.LocationPositionComponent value = + (org.hl7.fhir.dstu3.model.Location.LocationPositionComponent) theValue; latitude = value.getLatitude(); longitude = value.getLongitude(); } else if (theValue instanceof org.hl7.fhir.r4.model.Location.LocationPositionComponent) { - org.hl7.fhir.r4.model.Location.LocationPositionComponent value = (org.hl7.fhir.r4.model.Location.LocationPositionComponent) theValue; + org.hl7.fhir.r4.model.Location.LocationPositionComponent value = + (org.hl7.fhir.r4.model.Location.LocationPositionComponent) theValue; latitude = value.getLatitude(); longitude = value.getLongitude(); } else if (theValue instanceof org.hl7.fhir.r5.model.Location.LocationPositionComponent) { - org.hl7.fhir.r5.model.Location.LocationPositionComponent value = (org.hl7.fhir.r5.model.Location.LocationPositionComponent) theValue; + org.hl7.fhir.r5.model.Location.LocationPositionComponent value = + (org.hl7.fhir.r5.model.Location.LocationPositionComponent) theValue; latitude = value.getLatitude(); longitude = value.getLongitude(); } @@ -1229,13 +1455,27 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor if (latitude != null && longitude != null) { double normalizedLatitude = GeopointNormalizer.normalizeLatitude(latitude.doubleValue()); double normalizedLongitude = GeopointNormalizer.normalizeLongitude(longitude.doubleValue()); - ResourceIndexedSearchParamCoords nextEntity = new ResourceIndexedSearchParamCoords(myPartitionSettings, theResourceType, theSearchParam.getName(), normalizedLatitude, normalizedLongitude); + ResourceIndexedSearchParamCoords nextEntity = new ResourceIndexedSearchParamCoords( + myPartitionSettings, + theResourceType, + theSearchParam.getName(), + normalizedLatitude, + normalizedLongitude); theParams.add(nextEntity); } } - private void addString_HumanName(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { - List myHumanNameChildren = Arrays.asList(myHumanNameFamilyValueChild, myHumanNameGivenValueChild, myHumanNameTextValueChild, myHumanNamePrefixValueChild, myHumanNameSuffixValueChild); + private void addString_HumanName( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { + List myHumanNameChildren = Arrays.asList( + myHumanNameFamilyValueChild, + myHumanNameGivenValueChild, + myHumanNameTextValueChild, + myHumanNamePrefixValueChild, + myHumanNameSuffixValueChild); for (BaseRuntimeChildDefinition theChild : myHumanNameChildren) { List indices = extractValuesAsStrings(theChild, theValue); for (String next : indices) { @@ -1244,19 +1484,31 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } } - private void addString_Quantity(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addString_Quantity( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { BigDecimal value = extractValueAsBigDecimal(myQuantityValueValueChild, theValue); if (value != null) { createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, value.toPlainString()); } } - private void addString_Range(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addString_Range( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { Optional value = myRangeLowValueChild.getAccessor().getFirstValueOrNull(theValue); - value.ifPresent(t->addString_Quantity(theResourceType, theParams, theSearchParam, t)); + value.ifPresent(t -> addString_Quantity(theResourceType, theParams, theSearchParam, t)); } - private void addString_ContactPoint(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addString_ContactPoint( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { String value = extractValueAsString(myContactPointValueValueChild, theValue); if (isNotBlank(value)) { @@ -1264,7 +1516,11 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } } - private void addString_Address(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addString_Address( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { List allNames = new ArrayList<>(extractValuesAsStrings(myAddressLineValueChild, theValue)); @@ -1296,7 +1552,6 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor for (String nextName : allNames) { createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, nextName); } - } /** @@ -1322,7 +1577,12 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor * This is the only way you could actually specify a FhirPath expression for those * prior to 6.2.0 so this isn't a breaking change. */ - SearchParamSet extractSearchParams(IBaseResource theResource, IExtractor theExtractor, RestSearchParameterTypeEnum theSearchParamType, boolean theWantLocalReferences, ISearchParamFilter theSearchParamFilter) { + SearchParamSet extractSearchParams( + IBaseResource theResource, + IExtractor theExtractor, + RestSearchParameterTypeEnum theSearchParamType, + boolean theWantLocalReferences, + ISearchParamFilter theSearchParamFilter) { SearchParamSet retVal = new SearchParamSet<>(); Collection searchParams = getSearchParams(theResource); @@ -1348,16 +1608,16 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor return retVal; } - /** * Helper function to determine if a set of SPs for a resource uses a resolve as part of its fhir path. */ - private boolean anySearchParameterUsesResolve(Collection searchParams, RestSearchParameterTypeEnum theSearchParamType) { + private boolean anySearchParameterUsesResolve( + Collection searchParams, RestSearchParameterTypeEnum theSearchParamType) { return searchParams.stream() - .filter(param -> param.getParamType() != theSearchParamType) - .map(RuntimeSearchParam::getPath) - .filter(Objects::nonNull) - .anyMatch(path -> path.contains("resolve")); + .filter(param -> param.getParamType() != theSearchParamType) + .map(RuntimeSearchParam::getPath) + .filter(Objects::nonNull) + .anyMatch(path -> path.contains("resolve")); } /** @@ -1369,32 +1629,56 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor * Doing this cleanup isn't hugely expensive, but it's not completely free either so we only do it * if we think there's actually a chance */ - private void cleanUpContainedResourceReferences(IBaseResource theResource, RestSearchParameterTypeEnum theSearchParamType, Collection searchParams) { - boolean havePathWithResolveExpression = - myStorageSettings.isIndexOnContainedResources() + private void cleanUpContainedResourceReferences( + IBaseResource theResource, + RestSearchParameterTypeEnum theSearchParamType, + Collection searchParams) { + boolean havePathWithResolveExpression = myStorageSettings.isIndexOnContainedResources() || anySearchParameterUsesResolve(searchParams, theSearchParamType); if (havePathWithResolveExpression && myContext.getParserOptions().isAutoContainReferenceTargetsWithNoId()) { - //TODO GGG/JA: At this point, if the Task.basedOn.reference.resource does _not_ have an ID, we will attempt to contain it internally. Wild - myContext.newTerser().containResources(theResource, FhirTerser.OptionsEnum.MODIFY_RESOURCE, FhirTerser.OptionsEnum.STORE_AND_REUSE_RESULTS); + // TODO GGG/JA: At this point, if the Task.basedOn.reference.resource does _not_ have an ID, we will attempt + // to contain it internally. Wild + myContext + .newTerser() + .containResources( + theResource, + FhirTerser.OptionsEnum.MODIFY_RESOURCE, + FhirTerser.OptionsEnum.STORE_AND_REUSE_RESULTS); } } - /** * extract for normal SP */ @VisibleForTesting - public void extractSearchParam(RuntimeSearchParam theSearchParameterDef, IBase theResource, IExtractor theExtractor, SearchParamSet theSetToPopulate, boolean theWantLocalReferences) { + public void extractSearchParam( + RuntimeSearchParam theSearchParameterDef, + IBase theResource, + IExtractor theExtractor, + SearchParamSet theSetToPopulate, + boolean theWantLocalReferences) { String nextPathUnsplit = theSearchParameterDef.getPath(); - extractSearchParam(theSearchParameterDef, nextPathUnsplit, theResource, theExtractor, theSetToPopulate, theWantLocalReferences); + extractSearchParam( + theSearchParameterDef, + nextPathUnsplit, + theResource, + theExtractor, + theSetToPopulate, + theWantLocalReferences); } /** * extract for SP, but with possibly different expression. * Allows composite SPs to use sub-paths. */ - private void extractSearchParam(RuntimeSearchParam theSearchParameterDef, String thePathExpression, IBase theResource, IExtractor theExtractor, SearchParamSet theSetToPopulate, boolean theWantLocalReferences) { + private void extractSearchParam( + RuntimeSearchParam theSearchParameterDef, + String thePathExpression, + IBase theResource, + IExtractor theExtractor, + SearchParamSet theSetToPopulate, + boolean theWantLocalReferences) { if (isBlank(thePathExpression)) { return; } @@ -1406,7 +1690,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor if (nextObject != null) { String typeName = toRootTypeName(nextObject); if (!myIgnoredForSearchDatatypes.contains(typeName)) { - theExtractor.extract(theSetToPopulate, theSearchParameterDef, nextObject, nextPath, theWantLocalReferences); + theExtractor.extract( + theSetToPopulate, theSearchParameterDef, nextObject, nextPath, theWantLocalReferences); } } } @@ -1426,17 +1711,26 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor return elementDefinition.getName(); } - private void addUri_Uri(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addUri_Uri( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { IPrimitiveType value = (IPrimitiveType) theValue; String valueAsString = value.getValueAsString(); if (isNotBlank(valueAsString)) { - ResourceIndexedSearchParamUri nextEntity = new ResourceIndexedSearchParamUri(myPartitionSettings, theResourceType, theSearchParam.getName(), valueAsString); + ResourceIndexedSearchParamUri nextEntity = new ResourceIndexedSearchParamUri( + myPartitionSettings, theResourceType, theSearchParam.getName(), valueAsString); theParams.add(nextEntity); } } @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"}) - private void createStringIndexIfNotBlank(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, String theValue) { + private void createStringIndexIfNotBlank( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + String theValue) { String value = theValue; if (isNotBlank(value)) { if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) { @@ -1451,15 +1745,22 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor valueEncoded = valueEncoded.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH); } - ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(myPartitionSettings, getStorageSettings(), theResourceType, searchParamName, valueEncoded, value); + ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString( + myPartitionSettings, getStorageSettings(), theResourceType, searchParamName, valueEncoded, value); Set params = theParams; params.add(nextEntity); } } - private void createTokenIndexIfNotBlankAndAdd(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, String theSystem, String theValue) { - ResourceIndexedSearchParamToken nextEntity = createTokenIndexIfNotBlank(theResourceType, theSystem, theValue, theSearchParam.getName()); + private void createTokenIndexIfNotBlankAndAdd( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + String theSystem, + String theValue) { + ResourceIndexedSearchParamToken nextEntity = + createTokenIndexIfNotBlank(theResourceType, theSystem, theValue, theSearchParam.getName()); if (nextEntity != null) { theParams.add(nextEntity); } @@ -1470,10 +1771,12 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor myPartitionSettings = thePartitionSettings; } - private ResourceIndexedSearchParamToken createTokenIndexIfNotBlank(String theResourceType, String theSystem, String theValue, String searchParamName) { + private ResourceIndexedSearchParamToken createTokenIndexIfNotBlank( + String theResourceType, String theSystem, String theValue, String searchParamName) { ResourceIndexedSearchParamToken nextEntity = null; if (isNotBlank(theSystem) || isNotBlank(theValue)) { - nextEntity = new ResourceIndexedSearchParamToken(myPartitionSettings, theResourceType, searchParamName, theSystem, theValue); + nextEntity = new ResourceIndexedSearchParamToken( + myPartitionSettings, theResourceType, searchParamName, theSystem, theValue); } return nextEntity; } @@ -1483,7 +1786,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor if (shouldAttemptToSplitPath(thePaths)) { return splitOutOfParensOrs(thePaths); } else { - return new String[]{thePaths}; + return new String[] {thePaths}; } } @@ -1493,7 +1796,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor return true; } } else { - //DSTU 3 and below used "or" as well as "|" + // DSTU 3 and below used "or" as well as "|" if (thePath.contains("|") || thePath.contains(" or ")) { return true; } @@ -1515,8 +1818,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor private String[] splitOutOfParensOrs(String thePaths) { List topLevelOrExpressions = splitOutOfParensToken(thePaths, " or "); List retVal = topLevelOrExpressions.stream() - .flatMap(s -> splitOutOfParensToken(s, " |").stream()) - .collect(Collectors.toList()); + .flatMap(s -> splitOutOfParensToken(s, " |").stream()) + .collect(Collectors.toList()); return retVal.toArray(new String[retVal.size()]); } @@ -1543,7 +1846,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor return open == close; } - private BigDecimal normalizeQuantityContainingTimeUnitsIntoDaysForNumberParam(String theSystem, String theCode, BigDecimal theValue) { + private BigDecimal normalizeQuantityContainingTimeUnitsIntoDaysForNumberParam( + String theSystem, String theCode, BigDecimal theValue) { if (SearchParamConstants.UCUM_NS.equals(theSystem)) { if (isNotBlank(theCode)) { Unit unit = Unit.valueOf(theCode); @@ -1572,24 +1876,30 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor * search param extraction happens a whole heck of a lot at runtime.. */ - BaseRuntimeElementCompositeDefinition quantityDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Quantity"); + BaseRuntimeElementCompositeDefinition quantityDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Quantity"); myQuantityValueValueChild = quantityDefinition.getChildByName("value"); myQuantitySystemValueChild = quantityDefinition.getChildByName("system"); myQuantityCodeValueChild = quantityDefinition.getChildByName("code"); - BaseRuntimeElementCompositeDefinition moneyDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Money"); + BaseRuntimeElementCompositeDefinition moneyDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Money"); myMoneyValueChild = moneyDefinition.getChildByName("value"); myMoneyCurrencyChild = moneyDefinition.getChildByName("currency"); - BaseRuntimeElementCompositeDefinition locationDefinition = getContext().getResourceDefinition("Location"); + BaseRuntimeElementCompositeDefinition locationDefinition = + getContext().getResourceDefinition("Location"); BaseRuntimeChildDefinition locationPositionValueChild = locationDefinition.getChildByName("position"); - myLocationPositionDefinition = (BaseRuntimeElementCompositeDefinition) locationPositionValueChild.getChildByName("position"); + myLocationPositionDefinition = + (BaseRuntimeElementCompositeDefinition) locationPositionValueChild.getChildByName("position"); - BaseRuntimeElementCompositeDefinition rangeDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Range"); + BaseRuntimeElementCompositeDefinition rangeDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Range"); myRangeLowValueChild = rangeDefinition.getChildByName("low"); myRangeHighValueChild = rangeDefinition.getChildByName("high"); - BaseRuntimeElementCompositeDefinition addressDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Address"); + BaseRuntimeElementCompositeDefinition addressDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Address"); myAddressLineValueChild = addressDefinition.getChildByName("line"); myAddressCityValueChild = addressDefinition.getChildByName("city"); myAddressDistrictValueChild = addressDefinition.getChildByName("district"); @@ -1597,51 +1907,64 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor myAddressCountryValueChild = addressDefinition.getChildByName("country"); myAddressPostalCodeValueChild = addressDefinition.getChildByName("postalCode"); - BaseRuntimeElementCompositeDefinition periodDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Period"); + BaseRuntimeElementCompositeDefinition periodDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Period"); myPeriodStartValueChild = periodDefinition.getChildByName("start"); myPeriodEndValueChild = periodDefinition.getChildByName("end"); - BaseRuntimeElementCompositeDefinition timingDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Timing"); + BaseRuntimeElementCompositeDefinition timingDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Timing"); myTimingEventValueChild = timingDefinition.getChildByName("event"); myTimingRepeatValueChild = timingDefinition.getChildByName("repeat"); - BaseRuntimeElementCompositeDefinition timingRepeatDefinition = (BaseRuntimeElementCompositeDefinition) myTimingRepeatValueChild.getChildByName("repeat"); + BaseRuntimeElementCompositeDefinition timingRepeatDefinition = + (BaseRuntimeElementCompositeDefinition) myTimingRepeatValueChild.getChildByName("repeat"); myTimingRepeatBoundsValueChild = timingRepeatDefinition.getChildByName("bounds[x]"); - BaseRuntimeElementCompositeDefinition durationDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Duration"); + BaseRuntimeElementCompositeDefinition durationDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Duration"); myDurationSystemValueChild = durationDefinition.getChildByName("system"); myDurationCodeValueChild = durationDefinition.getChildByName("code"); myDurationValueValueChild = durationDefinition.getChildByName("value"); - BaseRuntimeElementCompositeDefinition humanNameDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("HumanName"); + BaseRuntimeElementCompositeDefinition humanNameDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("HumanName"); myHumanNameFamilyValueChild = humanNameDefinition.getChildByName("family"); myHumanNameGivenValueChild = humanNameDefinition.getChildByName("given"); myHumanNameTextValueChild = humanNameDefinition.getChildByName("text"); myHumanNamePrefixValueChild = humanNameDefinition.getChildByName("prefix"); myHumanNameSuffixValueChild = humanNameDefinition.getChildByName("suffix"); - BaseRuntimeElementCompositeDefinition contactPointDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("ContactPoint"); + BaseRuntimeElementCompositeDefinition contactPointDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("ContactPoint"); myContactPointValueValueChild = contactPointDefinition.getChildByName("value"); myContactPointSystemValueChild = contactPointDefinition.getChildByName("system"); - BaseRuntimeElementCompositeDefinition identifierDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Identifier"); + BaseRuntimeElementCompositeDefinition identifierDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Identifier"); myIdentifierSystemValueChild = identifierDefinition.getChildByName("system"); myIdentifierValueValueChild = identifierDefinition.getChildByName("value"); myIdentifierTypeValueChild = identifierDefinition.getChildByName("type"); - BaseRuntimeElementCompositeDefinition identifierTypeDefinition = (BaseRuntimeElementCompositeDefinition) myIdentifierTypeValueChild.getChildByName("type"); + BaseRuntimeElementCompositeDefinition identifierTypeDefinition = + (BaseRuntimeElementCompositeDefinition) myIdentifierTypeValueChild.getChildByName("type"); myIdentifierTypeTextValueChild = identifierTypeDefinition.getChildByName("text"); - BaseRuntimeElementCompositeDefinition codeableConceptDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("CodeableConcept"); + BaseRuntimeElementCompositeDefinition codeableConceptDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("CodeableConcept"); myCodeableConceptCodingValueChild = codeableConceptDefinition.getChildByName("coding"); myCodeableConceptTextValueChild = codeableConceptDefinition.getChildByName("text"); - BaseRuntimeElementCompositeDefinition codingDefinition = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Coding"); + BaseRuntimeElementCompositeDefinition codingDefinition = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("Coding"); myCodingSystemValueChild = codingDefinition.getChildByName("system"); myCodingCodeValueChild = codingDefinition.getChildByName("code"); myCodingDisplayValueChild = codingDefinition.getChildByName("display"); - BaseRuntimeElementCompositeDefinition patientDefinition = getContext().getResourceDefinition("Patient"); + BaseRuntimeElementCompositeDefinition patientDefinition = + getContext().getResourceDefinition("Patient"); BaseRuntimeChildDefinition patientCommunicationValueChild = patientDefinition.getChildByName("communication"); - BaseRuntimeElementCompositeDefinition patientCommunicationDefinition = (BaseRuntimeElementCompositeDefinition) patientCommunicationValueChild.getChildByName("communication"); + BaseRuntimeElementCompositeDefinition patientCommunicationDefinition = + (BaseRuntimeElementCompositeDefinition) + patientCommunicationValueChild.getChildByName("communication"); myPatientCommunicationLanguageValueChild = patientCommunicationDefinition.getChildByName("language"); // DSTU3+ @@ -1651,23 +1974,29 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor assert codeSystemDefinition != null; myCodeSystemUrlValueChild = codeSystemDefinition.getChildByName("url"); - BaseRuntimeElementCompositeDefinition capabilityStatementDefinition = getContext().getResourceDefinition("CapabilityStatement"); - BaseRuntimeChildDefinition capabilityStatementRestChild = capabilityStatementDefinition.getChildByName("rest"); - BaseRuntimeElementCompositeDefinition capabilityStatementRestDefinition = (BaseRuntimeElementCompositeDefinition) capabilityStatementRestChild.getChildByName("rest"); - BaseRuntimeChildDefinition capabilityStatementRestSecurityValueChild = capabilityStatementRestDefinition.getChildByName("security"); - BaseRuntimeElementCompositeDefinition capabilityStatementRestSecurityDefinition = (BaseRuntimeElementCompositeDefinition) capabilityStatementRestSecurityValueChild.getChildByName("security"); - myCapabilityStatementRestSecurityServiceValueChild = capabilityStatementRestSecurityDefinition.getChildByName("service"); + BaseRuntimeElementCompositeDefinition capabilityStatementDefinition = + getContext().getResourceDefinition("CapabilityStatement"); + BaseRuntimeChildDefinition capabilityStatementRestChild = + capabilityStatementDefinition.getChildByName("rest"); + BaseRuntimeElementCompositeDefinition capabilityStatementRestDefinition = + (BaseRuntimeElementCompositeDefinition) capabilityStatementRestChild.getChildByName("rest"); + BaseRuntimeChildDefinition capabilityStatementRestSecurityValueChild = + capabilityStatementRestDefinition.getChildByName("security"); + BaseRuntimeElementCompositeDefinition capabilityStatementRestSecurityDefinition = + (BaseRuntimeElementCompositeDefinition) + capabilityStatementRestSecurityValueChild.getChildByName("security"); + myCapabilityStatementRestSecurityServiceValueChild = + capabilityStatementRestSecurityDefinition.getChildByName("service"); } // R4B+ if (getContext().getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R4B)) { - BaseRuntimeElementCompositeDefinition codeableReferenceDef = (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("CodeableReference"); + BaseRuntimeElementCompositeDefinition codeableReferenceDef = + (BaseRuntimeElementCompositeDefinition) getContext().getElementDefinition("CodeableReference"); myCodeableReferenceConcept = codeableReferenceDef.getChildByName("concept"); myCodeableReferenceReference = codeableReferenceDef.getChildByName("reference"); - } - } @SuppressWarnings("unchecked") @@ -1681,7 +2010,9 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor for (BundleEntryParts next : entries) { if (next.getResource() != null) { if (theUrl.startsWith("urn:uuid:")) { - if (theUrl.equals(next.getUrl()) || theUrl.equals(next.getResource().getIdElement().getValue())) { + if (theUrl.equals(next.getUrl()) + || theUrl.equals( + next.getResource().getIdElement().getValue())) { return (T) next.getResource(); } } else { @@ -1699,14 +2030,17 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor public interface IValueExtractor { List get() throws FHIRException; - } @FunctionalInterface private interface IExtractor { - void extract(SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences); - + void extract( + SearchParamSet theParams, + RuntimeSearchParam theSearchParam, + IBase theValue, + String thePath, + boolean theWantLocalReferences); } /** @@ -1721,18 +2055,20 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor return tok.getTokenArray(); } - public static boolean tokenTextIndexingEnabledForSearchParam(StorageSettings theStorageSettings, RuntimeSearchParam theSearchParam) { - Optional noSuppressForSearchParam = theSearchParam.getExtensions(HapiExtensions.EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING).stream() - .map(IBaseExtension::getValue) - .map(val -> (IPrimitiveType) val) - .map(IPrimitiveType::getValueAsString) - .map(Boolean::parseBoolean) - .findFirst(); + public static boolean tokenTextIndexingEnabledForSearchParam( + StorageSettings theStorageSettings, RuntimeSearchParam theSearchParam) { + Optional noSuppressForSearchParam = + theSearchParam.getExtensions(HapiExtensions.EXT_SEARCHPARAM_TOKEN_SUPPRESS_TEXT_INDEXING).stream() + .map(IBaseExtension::getValue) + .map(val -> (IPrimitiveType) val) + .map(IPrimitiveType::getValueAsString) + .map(Boolean::parseBoolean) + .findFirst(); - //if the SP doesn't care, use the system default. + // if the SP doesn't care, use the system default. if (!noSuppressForSearchParam.isPresent()) { return !theStorageSettings.isSuppressStringIndexingInTokens(); - //If the SP does care, use its value. + // If the SP does care, use its value. } else { boolean suppressForSearchParam = noSuppressForSearchParam.get(); ourLog.trace("Text indexing for SearchParameter {}: {}", theSearchParam.getName(), suppressForSearchParam); @@ -1749,44 +2085,42 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor protected static String extractValueAsString(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) { return theChildDefinition - .getAccessor() - .>getFirstValueOrNull(theElement) - .map(IPrimitiveType::getValueAsString) - .orElse(null); + .getAccessor() + .>getFirstValueOrNull(theElement) + .map(IPrimitiveType::getValueAsString) + .orElse(null); } protected static Date extractValueAsDate(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) { return theChildDefinition - .getAccessor() - .>getFirstValueOrNull(theElement) - .map(IPrimitiveType::getValue) - .orElse(null); + .getAccessor() + .>getFirstValueOrNull(theElement) + .map(IPrimitiveType::getValue) + .orElse(null); } - protected static BigDecimal extractValueAsBigDecimal(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) { + protected static BigDecimal extractValueAsBigDecimal( + BaseRuntimeChildDefinition theChildDefinition, IBase theElement) { return theChildDefinition - .getAccessor() - .>getFirstValueOrNull(theElement) - .map(IPrimitiveType::getValue) - .orElse(null); + .getAccessor() + .>getFirstValueOrNull(theElement) + .map(IPrimitiveType::getValue) + .orElse(null); } @SuppressWarnings("unchecked") - protected static List> extractValuesAsFhirDates(BaseRuntimeChildDefinition theChildDefinition, IBase theElement) { - return (List) theChildDefinition - .getAccessor() - .getValues(theElement); + protected static List> extractValuesAsFhirDates( + BaseRuntimeChildDefinition theChildDefinition, IBase theElement) { + return (List) theChildDefinition.getAccessor().getValues(theElement); } - protected static List extractValuesAsStrings(BaseRuntimeChildDefinition theChildDefinition, IBase theValue) { - return theChildDefinition - .getAccessor() - .getValues(theValue) - .stream() - .map(t -> (IPrimitiveType) t) - .map(IPrimitiveType::getValueAsString) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + protected static List extractValuesAsStrings( + BaseRuntimeChildDefinition theChildDefinition, IBase theValue) { + return theChildDefinition.getAccessor().getValues(theValue).stream() + .map(t -> (IPrimitiveType) t) + .map(IPrimitiveType::getValueAsString) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); } protected static > String extractSystem(IBaseEnumeration theBoundCode) { @@ -1801,7 +2135,12 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor private PathAndRef myPathAndRef = null; @Override - public void extract(SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences) { + public void extract( + SearchParamSet theParams, + RuntimeSearchParam theSearchParam, + IBase theValue, + String thePath, + boolean theWantLocalReferences) { if (theValue instanceof IBaseResource) { myPathAndRef = new PathAndRef(theSearchParam.getName(), thePath, (IBaseResource) theValue); theParams.add(myPathAndRef); @@ -1814,7 +2153,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor case "canonical": String typeName = toTypeName(theValue); IPrimitiveType valuePrimitive = (IPrimitiveType) theValue; - IBaseReference fakeReference = (IBaseReference) myContext.getElementDefinition("Reference").newInstance(); + IBaseReference fakeReference = (IBaseReference) + myContext.getElementDefinition("Reference").newInstance(); fakeReference.setReference(valuePrimitive.getValueAsString()); // Canonical has a root type of "uri" @@ -1846,13 +2186,16 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor case "reference": case "Reference": IBaseReference valueRef = (IBaseReference) theValue; - extractResourceLinkFromReference(theParams, theSearchParam, thePath, theWantLocalReferences, valueRef); + extractResourceLinkFromReference( + theParams, theSearchParam, thePath, theWantLocalReferences, valueRef); break; case "CodeableReference": - Optional referenceOpt = myCodeableReferenceReference.getAccessor().getFirstValueOrNull(theValue); + Optional referenceOpt = + myCodeableReferenceReference.getAccessor().getFirstValueOrNull(theValue); if (referenceOpt.isPresent()) { IBaseReference value = (IBaseReference) referenceOpt.get(); - extractResourceLinkFromReference(theParams, theSearchParam, thePath, theWantLocalReferences, value); + extractResourceLinkFromReference( + theParams, theSearchParam, thePath, theWantLocalReferences, value); } break; default: @@ -1861,16 +2204,18 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } } - private void extractResourceLinkFromReference(SearchParamSet theParams, RuntimeSearchParam theSearchParam, String thePath, boolean theWantLocalReferences, IBaseReference valueRef) { + private void extractResourceLinkFromReference( + SearchParamSet theParams, + RuntimeSearchParam theSearchParam, + String thePath, + boolean theWantLocalReferences, + IBaseReference valueRef) { IIdType nextId = valueRef.getReferenceElement(); if (nextId.isEmpty() && valueRef.getResource() != null) { nextId = valueRef.getResource().getIdElement(); } - if ( - nextId == null || - nextId.isEmpty() - ) { + if (nextId == null || nextId.isEmpty()) { // Ignore placeholder references that are blank } else if (!theWantLocalReferences && nextId.getValue().startsWith("#")) { // Ignore local refs unless we specifically want them @@ -1881,9 +2226,12 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } public PathAndRef get(IBase theValue, String thePath) { - extract(new SearchParamSet<>(), - new RuntimeSearchParam(null, null, "Reference", null, null, null, null, null, null, null), - theValue, thePath, false); + extract( + new SearchParamSet<>(), + new RuntimeSearchParam(null, null, "Reference", null, null, null, null, null, null, null), + theValue, + thePath, + false); return myPathAndRef; } } @@ -1902,7 +2250,12 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public void extract(SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences) { + public void extract( + SearchParamSet theParams, + RuntimeSearchParam theSearchParam, + IBase theValue, + String thePath, + boolean theWantLocalReferences) { String nextType = toRootTypeName(theValue); switch (nextType) { case "date": @@ -1928,11 +2281,14 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor default: addUnexpectedDatatypeWarning(theParams, theSearchParam, theValue, thePath); break; - } } - private void addDate_Period(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addDate_Period( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { Date start = extractValueAsDate(myPeriodStartValueChild, theValue); String startAsString = extractValueAsString(myPeriodStartValueChild, theValue); Date end = extractValueAsDate(myPeriodEndValueChild, theValue); @@ -1942,19 +2298,32 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor if (start == null) { start = myStorageSettings.getPeriodIndexStartOfTime().getValue(); - startAsString = myStorageSettings.getPeriodIndexStartOfTime().getValueAsString(); + startAsString = + myStorageSettings.getPeriodIndexStartOfTime().getValueAsString(); } if (end == null) { end = myStorageSettings.getPeriodIndexEndOfTime().getValue(); endAsString = myStorageSettings.getPeriodIndexEndOfTime().getValueAsString(); } - myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), start, startAsString, end, endAsString, startAsString); + myIndexedSearchParamDate = new ResourceIndexedSearchParamDate( + myPartitionSettings, + theResourceType, + theSearchParam.getName(), + start, + startAsString, + end, + endAsString, + startAsString); theParams.add(myIndexedSearchParamDate); } } - private void addDate_Timing(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addDate_Timing( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { List> values = extractValuesAsFhirDates(myTimingEventValueChild, theValue); TreeSet dates = new TreeSet<>(); @@ -1972,7 +2341,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor Optional repeat = myTimingRepeatValueChild.getAccessor().getFirstValueOrNull(theValue); if (repeat.isPresent()) { - Optional bounds = myTimingRepeatBoundsValueChild.getAccessor().getFirstValueOrNull(repeat.get()); + Optional bounds = + myTimingRepeatBoundsValueChild.getAccessor().getFirstValueOrNull(repeat.get()); if (bounds.isPresent()) { String boundsType = toRootTypeName(bounds.get()); if ("Period".equals(boundsType)) { @@ -1989,25 +2359,48 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } if (!dates.isEmpty()) { - myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), dates.first(), firstValue, dates.last(), finalValue, firstValue); + myIndexedSearchParamDate = new ResourceIndexedSearchParamDate( + myPartitionSettings, + theResourceType, + theSearchParam.getName(), + dates.first(), + firstValue, + dates.last(), + finalValue, + firstValue); theParams.add(myIndexedSearchParamDate); } } @SuppressWarnings("unchecked") - private void addDateTimeTypes(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { + private void addDateTimeTypes( + String theResourceType, + Set theParams, + RuntimeSearchParam theSearchParam, + IBase theValue) { IPrimitiveType nextBaseDateTime = (IPrimitiveType) theValue; if (nextBaseDateTime.getValue() != null) { - myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValueAsString()); + myIndexedSearchParamDate = new ResourceIndexedSearchParamDate( + myPartitionSettings, + theResourceType, + theSearchParam.getName(), + nextBaseDateTime.getValue(), + nextBaseDateTime.getValueAsString(), + nextBaseDateTime.getValue(), + nextBaseDateTime.getValueAsString(), + nextBaseDateTime.getValueAsString()); ourLog.trace("DateExtractor - extracted {} for {}", nextBaseDateTime, theSearchParam.getName()); theParams.add(myIndexedSearchParamDate); } } public ResourceIndexedSearchParamDate get(IBase theValue, String thePath, boolean theWantLocalReferences) { - extract(new SearchParamSet<>(), - new RuntimeSearchParam(null, null, "date", null, null, null, null, null, null, null), - theValue, thePath, theWantLocalReferences); + extract( + new SearchParamSet<>(), + new RuntimeSearchParam(null, null, "date", null, null, null, null, null, null, null), + theValue, + thePath, + theWantLocalReferences); return myIndexedSearchParamDate; } } @@ -2022,14 +2415,20 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public void extract(SearchParamSet params, RuntimeSearchParam searchParam, IBase value, String path, boolean theWantLocalReferences) { + public void extract( + SearchParamSet params, + RuntimeSearchParam searchParam, + IBase value, + String path, + boolean theWantLocalReferences) { // DSTU3+ if (value instanceof IBaseEnumeration) { IBaseEnumeration obj = (IBaseEnumeration) value; String system = extractSystem(obj); String code = obj.getValueAsString(); - BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd(myResourceTypeName, params, searchParam, system, code); + BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd( + myResourceTypeName, params, searchParam, system, code); return; } @@ -2043,7 +2442,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor system = boundCode.getBinder().toSystemString(valueAsEnum); } String code = boundCode.getValueAsString(); - BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd(myResourceTypeName, params, searchParam, system, code); + BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd( + myResourceTypeName, params, searchParam, system, code); return; } @@ -2061,13 +2461,15 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor valueAsString = ((IIdType) value).getIdPart(); } - BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd(myResourceTypeName, params, searchParam, systemAsString, valueAsString); + BaseSearchParamExtractor.this.createTokenIndexIfNotBlankAndAdd( + myResourceTypeName, params, searchParam, systemAsString, valueAsString); return; } switch (path) { case "Patient.communication": - BaseSearchParamExtractor.this.addToken_PatientCommunication(myResourceTypeName, params, searchParam, value); + BaseSearchParamExtractor.this.addToken_PatientCommunication( + myResourceTypeName, params, searchParam, value); return; case "Consent.source": // Consent#source-identifier has a path that isn't typed - This is a one-off to deal with that @@ -2080,7 +2482,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor ourLog.warn("StructureDefinition context indexing not currently supported"); return; case "CapabilityStatement.rest.security": - BaseSearchParamExtractor.this.addToken_CapabilityStatementRestSecurity(myResourceTypeName, params, searchParam, value); + BaseSearchParamExtractor.this.addToken_CapabilityStatementRestSecurity( + myResourceTypeName, params, searchParam, value); return; } @@ -2114,7 +2517,6 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } } - /** * Extractor that delegates to two other extractors. * @@ -2131,10 +2533,14 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } @Override - public void extract(SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath, boolean theWantLocalReferences) { + public void extract( + SearchParamSet theParams, + RuntimeSearchParam theSearchParam, + IBase theValue, + String thePath, + boolean theWantLocalReferences) { myExtractor0.extract(theParams, theSearchParam, theValue, thePath, theWantLocalReferences); myExtractor1.extract(theParams, theSearchParam, theValue, thePath, theWantLocalReferences); } } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java index 5870f8443c1..e7f9c352a24 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java @@ -29,19 +29,28 @@ public class CrossPartitionReferenceDetails { @Nonnull private final RequestPartitionId mySourceResourcePartitionId; + @Nonnull private final PathAndRef myPathAndRef; + @Nonnull private final RequestDetails myRequestDetails; + @Nonnull private final TransactionDetails myTransactionDetails; + @Nonnull private final String mySourceResourceName; /** * Constructor */ - public CrossPartitionReferenceDetails(@Nonnull RequestPartitionId theSourceResourcePartitionId, @Nonnull String theSourceResourceName, @Nonnull PathAndRef thePathAndRef, @Nonnull RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) { + public CrossPartitionReferenceDetails( + @Nonnull RequestPartitionId theSourceResourcePartitionId, + @Nonnull String theSourceResourceName, + @Nonnull PathAndRef thePathAndRef, + @Nonnull RequestDetails theRequestDetails, + @Nonnull TransactionDetails theTransactionDetails) { mySourceResourcePartitionId = theSourceResourcePartitionId; mySourceResourceName = theSourceResourceName; myPathAndRef = thePathAndRef; @@ -73,5 +82,4 @@ public class CrossPartitionReferenceDetails { public PathAndRef getPathAndRef() { return myPathAndRef; } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/GeopointNormalizer.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/GeopointNormalizer.java index 9af38d021b7..4d691e07b56 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/GeopointNormalizer.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/GeopointNormalizer.java @@ -31,29 +31,26 @@ public class GeopointNormalizer { static int LATITUDE_DEGREE_MAX = LATITUDE_DEGREE_RANGE / 2; public static double normalizeLongitude(double longitude) { - if ( longitude == ( -LONGITUDE_DEGREE_RANGE / 2 ) ) { - return LONGITUDE_DEGREE_RANGE / 2 ; - } - else { - return normalizeLongitudeInclusive( longitude ); + if (longitude == (-LONGITUDE_DEGREE_RANGE / 2)) { + return LONGITUDE_DEGREE_RANGE / 2; + } else { + return normalizeLongitudeInclusive(longitude); } } public static double normalizeLongitudeInclusive(double longitude) { - if ( (longitude < -( LONGITUDE_DEGREE_RANGE / 2 ) ) || (longitude > ( LONGITUDE_DEGREE_RANGE / 2 ) ) ) { + if ((longitude < -(LONGITUDE_DEGREE_RANGE / 2)) || (longitude > (LONGITUDE_DEGREE_RANGE / 2))) { double _longitude; // shift 180 and normalize full circle turn - _longitude = ( ( longitude + ( LONGITUDE_DEGREE_RANGE / 2 ) ) % WHOLE_CIRCLE_DEGREE_RANGE ); + _longitude = ((longitude + (LONGITUDE_DEGREE_RANGE / 2)) % WHOLE_CIRCLE_DEGREE_RANGE); // as Java % is not a math modulus we may have negative numbers so the unshift is sign dependant - if ( _longitude < 0 ) { - _longitude = _longitude + ( LONGITUDE_DEGREE_RANGE / 2 ); - } - else { - _longitude = _longitude - ( LONGITUDE_DEGREE_RANGE / 2 ); + if (_longitude < 0) { + _longitude = _longitude + (LONGITUDE_DEGREE_RANGE / 2); + } else { + _longitude = _longitude - (LONGITUDE_DEGREE_RANGE / 2); } return _longitude; - } - else { + } else { return longitude; } } @@ -63,18 +60,17 @@ public class GeopointNormalizer { * @return latitude normalized in [-90;+90] */ public static double normalizeLatitude(double latitude) { - if ( latitude > LATITUDE_DEGREE_MAX || latitude < LATITUDE_DEGREE_MIN ) { + if (latitude > LATITUDE_DEGREE_MAX || latitude < LATITUDE_DEGREE_MIN) { // shift 90, normalize full circle turn and 'symmetry' on the lat axis with abs - double _latitude = Math.abs( ( latitude + ( LATITUDE_DEGREE_RANGE / 2 ) ) % ( WHOLE_CIRCLE_DEGREE_RANGE ) ); + double _latitude = Math.abs((latitude + (LATITUDE_DEGREE_RANGE / 2)) % (WHOLE_CIRCLE_DEGREE_RANGE)); // Push 2nd and 3rd quadran in 1st and 4th by 'symmetry' - if ( _latitude > LATITUDE_DEGREE_RANGE ) { + if (_latitude > LATITUDE_DEGREE_RANGE) { _latitude = WHOLE_CIRCLE_DEGREE_RANGE - _latitude; } // unshift - _latitude = _latitude - ( LATITUDE_DEGREE_RANGE / 2 ); + _latitude = _latitude - (LATITUDE_DEGREE_RANGE / 2); return _latitude; - } - else { + } else { return latitude; } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/IResourceLinkResolver.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/IResourceLinkResolver.java index 3c8a6aa90c6..dee9d778828 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/IResourceLinkResolver.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/IResourceLinkResolver.java @@ -43,7 +43,12 @@ public interface IResourceLinkResolver { * @param theRequest The incoming request, if any * @param theTransactionDetails The current TransactionDetails object */ - IResourceLookup findTargetResource(@Nonnull RequestPartitionId theRequestPartitionId, String theSourceResourceName, PathAndRef thePathAndRef, RequestDetails theRequest, TransactionDetails theTransactionDetails); + IResourceLookup findTargetResource( + @Nonnull RequestPartitionId theRequestPartitionId, + String theSourceResourceName, + PathAndRef thePathAndRef, + RequestDetails theRequest, + TransactionDetails theTransactionDetails); /** * This method resolves the target of a reference found within a resource that is being created/updated. We do this @@ -59,10 +64,12 @@ public interface IResourceLinkResolver { * @param theTransactionDetails The current TransactionDetails object */ @Nullable - IBaseResource loadTargetResource(@Nonnull RequestPartitionId theRequestPartitionId, String theSourceResourceName, PathAndRef thePathAndRef, RequestDetails theRequest, TransactionDetails theTransactionDetails); - - + IBaseResource loadTargetResource( + @Nonnull RequestPartitionId theRequestPartitionId, + String theSourceResourceName, + PathAndRef thePathAndRef, + RequestDetails theRequest, + TransactionDetails theTransactionDetails); void validateTypeOrThrowException(Class theType); - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ISearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ISearchParamExtractor.java index 8571def6a6d..fe137364848 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ISearchParamExtractor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ISearchParamExtractor.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.searchparam.extractor; - import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.jpa.model.entity.*; import org.hl7.fhir.instance.model.api.IBase; @@ -50,58 +49,71 @@ public interface ISearchParamExtractor { return extractSearchParamDates(theResource, ALL_PARAMS); } - SearchParamSet extractSearchParamDates(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamDates( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); default SearchParamSet extractSearchParamNumber(IBaseResource theResource) { return extractSearchParamNumber(theResource, ALL_PARAMS); } - SearchParamSet extractSearchParamNumber(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamNumber( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); default SearchParamSet extractSearchParamQuantity(IBaseResource theResource) { return extractSearchParamQuantity(theResource, ALL_PARAMS); } - SearchParamSet extractSearchParamQuantity(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamQuantity( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); - default SearchParamSet extractSearchParamQuantityNormalized(IBaseResource theResource) { + default SearchParamSet extractSearchParamQuantityNormalized( + IBaseResource theResource) { return extractSearchParamQuantityNormalized(theResource, ALL_PARAMS); } - SearchParamSet extractSearchParamQuantityNormalized(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamQuantityNormalized( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); default SearchParamSet extractSearchParamStrings(IBaseResource theResource) { return extractSearchParamStrings(theResource, ALL_PARAMS); } - SearchParamSet extractSearchParamStrings(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamStrings( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); - default SearchParamSet extractSearchParamComposites(IBaseResource theResource) { + default SearchParamSet extractSearchParamComposites( + IBaseResource theResource) { return extractSearchParamComposites(theResource, ALL_PARAMS); } - SearchParamSet extractSearchParamComposites(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamComposites( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); default SearchParamSet extractSearchParamTokens(IBaseResource theResource) { return extractSearchParamTokens(theResource, ALL_PARAMS); } - SearchParamSet extractSearchParamTokens(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamTokens( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); - SearchParamSet extractSearchParamTokens(IBaseResource theResource, RuntimeSearchParam theSearchParam); + SearchParamSet extractSearchParamTokens( + IBaseResource theResource, RuntimeSearchParam theSearchParam); - SearchParamSet extractSearchParamSpecial(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamSpecial( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); - SearchParamSet extractSearchParamComboUnique(String theResourceType, ResourceIndexedSearchParams theParams); + SearchParamSet extractSearchParamComboUnique( + String theResourceType, ResourceIndexedSearchParams theParams); - SearchParamSet extractSearchParamComboNonUnique(String theResourceType, ResourceIndexedSearchParams theParams); + SearchParamSet extractSearchParamComboNonUnique( + String theResourceType, ResourceIndexedSearchParams theParams); default SearchParamSet extractSearchParamUri(IBaseResource theResource) { return extractSearchParamUri(theResource, ALL_PARAMS); } - - SearchParamSet extractSearchParamUri(IBaseResource theResource, ISearchParamFilter theSearchParamFilter); + SearchParamSet extractSearchParamUri( + IBaseResource theResource, ISearchParamFilter theSearchParamFilter); SearchParamSet extractResourceLinks(IBaseResource theResource, boolean theWantLocalReferences); @@ -119,7 +131,8 @@ public interface ISearchParamExtractor { Date extractDateFromResource(IBase theValue, String thePath); - ResourceIndexedSearchParamToken createSearchParamForCoding(String theResourceType, RuntimeSearchParam theSearchParam, IBase theValue); + ResourceIndexedSearchParamToken createSearchParamForCoding( + String theResourceType, RuntimeSearchParam theSearchParam, IBase theValue); String getDisplayTextForCoding(IBase theValue); @@ -140,7 +153,6 @@ public interface ISearchParamExtractor { * desired, a new list must be created and returned. */ Collection filterSearchParams(Collection theSearchParams); - } class SearchParamSet extends HashSet { @@ -160,8 +172,5 @@ public interface ISearchParamExtractor { } return myWarnings; } - } - - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/LogicalReferenceHelper.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/LogicalReferenceHelper.java index 3f1a299b960..db6bf9bff5e 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/LogicalReferenceHelper.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/LogicalReferenceHelper.java @@ -43,7 +43,6 @@ public class LogicalReferenceHelper { } } } - } /* @@ -56,6 +55,4 @@ public class LogicalReferenceHelper { return false; } - - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParamComposite.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParamComposite.java index da1bb8c9b4b..dfd41f27635 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParamComposite.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParamComposite.java @@ -74,11 +74,17 @@ public class ResourceIndexedSearchParamComposite { * @param theComponentSearchParam the component SP we are extracting * @param theExtractedParams index data extracted by the sub-extractor */ - public void addComponentIndexedSearchParams(RuntimeSearchParam theComponentSearchParam, ISearchParamExtractor.SearchParamSet theExtractedParams) { - addComponentIndexedSearchParams(theComponentSearchParam.getName(), theComponentSearchParam.getParamType(), theExtractedParams); + public void addComponentIndexedSearchParams( + RuntimeSearchParam theComponentSearchParam, + ISearchParamExtractor.SearchParamSet theExtractedParams) { + addComponentIndexedSearchParams( + theComponentSearchParam.getName(), theComponentSearchParam.getParamType(), theExtractedParams); } - public void addComponentIndexedSearchParams(String theComponentSearchParamName, RestSearchParameterTypeEnum theComponentSearchParamType, ISearchParamExtractor.SearchParamSet theExtractedParams) { + public void addComponentIndexedSearchParams( + String theComponentSearchParamName, + RestSearchParameterTypeEnum theComponentSearchParamType, + ISearchParamExtractor.SearchParamSet theExtractedParams) { myComponents.add(new Component(theComponentSearchParamName, theComponentSearchParamType, theExtractedParams)); } @@ -102,7 +108,10 @@ public class ResourceIndexedSearchParamComposite { */ private final ISearchParamExtractor.SearchParamSet myParamIndexValues; - private Component(String theComponentSearchParamName, RestSearchParameterTypeEnum theComponentSearchParamType, ISearchParamExtractor.SearchParamSet theParamIndexValues) { + private Component( + String theComponentSearchParamName, + RestSearchParameterTypeEnum theComponentSearchParamType, + ISearchParamExtractor.SearchParamSet theParamIndexValues) { mySearchParamName = theComponentSearchParamName; mySearchParameterType = theComponentSearchParamType; myParamIndexValues = theParamIndexValues; @@ -125,5 +134,4 @@ public class ResourceIndexedSearchParamComposite { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParams.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParams.java index 3ae88b2ca01..3af3d4c2986 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParams.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ResourceIndexedSearchParams.java @@ -19,11 +19,10 @@ */ package ca.uhn.fhir.jpa.searchparam.extractor; - import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.model.entity.*; import ca.uhn.fhir.jpa.model.entity.StorageSettings; +import ca.uhn.fhir.jpa.model.entity.*; import ca.uhn.fhir.jpa.model.util.UcumServiceUtil; import ca.uhn.fhir.jpa.searchparam.util.RuntimeSearchParamHelper; import ca.uhn.fhir.model.api.IQueryParameterType; @@ -34,7 +33,6 @@ import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.server.util.ResourceSearchParams; import org.apache.commons.lang3.StringUtils; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -43,31 +41,32 @@ import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.function.Predicate; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.compare; import static org.apache.commons.lang3.StringUtils.isNotBlank; public final class ResourceIndexedSearchParams { - final public Collection myStringParams = new ArrayList<>(); - final public Collection myTokenParams = new HashSet<>(); - final public Collection myNumberParams = new ArrayList<>(); - final public Collection myQuantityParams = new ArrayList<>(); - final public Collection myQuantityNormalizedParams = new ArrayList<>(); - final public Collection myDateParams = new ArrayList<>(); - final public Collection myUriParams = new ArrayList<>(); - final public Collection myCoordsParams = new ArrayList<>(); + public final Collection myStringParams = new ArrayList<>(); + public final Collection myTokenParams = new HashSet<>(); + public final Collection myNumberParams = new ArrayList<>(); + public final Collection myQuantityParams = new ArrayList<>(); + public final Collection myQuantityNormalizedParams = + new ArrayList<>(); + public final Collection myDateParams = new ArrayList<>(); + public final Collection myUriParams = new ArrayList<>(); + public final Collection myCoordsParams = new ArrayList<>(); - final public Collection myComboStringUniques = new HashSet<>(); - final public Collection myComboTokenNonUnique = new HashSet<>(); - final public Collection myLinks = new HashSet<>(); - final public Set myPopulatedResourceLinkParameters = new HashSet<>(); - final public Collection mySearchParamPresentEntities = new HashSet<>(); - final public Collection myCompositeParams = new HashSet<>(); + public final Collection myComboStringUniques = new HashSet<>(); + public final Collection myComboTokenNonUnique = new HashSet<>(); + public final Collection myLinks = new HashSet<>(); + public final Set myPopulatedResourceLinkParameters = new HashSet<>(); + public final Collection mySearchParamPresentEntities = new HashSet<>(); + public final Collection myCompositeParams = new HashSet<>(); private static final Set myIgnoredParams = Set.of(Constants.PARAM_TEXT, Constants.PARAM_CONTENT); - public ResourceIndexedSearchParams() { - } + public ResourceIndexedSearchParams() {} public ResourceIndexedSearchParams(ResourceTable theEntity) { if (theEntity.isParamsStringPopulated()) { @@ -83,7 +82,7 @@ public final class ResourceIndexedSearchParams { myQuantityParams.addAll(theEntity.getParamsQuantity()); } if (theEntity.isParamsQuantityNormalizedPopulated()) { - myQuantityNormalizedParams.addAll(theEntity.getParamsQuantityNormalized()); + myQuantityNormalizedParams.addAll(theEntity.getParamsQuantityNormalized()); } if (theEntity.isParamsDatePopulated()) { myDateParams.addAll(theEntity.getParamsDate()); @@ -106,7 +105,6 @@ public final class ResourceIndexedSearchParams { } } - public Collection getResourceLinks() { return myLinks; } @@ -125,7 +123,6 @@ public final class ResourceIndexedSearchParams { theEntity.setHasLinks(myLinks.isEmpty() == false); } - public void populateResourceTableParamCollections(ResourceTable theEntity) { theEntity.setParamsString(myStringParams); theEntity.setParamsToken(myTokenParams); @@ -148,7 +145,7 @@ public final class ResourceIndexedSearchParams { updateSpnamePrefixForIndexOnUpliftedChain(theContainingType, myStringParams, theSpnamePrefix); updateSpnamePrefixForIndexOnUpliftedChain(theContainingType, myCoordsParams, theSpnamePrefix); } - + public void updateSpnamePrefixForLinksOnContainedResource(String theSpNamePrefix) { for (ResourceLink param : myLinks) { // The resource link already has the resource type of the contained resource at the head of the path. @@ -181,8 +178,11 @@ public final class ResourceIndexedSearchParams { } } - private void updateSpnamePrefixForIndexOnUpliftedChain(String theContainingType, Collection theParams, @Nonnull String theSpnamePrefix) { - + private void updateSpnamePrefixForIndexOnUpliftedChain( + String theContainingType, + Collection theParams, + @Nonnull String theSpnamePrefix) { + for (BaseResourceIndexedSearchParam param : theParams) { param.setResourceType(theContainingType); param.setParamName(theSpnamePrefix + "." + param.getParamName()); @@ -191,13 +191,18 @@ public final class ResourceIndexedSearchParams { param.calculateHashes(); } } - + public Set getPopulatedResourceLinkParameters() { return myPopulatedResourceLinkParameters; } - public boolean matchParam(StorageSettings theStorageSettings, String theResourceName, String theParamName, RuntimeSearchParam theParamDef, IQueryParameterType theValue) { - + public boolean matchParam( + StorageSettings theStorageSettings, + String theResourceName, + String theParamName, + RuntimeSearchParam theParamDef, + IQueryParameterType theValue) { + if (theParamDef == null) { return false; } @@ -208,7 +213,9 @@ public final class ResourceIndexedSearchParams { resourceParams = myTokenParams; break; case QUANTITY: - if (theStorageSettings.getNormalizedQuantitySearchLevel().equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED)) { + if (theStorageSettings + .getNormalizedQuantitySearchLevel() + .equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED)) { QuantityParam quantity = QuantityParam.toQuantityParam(theValue); QuantityParam normalized = UcumServiceUtil.toCanonicalQuantityOrNull(quantity); if (normalized != null) { @@ -234,7 +241,12 @@ public final class ResourceIndexedSearchParams { resourceParams = myDateParams; break; case REFERENCE: - return matchResourceLinks(theStorageSettings, theResourceName, theParamName, value, theParamDef.getPathsSplitForResourceType(theResourceName)); + return matchResourceLinks( + theStorageSettings, + theResourceName, + theParamName, + value, + theParamDef.getPathsSplitForResourceType(theResourceName)); case COMPOSITE: case HAS: case SPECIAL: @@ -261,11 +273,17 @@ public final class ResourceIndexedSearchParams { */ // KHS This needs to be public as libraries outside of hapi call it directly @Deprecated - public boolean matchResourceLinks(String theResourceName, String theParamName, IQueryParameterType theParam, String theParamPath) { + public boolean matchResourceLinks( + String theResourceName, String theParamName, IQueryParameterType theParam, String theParamPath) { return matchResourceLinks(new StorageSettings(), theResourceName, theParamName, theParam, theParamPath); } - public boolean matchResourceLinks(StorageSettings theStorageSettings, String theResourceName, String theParamName, IQueryParameterType theParam, List theParamPaths) { + public boolean matchResourceLinks( + StorageSettings theStorageSettings, + String theResourceName, + String theParamName, + IQueryParameterType theParam, + List theParamPaths) { for (String nextPath : theParamPaths) { if (matchResourceLinks(theStorageSettings, theResourceName, theParamName, theParam, nextPath)) { return true; @@ -275,17 +293,23 @@ public final class ResourceIndexedSearchParams { } // KHS This needs to be public as libraries outside of hapi call it directly - public boolean matchResourceLinks(StorageSettings theStorageSettings, String theResourceName, String theParamName, IQueryParameterType theParam, String theParamPath) { + public boolean matchResourceLinks( + StorageSettings theStorageSettings, + String theResourceName, + String theParamName, + IQueryParameterType theParam, + String theParamPath) { ReferenceParam reference = (ReferenceParam) theParam; - Predicate namedParamPredicate = resourceLink -> - searchParameterPathMatches(theResourceName, resourceLink, theParamName, theParamPath) - && resourceIdMatches(theStorageSettings, resourceLink, reference); + Predicate namedParamPredicate = + resourceLink -> searchParameterPathMatches(theResourceName, resourceLink, theParamName, theParamPath) + && resourceIdMatches(theStorageSettings, resourceLink, reference); return myLinks.stream().anyMatch(namedParamPredicate); } - private boolean resourceIdMatches(StorageSettings theStorageSettings, ResourceLink theResourceLink, ReferenceParam theReference) { + private boolean resourceIdMatches( + StorageSettings theStorageSettings, ResourceLink theResourceLink, ReferenceParam theReference) { String baseUrl = theReference.getBaseUrl(); if (isNotBlank(baseUrl)) { if (!theStorageSettings.getTreatBaseUrlsAsLocal().contains(baseUrl)) { @@ -312,41 +336,92 @@ public final class ResourceIndexedSearchParams { return true; } - private boolean searchParameterPathMatches(String theResourceName, ResourceLink theResourceLink, String theParamName, String theParamPath) { + private boolean searchParameterPathMatches( + String theResourceName, ResourceLink theResourceLink, String theParamName, String theParamPath) { String sourcePath = theResourceLink.getSourcePath(); return sourcePath.equalsIgnoreCase(theParamPath); } @Override public String toString() { - return "ResourceIndexedSearchParams{" + - "stringParams=" + myStringParams + - ", tokenParams=" + myTokenParams + - ", numberParams=" + myNumberParams + - ", quantityParams=" + myQuantityParams + - ", quantityNormalizedParams=" + myQuantityNormalizedParams + - ", dateParams=" + myDateParams + - ", uriParams=" + myUriParams + - ", coordsParams=" + myCoordsParams + - ", comboStringUniques=" + myComboStringUniques + - ", comboTokenNonUniques=" + myComboTokenNonUnique + - ", links=" + myLinks + - '}'; + return "ResourceIndexedSearchParams{" + "stringParams=" + + myStringParams + ", tokenParams=" + + myTokenParams + ", numberParams=" + + myNumberParams + ", quantityParams=" + + myQuantityParams + ", quantityNormalizedParams=" + + myQuantityNormalizedParams + ", dateParams=" + + myDateParams + ", uriParams=" + + myUriParams + ", coordsParams=" + + myCoordsParams + ", comboStringUniques=" + + myComboStringUniques + ", comboTokenNonUniques=" + + myComboTokenNonUnique + ", links=" + + myLinks + '}'; } - public void findMissingSearchParams(PartitionSettings thePartitionSettings, StorageSettings theStorageSettings, ResourceTable theEntity, ResourceSearchParams theActiveSearchParams) { - findMissingSearchParams(thePartitionSettings, theStorageSettings, theEntity, theActiveSearchParams, RestSearchParameterTypeEnum.STRING, myStringParams); - findMissingSearchParams(thePartitionSettings, theStorageSettings, theEntity, theActiveSearchParams, RestSearchParameterTypeEnum.NUMBER, myNumberParams); - findMissingSearchParams(thePartitionSettings, theStorageSettings, theEntity, theActiveSearchParams, RestSearchParameterTypeEnum.QUANTITY, myQuantityParams); - findMissingSearchParams(thePartitionSettings, theStorageSettings, theEntity, theActiveSearchParams, RestSearchParameterTypeEnum.DATE, myDateParams); - findMissingSearchParams(thePartitionSettings, theStorageSettings, theEntity, theActiveSearchParams, RestSearchParameterTypeEnum.URI, myUriParams); - findMissingSearchParams(thePartitionSettings, theStorageSettings, theEntity, theActiveSearchParams, RestSearchParameterTypeEnum.TOKEN, myTokenParams); - findMissingSearchParams(thePartitionSettings, theStorageSettings, theEntity, theActiveSearchParams, RestSearchParameterTypeEnum.SPECIAL, myCoordsParams); + public void findMissingSearchParams( + PartitionSettings thePartitionSettings, + StorageSettings theStorageSettings, + ResourceTable theEntity, + ResourceSearchParams theActiveSearchParams) { + findMissingSearchParams( + thePartitionSettings, + theStorageSettings, + theEntity, + theActiveSearchParams, + RestSearchParameterTypeEnum.STRING, + myStringParams); + findMissingSearchParams( + thePartitionSettings, + theStorageSettings, + theEntity, + theActiveSearchParams, + RestSearchParameterTypeEnum.NUMBER, + myNumberParams); + findMissingSearchParams( + thePartitionSettings, + theStorageSettings, + theEntity, + theActiveSearchParams, + RestSearchParameterTypeEnum.QUANTITY, + myQuantityParams); + findMissingSearchParams( + thePartitionSettings, + theStorageSettings, + theEntity, + theActiveSearchParams, + RestSearchParameterTypeEnum.DATE, + myDateParams); + findMissingSearchParams( + thePartitionSettings, + theStorageSettings, + theEntity, + theActiveSearchParams, + RestSearchParameterTypeEnum.URI, + myUriParams); + findMissingSearchParams( + thePartitionSettings, + theStorageSettings, + theEntity, + theActiveSearchParams, + RestSearchParameterTypeEnum.TOKEN, + myTokenParams); + findMissingSearchParams( + thePartitionSettings, + theStorageSettings, + theEntity, + theActiveSearchParams, + RestSearchParameterTypeEnum.SPECIAL, + myCoordsParams); } @SuppressWarnings("unchecked") - private void findMissingSearchParams(PartitionSettings thePartitionSettings, StorageSettings theStorageSettings, ResourceTable theEntity, ResourceSearchParams activeSearchParams, RestSearchParameterTypeEnum type, - Collection paramCollection) { + private void findMissingSearchParams( + PartitionSettings thePartitionSettings, + StorageSettings theStorageSettings, + ResourceTable theEntity, + ResourceSearchParams activeSearchParams, + RestSearchParameterTypeEnum type, + Collection paramCollection) { for (String nextParamName : activeSearchParams.getSearchParamNames()) { if (nextParamName == null || myIgnoredParams.contains(nextParamName)) { continue; @@ -379,8 +454,7 @@ public final class ResourceIndexedSearchParams { param = new ResourceIndexedSearchParamQuantity(); break; case STRING: - param = new ResourceIndexedSearchParamString() - .setStorageSettings(theStorageSettings); + param = new ResourceIndexedSearchParamString().setStorageSettings(theStorageSettings); break; case TOKEN: param = new ResourceIndexedSearchParamToken(); @@ -435,7 +509,8 @@ public final class ResourceIndexedSearchParams { * @param theResourceType E.g. Patient * @param thePartsChoices E.g. [[gender=male], [name=SMITH, name=JOHN]] */ - public static Set extractCompositeStringUniquesValueChains(String theResourceType, List> thePartsChoices) { + public static Set extractCompositeStringUniquesValueChains( + String theResourceType, List> thePartsChoices) { for (List next : thePartsChoices) { next.removeIf(StringUtils::isBlank); @@ -469,13 +544,21 @@ public final class ResourceIndexedSearchParams { return queryStringsToPopulate; } - private static void extractCompositeStringUniquesValueChains(String theResourceType, List> thePartsChoices, List theValues, Set theQueryStringsToPopulate) { + private static void extractCompositeStringUniquesValueChains( + String theResourceType, + List> thePartsChoices, + List theValues, + Set theQueryStringsToPopulate) { if (thePartsChoices.size() > 0) { List nextList = thePartsChoices.get(0); Collections.sort(nextList); for (String nextChoice : nextList) { theValues.add(nextChoice); - extractCompositeStringUniquesValueChains(theResourceType, thePartsChoices.subList(1, thePartsChoices.size()), theValues, theQueryStringsToPopulate); + extractCompositeStringUniquesValueChains( + theResourceType, + thePartsChoices.subList(1, thePartsChoices.size()), + theValues, + theQueryStringsToPopulate); theValues.remove(theValues.size() - 1); } } else { @@ -492,6 +575,4 @@ public final class ResourceIndexedSearchParams { } } } - - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu2.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu2.java index 6a953fe9beb..66d1bb1a047 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu2.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu2.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.jpa.searchparam.extractor; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.StorageSettings; -import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.model.dstu2.composite.ContactPointDt; +import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.util.FhirTerser; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseDatatype; @@ -34,13 +34,16 @@ import java.util.List; public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implements ISearchParamExtractor { - public SearchParamExtractorDstu2() { - } + public SearchParamExtractorDstu2() {} /** * Constructor for unit tests */ - SearchParamExtractorDstu2(StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) { + SearchParamExtractorDstu2( + StorageSettings theStorageSettings, + PartitionSettings thePartitionSettings, + FhirContext theCtx, + ISearchParamRegistry theSearchParamRegistry) { super(theStorageSettings, thePartitionSettings, theCtx, theSearchParamRegistry); start(); } @@ -85,5 +88,4 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen return values; }; } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3.java index 9b1ee2e0e65..a7e94c3ef19 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorDstu3.java @@ -30,9 +30,9 @@ import org.hl7.fhir.dstu3.model.Base; import org.hl7.fhir.dstu3.utils.FHIRPathEngine; import org.hl7.fhir.instance.model.api.IBase; -import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.List; +import javax.annotation.PostConstruct; public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implements ISearchParamExtractor { @@ -47,7 +47,11 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen // This constructor is used by tests @VisibleForTesting - public SearchParamExtractorDstu3(StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) { + public SearchParamExtractorDstu3( + StorageSettings theStorageSettings, + PartitionSettings thePartitionSettings, + FhirContext theCtx, + ISearchParamRegistry theSearchParamRegistry) { super(theStorageSettings, thePartitionSettings, theCtx, theSearchParamRegistry); initFhirPathEngine(); start(); @@ -66,7 +70,6 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen }; } - @Override @PostConstruct public void start() { @@ -80,5 +83,4 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen IWorkerContext worker = new HapiWorkerContext(getContext(), getContext().getValidationSupport()); myFhirPathEngine = new FHIRPathEngine(worker); } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4.java index d36a8c616f8..32a22686730 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4.java @@ -31,7 +31,6 @@ import org.hl7.fhir.exceptions.PathEngineException; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.r4.context.IWorkerContext; import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext; -import org.hl7.fhir.r4.utils.FHIRPathEngine; import org.hl7.fhir.r4.model.Base; import org.hl7.fhir.r4.model.ExpressionNode; import org.hl7.fhir.r4.model.IdType; @@ -41,12 +40,12 @@ import org.hl7.fhir.r4.model.TypeDetails; import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.utils.FHIRPathEngine; -import javax.annotation.PostConstruct; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import javax.annotation.PostConstruct; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -64,7 +63,11 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements // This constructor is used by tests @VisibleForTesting - public SearchParamExtractorR4(StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) { + public SearchParamExtractorR4( + StorageSettings theStorageSettings, + PartitionSettings thePartitionSettings, + FhirContext theCtx, + ISearchParamRegistry theSearchParamRegistry) { super(theStorageSettings, thePartitionSettings, theCtx, theSearchParamRegistry); initFhirPath(); start(); @@ -74,11 +77,11 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements public IValueExtractor getPathValueExtractor(IBase theResource, String theSinglePath) { return () -> { ExpressionNode parsed = myParsedFhirPathCache.get(theSinglePath, path -> myFhirPathEngine.parse(path)); - return myFhirPathEngine.evaluate(theResource, (Base) theResource, (Base) theResource, (Base) theResource, parsed); + return myFhirPathEngine.evaluate( + theResource, (Base) theResource, (Base) theResource, (Base) theResource, parsed); }; } - @Override @PostConstruct public void start() { @@ -96,13 +99,13 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements myParsedFhirPathCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(10)); } - private class SearchParamExtractorR4HostServices implements FHIRPathEngine.IEvaluationContext { private final Map myResourceTypeToStub = Collections.synchronizedMap(new HashMap<>()); @Override - public List resolveConstant(Object appContext, String name, boolean beforeContext) throws PathEngineException { + public List resolveConstant(Object appContext, String name, boolean beforeContext) + throws PathEngineException { return Collections.emptyList(); } @@ -122,16 +125,17 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements } @Override - public TypeDetails checkFunction(Object appContext, String functionName, List parameters) throws PathEngineException { + public TypeDetails checkFunction(Object appContext, String functionName, List parameters) + throws PathEngineException { return null; } @Override - public List executeFunction(Object appContext, List focus, String functionName, List> parameters) { + public List executeFunction( + Object appContext, List focus, String functionName, List> parameters) { return null; } - @Override public Base resolveReference(Object theAppContext, String theUrl, Base theRefContext) throws FHIRException { Base retVal = resolveResourceInBundleWithPlaceholderId(theAppContext, theUrl); @@ -173,7 +177,6 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements public String fhirType() { return url.getResourceType(); } - }; myResourceTypeToStub.put(url.getResourceType(), retVal); } @@ -191,5 +194,4 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements return null; } } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4B.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4B.java index 9fa6813c02d..749df636d95 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4B.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR4B.java @@ -40,12 +40,12 @@ import org.hl7.fhir.r4b.model.TypeDetails; import org.hl7.fhir.r4b.model.ValueSet; import org.hl7.fhir.r4b.utils.FHIRPathEngine; -import javax.annotation.PostConstruct; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import javax.annotation.PostConstruct; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -63,7 +63,11 @@ public class SearchParamExtractorR4B extends BaseSearchParamExtractor implements // This constructor is used by tests @VisibleForTesting - public SearchParamExtractorR4B(StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) { + public SearchParamExtractorR4B( + StorageSettings theStorageSettings, + PartitionSettings thePartitionSettings, + FhirContext theCtx, + ISearchParamRegistry theSearchParamRegistry) { super(theStorageSettings, thePartitionSettings, theCtx, theSearchParamRegistry); initFhirPath(); start(); @@ -73,11 +77,11 @@ public class SearchParamExtractorR4B extends BaseSearchParamExtractor implements public IValueExtractor getPathValueExtractor(IBase theResource, String theSinglePath) { return () -> { ExpressionNode parsed = myParsedFhirPathCache.get(theSinglePath, path -> myFhirPathEngine.parse(path)); - return myFhirPathEngine.evaluate(theResource, (Base) theResource, (Base) theResource, (Base) theResource, parsed); + return myFhirPathEngine.evaluate( + theResource, (Base) theResource, (Base) theResource, (Base) theResource, parsed); }; } - @Override @PostConstruct public void start() { @@ -95,13 +99,13 @@ public class SearchParamExtractorR4B extends BaseSearchParamExtractor implements myParsedFhirPathCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(10)); } - private class SearchParamExtractorR4BHostServices implements FHIRPathEngine.IEvaluationContext { private final Map myResourceTypeToStub = Collections.synchronizedMap(new HashMap<>()); @Override - public List resolveConstant(Object appContext, String name, boolean beforeContext) throws PathEngineException { + public List resolveConstant(Object appContext, String name, boolean beforeContext) + throws PathEngineException { return Collections.emptyList(); } @@ -121,16 +125,17 @@ public class SearchParamExtractorR4B extends BaseSearchParamExtractor implements } @Override - public TypeDetails checkFunction(Object appContext, String functionName, List parameters) throws PathEngineException { + public TypeDetails checkFunction(Object appContext, String functionName, List parameters) + throws PathEngineException { return null; } @Override - public List executeFunction(Object appContext, List focus, String functionName, List> parameters) { + public List executeFunction( + Object appContext, List focus, String functionName, List> parameters) { return null; } - @Override public Base resolveReference(Object theAppContext, String theUrl, Base refContext) throws FHIRException { Base retVal = resolveResourceInBundleWithPlaceholderId(theAppContext, theUrl); @@ -172,7 +177,6 @@ public class SearchParamExtractorR4B extends BaseSearchParamExtractor implements public String fhirType() { return url.getResourceType(); } - }; myResourceTypeToStub.put(url.getResourceType(), retVal); } @@ -190,5 +194,4 @@ public class SearchParamExtractorR4B extends BaseSearchParamExtractor implements return null; } } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR5.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR5.java index 8a8e6dba54a..142432415f1 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR5.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorR5.java @@ -25,12 +25,9 @@ import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; import ca.uhn.fhir.sl.cache.Cache; import ca.uhn.fhir.sl.cache.CacheFactory; -import ca.uhn.fhir.util.BundleUtil; -import ca.uhn.fhir.util.bundle.BundleEntryParts; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.PathEngineException; import org.hl7.fhir.instance.model.api.IBase; -import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.r5.context.IWorkerContext; import org.hl7.fhir.r5.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.r5.model.Base; @@ -42,12 +39,12 @@ import org.hl7.fhir.r5.model.TypeDetails; import org.hl7.fhir.r5.model.ValueSet; import org.hl7.fhir.r5.utils.FHIRPathEngine; -import javax.annotation.PostConstruct; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import javax.annotation.PostConstruct; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -63,7 +60,11 @@ public class SearchParamExtractorR5 extends BaseSearchParamExtractor implements /** * Constructor for unit tests */ - public SearchParamExtractorR5(StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) { + public SearchParamExtractorR5( + StorageSettings theStorageSettings, + PartitionSettings thePartitionSettings, + FhirContext theCtx, + ISearchParamRegistry theSearchParamRegistry) { super(theStorageSettings, thePartitionSettings, theCtx, theSearchParamRegistry); initFhirPath(); start(); @@ -82,7 +83,7 @@ public class SearchParamExtractorR5 extends BaseSearchParamExtractor implements IWorkerContext worker = new HapiWorkerContext(getContext(), getContext().getValidationSupport()); myFhirPathEngine = new FHIRPathEngine(worker); myFhirPathEngine.setHostServices(new SearchParamExtractorR5HostServices()); - + myParsedFhirPathCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(10)); } @@ -90,17 +91,18 @@ public class SearchParamExtractorR5 extends BaseSearchParamExtractor implements public IValueExtractor getPathValueExtractor(IBase theResource, String theSinglePath) { return () -> { ExpressionNode parsed = myParsedFhirPathCache.get(theSinglePath, path -> myFhirPathEngine.parse(path)); - return myFhirPathEngine.evaluate(theResource, (Base) theResource, (Base) theResource, (Base) theResource, parsed); + return myFhirPathEngine.evaluate( + theResource, (Base) theResource, (Base) theResource, (Base) theResource, parsed); }; } - private class SearchParamExtractorR5HostServices implements FHIRPathEngine.IEvaluationContext { private final Map myResourceTypeToStub = Collections.synchronizedMap(new HashMap<>()); @Override - public List resolveConstant(Object appContext, String name, boolean beforeContext) throws PathEngineException { + public List resolveConstant(Object appContext, String name, boolean beforeContext) + throws PathEngineException { return Collections.emptyList(); } @@ -120,12 +122,14 @@ public class SearchParamExtractorR5 extends BaseSearchParamExtractor implements } @Override - public TypeDetails checkFunction(Object appContext, String functionName, List parameters) throws PathEngineException { + public TypeDetails checkFunction(Object appContext, String functionName, List parameters) + throws PathEngineException { return null; } @Override - public List executeFunction(Object appContext, List focus, String functionName, List> parameters) { + public List executeFunction( + Object appContext, List focus, String functionName, List> parameters) { return null; } @@ -170,7 +174,6 @@ public class SearchParamExtractorR5 extends BaseSearchParamExtractor implements public String fhirType() { return url.getResourceType(); } - }; myResourceTypeToStub.put(url.getResourceType(), retVal); } @@ -187,7 +190,5 @@ public class SearchParamExtractorR5 extends BaseSearchParamExtractor implements public ValueSet resolveValueSet(Object theO, String theS) { return null; } - } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java index 9a66cfb0e81..3b39ccc7df6 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java @@ -66,8 +66,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Date; @@ -78,24 +76,33 @@ import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class SearchParamExtractorService { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchParamExtractorService.class); + @Autowired private ISearchParamExtractor mySearchParamExtractor; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private StorageSettings myStorageSettings; + @Autowired private FhirContext myContext; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private PartitionSettings myPartitionSettings; + @Autowired(required = false) private IResourceLinkResolver myResourceLinkResolver; @@ -104,9 +111,24 @@ public class SearchParamExtractorService { mySearchParamExtractor = theSearchParamExtractor; } - - public void extractFromResource(RequestPartitionId theRequestPartitionId, RequestDetails theRequestDetails, ResourceIndexedSearchParams theParams, ResourceTable theEntity, IBaseResource theResource, TransactionDetails theTransactionDetails, boolean theFailOnInvalidReference) { - extractFromResource(theRequestPartitionId, theRequestDetails, theParams, new ResourceIndexedSearchParams(), theEntity, theResource, theTransactionDetails, theFailOnInvalidReference, ISearchParamExtractor.ALL_PARAMS); + public void extractFromResource( + RequestPartitionId theRequestPartitionId, + RequestDetails theRequestDetails, + ResourceIndexedSearchParams theParams, + ResourceTable theEntity, + IBaseResource theResource, + TransactionDetails theTransactionDetails, + boolean theFailOnInvalidReference) { + extractFromResource( + theRequestPartitionId, + theRequestDetails, + theParams, + new ResourceIndexedSearchParams(), + theEntity, + theResource, + theTransactionDetails, + theFailOnInvalidReference, + ISearchParamExtractor.ALL_PARAMS); } /** @@ -115,36 +137,70 @@ public class SearchParamExtractorService { * a given resource type, it extracts the associated indexes and populates * {@literal theParams}. */ - public void extractFromResource(RequestPartitionId theRequestPartitionId, RequestDetails theRequestDetails, ResourceIndexedSearchParams theNewParams, ResourceIndexedSearchParams theExistingParams, ResourceTable theEntity, IBaseResource theResource, TransactionDetails theTransactionDetails, boolean theFailOnInvalidReference, @Nonnull ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + public void extractFromResource( + RequestPartitionId theRequestPartitionId, + RequestDetails theRequestDetails, + ResourceIndexedSearchParams theNewParams, + ResourceIndexedSearchParams theExistingParams, + ResourceTable theEntity, + IBaseResource theResource, + TransactionDetails theTransactionDetails, + boolean theFailOnInvalidReference, + @Nonnull ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { // All search parameter types except Reference ResourceIndexedSearchParams normalParams = new ResourceIndexedSearchParams(); extractSearchIndexParameters(theRequestDetails, normalParams, theResource, theSearchParamFilter); mergeParams(normalParams, theNewParams); boolean indexOnContainedResources = myStorageSettings.isIndexOnContainedResources(); - ISearchParamExtractor.SearchParamSet indexedReferences = mySearchParamExtractor.extractResourceLinks(theResource, indexOnContainedResources); + ISearchParamExtractor.SearchParamSet indexedReferences = + mySearchParamExtractor.extractResourceLinks(theResource, indexOnContainedResources); SearchParamExtractorService.handleWarnings(theRequestDetails, myInterceptorBroadcaster, indexedReferences); if (indexOnContainedResources) { ResourceIndexedSearchParams containedParams = new ResourceIndexedSearchParams(); - extractSearchIndexParametersForContainedResources(theRequestDetails, containedParams, theResource, theEntity, indexedReferences); + extractSearchIndexParametersForContainedResources( + theRequestDetails, containedParams, theResource, theEntity, indexedReferences); mergeParams(containedParams, theNewParams); } if (myStorageSettings.isIndexOnUpliftedRefchains()) { ResourceIndexedSearchParams containedParams = new ResourceIndexedSearchParams(); - extractSearchIndexParametersForUpliftedRefchains(theRequestDetails, containedParams, theEntity, theRequestPartitionId, theTransactionDetails, indexedReferences); + extractSearchIndexParametersForUpliftedRefchains( + theRequestDetails, + containedParams, + theEntity, + theRequestPartitionId, + theTransactionDetails, + indexedReferences); mergeParams(containedParams, theNewParams); } - // Do this after, because we add to strings during both string and token processing, and contained resource if any + // Do this after, because we add to strings during both string and token processing, and contained resource if + // any populateResourceTables(theNewParams, theEntity); // Reference search parameters - extractResourceLinks(theRequestPartitionId, theExistingParams, theNewParams, theEntity, theResource, theTransactionDetails, theFailOnInvalidReference, theRequestDetails, indexedReferences); + extractResourceLinks( + theRequestPartitionId, + theExistingParams, + theNewParams, + theEntity, + theResource, + theTransactionDetails, + theFailOnInvalidReference, + theRequestDetails, + indexedReferences); if (indexOnContainedResources) { - extractResourceLinksForContainedResources(theRequestPartitionId, theNewParams, theEntity, theResource, theTransactionDetails, theFailOnInvalidReference, theRequestDetails); + extractResourceLinksForContainedResources( + theRequestPartitionId, + theNewParams, + theEntity, + theResource, + theTransactionDetails, + theFailOnInvalidReference, + theRequestDetails); } // Missing (:missing) Indexes - These are indexes to satisfy the :missing @@ -165,7 +221,8 @@ public class SearchParamExtractorService { }); // Everything else - ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(theEntity.getResourceType()); + ResourceSearchParams activeSearchParams = + mySearchParamRegistry.getActiveSearchParams(theEntity.getResourceType()); theNewParams.findMissingSearchParams(myPartitionSettings, myStorageSettings, theEntity, activeSearchParams); } @@ -177,7 +234,8 @@ public class SearchParamExtractorService { } @Nonnull - private Map getReferenceSearchParamPresenceMap(ResourceTable entity, ResourceIndexedSearchParams newParams) { + private Map getReferenceSearchParamPresenceMap( + ResourceTable entity, ResourceIndexedSearchParams newParams) { Map retval = new HashMap<>(); for (String nextKey : newParams.getPopulatedResourceLinkParameters()) { @@ -185,13 +243,10 @@ public class SearchParamExtractorService { } ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(entity.getResourceType()); - activeSearchParams - .getReferenceSearchParamNames() - .forEach(key -> retval.putIfAbsent(key, Boolean.FALSE)); + activeSearchParams.getReferenceSearchParamNames().forEach(key -> retval.putIfAbsent(key, Boolean.FALSE)); return retval; } - @VisibleForTesting public void setStorageSettings(StorageSettings theStorageSettings) { myStorageSettings = theStorageSettings; @@ -203,7 +258,12 @@ public class SearchParamExtractorService { * a String index on the Patient with paramName="organization.name" and * value="Org Name" */ - private void extractSearchIndexParametersForContainedResources(RequestDetails theRequestDetails, ResourceIndexedSearchParams theParams, IBaseResource theResource, ResourceTable theEntity, ISearchParamExtractor.SearchParamSet theIndexedReferences) { + private void extractSearchIndexParametersForContainedResources( + RequestDetails theRequestDetails, + ResourceIndexedSearchParams theParams, + IBaseResource theResource, + ResourceTable theEntity, + ISearchParamExtractor.SearchParamSet theIndexedReferences) { FhirTerser terser = myContext.newTerser(); @@ -231,7 +291,15 @@ public class SearchParamExtractorService { } }; boolean recurse = myStorageSettings.isIndexOnContainedResourcesRecursively(); - extractSearchIndexParametersForTargetResources(theRequestDetails, theParams, theEntity, new HashSet<>(), strategy, theIndexedReferences, recurse, true); + extractSearchIndexParametersForTargetResources( + theRequestDetails, + theParams, + theEntity, + new HashSet<>(), + strategy, + theIndexedReferences, + recurse, + true); } /** @@ -241,22 +309,28 @@ public class SearchParamExtractorService { * on the "name" SearchParameter, we might extract a String index * on the Patient with paramName="organization.name" and value="Org Name" */ - private void extractSearchIndexParametersForUpliftedRefchains(RequestDetails theRequestDetails, ResourceIndexedSearchParams theParams, ResourceTable theEntity, RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails, ISearchParamExtractor.SearchParamSet theIndexedReferences) { + private void extractSearchIndexParametersForUpliftedRefchains( + RequestDetails theRequestDetails, + ResourceIndexedSearchParams theParams, + ResourceTable theEntity, + RequestPartitionId theRequestPartitionId, + TransactionDetails theTransactionDetails, + ISearchParamExtractor.SearchParamSet theIndexedReferences) { IChainedSearchParameterExtractionStrategy strategy = new IChainedSearchParameterExtractionStrategy() { @Nonnull @Override public ISearchParamExtractor.ISearchParamFilter getSearchParamFilter(@Nonnull PathAndRef thePathAndRef) { String searchParamName = thePathAndRef.getSearchParamName(); - RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theEntity.getResourceType(), searchParamName); + RuntimeSearchParam searchParam = + mySearchParamRegistry.getActiveSearchParam(theEntity.getResourceType(), searchParamName); Set upliftRefchainCodes = searchParam.getUpliftRefchainCodes(); if (upliftRefchainCodes.isEmpty()) { return ISearchParamExtractor.NO_PARAMS; } - return sp -> sp - .stream() - .filter(t -> upliftRefchainCodes.contains(t.getName())) - .collect(Collectors.toList()); + return sp -> sp.stream() + .filter(t -> upliftRefchainCodes.contains(t.getName())) + .collect(Collectors.toList()); } @Override @@ -277,19 +351,28 @@ public class SearchParamExtractorService { // And the usual case is that the reference points to a resource // elsewhere in the repository, so we load it - if (resolvedResource == null && myResourceLinkResolver != null && !reference.getValue().startsWith("urn:uuid:")) { + if (resolvedResource == null + && myResourceLinkResolver != null + && !reference.getValue().startsWith("urn:uuid:")) { RequestPartitionId targetRequestPartitionId = determineResolverPartitionId(theRequestPartitionId); - resolvedResource = myResourceLinkResolver.loadTargetResource(targetRequestPartitionId, theEntity.getResourceType(), thePathAndRef, theRequestDetails, theTransactionDetails); + resolvedResource = myResourceLinkResolver.loadTargetResource( + targetRequestPartitionId, + theEntity.getResourceType(), + thePathAndRef, + theRequestDetails, + theTransactionDetails); if (resolvedResource != null) { ourLog.trace("Found target: {}", resolvedResource.getIdElement()); - theTransactionDetails.addResolvedResource(thePathAndRef.getRef().getReferenceElement(), resolvedResource); + theTransactionDetails.addResolvedResource( + thePathAndRef.getRef().getReferenceElement(), resolvedResource); } } return resolvedResource; } }; - extractSearchIndexParametersForTargetResources(theRequestDetails, theParams, theEntity, new HashSet<>(), strategy, theIndexedReferences, false, false); + extractSearchIndexParametersForTargetResources( + theRequestDetails, theParams, theEntity, new HashSet<>(), strategy, theIndexedReferences, false, false); } /** @@ -305,7 +388,15 @@ public class SearchParamExtractorService { * @see #extractSearchIndexParametersForContainedResources(RequestDetails, ResourceIndexedSearchParams, IBaseResource, ResourceTable, ISearchParamExtractor.SearchParamSet) * @see #extractSearchIndexParametersForUpliftedRefchains(RequestDetails, ResourceIndexedSearchParams, ResourceTable, RequestPartitionId, TransactionDetails, ISearchParamExtractor.SearchParamSet) */ - private void extractSearchIndexParametersForTargetResources(RequestDetails theRequestDetails, ResourceIndexedSearchParams theParams, ResourceTable theEntity, Collection theAlreadySeenResources, IChainedSearchParameterExtractionStrategy theTargetIndexingStrategy, ISearchParamExtractor.SearchParamSet theIndexedReferences, boolean theRecurse, boolean theIndexOnContainedResources) { + private void extractSearchIndexParametersForTargetResources( + RequestDetails theRequestDetails, + ResourceIndexedSearchParams theParams, + ResourceTable theEntity, + Collection theAlreadySeenResources, + IChainedSearchParameterExtractionStrategy theTargetIndexingStrategy, + ISearchParamExtractor.SearchParamSet theIndexedReferences, + boolean theRecurse, + boolean theIndexOnContainedResources) { // 2. Find referenced search parameters String spnamePrefix; @@ -319,17 +410,18 @@ public class SearchParamExtractorService { continue; // 3.1.2 check if this ref actually applies here - ISearchParamExtractor.ISearchParamFilter searchParamsToIndex = theTargetIndexingStrategy.getSearchParamFilter(nextPathAndRef); + ISearchParamExtractor.ISearchParamFilter searchParamsToIndex = + theTargetIndexingStrategy.getSearchParamFilter(nextPathAndRef); if (searchParamsToIndex == ISearchParamExtractor.NO_PARAMS) { continue; } // 3.2 find the target resource IBaseResource targetResource = theTargetIndexingStrategy.fetchResourceAtPath(nextPathAndRef); - if (targetResource == null) - continue; + if (targetResource == null) continue; - // 3.2.1 if we've already processed this resource upstream, do not process it again, to prevent infinite loops + // 3.2.1 if we've already processed this resource upstream, do not process it again, to prevent infinite + // loops if (theAlreadySeenResources.contains(targetResource)) { continue; } @@ -349,16 +441,27 @@ public class SearchParamExtractorService { HashSet nextAlreadySeenResources = new HashSet<>(theAlreadySeenResources); nextAlreadySeenResources.add(targetResource); - ISearchParamExtractor.SearchParamSet indexedReferences = mySearchParamExtractor.extractResourceLinks(targetResource, theIndexOnContainedResources); - SearchParamExtractorService.handleWarnings(theRequestDetails, myInterceptorBroadcaster, indexedReferences); + ISearchParamExtractor.SearchParamSet indexedReferences = + mySearchParamExtractor.extractResourceLinks(targetResource, theIndexOnContainedResources); + SearchParamExtractorService.handleWarnings( + theRequestDetails, myInterceptorBroadcaster, indexedReferences); - extractSearchIndexParametersForTargetResources(theRequestDetails, currParams, theEntity, nextAlreadySeenResources, theTargetIndexingStrategy, indexedReferences, true, theIndexOnContainedResources); + extractSearchIndexParametersForTargetResources( + theRequestDetails, + currParams, + theEntity, + nextAlreadySeenResources, + theTargetIndexingStrategy, + indexedReferences, + true, + theIndexOnContainedResources); } // 3.5 added reference name as a prefix for the contained resource if any // e.g. for Observation.subject contained reference // the SP_NAME = subject.family - currParams.updateSpnamePrefixForIndexOnUpliftedChain(theEntity.getResourceType(), nextPathAndRef.getSearchParamName()); + currParams.updateSpnamePrefixForIndexOnUpliftedChain( + theEntity.getResourceType(), nextPathAndRef.getSearchParamName()); // 3.6 merge to the mainParams // NOTE: the spname prefix is different @@ -368,8 +471,7 @@ public class SearchParamExtractorService { private IBaseResource findContainedResource(Collection resources, IBaseReference reference) { for (IBaseResource resource : resources) { - if (resource.getIdElement().equals(reference.getReferenceElement())) - return resource; + if (resource.getIdElement().equals(reference.getReferenceElement())) return resource; } return null; } @@ -387,42 +489,58 @@ public class SearchParamExtractorService { theTargetParams.myCompositeParams.addAll(theSrcParams.myCompositeParams); } - void extractSearchIndexParameters(RequestDetails theRequestDetails, ResourceIndexedSearchParams theParams, IBaseResource theResource, @Nonnull ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + void extractSearchIndexParameters( + RequestDetails theRequestDetails, + ResourceIndexedSearchParams theParams, + IBaseResource theResource, + @Nonnull ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { // Strings - ISearchParamExtractor.SearchParamSet strings = extractSearchParamStrings(theResource, theSearchParamFilter); + ISearchParamExtractor.SearchParamSet strings = + extractSearchParamStrings(theResource, theSearchParamFilter); handleWarnings(theRequestDetails, myInterceptorBroadcaster, strings); theParams.myStringParams.addAll(strings); // Numbers - ISearchParamExtractor.SearchParamSet numbers = extractSearchParamNumber(theResource, theSearchParamFilter); + ISearchParamExtractor.SearchParamSet numbers = + extractSearchParamNumber(theResource, theSearchParamFilter); handleWarnings(theRequestDetails, myInterceptorBroadcaster, numbers); theParams.myNumberParams.addAll(numbers); // Quantities - ISearchParamExtractor.SearchParamSet quantities = extractSearchParamQuantity(theResource, theSearchParamFilter); + ISearchParamExtractor.SearchParamSet quantities = + extractSearchParamQuantity(theResource, theSearchParamFilter); handleWarnings(theRequestDetails, myInterceptorBroadcaster, quantities); theParams.myQuantityParams.addAll(quantities); - if (myStorageSettings.getNormalizedQuantitySearchLevel().equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_STORAGE_SUPPORTED) || myStorageSettings.getNormalizedQuantitySearchLevel().equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED)) { - ISearchParamExtractor.SearchParamSet quantitiesNormalized = extractSearchParamQuantityNormalized(theResource, theSearchParamFilter); + if (myStorageSettings + .getNormalizedQuantitySearchLevel() + .equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_STORAGE_SUPPORTED) + || myStorageSettings + .getNormalizedQuantitySearchLevel() + .equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED)) { + ISearchParamExtractor.SearchParamSet quantitiesNormalized = + extractSearchParamQuantityNormalized(theResource, theSearchParamFilter); handleWarnings(theRequestDetails, myInterceptorBroadcaster, quantitiesNormalized); theParams.myQuantityNormalizedParams.addAll(quantitiesNormalized); } // Dates - ISearchParamExtractor.SearchParamSet dates = extractSearchParamDates(theResource, theSearchParamFilter); + ISearchParamExtractor.SearchParamSet dates = + extractSearchParamDates(theResource, theSearchParamFilter); handleWarnings(theRequestDetails, myInterceptorBroadcaster, dates); theParams.myDateParams.addAll(dates); // URIs - ISearchParamExtractor.SearchParamSet uris = extractSearchParamUri(theResource, theSearchParamFilter); + ISearchParamExtractor.SearchParamSet uris = + extractSearchParamUri(theResource, theSearchParamFilter); handleWarnings(theRequestDetails, myInterceptorBroadcaster, uris); theParams.myUriParams.addAll(uris); // Tokens (can result in both Token and String, as we index the display name for // the types: Coding, CodeableConcept) - ISearchParamExtractor.SearchParamSet tokens = extractSearchParamTokens(theResource, theSearchParamFilter); + ISearchParamExtractor.SearchParamSet tokens = + extractSearchParamTokens(theResource, theSearchParamFilter); for (BaseResourceIndexedSearchParam next : tokens) { if (next instanceof ResourceIndexedSearchParamToken) { theParams.myTokenParams.add((ResourceIndexedSearchParamToken) next); @@ -436,19 +554,20 @@ public class SearchParamExtractorService { // Composites // dst2 composites use stuff like value[x] , and we don't support them. if (myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) { - ISearchParamExtractor.SearchParamSet composites = extractSearchParamComposites(theResource, theSearchParamFilter); + ISearchParamExtractor.SearchParamSet composites = + extractSearchParamComposites(theResource, theSearchParamFilter); handleWarnings(theRequestDetails, myInterceptorBroadcaster, composites); theParams.myCompositeParams.addAll(composites); } // Specials - ISearchParamExtractor.SearchParamSet specials = extractSearchParamSpecial(theResource, theSearchParamFilter); + ISearchParamExtractor.SearchParamSet specials = + extractSearchParamSpecial(theResource, theSearchParamFilter); for (BaseResourceIndexedSearchParam next : specials) { if (next instanceof ResourceIndexedSearchParamCoords) { theParams.myCoordsParams.add((ResourceIndexedSearchParamCoords) next); } } - } private void populateResourceTables(ResourceIndexedSearchParams theParams, ResourceTable theEntity) { @@ -468,11 +587,37 @@ public class SearchParamExtractorService { myContext = theContext; } - private void extractResourceLinks(RequestPartitionId theRequestPartitionId, ResourceIndexedSearchParams theParams, ResourceTable theEntity, IBaseResource theResource, TransactionDetails theTransactionDetails, boolean theFailOnInvalidReference, RequestDetails theRequest, ISearchParamExtractor.SearchParamSet theIndexedReferences) { - extractResourceLinks(theRequestPartitionId, new ResourceIndexedSearchParams(), theParams, theEntity, theResource, theTransactionDetails, theFailOnInvalidReference, theRequest, theIndexedReferences); + private void extractResourceLinks( + RequestPartitionId theRequestPartitionId, + ResourceIndexedSearchParams theParams, + ResourceTable theEntity, + IBaseResource theResource, + TransactionDetails theTransactionDetails, + boolean theFailOnInvalidReference, + RequestDetails theRequest, + ISearchParamExtractor.SearchParamSet theIndexedReferences) { + extractResourceLinks( + theRequestPartitionId, + new ResourceIndexedSearchParams(), + theParams, + theEntity, + theResource, + theTransactionDetails, + theFailOnInvalidReference, + theRequest, + theIndexedReferences); } - private void extractResourceLinks(RequestPartitionId theRequestPartitionId, ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, ResourceTable theEntity, IBaseResource theResource, TransactionDetails theTransactionDetails, boolean theFailOnInvalidReference, RequestDetails theRequest, ISearchParamExtractor.SearchParamSet theIndexedReferences) { + private void extractResourceLinks( + RequestPartitionId theRequestPartitionId, + ResourceIndexedSearchParams theExistingParams, + ResourceIndexedSearchParams theNewParams, + ResourceTable theEntity, + IBaseResource theResource, + TransactionDetails theTransactionDetails, + boolean theFailOnInvalidReference, + RequestDetails theRequest, + ISearchParamExtractor.SearchParamSet theIndexedReferences) { String sourceResourceName = myContext.getResourceType(theResource); for (PathAndRef nextPathAndRef : theIndexedReferences) { @@ -481,15 +626,36 @@ public class SearchParamExtractorService { continue; } - RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(sourceResourceName, nextPathAndRef.getSearchParamName()); - extractResourceLinks(theRequestPartitionId, theExistingParams, theNewParams, theEntity, theTransactionDetails, sourceResourceName, searchParam, nextPathAndRef, theFailOnInvalidReference, theRequest); + RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam( + sourceResourceName, nextPathAndRef.getSearchParamName()); + extractResourceLinks( + theRequestPartitionId, + theExistingParams, + theNewParams, + theEntity, + theTransactionDetails, + sourceResourceName, + searchParam, + nextPathAndRef, + theFailOnInvalidReference, + theRequest); } } theEntity.setHasLinks(theNewParams.myLinks.size() > 0); } - private void extractResourceLinks(@Nonnull RequestPartitionId theRequestPartitionId, ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, ResourceTable theEntity, TransactionDetails theTransactionDetails, String theSourceResourceName, RuntimeSearchParam theRuntimeSearchParam, PathAndRef thePathAndRef, boolean theFailOnInvalidReference, RequestDetails theRequest) { + private void extractResourceLinks( + @Nonnull RequestPartitionId theRequestPartitionId, + ResourceIndexedSearchParams theExistingParams, + ResourceIndexedSearchParams theNewParams, + ResourceTable theEntity, + TransactionDetails theTransactionDetails, + String theSourceResourceName, + RuntimeSearchParam theRuntimeSearchParam, + PathAndRef thePathAndRef, + boolean theFailOnInvalidReference, + RequestDetails theRequest) { IBaseReference nextReference = thePathAndRef.getRef(); IIdType nextId = nextReference.getReferenceElement(); String path = thePathAndRef.getPath(); @@ -504,7 +670,12 @@ public class SearchParamExtractorService { nextId = nextReference.getResource().getIdElement(); } - if (myContext.getParserOptions().isStripVersionsFromReferences() && !myContext.getParserOptions().getDontStripVersionsFromReferencesAtPaths().contains(thePathAndRef.getPath()) && nextId.hasVersionIdPart()) { + if (myContext.getParserOptions().isStripVersionsFromReferences() + && !myContext + .getParserOptions() + .getDontStripVersionsFromReferencesAtPaths() + .contains(thePathAndRef.getPath()) + && nextId.hasVersionIdPart()) { nextId = nextId.toVersionless(); } @@ -513,18 +684,20 @@ public class SearchParamExtractorService { boolean canonical = thePathAndRef.isCanonical(); if (LogicalReferenceHelper.isLogicalReference(myStorageSettings, nextId) || canonical) { String value = nextId.getValue(); - ResourceLink resourceLink = ResourceLink.forLogicalReference(thePathAndRef.getPath(), theEntity, value, transactionDate); + ResourceLink resourceLink = + ResourceLink.forLogicalReference(thePathAndRef.getPath(), theEntity, value, transactionDate); if (theNewParams.myLinks.add(resourceLink)) { ourLog.debug("Indexing remote resource reference URL: {}", nextId); } return; } - final boolean hasNoIdentifier = ! nextReference.hasIdentifier(); + final boolean hasNoIdentifier = !nextReference.hasIdentifier(); final String baseUrl = hasNoIdentifier ? nextId.getBaseUrl() : null; String typeString = nextId.getResourceType(); if (isBlank(typeString)) { - String msg = "Invalid resource reference found at path[" + path + "] - Does not contain resource type - " + nextId.getValue(); + String msg = "Invalid resource reference found at path[" + path + "] - Does not contain resource type - " + + nextId.getValue(); if (theFailOnInvalidReference) { throw new InvalidRequestException(Msg.code(505) + msg); } else { @@ -536,7 +709,8 @@ public class SearchParamExtractorService { try { resourceDefinition = myContext.getResourceDefinition(typeString); } catch (DataFormatException e) { - String msg = "Invalid resource reference found at path[" + path + "] - Resource type is unknown or not supported on this server - " + nextId.getValue(); + String msg = "Invalid resource reference found at path[" + path + + "] - Resource type is unknown or not supported on this server - " + nextId.getValue(); if (theFailOnInvalidReference) { throw new InvalidRequestException(Msg.code(506) + msg); } else { @@ -552,11 +726,15 @@ public class SearchParamExtractorService { } if (isNotBlank(baseUrl)) { - if (!myStorageSettings.getTreatBaseUrlsAsLocal().contains(baseUrl) && !myStorageSettings.isAllowExternalReferences()) { - String msg = myContext.getLocalizer().getMessage(BaseSearchParamExtractor.class, "externalReferenceNotAllowed", nextId.getValue()); + if (!myStorageSettings.getTreatBaseUrlsAsLocal().contains(baseUrl) + && !myStorageSettings.isAllowExternalReferences()) { + String msg = myContext + .getLocalizer() + .getMessage(BaseSearchParamExtractor.class, "externalReferenceNotAllowed", nextId.getValue()); throw new InvalidRequestException(Msg.code(507) + msg); } else { - ResourceLink resourceLink = ResourceLink.forAbsoluteReference(thePathAndRef.getPath(), theEntity, nextId, transactionDate); + ResourceLink resourceLink = + ResourceLink.forAbsoluteReference(thePathAndRef.getPath(), theEntity, nextId, transactionDate); if (theNewParams.myLinks.add(resourceLink)) { ourLog.debug("Indexing remote resource reference URL: {}", nextId); } @@ -567,7 +745,8 @@ public class SearchParamExtractorService { Class type = resourceDefinition.getImplementingClass(); String targetId = nextId.getIdPart(); if (StringUtils.isBlank(targetId)) { - String msg = "Invalid resource reference found at path[" + path + "] - Does not contain resource ID - " + nextId.getValue(); + String msg = "Invalid resource reference found at path[" + path + "] - Does not contain resource ID - " + + nextId.getValue(); if (theFailOnInvalidReference) { throw new InvalidRequestException(Msg.code(508) + msg); } else { @@ -588,7 +767,14 @@ public class SearchParamExtractorService { * need to resolve it again */ myResourceLinkResolver.validateTypeOrThrowException(type); - resourceLink = ResourceLink.forLocalReference(thePathAndRef.getPath(), theEntity, typeString, resolvedTargetId.getId(), targetId, transactionDate, targetVersionId); + resourceLink = ResourceLink.forLocalReference( + thePathAndRef.getPath(), + theEntity, + typeString, + resolvedTargetId.getId(), + targetId, + transactionDate, + targetVersionId); } else if (theFailOnInvalidReference) { @@ -606,11 +792,20 @@ public class SearchParamExtractorService { * very expensive operation of creating a resourceLink that would end up being exactly the same * one we already have. */ - Optional optionalResourceLink = findMatchingResourceLink(thePathAndRef, theExistingParams.getResourceLinks()); + Optional optionalResourceLink = + findMatchingResourceLink(thePathAndRef, theExistingParams.getResourceLinks()); if (optionalResourceLink.isPresent()) { resourceLink = optionalResourceLink.get(); } else { - resourceLink = resolveTargetAndCreateResourceLinkOrReturnNull(theRequestPartitionId, theSourceResourceName, thePathAndRef, theEntity, transactionDate, nextId, theRequest, theTransactionDetails); + resourceLink = resolveTargetAndCreateResourceLinkOrReturnNull( + theRequestPartitionId, + theSourceResourceName, + thePathAndRef, + theEntity, + transactionDate, + nextId, + theRequest, + theTransactionDetails); } if (resourceLink == null) { @@ -630,50 +825,84 @@ public class SearchParamExtractorService { ResourceTable target; target = new ResourceTable(); target.setResourceType(typeString); - resourceLink = ResourceLink.forLocalReference(thePathAndRef.getPath(), theEntity, typeString, null, targetId, transactionDate, targetVersionId); - + resourceLink = ResourceLink.forLocalReference( + thePathAndRef.getPath(), theEntity, typeString, null, targetId, transactionDate, targetVersionId); } theNewParams.myLinks.add(resourceLink); } - private Optional findMatchingResourceLink(PathAndRef thePathAndRef, Collection theResourceLinks) { + private Optional findMatchingResourceLink( + PathAndRef thePathAndRef, Collection theResourceLinks) { IIdType referenceElement = thePathAndRef.getRef().getReferenceElement(); List resourceLinks = new ArrayList<>(theResourceLinks); for (ResourceLink resourceLink : resourceLinks) { // comparing the searchParam path ex: Group.member.entity - boolean hasMatchingSearchParamPath = StringUtils.equals(resourceLink.getSourcePath(), thePathAndRef.getPath()); + boolean hasMatchingSearchParamPath = + StringUtils.equals(resourceLink.getSourcePath(), thePathAndRef.getPath()); - boolean hasMatchingResourceType = StringUtils.equals(resourceLink.getTargetResourceType(), referenceElement.getResourceType()); + boolean hasMatchingResourceType = + StringUtils.equals(resourceLink.getTargetResourceType(), referenceElement.getResourceType()); - boolean hasMatchingResourceId = StringUtils.equals(resourceLink.getTargetResourceId(), referenceElement.getIdPart()); + boolean hasMatchingResourceId = + StringUtils.equals(resourceLink.getTargetResourceId(), referenceElement.getIdPart()); - boolean hasMatchingResourceVersion = myContext.getParserOptions().isStripVersionsFromReferences() || referenceElement.getVersionIdPartAsLong() == null || referenceElement.getVersionIdPartAsLong().equals(resourceLink.getTargetResourceVersion()); + boolean hasMatchingResourceVersion = myContext.getParserOptions().isStripVersionsFromReferences() + || referenceElement.getVersionIdPartAsLong() == null + || referenceElement.getVersionIdPartAsLong().equals(resourceLink.getTargetResourceVersion()); - if (hasMatchingSearchParamPath && hasMatchingResourceType && hasMatchingResourceId && hasMatchingResourceVersion) { + if (hasMatchingSearchParamPath + && hasMatchingResourceType + && hasMatchingResourceId + && hasMatchingResourceVersion) { return Optional.of(resourceLink); } } return Optional.empty(); - } - private void extractResourceLinksForContainedResources(RequestPartitionId theRequestPartitionId, ResourceIndexedSearchParams theParams, ResourceTable theEntity, IBaseResource theResource, TransactionDetails theTransactionDetails, boolean theFailOnInvalidReference, RequestDetails theRequest) { + private void extractResourceLinksForContainedResources( + RequestPartitionId theRequestPartitionId, + ResourceIndexedSearchParams theParams, + ResourceTable theEntity, + IBaseResource theResource, + TransactionDetails theTransactionDetails, + boolean theFailOnInvalidReference, + RequestDetails theRequest) { FhirTerser terser = myContext.newTerser(); // 1. get all contained resources Collection containedResources = terser.getAllEmbeddedResources(theResource, false); - extractResourceLinksForContainedResources(theRequestPartitionId, theParams, theEntity, theResource, theTransactionDetails, theFailOnInvalidReference, theRequest, containedResources, new HashSet<>()); + extractResourceLinksForContainedResources( + theRequestPartitionId, + theParams, + theEntity, + theResource, + theTransactionDetails, + theFailOnInvalidReference, + theRequest, + containedResources, + new HashSet<>()); } - private void extractResourceLinksForContainedResources(RequestPartitionId theRequestPartitionId, ResourceIndexedSearchParams theParams, ResourceTable theEntity, IBaseResource theResource, TransactionDetails theTransactionDetails, boolean theFailOnInvalidReference, RequestDetails theRequest, Collection theContainedResources, Collection theAlreadySeenResources) { + private void extractResourceLinksForContainedResources( + RequestPartitionId theRequestPartitionId, + ResourceIndexedSearchParams theParams, + ResourceTable theEntity, + IBaseResource theResource, + TransactionDetails theTransactionDetails, + boolean theFailOnInvalidReference, + RequestDetails theRequest, + Collection theContainedResources, + Collection theAlreadySeenResources) { // 2. Find referenced search parameters - ISearchParamExtractor.SearchParamSet referencedSearchParamSet = mySearchParamExtractor.extractResourceLinks(theResource, true); + ISearchParamExtractor.SearchParamSet referencedSearchParamSet = + mySearchParamExtractor.extractResourceLinks(theResource, true); String spNamePrefix; ResourceIndexedSearchParams currParams; @@ -683,15 +912,14 @@ public class SearchParamExtractorService { // 3.1 get the search parameter name as spname prefix spNamePrefix = nextPathAndRef.getSearchParamName(); - if (spNamePrefix == null || nextPathAndRef.getRef() == null) - continue; + if (spNamePrefix == null || nextPathAndRef.getRef() == null) continue; // 3.2 find the contained resource IBaseResource containedResource = findContainedResource(theContainedResources, nextPathAndRef.getRef()); - if (containedResource == null) - continue; + if (containedResource == null) continue; - // 3.2.1 if we've already processed this resource upstream, do not process it again, to prevent infinite loops + // 3.2.1 if we've already processed this resource upstream, do not process it again, to prevent infinite + // loops if (theAlreadySeenResources.contains(containedResource)) { continue; } @@ -699,14 +927,32 @@ public class SearchParamExtractorService { currParams = new ResourceIndexedSearchParams(); // 3.3 create indexes for the current contained resource - ISearchParamExtractor.SearchParamSet indexedReferences = mySearchParamExtractor.extractResourceLinks(containedResource, true); - extractResourceLinks(theRequestPartitionId, currParams, theEntity, containedResource, theTransactionDetails, theFailOnInvalidReference, theRequest, indexedReferences); + ISearchParamExtractor.SearchParamSet indexedReferences = + mySearchParamExtractor.extractResourceLinks(containedResource, true); + extractResourceLinks( + theRequestPartitionId, + currParams, + theEntity, + containedResource, + theTransactionDetails, + theFailOnInvalidReference, + theRequest, + indexedReferences); // 3.4 recurse to process any other contained resources referenced by this one if (myStorageSettings.isIndexOnContainedResourcesRecursively()) { HashSet nextAlreadySeenResources = new HashSet<>(theAlreadySeenResources); nextAlreadySeenResources.add(containedResource); - extractResourceLinksForContainedResources(theRequestPartitionId, currParams, theEntity, containedResource, theTransactionDetails, theFailOnInvalidReference, theRequest, theContainedResources, nextAlreadySeenResources); + extractResourceLinksForContainedResources( + theRequestPartitionId, + currParams, + theEntity, + containedResource, + theTransactionDetails, + theFailOnInvalidReference, + theRequest, + theContainedResources, + nextAlreadySeenResources); } // 3.4 added reference name as a prefix for the contained resource if any @@ -721,14 +967,29 @@ public class SearchParamExtractorService { } @SuppressWarnings("unchecked") - private ResourceLink resolveTargetAndCreateResourceLinkOrReturnNull(@Nonnull RequestPartitionId theRequestPartitionId, String theSourceResourceName, PathAndRef thePathAndRef, ResourceTable theEntity, Date theUpdateTime, IIdType theNextId, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + private ResourceLink resolveTargetAndCreateResourceLinkOrReturnNull( + @Nonnull RequestPartitionId theRequestPartitionId, + String theSourceResourceName, + PathAndRef thePathAndRef, + ResourceTable theEntity, + Date theUpdateTime, + IIdType theNextId, + RequestDetails theRequest, + TransactionDetails theTransactionDetails) { JpaPid resolvedResourceId = (JpaPid) theTransactionDetails.getResolvedResourceId(theNextId); if (resolvedResourceId != null) { String targetResourceType = theNextId.getResourceType(); Long targetResourcePid = resolvedResourceId.getId(); String targetResourceIdPart = theNextId.getIdPart(); Long targetVersion = theNextId.getVersionIdPartAsLong(); - return ResourceLink.forLocalReference(thePathAndRef.getPath(), theEntity, targetResourceType, targetResourcePid, targetResourceIdPart, theUpdateTime, targetVersion); + return ResourceLink.forLocalReference( + thePathAndRef.getPath(), + theEntity, + targetResourceType, + targetResourcePid, + targetResourceIdPart, + theUpdateTime, + targetVersion); } /* @@ -740,25 +1001,43 @@ public class SearchParamExtractorService { IResourceLookup targetResource; if (myPartitionSettings.isPartitioningEnabled()) { - if (myPartitionSettings.getAllowReferencesAcrossPartitions() == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) { + if (myPartitionSettings.getAllowReferencesAcrossPartitions() + == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) { // Interceptor: Pointcut.JPA_CROSS_PARTITION_REFERENCE_DETECTED - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, myInterceptorBroadcaster, theRequest)) { - CrossPartitionReferenceDetails referenceDetails = new CrossPartitionReferenceDetails(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, myInterceptorBroadcaster, theRequest)) { + CrossPartitionReferenceDetails referenceDetails = new CrossPartitionReferenceDetails( + theRequestPartitionId, + theSourceResourceName, + thePathAndRef, + theRequest, + theTransactionDetails); HookParams params = new HookParams(referenceDetails); - targetResource = (IResourceLookup) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, params); + targetResource = + (IResourceLookup) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( + myInterceptorBroadcaster, + theRequest, + Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, + params); } else { - targetResource = myResourceLinkResolver.findTargetResource(RequestPartitionId.allPartitions(), theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); + targetResource = myResourceLinkResolver.findTargetResource( + RequestPartitionId.allPartitions(), + theSourceResourceName, + thePathAndRef, + theRequest, + theTransactionDetails); } } else { - targetResource = myResourceLinkResolver.findTargetResource(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); + targetResource = myResourceLinkResolver.findTargetResource( + theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); } } else { - targetResource = myResourceLinkResolver.findTargetResource(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); + targetResource = myResourceLinkResolver.findTargetResource( + theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails); } - if (targetResource == null) { return null; } @@ -767,18 +1046,28 @@ public class SearchParamExtractorService { Long targetResourcePid = targetResource.getPersistentId().getId(); String targetResourceIdPart = theNextId.getIdPart(); Long targetVersion = theNextId.getVersionIdPartAsLong(); - return ResourceLink.forLocalReference(thePathAndRef.getPath(), theEntity, targetResourceType, targetResourcePid, targetResourceIdPart, theUpdateTime, targetVersion); + return ResourceLink.forLocalReference( + thePathAndRef.getPath(), + theEntity, + targetResourceType, + targetResourcePid, + targetResourceIdPart, + theUpdateTime, + targetVersion); } private RequestPartitionId determineResolverPartitionId(@Nonnull RequestPartitionId theRequestPartitionId) { RequestPartitionId targetRequestPartitionId = theRequestPartitionId; - if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.getAllowReferencesAcrossPartitions() == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) { + if (myPartitionSettings.isPartitioningEnabled() + && myPartitionSettings.getAllowReferencesAcrossPartitions() + == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) { targetRequestPartitionId = RequestPartitionId.allPartitions(); } return targetRequestPartitionId; } - private void populateResourceTable(Collection theParams, ResourceTable theResourceTable) { + private void populateResourceTable( + Collection theParams, ResourceTable theResourceTable) { for (BaseResourceIndexedSearchParam next : theParams) { if (next.getResourcePid() == null) { next.setResource(theResourceTable); @@ -786,7 +1075,8 @@ public class SearchParamExtractorService { } } - private void populateResourceTableForComboParams(Collection theParams, ResourceTable theResourceTable) { + private void populateResourceTableForComboParams( + Collection theParams, ResourceTable theResourceTable) { for (IResourceIndexComboSearchParameter next : theParams) { if (next.getResource() == null) { next.setResource(theResourceTable); @@ -797,39 +1087,49 @@ public class SearchParamExtractorService { } } - private ISearchParamExtractor.SearchParamSet extractSearchParamDates(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet extractSearchParamDates( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamDates(theResource, theSearchParamFilter); } - private ISearchParamExtractor.SearchParamSet extractSearchParamNumber(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet extractSearchParamNumber( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamNumber(theResource, theSearchParamFilter); } - private ISearchParamExtractor.SearchParamSet extractSearchParamQuantity(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet extractSearchParamQuantity( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamQuantity(theResource, theSearchParamFilter); } - private ISearchParamExtractor.SearchParamSet extractSearchParamQuantityNormalized(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet + extractSearchParamQuantityNormalized( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamQuantityNormalized(theResource, theSearchParamFilter); } - private ISearchParamExtractor.SearchParamSet extractSearchParamStrings(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet extractSearchParamStrings( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamStrings(theResource, theSearchParamFilter); } - private ISearchParamExtractor.SearchParamSet extractSearchParamTokens(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet extractSearchParamTokens( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamTokens(theResource, theSearchParamFilter); } - private ISearchParamExtractor.SearchParamSet extractSearchParamSpecial(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet extractSearchParamSpecial( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamSpecial(theResource, theSearchParamFilter); } - private ISearchParamExtractor.SearchParamSet extractSearchParamUri(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet extractSearchParamUri( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamUri(theResource, theSearchParamFilter); } - private ISearchParamExtractor.SearchParamSet extractSearchParamComposites(IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { + private ISearchParamExtractor.SearchParamSet extractSearchParamComposites( + IBaseResource theResource, ISearchParamExtractor.ISearchParamFilter theSearchParamFilter) { return mySearchParamExtractor.extractSearchParamComposites(theResource, theSearchParamFilter); } @@ -839,20 +1139,23 @@ public class SearchParamExtractorService { } @Nonnull - public List extractParamValuesAsStrings(RuntimeSearchParam theActiveSearchParam, IBaseResource theResource) { + public List extractParamValuesAsStrings( + RuntimeSearchParam theActiveSearchParam, IBaseResource theResource) { return mySearchParamExtractor.extractParamValuesAsStrings(theActiveSearchParam, theResource); } public void extractSearchParamComboUnique(ResourceTable theEntity, ResourceIndexedSearchParams theParams) { String resourceType = theEntity.getResourceType(); - Set comboUniques = mySearchParamExtractor.extractSearchParamComboUnique(resourceType, theParams); + Set comboUniques = + mySearchParamExtractor.extractSearchParamComboUnique(resourceType, theParams); theParams.myComboStringUniques.addAll(comboUniques); populateResourceTableForComboParams(theParams.myComboStringUniques, theEntity); } public void extractSearchParamComboNonUnique(ResourceTable theEntity, ResourceIndexedSearchParams theParams) { String resourceType = theEntity.getResourceType(); - Set comboNonUniques = mySearchParamExtractor.extractSearchParamComboNonUnique(resourceType, theParams); + Set comboNonUniques = + mySearchParamExtractor.extractSearchParamComboNonUnique(resourceType, theParams); theParams.myComboTokenNonUnique.addAll(comboNonUniques); populateResourceTableForComboParams(theParams.myComboTokenNonUnique, theEntity); } @@ -881,26 +1184,29 @@ public class SearchParamExtractorService { */ @Nullable IBaseResource fetchResourceAtPath(@Nonnull PathAndRef thePathAndRef); - } - static void handleWarnings(RequestDetails theRequestDetails, IInterceptorBroadcaster theInterceptorBroadcaster, ISearchParamExtractor.SearchParamSet theSearchParamSet) { + static void handleWarnings( + RequestDetails theRequestDetails, + IInterceptorBroadcaster theInterceptorBroadcaster, + ISearchParamExtractor.SearchParamSet theSearchParamSet) { if (theSearchParamSet.getWarnings().isEmpty()) { return; } // If extraction generated any warnings, broadcast an error - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING, theInterceptorBroadcaster, theRequestDetails)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_WARNING, theInterceptorBroadcaster, theRequestDetails)) { for (String next : theSearchParamSet.getWarnings()) { StorageProcessingMessage messageHolder = new StorageProcessingMessage(); messageHolder.setMessage(next); HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(StorageProcessingMessage.class, messageHolder); - CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(StorageProcessingMessage.class, messageHolder); + CompositeInterceptorBroadcaster.doCallHooks( + theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params); } } } } - diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryMatchResult.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryMatchResult.java index f36a2ee7bda..06cc6c57715 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryMatchResult.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryMatchResult.java @@ -74,7 +74,8 @@ public class InMemoryMatchResult { return new InMemoryMatchResult(null, theUnsupportedReason); } - public static InMemoryMatchResult unsupportedFromParameterAndReason(String theUnsupportedParameter, String theUnsupportedReason) { + public static InMemoryMatchResult unsupportedFromParameterAndReason( + String theUnsupportedParameter, String theUnsupportedReason) { return new InMemoryMatchResult(theUnsupportedParameter, theUnsupportedReason); } @@ -116,7 +117,9 @@ public class InMemoryMatchResult { return InMemoryMatchResult.fromBoolean(theLeft.matched() && theRight.matched()); } if (!theLeft.supported() && !theRight.supported()) { - return InMemoryMatchResult.unsupportedFromReason(List.of(theLeft.getUnsupportedReason(), theRight.getUnsupportedReason()).toString()); + return InMemoryMatchResult.unsupportedFromReason( + List.of(theLeft.getUnsupportedReason(), theRight.getUnsupportedReason()) + .toString()); } if (!theLeft.supported()) { return theLeft; @@ -135,12 +138,13 @@ public class InMemoryMatchResult { return InMemoryMatchResult.successfulMatch(); } if (!theLeft.supported() && !theRight.supported()) { - return InMemoryMatchResult.unsupportedFromReason(List.of(theLeft.getUnsupportedReason(), theRight.getUnsupportedReason()).toString()); + return InMemoryMatchResult.unsupportedFromReason( + List.of(theLeft.getUnsupportedReason(), theRight.getUnsupportedReason()) + .toString()); } if (!theLeft.supported()) { return theLeft; } return theRight; } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java index d73cb43f9fc..a1ddbfc0085 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/InMemoryResourceMatcher.java @@ -63,12 +63,12 @@ import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -77,24 +77,33 @@ public class InMemoryResourceMatcher { public static final Set UNSUPPORTED_PARAMETER_NAMES = Sets.newHashSet(Constants.PARAM_HAS); private static final org.slf4j.Logger ourLog = LoggerFactory.getLogger(InMemoryResourceMatcher.class); + @Autowired ApplicationContext myApplicationContext; + @Autowired ISearchParamRegistry mySearchParamRegistry; + @Autowired StorageSettings myStorageSettings; + @Autowired FhirContext myFhirContext; + @Autowired SearchParamExtractorService mySearchParamExtractorService; + @Autowired IndexedSearchParamExtractor myIndexedSearchParamExtractor; + @Autowired private MatchUrlService myMatchUrlService; - private ValidationSupportInitializationState validationSupportState = ValidationSupportInitializationState.NOT_INITIALIZED; + + private ValidationSupportInitializationState validationSupportState = + ValidationSupportInitializationState.NOT_INITIALIZED; private IValidationSupport myValidationSupport = null; - public InMemoryResourceMatcher() { - } + + public InMemoryResourceMatcher() {} /** * Lazy loads a {@link IValidationSupport} implementation just-in-time. @@ -110,7 +119,9 @@ public class InMemoryResourceMatcher { validationSupportState = ValidationSupportInitializationState.INITIALIZED; } catch (BeansException | ConfigurationException ignore) { // We couldn't get a validation support bean, and we don't want to waste cycles trying again - ourLog.warn(Msg.code(2100) + "No bean satisfying IValidationSupport could be initialized. Qualifiers dependent on IValidationSupport will not be supported."); + ourLog.warn( + Msg.code(2100) + + "No bean satisfying IValidationSupport could be initialized. Qualifiers dependent on IValidationSupport will not be supported."); validationSupportState = ValidationSupportInitializationState.FAILED; } } @@ -121,31 +132,39 @@ public class InMemoryResourceMatcher { * @deprecated Use {@link #match(String, IBaseResource, ResourceIndexedSearchParams, RequestDetails)} */ @Deprecated - public InMemoryMatchResult match(String theCriteria, IBaseResource theResource, @Nullable ResourceIndexedSearchParams theIndexedSearchParams) { + public InMemoryMatchResult match( + String theCriteria, + IBaseResource theResource, + @Nullable ResourceIndexedSearchParams theIndexedSearchParams) { return match(theCriteria, theResource, theIndexedSearchParams, null); } - /** - * This method is called in two different scenarios. With a null theResource, it determines whether database matching might be required. - * Otherwise, it tries to perform the match in-memory, returning UNSUPPORTED if it's not possible. - *

    - * Note that there will be cases where it returns UNSUPPORTED with a null resource, but when a non-null resource it returns supported and no match. - * This is because an earlier parameter may be matchable in-memory in which case processing stops and we never get to the parameter - * that would have required a database call. - * - * @param theIndexedSearchParams If the search params have already been calculated for the given resource, - * they can be passed in. Passing in {@literal null} is also fine, in which - * case they will be calculated for the resource. It can be preferable to - * pass in {@literal null} unless you already actually had to calculate the - * indexes for another reason, since we can be efficient here and only calculate - * the params that are actually relevant for the given search expression. - */ - public InMemoryMatchResult match(String theCriteria, IBaseResource theResource, @Nullable ResourceIndexedSearchParams theIndexedSearchParams, RequestDetails theRequestDetails) { + * This method is called in two different scenarios. With a null theResource, it determines whether database matching might be required. + * Otherwise, it tries to perform the match in-memory, returning UNSUPPORTED if it's not possible. + *

    + * Note that there will be cases where it returns UNSUPPORTED with a null resource, but when a non-null resource it returns supported and no match. + * This is because an earlier parameter may be matchable in-memory in which case processing stops and we never get to the parameter + * that would have required a database call. + * + * @param theIndexedSearchParams If the search params have already been calculated for the given resource, + * they can be passed in. Passing in {@literal null} is also fine, in which + * case they will be calculated for the resource. It can be preferable to + * pass in {@literal null} unless you already actually had to calculate the + * indexes for another reason, since we can be efficient here and only calculate + * the params that are actually relevant for the given search expression. + */ + public InMemoryMatchResult match( + String theCriteria, + IBaseResource theResource, + @Nullable ResourceIndexedSearchParams theIndexedSearchParams, + RequestDetails theRequestDetails) { RuntimeResourceDefinition resourceDefinition; if (theResource == null) { - Validate.isTrue(!theCriteria.startsWith("?"), "Invalid match URL format (must match \"[resourceType]?[params]\")"); - Validate.isTrue(theCriteria.contains("?"), "Invalid match URL format (must match \"[resourceType]?[params]\")"); + Validate.isTrue( + !theCriteria.startsWith("?"), "Invalid match URL format (must match \"[resourceType]?[params]\")"); + Validate.isTrue( + theCriteria.contains("?"), "Invalid match URL format (must match \"[resourceType]?[params]\")"); resourceDefinition = UrlUtil.parseUrlResourceType(myFhirContext, theCriteria); } else { resourceDefinition = myFhirContext.getResourceDefinition(theResource); @@ -163,11 +182,11 @@ public class InMemoryResourceMatcher { relevantSearchParams = theIndexedSearchParams; } else if (theResource != null) { // Don't index search params we don't actully need for the given criteria - ISearchParamExtractor.ISearchParamFilter filter = theSearchParams -> theSearchParams - .stream() - .filter(t -> searchParameterMap.containsKey(t.getName())) - .collect(Collectors.toList()); - relevantSearchParams = myIndexedSearchParamExtractor.extractIndexedSearchParams(theResource, theRequestDetails, filter); + ISearchParamExtractor.ISearchParamFilter filter = theSearchParams -> theSearchParams.stream() + .filter(t -> searchParameterMap.containsKey(t.getName())) + .collect(Collectors.toList()); + relevantSearchParams = + myIndexedSearchParamExtractor.extractIndexedSearchParams(theResource, theRequestDetails, filter); } return match(searchParameterMap, theResource, resourceDefinition, relevantSearchParams); @@ -186,14 +205,20 @@ public class InMemoryResourceMatcher { * @param theResourceDefinition * @return result.supported() will be true if theSearchParameterMap can be evaluated in-memory */ - public InMemoryMatchResult canBeEvaluatedInMemory(SearchParameterMap theSearchParameterMap, RuntimeResourceDefinition theResourceDefinition) { + public InMemoryMatchResult canBeEvaluatedInMemory( + SearchParameterMap theSearchParameterMap, RuntimeResourceDefinition theResourceDefinition) { return match(theSearchParameterMap, null, theResourceDefinition, null); } @Nonnull - public InMemoryMatchResult match(SearchParameterMap theSearchParameterMap, IBaseResource theResource, RuntimeResourceDefinition theResourceDefinition, ResourceIndexedSearchParams theSearchParams) { + public InMemoryMatchResult match( + SearchParameterMap theSearchParameterMap, + IBaseResource theResource, + RuntimeResourceDefinition theResourceDefinition, + ResourceIndexedSearchParams theSearchParams) { if (theSearchParameterMap.getLastUpdated() != null) { - return InMemoryMatchResult.unsupportedFromParameterAndReason(Constants.PARAM_LASTUPDATED, InMemoryMatchResult.STANDARD_PARAMETER); + return InMemoryMatchResult.unsupportedFromParameterAndReason( + Constants.PARAM_LASTUPDATED, InMemoryMatchResult.STANDARD_PARAMETER); } if (theSearchParameterMap.containsKey(Location.SP_NEAR)) { return InMemoryMatchResult.unsupportedFromReason(InMemoryMatchResult.LOCATION_NEAR); @@ -202,7 +227,8 @@ public class InMemoryResourceMatcher { for (Map.Entry>> entry : theSearchParameterMap.entrySet()) { String theParamName = entry.getKey(); List> theAndOrParams = entry.getValue(); - InMemoryMatchResult result = matchIdsWithAndOr(theParamName, theAndOrParams, theResourceDefinition, theResource, theSearchParams); + InMemoryMatchResult result = matchIdsWithAndOr( + theParamName, theAndOrParams, theResourceDefinition, theResource, theSearchParams); if (!result.matched()) { return result; } @@ -211,14 +237,20 @@ public class InMemoryResourceMatcher { } // This method is modelled from SearchBuilder.searchForIdsWithAndOr() - private InMemoryMatchResult matchIdsWithAndOr(String theParamName, List> theAndOrParams, RuntimeResourceDefinition theResourceDefinition, IBaseResource theResource, ResourceIndexedSearchParams theSearchParams) { + private InMemoryMatchResult matchIdsWithAndOr( + String theParamName, + List> theAndOrParams, + RuntimeResourceDefinition theResourceDefinition, + IBaseResource theResource, + ResourceIndexedSearchParams theSearchParams) { if (theAndOrParams.isEmpty()) { return InMemoryMatchResult.successfulMatch(); } String resourceName = theResourceDefinition.getName(); RuntimeSearchParam paramDef = mySearchParamRegistry.getActiveSearchParam(resourceName, theParamName); - InMemoryMatchResult checkUnsupportedResult = checkForUnsupportedParameters(theParamName, paramDef, theAndOrParams); + InMemoryMatchResult checkUnsupportedResult = + checkForUnsupportedParameters(theParamName, paramDef, theAndOrParams); if (!checkUnsupportedResult.supported()) { return checkUnsupportedResult; } @@ -235,11 +267,13 @@ public class InMemoryResourceMatcher { case Constants.PARAM_PROFILE: return InMemoryMatchResult.fromBoolean(matchProfilesAndOr(theAndOrParams, theResource)); default: - return matchResourceParam(myStorageSettings, theParamName, theAndOrParams, theSearchParams, resourceName, paramDef); + return matchResourceParam( + myStorageSettings, theParamName, theAndOrParams, theSearchParams, resourceName, paramDef); } } - private InMemoryMatchResult checkForUnsupportedParameters(String theParamName, RuntimeSearchParam theParamDef, List> theAndOrParams) { + private InMemoryMatchResult checkForUnsupportedParameters( + String theParamName, RuntimeSearchParam theParamDef, List> theAndOrParams) { if (UNSUPPORTED_PARAMETER_NAMES.contains(theParamName)) { return InMemoryMatchResult.unsupportedFromParameterAndReason(theParamName, InMemoryMatchResult.PARAM); @@ -248,8 +282,10 @@ public class InMemoryResourceMatcher { for (List orParams : theAndOrParams) { // The list should never be empty, but better safe than sorry if (orParams.size() > 0) { - // The params in each OR list all share the same qualifier, prefix, etc., so we only need to check the first one - InMemoryMatchResult checkUnsupportedResult = checkOneParameterForUnsupportedModifiers(theParamName, theParamDef, orParams.get(0)); + // The params in each OR list all share the same qualifier, prefix, etc., so we only need to check the + // first one + InMemoryMatchResult checkUnsupportedResult = + checkOneParameterForUnsupportedModifiers(theParamName, theParamDef, orParams.get(0)); if (!checkUnsupportedResult.supported()) { return checkUnsupportedResult; } @@ -259,12 +295,14 @@ public class InMemoryResourceMatcher { return InMemoryMatchResult.successfulMatch(); } - private InMemoryMatchResult checkOneParameterForUnsupportedModifiers(String theParamName, RuntimeSearchParam theParamDef, IQueryParameterType theParam) { + private InMemoryMatchResult checkOneParameterForUnsupportedModifiers( + String theParamName, RuntimeSearchParam theParamDef, IQueryParameterType theParam) { // Assume we're ok until we find evidence we aren't InMemoryMatchResult checkUnsupportedResult = InMemoryMatchResult.successfulMatch(); if (hasChain(theParam)) { - checkUnsupportedResult = InMemoryMatchResult.unsupportedFromParameterAndReason(theParamName + "." + ((ReferenceParam) theParam).getChain(), InMemoryMatchResult.CHAIN); + checkUnsupportedResult = InMemoryMatchResult.unsupportedFromParameterAndReason( + theParamName + "." + ((ReferenceParam) theParam).getChain(), InMemoryMatchResult.CHAIN); } if (checkUnsupportedResult.supported()) { @@ -297,8 +335,8 @@ public class InMemoryResourceMatcher { return false; } else { return theResource.getMeta().getProfile().stream() - .map(IPrimitiveType::getValueAsString) - .anyMatch(profileValue -> profileValue != null && profileValue.equals(paramProfileValue)); + .map(IPrimitiveType::getValueAsString) + .anyMatch(profileValue -> profileValue != null && profileValue.equals(paramProfileValue)); } } @@ -326,14 +364,16 @@ public class InMemoryResourceMatcher { return matches; } - private boolean matchTagsOrSecurityAndOr(List> theAndOrParams, IBaseResource theResource, boolean theTag) { + private boolean matchTagsOrSecurityAndOr( + List> theAndOrParams, IBaseResource theResource, boolean theTag) { if (theResource == null) { return true; } return theAndOrParams.stream().allMatch(nextAnd -> matchTagsOrSecurityOr(nextAnd, theResource, theTag)); } - private boolean matchTagsOrSecurityOr(List theOrParams, IBaseResource theResource, boolean theTag) { + private boolean matchTagsOrSecurityOr( + List theOrParams, IBaseResource theResource, boolean theTag) { return theOrParams.stream().anyMatch(param -> matchTagOrSecurity(param, theResource, theTag)); } @@ -382,14 +422,22 @@ public class InMemoryResourceMatcher { } private boolean matchIdsOr(List theOrParams, IBaseResource theResource) { - return theOrParams.stream().anyMatch(param -> param instanceof StringParam && matchId(((StringParam) param).getValue(), theResource.getIdElement())); + return theOrParams.stream() + .anyMatch(param -> param instanceof StringParam + && matchId(((StringParam) param).getValue(), theResource.getIdElement())); } private boolean matchId(String theValue, IIdType theId) { return theValue.equals(theId.getValue()) || theValue.equals(theId.getIdPart()); } - private InMemoryMatchResult matchResourceParam(StorageSettings theStorageSettings, String theParamName, List> theAndOrParams, ResourceIndexedSearchParams theSearchParams, String theResourceName, RuntimeSearchParam theParamDef) { + private InMemoryMatchResult matchResourceParam( + StorageSettings theStorageSettings, + String theParamName, + List> theAndOrParams, + ResourceIndexedSearchParams theSearchParams, + String theResourceName, + RuntimeSearchParam theParamDef) { if (theParamDef != null) { switch (theParamDef.getParamType()) { case QUANTITY: @@ -402,31 +450,50 @@ public class InMemoryResourceMatcher { if (theSearchParams == null) { return InMemoryMatchResult.successfulMatch(); } else { - return InMemoryMatchResult.fromBoolean(theAndOrParams.stream().allMatch(nextAnd -> matchParams(theStorageSettings, theResourceName, theParamName, theParamDef, nextAnd, theSearchParams))); + return InMemoryMatchResult.fromBoolean(theAndOrParams.stream() + .allMatch(nextAnd -> matchParams( + theStorageSettings, + theResourceName, + theParamName, + theParamDef, + nextAnd, + theSearchParams))); } case COMPOSITE: case HAS: case SPECIAL: default: - return InMemoryMatchResult.unsupportedFromParameterAndReason(theParamName, InMemoryMatchResult.PARAM); + return InMemoryMatchResult.unsupportedFromParameterAndReason( + theParamName, InMemoryMatchResult.PARAM); } } else { if (Constants.PARAM_CONTENT.equals(theParamName) || Constants.PARAM_TEXT.equals(theParamName)) { return InMemoryMatchResult.unsupportedFromParameterAndReason(theParamName, InMemoryMatchResult.PARAM); } else { - throw new InvalidRequestException(Msg.code(509) + "Unknown search parameter " + theParamName + " for resource type " + theResourceName); + throw new InvalidRequestException(Msg.code(509) + "Unknown search parameter " + theParamName + + " for resource type " + theResourceName); } } } - private boolean matchParams(StorageSettings theStorageSettings, String theResourceName, String theParamName, RuntimeSearchParam theParamDef, List theOrList, ResourceIndexedSearchParams theSearchParams) { + private boolean matchParams( + StorageSettings theStorageSettings, + String theResourceName, + String theParamName, + RuntimeSearchParam theParamDef, + List theOrList, + ResourceIndexedSearchParams theSearchParams) { boolean isNegativeTest = isNegative(theParamDef, theOrList); // negative tests like :not and :not-in must not match any or-clause, so we invert the quantifier. if (isNegativeTest) { - return theOrList.stream().allMatch(token -> matchParam(theStorageSettings, theResourceName, theParamName, theParamDef, theSearchParams, token)); + return theOrList.stream() + .allMatch(token -> matchParam( + theStorageSettings, theResourceName, theParamName, theParamDef, theSearchParams, token)); } else { - return theOrList.stream().anyMatch(token -> matchParam(theStorageSettings, theResourceName, theParamName, theParamDef, theSearchParams, token)); + return theOrList.stream() + .anyMatch(token -> matchParam( + theStorageSettings, theResourceName, theParamName, theParamDef, theSearchParams, token)); } } @@ -441,12 +508,19 @@ public class InMemoryResourceMatcher { } else { return false; } - } - private boolean matchParam(StorageSettings theStorageSettings, String theResourceName, String theParamName, RuntimeSearchParam theParamDef, ResourceIndexedSearchParams theSearchParams, IQueryParameterType theToken) { + private boolean matchParam( + StorageSettings theStorageSettings, + String theResourceName, + String theParamName, + RuntimeSearchParam theParamDef, + ResourceIndexedSearchParams theSearchParams, + IQueryParameterType theToken) { if (theParamDef.getParamType().equals(RestSearchParameterTypeEnum.TOKEN)) { - return matchTokenParam(theStorageSettings, theResourceName, theParamName, theParamDef, theSearchParams, (TokenParam) theToken); + return matchTokenParam( + theStorageSettings, theResourceName, theParamName, theParamDef, theSearchParams, (TokenParam) + theToken); } else { return theSearchParams.matchParam(theStorageSettings, theResourceName, theParamName, theParamDef, theToken); } @@ -466,24 +540,33 @@ public class InMemoryResourceMatcher { * @param theQueryParam the query parameter to compare with theSearchParams * @return true if theQueryParam matches the collection of theSearchParams, otherwise false */ - private boolean matchTokenParam(StorageSettings theStorageSettings, String theResourceName, String theParamName, RuntimeSearchParam theParamDef, ResourceIndexedSearchParams theSearchParams, TokenParam theQueryParam) { + private boolean matchTokenParam( + StorageSettings theStorageSettings, + String theResourceName, + String theParamName, + RuntimeSearchParam theParamDef, + ResourceIndexedSearchParams theSearchParams, + TokenParam theQueryParam) { if (theQueryParam.getModifier() != null) { switch (theQueryParam.getModifier()) { case IN: return theSearchParams.myTokenParams.stream() - .filter(t -> t.getParamName().equals(theParamName)) - .anyMatch(t -> systemContainsCode(theQueryParam, t)); + .filter(t -> t.getParamName().equals(theParamName)) + .anyMatch(t -> systemContainsCode(theQueryParam, t)); case NOT_IN: return theSearchParams.myTokenParams.stream() - .filter(t -> t.getParamName().equals(theParamName)) - .noneMatch(t -> systemContainsCode(theQueryParam, t)); + .filter(t -> t.getParamName().equals(theParamName)) + .noneMatch(t -> systemContainsCode(theQueryParam, t)); case NOT: - return !theSearchParams.matchParam(theStorageSettings, theResourceName, theParamName, theParamDef, theQueryParam); + return !theSearchParams.matchParam( + theStorageSettings, theResourceName, theParamName, theParamDef, theQueryParam); default: - return theSearchParams.matchParam(theStorageSettings, theResourceName, theParamName, theParamDef, theQueryParam); + return theSearchParams.matchParam( + theStorageSettings, theResourceName, theParamName, theParamDef, theQueryParam); } } else { - return theSearchParams.matchParam(theStorageSettings, theResourceName, theParamName, theParamDef, theQueryParam); + return theSearchParams.matchParam( + theStorageSettings, theResourceName, theParamName, theParamDef, theQueryParam); } } @@ -494,7 +577,13 @@ public class InMemoryResourceMatcher { return false; } - IValidationSupport.CodeValidationResult codeValidationResult = validationSupport.validateCode(new ValidationSupportContext(validationSupport), new ConceptValidationOptions(), theSearchParamToken.getSystem(), theSearchParamToken.getValue(), null, theQueryParam.getValue()); + IValidationSupport.CodeValidationResult codeValidationResult = validationSupport.validateCode( + new ValidationSupportContext(validationSupport), + new ConceptValidationOptions(), + theSearchParamToken.getSystem(), + theSearchParamToken.getValue(), + null, + theQueryParam.getValue()); if (codeValidationResult != null) { return codeValidationResult.isOk(); } else { @@ -510,12 +599,15 @@ public class InMemoryResourceMatcher { return theParam.getQueryParameterQualifier() != null; } - private InMemoryMatchResult checkUnsupportedPrefixes(String theParamName, RuntimeSearchParam theParamDef, IQueryParameterType theParam) { + private InMemoryMatchResult checkUnsupportedPrefixes( + String theParamName, RuntimeSearchParam theParamDef, IQueryParameterType theParam) { if (theParamDef != null && theParam instanceof BaseParamWithPrefix) { ParamPrefixEnum prefix = ((BaseParamWithPrefix) theParam).getPrefix(); RestSearchParameterTypeEnum paramType = theParamDef.getParamType(); if (!supportedPrefix(prefix, paramType)) { - return InMemoryMatchResult.unsupportedFromParameterAndReason(theParamName, String.format("The prefix %s is not supported for param type %s", prefix, paramType)); + return InMemoryMatchResult.unsupportedFromParameterAndReason( + theParamName, + String.format("The prefix %s is not supported for param type %s", prefix, paramType)); } } return InMemoryMatchResult.successfulMatch(); @@ -543,9 +635,11 @@ public class InMemoryResourceMatcher { return false; } - private InMemoryMatchResult checkUnsupportedQualifiers(String theParamName, RuntimeSearchParam theParamDef, IQueryParameterType theParam) { + private InMemoryMatchResult checkUnsupportedQualifiers( + String theParamName, RuntimeSearchParam theParamDef, IQueryParameterType theParam) { if (hasQualifiers(theParam) && !supportedQualifier(theParamDef, theParam)) { - return InMemoryMatchResult.unsupportedFromParameterAndReason(theParamName + theParam.getQueryParameterQualifier(), InMemoryMatchResult.QUALIFIER); + return InMemoryMatchResult.unsupportedFromParameterAndReason( + theParamName + theParam.getQueryParameterQualifier(), InMemoryMatchResult.QUALIFIER); } return InMemoryMatchResult.successfulMatch(); } @@ -560,7 +654,8 @@ public class InMemoryResourceMatcher { switch (tokenParam.getModifier()) { case IN: case NOT_IN: - // Support for these qualifiers is dependent on an implementation of IValidationSupport being available to delegate the check to + // Support for these qualifiers is dependent on an implementation of IValidationSupport being + // available to delegate the check to return getValidationSupportOrNull() != null; case NOT: return true; @@ -572,6 +667,9 @@ public class InMemoryResourceMatcher { } } - private enum ValidationSupportInitializationState {NOT_INITIALIZED, INITIALIZED, FAILED} - + private enum ValidationSupportInitializationState { + NOT_INITIALIZED, + INITIALIZED, + FAILED + } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/IndexedSearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/IndexedSearchParamExtractor.java index 525f7148aae..3deafea8642 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/IndexedSearchParamExtractor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/IndexedSearchParamExtractor.java @@ -34,22 +34,34 @@ import javax.annotation.Nonnull; public class IndexedSearchParamExtractor { @Autowired private FhirContext myContext; + @Autowired private SearchParamExtractorService mySearchParamExtractorService; @Nonnull - public ResourceIndexedSearchParams extractIndexedSearchParams(IBaseResource theResource, RequestDetails theRequest) { + public ResourceIndexedSearchParams extractIndexedSearchParams( + IBaseResource theResource, RequestDetails theRequest) { return extractIndexedSearchParams(theResource, theRequest, ISearchParamExtractor.ALL_PARAMS); } @Nonnull - public ResourceIndexedSearchParams extractIndexedSearchParams(IBaseResource theResource, RequestDetails theRequest, ISearchParamExtractor.ISearchParamFilter filter) { + public ResourceIndexedSearchParams extractIndexedSearchParams( + IBaseResource theResource, RequestDetails theRequest, ISearchParamExtractor.ISearchParamFilter filter) { ResourceTable entity = new ResourceTable(); TransactionDetails transactionDetails = new TransactionDetails(); String resourceType = myContext.getResourceType(theResource); entity.setResourceType(resourceType); ResourceIndexedSearchParams resourceIndexedSearchParams = new ResourceIndexedSearchParams(); - mySearchParamExtractorService.extractFromResource(null, theRequest, resourceIndexedSearchParams, new ResourceIndexedSearchParams(), entity, theResource, transactionDetails, false, filter); + mySearchParamExtractorService.extractFromResource( + null, + theRequest, + resourceIndexedSearchParams, + new ResourceIndexedSearchParams(), + entity, + theResource, + transactionDetails, + false, + filter); return resourceIndexedSearchParams; } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/SearchParamMatcher.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/SearchParamMatcher.java index 7e5dbae7b02..75367cca4bc 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/SearchParamMatcher.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/matcher/SearchParamMatcher.java @@ -32,8 +32,10 @@ import org.springframework.stereotype.Service; public class SearchParamMatcher { @Autowired private FhirContext myFhirContext; + @Autowired private IndexedSearchParamExtractor myIndexedSearchParamExtractor; + @Autowired private InMemoryResourceMatcher myInMemoryResourceMatcher; @@ -45,8 +47,10 @@ public class SearchParamMatcher { if (theSearchParameterMap.isEmpty()) { return InMemoryMatchResult.successfulMatch(); } - ResourceIndexedSearchParams resourceIndexedSearchParams = myIndexedSearchParamExtractor.extractIndexedSearchParams(theResource, null); + ResourceIndexedSearchParams resourceIndexedSearchParams = + myIndexedSearchParamExtractor.extractIndexedSearchParams(theResource, null); RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(theResource); - return myInMemoryResourceMatcher.match(theSearchParameterMap, theResource, resourceDefinition, resourceIndexedSearchParams); + return myInMemoryResourceMatcher.match( + theSearchParameterMap, theResource, resourceDefinition, resourceIndexedSearchParams); } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/nickname/NicknameInterceptor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/nickname/NicknameInterceptor.java index 16392bc57fb..3dcf5fec1e4 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/nickname/NicknameInterceptor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/nickname/NicknameInterceptor.java @@ -68,16 +68,14 @@ public class NicknameInterceptor { if (stringParam.isNicknameExpand()) { ourLog.debug("Found a nickname parameter to expand: {} {}", theParamName, stringParam); toRemove.add(stringParam); - //First, attempt to expand as a formal name + // First, attempt to expand as a formal name String name = stringParam.getValue().toLowerCase(Locale.ROOT); Collection expansions = myNicknameSvc.getEquivalentNames(name); if (expansions == null) { continue; } ourLog.debug("Parameter has been expanded to: {} {}", theParamName, String.join(", ", expansions)); - expansions.stream() - .map(StringParam::new) - .forEach(toAdd::add); + expansions.stream().map(StringParam::new).forEach(toAdd::add); } } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/provider/SearchableHashMapResourceProvider.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/provider/SearchableHashMapResourceProvider.java index 02473092945..41aed01c271 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/provider/SearchableHashMapResourceProvider.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/provider/SearchableHashMapResourceProvider.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.searchparam.provider; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult; import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher; @@ -42,21 +42,25 @@ public class SearchableHashMapResourceProvider extends * @param theFhirContext The FHIR context * @param theResourceType The resource type to support */ - public SearchableHashMapResourceProvider(FhirContext theFhirContext, Class theResourceType, SearchParamMatcher theSearchParamMatcher) { + public SearchableHashMapResourceProvider( + FhirContext theFhirContext, Class theResourceType, SearchParamMatcher theSearchParamMatcher) { super(theFhirContext, theResourceType); mySearchParamMatcher = theSearchParamMatcher; } public List searchByCriteria(String theCriteria, RequestDetails theRequest) { return searchBy(resource -> mySearchParamMatcher.match(theCriteria, resource, theRequest), theRequest); - } public List searchByParams(SearchParameterMap theSearchParams, RequestDetails theRequest) { - return searchBy(resource -> mySearchParamMatcher.match(theSearchParams.toNormalizedQueryString(getFhirContext()), resource, theRequest), theRequest); + return searchBy( + resource -> mySearchParamMatcher.match( + theSearchParams.toNormalizedQueryString(getFhirContext()), resource, theRequest), + theRequest); } - private List searchBy(Function theMatcher, RequestDetails theRequest) { + private List searchBy( + Function theMatcher, RequestDetails theRequest) { mySearchCount.incrementAndGet(); List allEResources = getAllResources(); @@ -64,7 +68,8 @@ public class SearchableHashMapResourceProvider extends for (T resource : allEResources) { InMemoryMatchResult result = theMatcher.apply(resource); if (!result.supported()) { - throw new InvalidRequestException(Msg.code(502) + "Search not supported by in-memory matcher: "+result.getUnsupportedReason()); + throw new InvalidRequestException( + Msg.code(502) + "Search not supported by in-memory matcher: " + result.getUnsupportedReason()); } if (result.matched()) { matches.add(resource); diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistryController.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistryController.java index 21e9014ecd9..35c9d14c4c2 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistryController.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ISearchParamRegistryController.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. * #L% - */ + */ package ca.uhn.fhir.jpa.searchparam.registry; import ca.uhn.fhir.jpa.cache.ResourceChangeResult; @@ -24,5 +24,4 @@ import ca.uhn.fhir.jpa.cache.ResourceChangeResult; public interface ISearchParamRegistryController { ResourceChangeResult refreshCacheIfNecessary(); - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java index ab4b3d14dbe..d1866bfcb3a 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/JpaSearchParamCache.java @@ -52,7 +52,8 @@ public class JpaSearchParamCache { private static final Logger ourLog = LoggerFactory.getLogger(JpaSearchParamCache.class); volatile Map> myActiveComboSearchParams = Collections.emptyMap(); - volatile Map, List>> myActiveParamNamesToComboSearchParams = Collections.emptyMap(); + volatile Map, List>> myActiveParamNamesToComboSearchParams = + Collections.emptyMap(); public List getActiveComboSearchParams(String theResourceName) { List retval = myActiveComboSearchParams.get(theResourceName); @@ -62,22 +63,22 @@ public class JpaSearchParamCache { return retval; } - public List getActiveComboSearchParams(String theResourceName, ComboSearchParamType theParamType) { - return getActiveComboSearchParams(theResourceName) - .stream() - .filter(param -> Objects.equals(theParamType, param.getComboSearchParamType())) - .collect(Collectors.toList()); + public List getActiveComboSearchParams( + String theResourceName, ComboSearchParamType theParamType) { + return getActiveComboSearchParams(theResourceName).stream() + .filter(param -> Objects.equals(theParamType, param.getComboSearchParamType())) + .collect(Collectors.toList()); } public Optional getActiveComboSearchParamById(String theResourceName, IIdType theId) { - return getActiveComboSearchParams(theResourceName) - .stream() - .filter((param) -> Objects.equals(theId, param.getId())) - .findFirst(); + return getActiveComboSearchParams(theResourceName).stream() + .filter((param) -> Objects.equals(theId, param.getId())) + .findFirst(); } public List getActiveComboSearchParams(String theResourceName, Set theParamNames) { - Map, List> paramNamesToParams = myActiveParamNamesToComboSearchParams.get(theResourceName); + Map, List> paramNamesToParams = + myActiveParamNamesToComboSearchParams.get(theResourceName); if (paramNamesToParams == null) { return Collections.emptyList(); } @@ -89,7 +90,10 @@ public class JpaSearchParamCache { return Collections.unmodifiableList(retVal); } - void populateActiveSearchParams(IInterceptorService theInterceptorBroadcaster, IPhoneticEncoder theDefaultPhoneticEncoder, RuntimeSearchParamCache theActiveSearchParams) { + void populateActiveSearchParams( + IInterceptorService theInterceptorBroadcaster, + IPhoneticEncoder theDefaultPhoneticEncoder, + RuntimeSearchParamCache theActiveSearchParams) { Map> resourceNameToComboSearchParams = new HashMap<>(); Map, List>> activeParamNamesToComboSearchParams = new HashMap<>(); @@ -101,17 +105,23 @@ public class JpaSearchParamCache { */ for (String theResourceName : theActiveSearchParams.getResourceNameKeys()) { ResourceSearchParams searchParams = theActiveSearchParams.getSearchParamMap(theResourceName); - List comboSearchParams = resourceNameToComboSearchParams.computeIfAbsent(theResourceName, k -> new ArrayList<>()); + List comboSearchParams = + resourceNameToComboSearchParams.computeIfAbsent(theResourceName, k -> new ArrayList<>()); Collection nextSearchParamsForResourceName = searchParams.values(); ourLog.trace("Resource {} has {} params", theResourceName, searchParams.size()); for (RuntimeSearchParam nextCandidate : nextSearchParamsForResourceName) { - ourLog.trace("Resource {} has parameter {} with ID {}", theResourceName, nextCandidate.getName(), nextCandidate.getId()); + ourLog.trace( + "Resource {} has parameter {} with ID {}", + theResourceName, + nextCandidate.getName(), + nextCandidate.getId()); if (nextCandidate.getId() != null) { - idToRuntimeSearchParam.put(nextCandidate.getId().toUnqualifiedVersionless().getValue(), nextCandidate); + idToRuntimeSearchParam.put( + nextCandidate.getId().toUnqualifiedVersionless().getValue(), nextCandidate); } if (isNotBlank(nextCandidate.getUri())) { idToRuntimeSearchParam.put(nextCandidate.getUri(), nextCandidate); @@ -124,14 +134,14 @@ public class JpaSearchParamCache { setPhoneticEncoder(theDefaultPhoneticEncoder, nextCandidate); } - } ourLog.trace("Have {} search params loaded", idToRuntimeSearchParam.size()); Set haveSeen = new HashSet<>(); for (RuntimeSearchParam next : jpaSearchParams) { - if (next.getId() != null && !haveSeen.add(next.getId().toUnqualifiedVersionless().getValue())) { + if (next.getId() != null + && !haveSeen.add(next.getId().toUnqualifiedVersionless().getValue())) { continue; } @@ -142,14 +152,15 @@ public class JpaSearchParamCache { if (componentTarget != null) { paramNames.add(componentTarget.getName()); } else { - String message = "Search parameter " + next + " refers to unknown component " + nextRef + ", ignoring this parameter"; + String message = "Search parameter " + next + " refers to unknown component " + nextRef + + ", ignoring this parameter"; ourLog.warn(message); // Interceptor broadcast: JPA_PERFTRACE_WARNING HookParams params = new HookParams() - .add(RequestDetails.class, null) - .add(ServletRequestDetails.class, null) - .add(StorageProcessingMessage.class, new StorageProcessingMessage().setMessage(message)); + .add(RequestDetails.class, null) + .add(ServletRequestDetails.class, null) + .add(StorageProcessingMessage.class, new StorageProcessingMessage().setMessage(message)); theInterceptorBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_WARNING, params); } } @@ -157,8 +168,13 @@ public class JpaSearchParamCache { if (next.getComboSearchParamType() != null) { for (String nextBase : next.getBase()) { activeParamNamesToComboSearchParams.computeIfAbsent(nextBase, v -> new HashMap<>()); - activeParamNamesToComboSearchParams.get(nextBase).computeIfAbsent(paramNames, t -> new ArrayList<>()); - activeParamNamesToComboSearchParams.get(nextBase).get(paramNames).add(next); + activeParamNamesToComboSearchParams + .get(nextBase) + .computeIfAbsent(paramNames, t -> new ArrayList<>()); + activeParamNamesToComboSearchParams + .get(nextBase) + .get(paramNames) + .add(next); } } } @@ -171,8 +187,11 @@ public class JpaSearchParamCache { void setPhoneticEncoder(IPhoneticEncoder theDefaultPhoneticEncoder, RuntimeSearchParam searchParam) { if ("phonetic".equals(searchParam.getName())) { - ourLog.debug("Setting search param {} on {} phonetic encoder to {}", - searchParam.getName(), searchParam.getPath(), theDefaultPhoneticEncoder == null ? "null" : theDefaultPhoneticEncoder.name()); + ourLog.debug( + "Setting search param {} on {} phonetic encoder to {}", + searchParam.getName(), + searchParam.getPath(), + theDefaultPhoneticEncoder == null ? "null" : theDefaultPhoneticEncoder.name()); searchParam.setPhoneticEncoder(theDefaultPhoneticEncoder); } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ReadOnlySearchParamCache.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ReadOnlySearchParamCache.java index 3bbb2e08c05..a9c86f1fa8a 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ReadOnlySearchParamCache.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/ReadOnlySearchParamCache.java @@ -30,8 +30,6 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -39,6 +37,8 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; @@ -84,11 +84,15 @@ public class ReadOnlySearchParamCache { return myUrlToParam.get(theUrl); } - public static ReadOnlySearchParamCache fromFhirContext(@Nonnull FhirContext theFhirContext, @Nonnull SearchParameterCanonicalizer theCanonicalizer) { + public static ReadOnlySearchParamCache fromFhirContext( + @Nonnull FhirContext theFhirContext, @Nonnull SearchParameterCanonicalizer theCanonicalizer) { return fromFhirContext(theFhirContext, theCanonicalizer, null); } - public static ReadOnlySearchParamCache fromFhirContext(@Nonnull FhirContext theFhirContext, @Nonnull SearchParameterCanonicalizer theCanonicalizer, @Nullable Set theSearchParamPatternsToInclude) { + public static ReadOnlySearchParamCache fromFhirContext( + @Nonnull FhirContext theFhirContext, + @Nonnull SearchParameterCanonicalizer theCanonicalizer, + @Nullable Set theSearchParamPatternsToInclude) { assert theCanonicalizer != null; ReadOnlySearchParamCache retVal = new ReadOnlySearchParamCache(); @@ -104,10 +108,16 @@ public class ReadOnlySearchParamCache { */ List searchParams = null; if (theFhirContext.getVersion().getVersion() == FhirVersionEnum.R4) { - IBaseBundle allSearchParameterBundle = (IBaseBundle) theFhirContext.newJsonParser().parseResource(ClasspathUtil.loadResourceAsStream("org/hl7/fhir/r4/model/sp/search-parameters.json")); + IBaseBundle allSearchParameterBundle = (IBaseBundle) theFhirContext + .newJsonParser() + .parseResource( + ClasspathUtil.loadResourceAsStream("org/hl7/fhir/r4/model/sp/search-parameters.json")); searchParams = BundleUtil.toListOfResources(theFhirContext, allSearchParameterBundle); } else if (theFhirContext.getVersion().getVersion() == FhirVersionEnum.R4B) { - IBaseBundle allSearchParameterBundle = (IBaseBundle) theFhirContext.newXmlParser().parseResource(ClasspathUtil.loadResourceAsStream("org/hl7/fhir/r4b/model/sp/search-parameters.xml")); + IBaseBundle allSearchParameterBundle = (IBaseBundle) theFhirContext + .newXmlParser() + .parseResource( + ClasspathUtil.loadResourceAsStream("org/hl7/fhir/r4b/model/sp/search-parameters.xml")); searchParams = BundleUtil.toListOfResources(theFhirContext, allSearchParameterBundle); } else if (theFhirContext.getVersion().getVersion() == FhirVersionEnum.R5) { searchParams = FhirContext.forR5Cached().getValidationSupport().fetchAllSearchParameters(); @@ -123,18 +133,18 @@ public class ReadOnlySearchParamCache { // a status of DRAFT which means the server doesn't actually apply them. // At least this was the case as of 2021-12-24 - JA nextCanonical = new RuntimeSearchParam( - nextCanonical.getId(), - nextCanonical.getUri(), - nextCanonical.getName(), - nextCanonical.getDescription(), - nextCanonical.getPath(), - nextCanonical.getParamType(), - nextCanonical.getProvidesMembershipInCompartments(), - nextCanonical.getTargets(), - RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, - nextCanonical.getComboSearchParamType(), - nextCanonical.getComponents(), - nextCanonical.getBase()); + nextCanonical.getId(), + nextCanonical.getUri(), + nextCanonical.getName(), + nextCanonical.getDescription(), + nextCanonical.getPath(), + nextCanonical.getParamType(), + nextCanonical.getProvidesMembershipInCompartments(), + nextCanonical.getTargets(), + RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, + nextCanonical.getComboSearchParamType(), + nextCanonical.getComponents(), + nextCanonical.getBase()); Collection base = nextCanonical.getBase(); if (base.contains("Resource") || base.contains("DomainResource")) { @@ -143,9 +153,12 @@ public class ReadOnlySearchParamCache { // Add it to our return value if permitted by the pattern parameters for (String nextResourceName : base) { - ResourceSearchParams resourceSearchParams = retVal.myResourceNameToSpNameToSp.computeIfAbsent(nextResourceName, t -> new ResourceSearchParams(nextResourceName)); + ResourceSearchParams resourceSearchParams = retVal.myResourceNameToSpNameToSp.computeIfAbsent( + nextResourceName, t -> new ResourceSearchParams(nextResourceName)); String nextParamName = nextCanonical.getName(); - if (theSearchParamPatternsToInclude == null || searchParamMatchesAtLeastOnePattern(theSearchParamPatternsToInclude, nextResourceName, nextParamName)) { + if (theSearchParamPatternsToInclude == null + || searchParamMatchesAtLeastOnePattern( + theSearchParamPatternsToInclude, nextResourceName, nextParamName)) { resourceSearchParams.addSearchParamIfAbsent(nextParamName, nextCanonical); } } @@ -157,11 +170,14 @@ public class ReadOnlySearchParamCache { RuntimeResourceDefinition nextResDef = theFhirContext.getResourceDefinition(resourceName); String nextResourceName = nextResDef.getName(); - ResourceSearchParams resourceSearchParams = retVal.myResourceNameToSpNameToSp.computeIfAbsent(nextResourceName, t -> new ResourceSearchParams(nextResourceName)); + ResourceSearchParams resourceSearchParams = retVal.myResourceNameToSpNameToSp.computeIfAbsent( + nextResourceName, t -> new ResourceSearchParams(nextResourceName)); for (RuntimeSearchParam nextSp : nextResDef.getSearchParams()) { String nextParamName = nextSp.getName(); // Add it to our return value if permitted by the pattern parameters - if (theSearchParamPatternsToInclude == null || searchParamMatchesAtLeastOnePattern(theSearchParamPatternsToInclude, nextResourceName, nextParamName)) { + if (theSearchParamPatternsToInclude == null + || searchParamMatchesAtLeastOnePattern( + theSearchParamPatternsToInclude, nextResourceName, nextParamName)) { resourceSearchParams.addSearchParamIfAbsent(nextParamName, nextSp); } } @@ -169,7 +185,8 @@ public class ReadOnlySearchParamCache { return retVal; } - public static boolean searchParamMatchesAtLeastOnePattern(Set theSearchParamPatterns, String theResourceType, String theSearchParamName) { + public static boolean searchParamMatchesAtLeastOnePattern( + Set theSearchParamPatterns, String theResourceType, String theSearchParamName) { for (String nextPattern : theSearchParamPatterns) { if ("*".equals(nextPattern)) { return true; @@ -192,8 +209,8 @@ public class ReadOnlySearchParamCache { return false; } - public static ReadOnlySearchParamCache fromRuntimeSearchParamCache(RuntimeSearchParamCache theRuntimeSearchParamCache) { + public static ReadOnlySearchParamCache fromRuntimeSearchParamCache( + RuntimeSearchParamCache theRuntimeSearchParamCache) { return new ReadOnlySearchParamCache(theRuntimeSearchParamCache); } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/RuntimeSearchParamCache.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/RuntimeSearchParamCache.java index 732b07c851f..f0d4b49d379 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/RuntimeSearchParamCache.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/RuntimeSearchParamCache.java @@ -32,8 +32,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class RuntimeSearchParamCache extends ReadOnlySearchParamCache { private static final Logger ourLog = LoggerFactory.getLogger(RuntimeSearchParamCache.class); - protected RuntimeSearchParamCache() { - } + protected RuntimeSearchParamCache() {} public void add(String theResourceName, String theName, RuntimeSearchParam theSearchParam) { ResourceSearchParams resourceSearchParams = getSearchParamMap(theResourceName); @@ -65,7 +64,8 @@ public class RuntimeSearchParamCache extends ReadOnlySearchParamCache { } private void putAll(ReadOnlySearchParamCache theReadOnlySearchParamCache) { - Set> builtInSps = theReadOnlySearchParamCache.myResourceNameToSpNameToSp.entrySet(); + Set> builtInSps = + theReadOnlySearchParamCache.myResourceNameToSpNameToSp.entrySet(); for (Map.Entry nextBuiltInEntry : builtInSps) { for (RuntimeSearchParam nextParam : nextBuiltInEntry.getValue().values()) { String nextResourceName = nextBuiltInEntry.getKey(); @@ -73,7 +73,8 @@ public class RuntimeSearchParamCache extends ReadOnlySearchParamCache { add(nextResourceName, nextParamName, nextParam); } - ourLog.trace("Have {} built-in SPs for: {}", nextBuiltInEntry.getValue().size(), nextBuiltInEntry.getKey()); + ourLog.trace( + "Have {} built-in SPs for: {}", nextBuiltInEntry.getValue().size(), nextBuiltInEntry.getKey()); } } @@ -92,10 +93,12 @@ public class RuntimeSearchParamCache extends ReadOnlySearchParamCache { @Override protected ResourceSearchParams getSearchParamMap(String theResourceName) { - return myResourceNameToSpNameToSp.computeIfAbsent(theResourceName, k -> new ResourceSearchParams(theResourceName)); + return myResourceNameToSpNameToSp.computeIfAbsent( + theResourceName, k -> new ResourceSearchParams(theResourceName)); } - public static RuntimeSearchParamCache fromReadOnlySearchParamCache(ReadOnlySearchParamCache theBuiltInSearchParams) { + public static RuntimeSearchParamCache fromReadOnlySearchParamCache( + ReadOnlySearchParamCache theBuiltInSearchParams) { RuntimeSearchParamCache retVal = new RuntimeSearchParamCache(); retVal.putAll(theBuiltInSearchParams); return retVal; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java index 468623c1b4c..4313ac95d25 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParamRegistryImpl.java @@ -48,10 +48,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -59,32 +55,40 @@ import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; import static org.apache.commons.lang3.StringUtils.isBlank; -public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceChangeListener, ISearchParamRegistryController { +public class SearchParamRegistryImpl + implements ISearchParamRegistry, IResourceChangeListener, ISearchParamRegistryController { - public static final Set NON_DISABLEABLE_SEARCH_PARAMS = Collections.unmodifiableSet(Sets.newHashSet( - "*:url", - "Subscription:*", - "SearchParameter:*" - )); + public static final Set NON_DISABLEABLE_SEARCH_PARAMS = + Collections.unmodifiableSet(Sets.newHashSet("*:url", "Subscription:*", "SearchParameter:*")); private static final Logger ourLog = LoggerFactory.getLogger(SearchParamRegistryImpl.class); private static final int MAX_MANAGED_PARAM_COUNT = 10000; private static final long REFRESH_INTERVAL = DateUtils.MILLIS_PER_MINUTE; private final JpaSearchParamCache myJpaSearchParamCache = new JpaSearchParamCache(); + @Autowired private StorageSettings myStorageSettings; + @Autowired private ISearchParamProvider mySearchParamProvider; + @Autowired private FhirContext myFhirContext; + @Autowired private SearchParameterCanonicalizer mySearchParameterCanonicalizer; + @Autowired private IInterceptorService myInterceptorBroadcaster; + @Autowired private IResourceChangeListenerRegistry myResourceChangeListenerRegistry; @@ -131,7 +135,8 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC } @Override - public List getActiveComboSearchParams(String theResourceName, ComboSearchParamType theParamType) { + public List getActiveComboSearchParams( + String theResourceName, ComboSearchParamType theParamType) { return myJpaSearchParamCache.getActiveComboSearchParams(theResourceName, theParamType); } @@ -150,7 +155,6 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC } } - @Override public Optional getActiveComboSearchParamById(String theResourceName, IIdType theId) { return myJpaSearchParamCache.getActiveComboSearchParamById(theResourceName, theId); @@ -170,7 +174,9 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC ourLog.trace("Loaded {} search params from the DB", allSearchParams.size()); if (size == null) { - ourLog.error("Only {} search parameters have been loaded, but there are more than that in the repository. Is offset search configured on this server?", allSearchParams.size()); + ourLog.error( + "Only {} search parameters have been loaded, but there are more than that in the repository. Is offset search configured on this server?", + allSearchParams.size()); } else if (size >= MAX_MANAGED_PARAM_COUNT) { ourLog.warn("Unable to support >" + MAX_MANAGED_PARAM_COUNT + " search params!"); } @@ -182,13 +188,15 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC StopWatch sw = new StopWatch(); ReadOnlySearchParamCache builtInSearchParams = getBuiltInSearchParams(); - RuntimeSearchParamCache searchParams = RuntimeSearchParamCache.fromReadOnlySearchParamCache(builtInSearchParams); + RuntimeSearchParamCache searchParams = + RuntimeSearchParamCache.fromReadOnlySearchParamCache(builtInSearchParams); long overriddenCount = overrideBuiltinSearchParamsWithActiveJpaSearchParams(searchParams, theJpaSearchParams); ourLog.trace("Have overridden {} built-in search parameters", overriddenCount); removeInactiveSearchParams(searchParams); myActiveSearchParams = searchParams; - myJpaSearchParamCache.populateActiveSearchParams(myInterceptorBroadcaster, myPhoneticEncoder, myActiveSearchParams); + myJpaSearchParamCache.populateActiveSearchParams( + myInterceptorBroadcaster, myPhoneticEncoder, myActiveSearchParams); ourLog.debug("Refreshed search parameter cache in {}ms", sw.getMillis()); } @@ -200,10 +208,12 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC private ReadOnlySearchParamCache getBuiltInSearchParams() { if (myBuiltInSearchParams == null) { if (myStorageSettings.isAutoSupportDefaultSearchParams()) { - myBuiltInSearchParams = ReadOnlySearchParamCache.fromFhirContext(myFhirContext, mySearchParameterCanonicalizer); + myBuiltInSearchParams = + ReadOnlySearchParamCache.fromFhirContext(myFhirContext, mySearchParameterCanonicalizer); } else { // Only the built-in search params that can not be disabled will be supported automatically - myBuiltInSearchParams = ReadOnlySearchParamCache.fromFhirContext(myFhirContext, mySearchParameterCanonicalizer, NON_DISABLEABLE_SEARCH_PARAMS); + myBuiltInSearchParams = ReadOnlySearchParamCache.fromFhirContext( + myFhirContext, mySearchParameterCanonicalizer, NON_DISABLEABLE_SEARCH_PARAMS); } } return myBuiltInSearchParams; @@ -221,7 +231,8 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC myStorageSettings = theStorageSettings; } - private long overrideBuiltinSearchParamsWithActiveJpaSearchParams(RuntimeSearchParamCache theSearchParamCache, Collection theSearchParams) { + private long overrideBuiltinSearchParamsWithActiveJpaSearchParams( + RuntimeSearchParamCache theSearchParamCache, Collection theSearchParams) { if (!myStorageSettings.isDefaultSearchParamsCanBeOverridden() || theSearchParams == null) { return 0; } @@ -255,7 +266,10 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC String name = runtimeSp.getName(); theSearchParams.add(nextBaseName, name, runtimeSp); - ourLog.debug("Adding search parameter {}.{} to SearchParamRegistry", nextBaseName, StringUtils.defaultString(name, "[composite]")); + ourLog.debug( + "Adding search parameter {}.{} to SearchParamRegistry", + nextBaseName, + StringUtils.defaultString(name, "[composite]")); retval++; } return retval; @@ -281,7 +295,6 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC myResourceChangeListenerRegistry = theResourceChangeListenerRegistry; } - /** * There is a circular reference between this class and the ResourceChangeListenerRegistry: * SearchParamRegistryImpl -> ResourceChangeListenerRegistry -> InMemoryResourceMatcher -> SearchParamRegistryImpl. Since we only need this once on boot-up, we delay @@ -291,7 +304,8 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC public void registerListener() { SearchParameterMap spMap = SearchParameterMap.newSynchronous(); spMap.setLoadSynchronousUpTo(MAX_MANAGED_PARAM_COUNT); - myResourceChangeListenerCache = myResourceChangeListenerRegistry.registerResourceResourceChangeListener("SearchParameter", spMap, this, REFRESH_INTERVAL); + myResourceChangeListenerCache = myResourceChangeListenerRegistry.registerResourceResourceChangeListener( + "SearchParameter", spMap, this, REFRESH_INTERVAL); } @PreDestroy @@ -319,7 +333,9 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC if (myActiveSearchParams == null) { return; } - myActiveSearchParams.getSearchParamStream().forEach(searchParam -> myJpaSearchParamCache.setPhoneticEncoder(myPhoneticEncoder, searchParam)); + myActiveSearchParams + .getSearchParamStream() + .forEach(searchParam -> myJpaSearchParamCache.setPhoneticEncoder(myPhoneticEncoder, searchParam)); } @Override @@ -330,22 +346,31 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC ResourceChangeResult result = ResourceChangeResult.fromResourceChangeEvent(theResourceChangeEvent); if (result.created > 0) { - ourLog.info("Adding {} search parameters to SearchParamRegistry: {}", result.created, unqualified(theResourceChangeEvent.getCreatedResourceIds())); + ourLog.info( + "Adding {} search parameters to SearchParamRegistry: {}", + result.created, + unqualified(theResourceChangeEvent.getCreatedResourceIds())); } if (result.updated > 0) { - ourLog.info("Updating {} search parameters in SearchParamRegistry: {}", result.updated, unqualified(theResourceChangeEvent.getUpdatedResourceIds())); + ourLog.info( + "Updating {} search parameters in SearchParamRegistry: {}", + result.updated, + unqualified(theResourceChangeEvent.getUpdatedResourceIds())); } if (result.deleted > 0) { - ourLog.info("Deleting {} search parameters from SearchParamRegistry: {}", result.deleted, unqualified(theResourceChangeEvent.getDeletedResourceIds())); + ourLog.info( + "Deleting {} search parameters from SearchParamRegistry: {}", + result.deleted, + unqualified(theResourceChangeEvent.getDeletedResourceIds())); } rebuildActiveSearchParams(); } private String unqualified(List theIds) { Iterator unqualifiedIds = theIds.stream() - .map(IIdType::toUnqualifiedVersionless) - .map(IIdType::getValue) - .iterator(); + .map(IIdType::toUnqualifiedVersionless) + .map(IIdType::getValue) + .iterator(); return StringUtils.join(unqualifiedIds, ", "); } @@ -371,7 +396,8 @@ public class SearchParamRegistryImpl implements ISearchParamRegistry, IResourceC } @VisibleForTesting - public void setSearchParameterCanonicalizerForUnitTest(SearchParameterCanonicalizer theSearchParameterCanonicalizerForUnitTest) { + public void setSearchParameterCanonicalizerForUnitTest( + SearchParameterCanonicalizer theSearchParameterCanonicalizerForUnitTest) { mySearchParameterCanonicalizer = theSearchParameterCanonicalizerForUnitTest; } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParameterCanonicalizer.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParameterCanonicalizer.java index e51c7f608a3..f8b086965ca 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParameterCanonicalizer.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/registry/SearchParameterCanonicalizer.java @@ -65,6 +65,7 @@ public class SearchParameterCanonicalizer { private final FhirContext myFhirContext; private final FhirTerser myTerser; + @Autowired public SearchParameterCanonicalizer(FhirContext theFhirContext) { myFhirContext = theFhirContext; @@ -85,10 +86,12 @@ public class SearchParameterCanonicalizer { RuntimeSearchParam retVal; switch (myFhirContext.getVersion().getVersion()) { case DSTU2: - retVal = canonicalizeSearchParameterDstu2((ca.uhn.fhir.model.dstu2.resource.SearchParameter) theSearchParameter); + retVal = canonicalizeSearchParameterDstu2( + (ca.uhn.fhir.model.dstu2.resource.SearchParameter) theSearchParameter); break; case DSTU3: - retVal = canonicalizeSearchParameterDstu3((org.hl7.fhir.dstu3.model.SearchParameter) theSearchParameter); + retVal = + canonicalizeSearchParameterDstu3((org.hl7.fhir.dstu3.model.SearchParameter) theSearchParameter); break; case R4: case R4B: @@ -99,7 +102,9 @@ public class SearchParameterCanonicalizer { case DSTU2_1: // Non-supported - these won't happen so just fall through default: - throw new InternalErrorException(Msg.code(510) + "SearchParameter canonicalization not supported for FHIR version" + myFhirContext.getVersion().getVersion()); + throw new InternalErrorException( + Msg.code(510) + "SearchParameter canonicalization not supported for FHIR version" + + myFhirContext.getVersion().getVersion()); } if (retVal != null) { @@ -109,15 +114,17 @@ public class SearchParameterCanonicalizer { return retVal; } - private RuntimeSearchParam canonicalizeSearchParameterDstu2(ca.uhn.fhir.model.dstu2.resource.SearchParameter theNextSp) { + private RuntimeSearchParam canonicalizeSearchParameterDstu2( + ca.uhn.fhir.model.dstu2.resource.SearchParameter theNextSp) { String name = theNextSp.getCode(); String description = theNextSp.getDescription(); String path = theNextSp.getXpath(); Collection baseResource = toStrings(Collections.singletonList(theNextSp.getBaseElement())); - List baseCustomResources = extractDstu2CustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE); + List baseCustomResources = extractDstu2CustomResourcesFromExtensions( + theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE); - if(!baseCustomResources.isEmpty()){ + if (!baseCustomResources.isEmpty()) { baseResource = Collections.singleton(baseCustomResources.get(0)); } @@ -166,7 +173,8 @@ public class SearchParameterCanonicalizer { } Set targetResources = DatatypeUtil.toStringSet(theNextSp.getTarget()); - List targetCustomResources = extractDstu2CustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE); + List targetCustomResources = extractDstu2CustomResourcesFromExtensions( + theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE); maybeAddCustomResourcesToResources(targetResources, targetCustomResources); @@ -193,7 +201,19 @@ public class SearchParameterCanonicalizer { } List components = Collections.emptyList(); - return new RuntimeSearchParam(id, uri, name, description, path, paramType, Collections.emptySet(), targetResources, status, unique, components, baseResource); + return new RuntimeSearchParam( + id, + uri, + name, + description, + path, + paramType, + Collections.emptySet(), + targetResources, + status, + unique, + components, + baseResource); } private RuntimeSearchParam canonicalizeSearchParameterDstu3(org.hl7.fhir.dstu3.model.SearchParameter theNextSp) { @@ -202,7 +222,8 @@ public class SearchParameterCanonicalizer { String path = theNextSp.getExpression(); List baseResources = new ArrayList<>(toStrings(theNextSp.getBase())); - List baseCustomResources = extractDstu3CustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE); + List baseCustomResources = extractDstu3CustomResourcesFromExtensions( + theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE); maybeAddCustomResourcesToResources(baseResources, baseCustomResources); @@ -258,7 +279,8 @@ public class SearchParameterCanonicalizer { } Set targetResources = DatatypeUtil.toStringSet(theNextSp.getTarget()); - List targetCustomResources = extractDstu3CustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE); + List targetCustomResources = extractDstu3CustomResourcesFromExtensions( + theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE); maybeAddCustomResourcesToResources(targetResources, targetCustomResources); @@ -286,10 +308,27 @@ public class SearchParameterCanonicalizer { List components = new ArrayList<>(); for (SearchParameter.SearchParameterComponentComponent next : theNextSp.getComponent()) { - components.add(new RuntimeSearchParam.Component(next.getExpression(), next.getDefinition().getReferenceElement().toUnqualifiedVersionless().getValue())); + components.add(new RuntimeSearchParam.Component( + next.getExpression(), + next.getDefinition() + .getReferenceElement() + .toUnqualifiedVersionless() + .getValue())); } - return new RuntimeSearchParam(id, uri, name, description, path, paramType, Collections.emptySet(), targetResources, status, unique, components, baseResources); + return new RuntimeSearchParam( + id, + uri, + name, + description, + path, + paramType, + Collections.emptySet(), + targetResources, + status, + unique, + components, + baseResources); } private RuntimeSearchParam canonicalizeSearchParameterR4Plus(IBaseResource theNextSp) { @@ -299,7 +338,8 @@ public class SearchParameterCanonicalizer { String path = myTerser.getSinglePrimitiveValueOrNull(theNextSp, "expression"); Set baseResources = extractR4PlusResources("base", theNextSp); - List baseCustomResources = extractR4PlusCustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE); + List baseCustomResources = extractR4PlusCustomResourcesFromExtensions( + theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE); maybeAddCustomResourcesToResources(baseResources, baseCustomResources); @@ -350,7 +390,8 @@ public class SearchParameterCanonicalizer { } Set targetResources = extractR4PlusResources("target", theNextSp); - List targetCustomResources = extractR4PlusCustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE); + List targetCustomResources = extractR4PlusCustomResourcesFromExtensions( + theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE); maybeAddCustomResourcesToResources(targetResources, targetCustomResources); @@ -366,14 +407,14 @@ public class SearchParameterCanonicalizer { String uri = myTerser.getSinglePrimitiveValueOrNull(theNextSp, "url"); ComboSearchParamType unique = null; - String value = ((IBaseHasExtensions) theNextSp).getExtension() - .stream() - .filter(e -> HapiExtensions.EXT_SP_UNIQUE.equals(e.getUrl())) - .filter(t -> t.getValue() instanceof IPrimitiveType) - .map(t -> (IPrimitiveType) t.getValue()) - .map(t -> t.getValueAsString()) - .findFirst() - .orElse(""); + String value = ((IBaseHasExtensions) theNextSp) + .getExtension().stream() + .filter(e -> HapiExtensions.EXT_SP_UNIQUE.equals(e.getUrl())) + .filter(t -> t.getValue() instanceof IPrimitiveType) + .map(t -> (IPrimitiveType) t.getValue()) + .map(t -> t.getValueAsString()) + .findFirst() + .orElse(""); if ("true".equalsIgnoreCase(value)) { unique = ComboSearchParamType.UNIQUE; } else if ("false".equalsIgnoreCase(value)) { @@ -391,15 +432,25 @@ public class SearchParameterCanonicalizer { components.add(new RuntimeSearchParam.Component(expression, definition)); } - return new RuntimeSearchParam(id, uri, name, description, path, paramType, Collections.emptySet(), targetResources, status, unique, components, baseResources); + return new RuntimeSearchParam( + id, + uri, + name, + description, + path, + paramType, + Collections.emptySet(), + targetResources, + status, + unique, + components, + baseResources); } private Set extractR4PlusResources(String thePath, IBaseResource theNextSp) { - return myTerser - .getValues(theNextSp, thePath, IPrimitiveType.class) - .stream() - .map(IPrimitiveType::getValueAsString) - .collect(Collectors.toSet()); + return myTerser.getValues(theNextSp, thePath, IPrimitiveType.class).stream() + .map(IPrimitiveType::getValueAsString) + .collect(Collectors.toSet()); } /** @@ -407,7 +458,9 @@ public class SearchParameterCanonicalizer { */ protected void extractExtensions(IBaseResource theSearchParamResource, RuntimeSearchParam theRuntimeSearchParam) { if (theSearchParamResource instanceof IBaseHasExtensions) { - List> extensions = (List>) ((IBaseHasExtensions) theSearchParamResource).getExtension(); + List> extensions = + (List>) + ((IBaseHasExtensions) theSearchParamResource).getExtension(); for (IBaseExtension next : extensions) { String nextUrl = next.getUrl(); if (isNotBlank(nextUrl)) { @@ -423,9 +476,12 @@ public class SearchParameterCanonicalizer { } @SuppressWarnings("unchecked") - private void addUpliftRefchain(RuntimeSearchParam theRuntimeSearchParam, IBaseExtension theExtension) { - String code = ExtensionUtil.extractChildPrimitiveExtensionValue(theExtension, HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_PARAM_CODE); - String elementName = ExtensionUtil.extractChildPrimitiveExtensionValue(theExtension, HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_ELEMENT_NAME); + private void addUpliftRefchain( + RuntimeSearchParam theRuntimeSearchParam, IBaseExtension theExtension) { + String code = ExtensionUtil.extractChildPrimitiveExtensionValue( + theExtension, HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_PARAM_CODE); + String elementName = ExtensionUtil.extractChildPrimitiveExtensionValue( + theExtension, HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_ELEMENT_NAME); if (isNotBlank(code)) { theRuntimeSearchParam.addUpliftRefchain(code, elementName); } @@ -447,62 +503,65 @@ public class SearchParameterCanonicalizer { } } - private List extractDstu2CustomResourcesFromExtensions(ca.uhn.fhir.model.dstu2.resource.SearchParameter theSearchParameter, String theExtensionUrl) { + private List extractDstu2CustomResourcesFromExtensions( + ca.uhn.fhir.model.dstu2.resource.SearchParameter theSearchParameter, String theExtensionUrl) { List customSpExtensionDt = theSearchParameter.getUndeclaredExtensionsByUrl(theExtensionUrl); return customSpExtensionDt.stream() - .map(theExtensionDt -> theExtensionDt.getValueAsPrimitive().getValueAsString()) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + .map(theExtensionDt -> theExtensionDt.getValueAsPrimitive().getValueAsString()) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); } - private List extractDstu3CustomResourcesFromExtensions(org.hl7.fhir.dstu3.model.SearchParameter theSearchParameter, String theExtensionUrl) { + private List extractDstu3CustomResourcesFromExtensions( + org.hl7.fhir.dstu3.model.SearchParameter theSearchParameter, String theExtensionUrl) { List customSpExtensions = theSearchParameter.getExtensionsByUrl(theExtensionUrl); return customSpExtensions.stream() - .map(theExtension -> theExtension.getValueAsPrimitive().getValueAsString()) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); - + .map(theExtension -> theExtension.getValueAsPrimitive().getValueAsString()) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); } - private List extractR4PlusCustomResourcesFromExtensions(IBaseResource theSearchParameter, String theExtensionUrl) { + private List extractR4PlusCustomResourcesFromExtensions( + IBaseResource theSearchParameter, String theExtensionUrl) { List retVal = new ArrayList<>(); if (theSearchParameter instanceof IBaseHasExtensions) { ((IBaseHasExtensions) theSearchParameter) - .getExtension() - .stream() - .filter(t -> theExtensionUrl.equals(t.getUrl())) - .filter(t -> t.getValue() instanceof IPrimitiveType) - .map(t -> ((IPrimitiveType) t.getValue())) - .map(IPrimitiveType::getValueAsString) - .filter(StringUtils::isNotBlank) - .forEach(retVal::add); + .getExtension().stream() + .filter(t -> theExtensionUrl.equals(t.getUrl())) + .filter(t -> t.getValue() instanceof IPrimitiveType) + .map(t -> ((IPrimitiveType) t.getValue())) + .map(IPrimitiveType::getValueAsString) + .filter(StringUtils::isNotBlank) + .forEach(retVal::add); } return retVal; } - private > void maybeAddCustomResourcesToResources(T theResources, List theCustomResources) { + private > void maybeAddCustomResourcesToResources( + T theResources, List theCustomResources) { // SearchParameter base and target components require strict binding to ResourceType for dstu[2|3], R4, R4B // and to Version Independent Resource Types for R5. // - // To handle custom resources, we set a placeholder of type 'Resource' in the base or target component and define - // the custom resource by adding a corresponding extension with url HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE + // To handle custom resources, we set a placeholder of type 'Resource' in the base or target component and + // define + // the custom resource by adding a corresponding extension with url + // HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE // or HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE with the name of the custom resource. // - // To provide a base/target list that contains both the resources and customResources, we need to remove the placeholders + // To provide a base/target list that contains both the resources and customResources, we need to remove the + // placeholders // from the theResources and add theCustomResources. - if (!theCustomResources.isEmpty()){ + if (!theCustomResources.isEmpty()) { theResources.removeAll(Collections.singleton("Resource")); theResources.addAll(theCustomResources); } - } - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/retry/Retrier.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/retry/Retrier.java index 34e9f61575c..9e6d8b204e9 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/retry/Retrier.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/retry/Retrier.java @@ -54,7 +54,7 @@ public class Retrier { backOff.setMultiplier(2); myRetryTemplate.setBackOffPolicy(backOff); - SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy(){ + SimpleRetryPolicy retryPolicy = new SimpleRetryPolicy() { private static final long serialVersionUID = -4522467251787518700L; @Override @@ -71,12 +71,21 @@ public class Retrier { RetryListener listener = new RetryListenerSupport() { @Override - public void onError(RetryContext context, RetryCallback callback, Throwable throwable) { + public void onError( + RetryContext context, RetryCallback callback, Throwable throwable) { super.onError(context, callback, throwable); - if (throwable instanceof NullPointerException || throwable instanceof UnsupportedOperationException || HapiSystemProperties.isUnitTestModeEnabled()) { - ourLog.error("Retry failure {}/{}: {}", context.getRetryCount(), theMaxRetries, throwable.getMessage(), throwable); + if (throwable instanceof NullPointerException + || throwable instanceof UnsupportedOperationException + || HapiSystemProperties.isUnitTestModeEnabled()) { + ourLog.error( + "Retry failure {}/{}: {}", + context.getRetryCount(), + theMaxRetries, + throwable.getMessage(), + throwable); } else { - ourLog.error("Retry failure {}/{}: {}", context.getRetryCount(), theMaxRetries, throwable.toString()); + ourLog.error( + "Retry failure {}/{}: {}", context.getRetryCount(), theMaxRetries, throwable.toString()); } } }; diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/Dstu3DistanceHelper.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/Dstu3DistanceHelper.java index 04485bb6d6c..a1041a1b738 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/Dstu3DistanceHelper.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/Dstu3DistanceHelper.java @@ -30,7 +30,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.Collection; import java.util.List; - /** * In DSTU3, the near-distance search parameter is separate from near. In this utility method, * we search for near-distance search parameters and if we find any, remove them from the list @@ -67,7 +66,8 @@ public class Dstu3DistanceHelper { ReferenceParam referenceParam = (ReferenceParam) param; if (Location.SP_NEAR_DISTANCE.equals(referenceParam.getChain())) { if (retval != null) { - throw new IllegalArgumentException(Msg.code(494) + "Only one " + Location.SP_NEAR_DISTANCE + " parameter may be present"); + throw new IllegalArgumentException(Msg.code(494) + "Only one " + Location.SP_NEAR_DISTANCE + + " parameter may be present"); } else { retval = referenceParam; orParamToRemove = param; @@ -94,12 +94,13 @@ public class Dstu3DistanceHelper { // No near-distance Param if (sum == 0) { return null; - // A single near-distance Param + // A single near-distance Param } else if (sum == 1) { return (QuantityParam) theParamAndList.get(0).get(0); - // Too many near-distance params + // Too many near-distance params } else { - throw new IllegalArgumentException(Msg.code(495) + "Only one " + Location.SP_NEAR_DISTANCE + " parameter may be present"); + throw new IllegalArgumentException( + Msg.code(495) + "Only one " + Location.SP_NEAR_DISTANCE + " parameter may be present"); } } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/JpaParamUtil.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/JpaParamUtil.java index a1ca638a590..f7713bb924d 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/JpaParamUtil.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/JpaParamUtil.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.jpa.searchparam.util; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.api.QualifiedParamList; @@ -50,63 +50,57 @@ import ca.uhn.fhir.rest.param.binder.QueryParameterAndBinder; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; +import javax.annotation.Nonnull; public enum JpaParamUtil { - ; /** * This is a utility method intended provided to help the JPA module. */ - public static IQueryParameterAnd parseQueryParams(FhirContext theContext, RestSearchParameterTypeEnum paramType, - String theUnqualifiedParamName, List theParameters) { + public static IQueryParameterAnd parseQueryParams( + FhirContext theContext, + RestSearchParameterTypeEnum paramType, + String theUnqualifiedParamName, + List theParameters) { QueryParameterAndBinder binder; switch (paramType) { case COMPOSITE: throw new UnsupportedOperationException(Msg.code(496)); case DATE: - binder = new QueryParameterAndBinder(DateAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(DateAndListParam.class, Collections.emptyList()); break; case NUMBER: - binder = new QueryParameterAndBinder(NumberAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(NumberAndListParam.class, Collections.emptyList()); break; case QUANTITY: - binder = new QueryParameterAndBinder(QuantityAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(QuantityAndListParam.class, Collections.emptyList()); break; case REFERENCE: - binder = new QueryParameterAndBinder(ReferenceAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(ReferenceAndListParam.class, Collections.emptyList()); break; case STRING: - binder = new QueryParameterAndBinder(StringAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(StringAndListParam.class, Collections.emptyList()); break; case TOKEN: - binder = new QueryParameterAndBinder(TokenAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(TokenAndListParam.class, Collections.emptyList()); break; case URI: - binder = new QueryParameterAndBinder(UriAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(UriAndListParam.class, Collections.emptyList()); break; case HAS: - binder = new QueryParameterAndBinder(HasAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(HasAndListParam.class, Collections.emptyList()); break; case SPECIAL: - binder = new QueryParameterAndBinder(SpecialAndListParam.class, - Collections.emptyList()); + binder = new QueryParameterAndBinder(SpecialAndListParam.class, Collections.emptyList()); break; default: - throw new IllegalArgumentException(Msg.code(497) + "Parameter '" + theUnqualifiedParamName + "' has type " + paramType + " which is currently not supported."); + throw new IllegalArgumentException(Msg.code(497) + "Parameter '" + theUnqualifiedParamName + + "' has type " + paramType + " which is currently not supported."); } return binder.parse(theContext, theUnqualifiedParamName, theParameters); @@ -115,8 +109,12 @@ public enum JpaParamUtil { /** * This is a utility method intended provided to help the JPA module. */ - public static IQueryParameterAnd parseQueryParams(ISearchParamRegistry theSearchParamRegistry, FhirContext theContext, RuntimeSearchParam theParamDef, - String theUnqualifiedParamName, List theParameters) { + public static IQueryParameterAnd parseQueryParams( + ISearchParamRegistry theSearchParamRegistry, + FhirContext theContext, + RuntimeSearchParam theParamDef, + String theUnqualifiedParamName, + List theParameters) { RestSearchParameterTypeEnum paramType = theParamDef.getParamType(); @@ -126,8 +124,8 @@ public enum JpaParamUtil { if (compositeList.size() != 2) { throw new ConfigurationException(Msg.code(498) + "Search parameter of type " + theUnqualifiedParamName - + " must have 2 composite types declared in parameter annotation, found " - + compositeList.size()); + + " must have 2 composite types declared in parameter annotation, found " + + compositeList.size()); } RuntimeSearchParam left = compositeList.get(0); @@ -135,8 +133,8 @@ public enum JpaParamUtil { @SuppressWarnings({"unchecked", "rawtypes"}) CompositeAndListParam cp = new CompositeAndListParam( - getCompositeBindingClass(left.getParamType(), left.getName()), - getCompositeBindingClass(right.getParamType(), right.getName())); + getCompositeBindingClass(left.getParamType(), left.getName()), + getCompositeBindingClass(right.getParamType(), right.getName())); cp.setValuesAsQueryTokens(theContext, theUnqualifiedParamName, theParameters); @@ -146,8 +144,10 @@ public enum JpaParamUtil { } } - public static List resolveComponentParameters(ISearchParamRegistry theSearchParamRegistry, RuntimeSearchParam theParamDef) { - List compositeList = resolveCompositeComponentsDeclaredOrder(theSearchParamRegistry, theParamDef); + public static List resolveComponentParameters( + ISearchParamRegistry theSearchParamRegistry, RuntimeSearchParam theParamDef) { + List compositeList = + resolveCompositeComponentsDeclaredOrder(theSearchParamRegistry, theParamDef); // todo mb why is this sorted? Is the param order flipped too during query-time? compositeList.sort((Comparator.comparing(RuntimeSearchParam::getName))); @@ -156,7 +156,8 @@ public enum JpaParamUtil { } @Nonnull - public static List resolveCompositeComponentsDeclaredOrder(ISearchParamRegistry theSearchParamRegistry, RuntimeSearchParam theParamDef) { + public static List resolveCompositeComponentsDeclaredOrder( + ISearchParamRegistry theSearchParamRegistry, RuntimeSearchParam theParamDef) { List compositeList = new ArrayList<>(); List components = theParamDef.getComponents(); for (RuntimeSearchParam.Component next : components) { @@ -170,8 +171,8 @@ public enum JpaParamUtil { return compositeList; } - private static Class getCompositeBindingClass(RestSearchParameterTypeEnum paramType, - String theUnqualifiedParamName) { + private static Class getCompositeBindingClass( + RestSearchParameterTypeEnum paramType, String theUnqualifiedParamName) { switch (paramType) { case DATE: @@ -195,8 +196,8 @@ public enum JpaParamUtil { case COMPOSITE: default: - throw new IllegalArgumentException(Msg.code(500) + "Parameter '" + theUnqualifiedParamName + "' has type " + paramType - + " which is currently not supported."); + throw new IllegalArgumentException(Msg.code(500) + "Parameter '" + theUnqualifiedParamName + + "' has type " + paramType + " which is currently not supported."); } } } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/LastNParameterHelper.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/LastNParameterHelper.java index 13bd137399c..caff223b6ea 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/LastNParameterHelper.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/LastNParameterHelper.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.jpa.searchparam.util; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; public class LastNParameterHelper { @@ -57,21 +57,27 @@ public class LastNParameterHelper { } private static boolean isLastNParameterDstu3(String theParamName) { - return (theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_PATIENT) - || theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE)) - || theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_DATE); + return (theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_SUBJECT) + || theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_PATIENT) + || theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_CATEGORY) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE)) + || theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_DATE); } private static boolean isLastNParameterR4(String theParamName) { - return (theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_PATIENT) - || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE)) - || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_DATE); + return (theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_SUBJECT) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_PATIENT) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CATEGORY) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE)) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_DATE); } private static boolean isLastNParameterR5(String theParamName) { - return (theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_PATIENT) - || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE)) - || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_DATE); + return (theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_SUBJECT) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_PATIENT) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CATEGORY) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE)) + || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_DATE); } public static String getSubjectParamName(FhirContext theContext) { @@ -82,7 +88,8 @@ public class LastNParameterHelper { } else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { return org.hl7.fhir.dstu3.model.Observation.SP_SUBJECT; } else { - throw new InvalidRequestException(Msg.code(489) + "$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString()); + throw new InvalidRequestException(Msg.code(489) + "$lastn operation is not implemented for FHIR Version " + + theContext.getVersion().getVersion().getFhirVersionString()); } } @@ -94,7 +101,8 @@ public class LastNParameterHelper { } else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { return org.hl7.fhir.dstu3.model.Observation.SP_PATIENT; } else { - throw new InvalidRequestException(Msg.code(490) + "$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString()); + throw new InvalidRequestException(Msg.code(490) + "$lastn operation is not implemented for FHIR Version " + + theContext.getVersion().getVersion().getFhirVersionString()); } } @@ -106,7 +114,8 @@ public class LastNParameterHelper { } else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { return org.hl7.fhir.dstu3.model.Observation.SP_DATE; } else { - throw new InvalidRequestException(Msg.code(491) + "$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString()); + throw new InvalidRequestException(Msg.code(491) + "$lastn operation is not implemented for FHIR Version " + + theContext.getVersion().getVersion().getFhirVersionString()); } } @@ -118,7 +127,8 @@ public class LastNParameterHelper { } else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { return org.hl7.fhir.dstu3.model.Observation.SP_CATEGORY; } else { - throw new InvalidRequestException(Msg.code(492) + "$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString()); + throw new InvalidRequestException(Msg.code(492) + "$lastn operation is not implemented for FHIR Version " + + theContext.getVersion().getVersion().getFhirVersionString()); } } @@ -130,9 +140,8 @@ public class LastNParameterHelper { } else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { return org.hl7.fhir.dstu3.model.Observation.SP_CODE; } else { - throw new InvalidRequestException(Msg.code(493) + "$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString()); + throw new InvalidRequestException(Msg.code(493) + "$lastn operation is not implemented for FHIR Version " + + theContext.getVersion().getVersion().getFhirVersionString()); } } - - } diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/SearchParameterHelper.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/SearchParameterHelper.java index 09fbe60b877..ea566709e54 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/SearchParameterHelper.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/util/SearchParameterHelper.java @@ -39,9 +39,9 @@ public class SearchParameterHelper { mySearchParameterCanonicalizer = theSearchParameterCanonicalizer; } - public Optional buildSearchParameterMapFromCanonical(IBaseResource theRuntimeSearchParam) { - RuntimeSearchParam canonicalSearchParam = mySearchParameterCanonicalizer.canonicalizeSearchParameter(theRuntimeSearchParam); + RuntimeSearchParam canonicalSearchParam = + mySearchParameterCanonicalizer.canonicalizeSearchParameter(theRuntimeSearchParam); if (canonicalSearchParam == null) { return Optional.empty(); } @@ -60,7 +60,6 @@ public class SearchParameterHelper { return Optional.of(retVal); } - private TokenAndListParam toTokenAndList(List theBases) { TokenAndListParam retVal = new TokenAndListParam(); @@ -78,6 +77,4 @@ public class SearchParameterHelper { return retVal.getValuesAsQueryTokens().isEmpty() ? null : retVal; } - - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/models/ProducingChannelParameters.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/models/ProducingChannelParameters.java index 29fa6aa0f33..1c962069ea8 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/models/ProducingChannelParameters.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/models/ProducingChannelParameters.java @@ -31,5 +31,4 @@ public class ProducingChannelParameters extends BaseChannelParameters { public ProducingChannelParameters(String theChannelName) { super(theChannelName); } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelRegistry.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelRegistry.java index 1badedd18aa..7e320f41497 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelRegistry.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelRegistry.java @@ -45,13 +45,16 @@ public class SubscriptionChannelRegistry { private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionRegistry.class); private final SubscriptionChannelCache myDeliveryReceiverChannels = new SubscriptionChannelCache(); - // This map is a reference count so we know to destroy the channel when there are no more active subscriptions using it + // This map is a reference count so we know to destroy the channel when there are no more active subscriptions using + // it // Key Channel Name, Value Subscription Id - private final Multimap myActiveSubscriptionByChannelName = MultimapBuilder.hashKeys().arrayListValues().build(); + private final Multimap myActiveSubscriptionByChannelName = + MultimapBuilder.hashKeys().arrayListValues().build(); private final Map myChannelNameToSender = new ConcurrentHashMap<>(); @Autowired private SubscriptionDeliveryHandlerFactory mySubscriptionDeliveryHandlerFactory; + @Autowired private SubscriptionChannelFactory mySubscriptionDeliveryChannelFactory; @@ -87,9 +90,11 @@ public class SubscriptionChannelRegistry { receivingParameters.setRetryConfiguration(retryConfigParameters); IChannelReceiver channelReceiver = newReceivingChannel(receivingParameters); - Optional deliveryHandler = mySubscriptionDeliveryHandlerFactory.createDeliveryHandler(theActiveSubscription.getChannelType()); + Optional deliveryHandler = + mySubscriptionDeliveryHandlerFactory.createDeliveryHandler(theActiveSubscription.getChannelType()); - SubscriptionChannelWithHandlers subscriptionChannelWithHandlers = new SubscriptionChannelWithHandlers(channelName, channelReceiver); + SubscriptionChannelWithHandlers subscriptionChannelWithHandlers = + new SubscriptionChannelWithHandlers(channelName, channelReceiver); deliveryHandler.ifPresent(subscriptionChannelWithHandlers::addHandler); myDeliveryReceiverChannels.put(channelName, subscriptionChannelWithHandlers); @@ -105,15 +110,14 @@ public class SubscriptionChannelRegistry { protected IChannelReceiver newReceivingChannel(ReceivingChannelParameters theParameters) { ChannelConsumerSettings settings = new ChannelConsumerSettings(); settings.setRetryConfiguration(theParameters.getRetryConfiguration()); - return mySubscriptionDeliveryChannelFactory.newDeliveryReceivingChannel(theParameters.getChannelName(), - settings); + return mySubscriptionDeliveryChannelFactory.newDeliveryReceivingChannel( + theParameters.getChannelName(), settings); } protected IChannelProducer newSendingChannel(ProducingChannelParameters theParameters) { ChannelProducerSettings settings = new ChannelProducerSettings(); settings.setRetryConfiguration(theParameters.getRetryConfiguration()); - return mySubscriptionDeliveryChannelFactory.newDeliverySendingChannel(theParameters.getChannelName(), - settings); + return mySubscriptionDeliveryChannelFactory.newDeliverySendingChannel(theParameters.getChannelName(), settings); } public synchronized void remove(ActiveSubscription theActiveSubscription) { @@ -134,7 +138,6 @@ public class SubscriptionChannelRegistry { myDeliveryReceiverChannels.closeAndRemove(channelName); myChannelNameToSender.remove(channelName); } - } public synchronized SubscriptionChannelWithHandlers getDeliveryReceiverChannel(String theChannelName) { diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelWithHandlers.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelWithHandlers.java index 028e56c4bb5..9d72e6ea733 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelWithHandlers.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelWithHandlers.java @@ -57,7 +57,11 @@ public class SubscriptionChannelWithHandlers implements Closeable { try { ((DisposableBean) theMessageHandler).destroy(); } catch (Exception e) { - ourLog.warn("Could not destroy {} handler for {}", theMessageHandler.getClass().getSimpleName(), myChannelName, e); + ourLog.warn( + "Could not destroy {} handler for {}", + theMessageHandler.getClass().getSimpleName(), + myChannelName, + e); } } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionDeliveryChannelNamer.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionDeliveryChannelNamer.java index cd8c3e70b17..0f64e743e46 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionDeliveryChannelNamer.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionDeliveryChannelNamer.java @@ -20,16 +20,12 @@ package ca.uhn.fhir.jpa.subscription.channel.subscription; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; -import org.springframework.stereotype.Service; public class SubscriptionDeliveryChannelNamer implements ISubscriptionDeliveryChannelNamer { @Override public String nameFromSubscription(CanonicalSubscription theCanonicalSubscription) { String channelType = theCanonicalSubscription.getChannelType().toCode().toLowerCase(); String subscriptionId = theCanonicalSubscription.getIdPart(); - return "subscription-delivery-" + - channelType + - "-" + - subscriptionId; + return "subscription-delivery-" + channelType + "-" + subscriptionId; } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionDeliveryHandlerFactory.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionDeliveryHandlerFactory.java index 3a0c48f3b05..5a73313cf55 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionDeliveryHandlerFactory.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionDeliveryHandlerFactory.java @@ -40,7 +40,8 @@ public class SubscriptionDeliveryHandlerFactory { myEmailSender = theEmailSender; } - protected SubscriptionDeliveringEmailSubscriber newSubscriptionDeliveringEmailSubscriber(IEmailSender theEmailSender) { + protected SubscriptionDeliveringEmailSubscriber newSubscriptionDeliveringEmailSubscriber( + IEmailSender theEmailSender) { return myApplicationContext.getBean(SubscriptionDeliveringEmailSubscriber.class, theEmailSender); } @@ -63,5 +64,4 @@ public class SubscriptionDeliveryHandlerFactory { return Optional.empty(); } } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/config/SubscriptionProcessorConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/config/SubscriptionProcessorConfig.java index 80e76f1d81b..e4738bfd336 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/config/SubscriptionProcessorConfig.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/config/SubscriptionProcessorConfig.java @@ -100,13 +100,18 @@ public class SubscriptionProcessorConfig { } @Bean - public SubscriptionDeliveryHandlerFactory subscriptionDeliveryHandlerFactory(ApplicationContext theApplicationContext, IEmailSender theEmailSender) { + public SubscriptionDeliveryHandlerFactory subscriptionDeliveryHandlerFactory( + ApplicationContext theApplicationContext, IEmailSender theEmailSender) { return new SubscriptionDeliveryHandlerFactory(theApplicationContext, theEmailSender); } @Bean - public SubscriptionMatchDeliverer subscriptionMatchDeliverer(FhirContext theFhirContext, IInterceptorBroadcaster theInterceptorBroadcaster, SubscriptionChannelRegistry theSubscriptionChannelRegistry) { - return new SubscriptionMatchDeliverer(theFhirContext, theInterceptorBroadcaster, theSubscriptionChannelRegistry); + public SubscriptionMatchDeliverer subscriptionMatchDeliverer( + FhirContext theFhirContext, + IInterceptorBroadcaster theInterceptorBroadcaster, + SubscriptionChannelRegistry theSubscriptionChannelRegistry) { + return new SubscriptionMatchDeliverer( + theFhirContext, theInterceptorBroadcaster, theSubscriptionChannelRegistry); } @Bean @@ -117,7 +122,8 @@ public class SubscriptionProcessorConfig { @Bean @Scope("prototype") - public SubscriptionDeliveringMessageSubscriber subscriptionDeliveringMessageSubscriber(IChannelFactory theChannelFactory) { + public SubscriptionDeliveringMessageSubscriber subscriptionDeliveringMessageSubscriber( + IChannelFactory theChannelFactory) { return new SubscriptionDeliveringMessageSubscriber(theChannelFactory); } @@ -139,14 +145,16 @@ public class SubscriptionProcessorConfig { @Bean @Primary - public ISubscriptionMatcher subscriptionMatcher(DaoSubscriptionMatcher theDaoSubscriptionMatcher, InMemorySubscriptionMatcher theInMemorySubscriptionMatcher) { + public ISubscriptionMatcher subscriptionMatcher( + DaoSubscriptionMatcher theDaoSubscriptionMatcher, + InMemorySubscriptionMatcher theInMemorySubscriptionMatcher) { return new CompositeInMemoryDaoSubscriptionMatcher(theDaoSubscriptionMatcher, theInMemorySubscriptionMatcher); } @Lazy @Bean SubscriptionTopicPayloadBuilder subscriptionTopicPayloadBuilder(FhirContext theFhirContext) { - switch(theFhirContext.getVersion().getVersion()) { + switch (theFhirContext.getVersion().getVersion()) { case R4: case R4B: case R5: @@ -158,8 +166,16 @@ public class SubscriptionProcessorConfig { @Lazy @Bean - SubscriptionTopicDispatcher subscriptionTopicDispatcher(FhirContext theFhirContext, SubscriptionRegistry theSubscriptionRegistry, SubscriptionMatchDeliverer theSubscriptionMatchDeliverer, SubscriptionTopicPayloadBuilder theSubscriptionTopicPayloadBuilder) { - return new SubscriptionTopicDispatcher(theFhirContext, theSubscriptionRegistry, theSubscriptionMatchDeliverer, theSubscriptionTopicPayloadBuilder); + SubscriptionTopicDispatcher subscriptionTopicDispatcher( + FhirContext theFhirContext, + SubscriptionRegistry theSubscriptionRegistry, + SubscriptionMatchDeliverer theSubscriptionMatchDeliverer, + SubscriptionTopicPayloadBuilder theSubscriptionTopicPayloadBuilder) { + return new SubscriptionTopicDispatcher( + theFhirContext, + theSubscriptionRegistry, + theSubscriptionMatchDeliverer, + theSubscriptionTopicPayloadBuilder); } @Bean diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/config/WebsocketDispatcherConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/config/WebsocketDispatcherConfig.java index e7e05c090a4..b5580884e33 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/config/WebsocketDispatcherConfig.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/config/WebsocketDispatcherConfig.java @@ -38,7 +38,7 @@ import org.springframework.web.socket.handler.PerConnectionWebSocketHandler; public class WebsocketDispatcherConfig implements WebSocketConfigurer { @Autowired - StorageSettings myStorageSettings; + StorageSettings myStorageSettings; @Bean public WebsocketConnectionValidator websocketConnectionValidator() { @@ -47,7 +47,9 @@ public class WebsocketDispatcherConfig implements WebSocketConfigurer { @Override public void registerWebSocketHandlers(WebSocketHandlerRegistry theRegistry) { - theRegistry.addHandler(subscriptionWebSocketHandler(), myStorageSettings.getWebsocketContextPath()).setAllowedOrigins("*"); + theRegistry + .addHandler(subscriptionWebSocketHandler(), myStorageSettings.getWebsocketContextPath()) + .setAllowedOrigins("*"); } @Bean @@ -55,5 +57,4 @@ public class WebsocketDispatcherConfig implements WebSocketConfigurer { PerConnectionWebSocketHandler retVal = new PerConnectionWebSocketHandler(SubscriptionWebsocketHandler.class); return retVal; } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/BaseSubscriptionDeliverySubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/BaseSubscriptionDeliverySubscriber.java index c2891bf27dd..2e4dff6e7e8 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/BaseSubscriptionDeliverySubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/BaseSubscriptionDeliverySubscriber.java @@ -56,12 +56,16 @@ public abstract class BaseSubscriptionDeliverySubscriber implements MessageHandl @Autowired protected FhirContext myFhirContext; + @Autowired protected SubscriptionRegistry mySubscriptionRegistry; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private MatchUrlService myMatchUrlService; @@ -79,7 +83,8 @@ public abstract class BaseSubscriptionDeliverySubscriber implements MessageHandl return; } - ActiveSubscription updatedSubscription = mySubscriptionRegistry.get(msg.getSubscription().getIdElement(myFhirContext).getIdPart()); + ActiveSubscription updatedSubscription = mySubscriptionRegistry.get( + msg.getSubscription().getIdElement(myFhirContext).getIdPart()); if (updatedSubscription != null) { msg.setSubscription(updatedSubscription.getSubscription()); } @@ -88,8 +93,8 @@ public abstract class BaseSubscriptionDeliverySubscriber implements MessageHandl // Interceptor call: SUBSCRIPTION_BEFORE_DELIVERY HookParams params = new HookParams() - .add(ResourceDeliveryMessage.class, msg) - .add(CanonicalSubscription.class, msg.getSubscription()); + .add(ResourceDeliveryMessage.class, msg) + .add(CanonicalSubscription.class, msg.getSubscription()); if (!myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_BEFORE_DELIVERY, params)) { return; } @@ -105,9 +110,8 @@ public abstract class BaseSubscriptionDeliverySubscriber implements MessageHandl ourLog.error(errorMsg, e); // Interceptor call: SUBSCRIPTION_AFTER_DELIVERY - HookParams hookParams = new HookParams() - .add(ResourceDeliveryMessage.class, msg) - .add(Exception.class, e); + HookParams hookParams = + new HookParams().add(ResourceDeliveryMessage.class, msg).add(Exception.class, e); if (!myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_AFTER_DELIVERY_FAILED, hookParams)) { return; } @@ -118,19 +122,26 @@ public abstract class BaseSubscriptionDeliverySubscriber implements MessageHandl public abstract void handleMessage(ResourceDeliveryMessage theMessage) throws Exception; - protected IBaseBundle createDeliveryBundleForPayloadSearchCriteria(CanonicalSubscription theSubscription, IBaseResource thePayloadResource) { - String resType = theSubscription.getPayloadSearchCriteria().substring(0, theSubscription.getPayloadSearchCriteria().indexOf('?')); + protected IBaseBundle createDeliveryBundleForPayloadSearchCriteria( + CanonicalSubscription theSubscription, IBaseResource thePayloadResource) { + String resType = theSubscription + .getPayloadSearchCriteria() + .substring(0, theSubscription.getPayloadSearchCriteria().indexOf('?')); IFhirResourceDao dao = myDaoRegistry.getResourceDao(resType); RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(resType); String payloadUrl = theSubscription.getPayloadSearchCriteria(); Map valueMap = new HashMap<>(1); - valueMap.put("matched_resource_id", thePayloadResource.getIdElement().toUnqualifiedVersionless().getValue()); + valueMap.put( + "matched_resource_id", + thePayloadResource.getIdElement().toUnqualifiedVersionless().getValue()); payloadUrl = new StringSubstitutor(valueMap).replace(payloadUrl); - SearchParameterMap payloadSearchMap = myMatchUrlService.translateMatchUrl(payloadUrl, resourceDefinition, MatchUrlService.processIncludes()); + SearchParameterMap payloadSearchMap = + myMatchUrlService.translateMatchUrl(payloadUrl, resourceDefinition, MatchUrlService.processIncludes()); payloadSearchMap.setLoadSynchronous(true); - IBundleProvider searchResults = dao.search(payloadSearchMap, createRequestDetailForPartitionedRequest(theSubscription)); + IBundleProvider searchResults = + dao.search(payloadSearchMap, createRequestDetailForPartitionedRequest(theSubscription)); BundleBuilder builder = new BundleBuilder(myFhirContext); for (IBaseResource next : searchResults.getAllResources()) { builder.addTransactionUpdateEntry(next); diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/EmailDetails.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/EmailDetails.java index e6c9fba1522..40fd11b611c 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/EmailDetails.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/EmailDetails.java @@ -31,9 +31,9 @@ import org.thymeleaf.spring5.dialect.SpringStandardDialect; import org.thymeleaf.templatemode.TemplateMode; import org.thymeleaf.templateresolver.StringTemplateResolver; -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class EmailDetails { private final SpringTemplateEngine myTemplateEngine; @@ -93,12 +93,12 @@ public class EmailDetails { public Email toEmail() { try { return EmailBuilder.startingBlank() - .from(getFrom()) - .to(getTo()) - .withSubject(getSubject()) - .withPlainText(getBody()) - .withHeader("X-FHIR-Subscription", getSubscriptionId()) - .buildEmail(); + .from(getFrom()) + .to(getTo()) + .withSubject(getSubject()) + .withPlainText(getBody()) + .withHeader("X-FHIR-Subscription", getSubscriptionId()) + .buildEmail(); } catch (IllegalArgumentException e) { throw new InternalErrorException(Msg.code(3) + "Failed to create email message", e); } @@ -119,5 +119,4 @@ public class EmailDetails { return springTemplateEngine; } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/EmailSenderImpl.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/EmailSenderImpl.java index a3c7cfc4e45..b2cc733ecd2 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/EmailSenderImpl.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/EmailSenderImpl.java @@ -43,18 +43,30 @@ public class EmailSenderImpl implements IEmailSender { public void send(EmailDetails theDetails) { StopWatch stopWatch = new StopWatch(); - ourLog.info("Sending email for subscription {} from [{}] to recipients: [{}]", theDetails.getSubscriptionId(), theDetails.getFrom(), theDetails.getTo()); + ourLog.info( + "Sending email for subscription {} from [{}] to recipients: [{}]", + theDetails.getSubscriptionId(), + theDetails.getFrom(), + theDetails.getTo()); Email email = theDetails.toEmail(); - myMailSvc.sendMail(email, - () -> ourLog.info("Done sending email for subscription {} from [{}] to recipients: [{}] (took {}ms)", - theDetails.getSubscriptionId(), theDetails.getFrom(), theDetails.getTo(), stopWatch.getMillis()), - (e) -> { - ourLog.error("Error sending email for subscription {} from [{}] to recipients: [{}] (took {}ms)", - theDetails.getSubscriptionId(), theDetails.getFrom(), theDetails.getTo(), stopWatch.getMillis()); - ourLog.error("Error sending email", e); - }); + myMailSvc.sendMail( + email, + () -> ourLog.info( + "Done sending email for subscription {} from [{}] to recipients: [{}] (took {}ms)", + theDetails.getSubscriptionId(), + theDetails.getFrom(), + theDetails.getTo(), + stopWatch.getMillis()), + (e) -> { + ourLog.error( + "Error sending email for subscription {} from [{}] to recipients: [{}] (took {}ms)", + theDetails.getSubscriptionId(), + theDetails.getFrom(), + theDetails.getTo(), + stopWatch.getMillis()); + ourLog.error("Error sending email", e); + }); } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/IEmailSender.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/IEmailSender.java index 26d33bbbd0e..79c3182ec76 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/IEmailSender.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/IEmailSender.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.jpa.subscription.match.deliver.email; public interface IEmailSender { void send(EmailDetails theDetails); - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/SubscriptionDeliveringEmailSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/SubscriptionDeliveringEmailSubscriber.java index 49d581a5f47..01ccf19397a 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/SubscriptionDeliveringEmailSubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/email/SubscriptionDeliveringEmailSubscriber.java @@ -43,6 +43,7 @@ public class SubscriptionDeliveringEmailSubscriber extends BaseSubscriptionDeliv @Autowired private StorageSettings myStorageSettings; + @Autowired private FhirContext myCtx; @@ -76,8 +77,10 @@ public class SubscriptionDeliveringEmailSubscriber extends BaseSubscriptionDeliv } } - String from = processEmailAddressUri(defaultString(subscription.getEmailDetails().getFrom(), myStorageSettings.getEmailFromAddress())); - String subjectTemplate = defaultString(subscription.getEmailDetails().getSubjectTemplate(), provideDefaultSubjectTemplate()); + String from = processEmailAddressUri( + defaultString(subscription.getEmailDetails().getFrom(), myStorageSettings.getEmailFromAddress())); + String subjectTemplate = + defaultString(subscription.getEmailDetails().getSubjectTemplate(), provideDefaultSubjectTemplate()); EmailDetails details = new EmailDetails(); details.setTo(destinationAddresses); @@ -92,8 +95,8 @@ public class SubscriptionDeliveringEmailSubscriber extends BaseSubscriptionDeliv private String processEmailAddressUri(String next) { next = trim(defaultString(next)); if (next.startsWith("mailto:")) { - next = next.substring("mailto:".length()); - } + next = next.substring("mailto:".length()); + } return next; } @@ -106,7 +109,7 @@ public class SubscriptionDeliveringEmailSubscriber extends BaseSubscriptionDeliv } @VisibleForTesting - public IEmailSender getEmailSender(){ + public IEmailSender getEmailSender() { return myEmailSender; } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/message/SubscriptionDeliveringMessageSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/message/SubscriptionDeliveringMessageSubscriber.java index 97adad246ca..b801d0e8f95 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/message/SubscriptionDeliveringMessageSubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/message/SubscriptionDeliveringMessageSubscriber.java @@ -56,20 +56,36 @@ public class SubscriptionDeliveringMessageSubscriber extends BaseSubscriptionDel myChannelFactory = theChannelFactory; } - protected void doDelivery(ResourceDeliveryMessage theSourceMessage, CanonicalSubscription theSubscription, IChannelProducer theChannelProducer, ResourceModifiedJsonMessage theWrappedMessageToSend) { + protected void doDelivery( + ResourceDeliveryMessage theSourceMessage, + CanonicalSubscription theSubscription, + IChannelProducer theChannelProducer, + ResourceModifiedJsonMessage theWrappedMessageToSend) { String payloadId = theSourceMessage.getPayloadId(); if (isNotBlank(theSubscription.getPayloadSearchCriteria())) { - IBaseResource payloadResource = createDeliveryBundleForPayloadSearchCriteria(theSubscription, theWrappedMessageToSend.getPayload().getPayload(myFhirContext)); - ResourceModifiedJsonMessage newWrappedMessageToSend = convertDeliveryMessageToResourceModifiedMessage(theSourceMessage, payloadResource); + IBaseResource payloadResource = createDeliveryBundleForPayloadSearchCriteria( + theSubscription, theWrappedMessageToSend.getPayload().getPayload(myFhirContext)); + ResourceModifiedJsonMessage newWrappedMessageToSend = + convertDeliveryMessageToResourceModifiedMessage(theSourceMessage, payloadResource); theWrappedMessageToSend.setPayload(newWrappedMessageToSend.getPayload()); - payloadId = payloadResource.getIdElement().toUnqualifiedVersionless().getValue(); + payloadId = + payloadResource.getIdElement().toUnqualifiedVersionless().getValue(); } theChannelProducer.send(theWrappedMessageToSend); - ourLog.debug("Delivering {} message payload {} for {}", theSourceMessage.getOperationType(), payloadId, theSubscription.getIdElement(myFhirContext).toUnqualifiedVersionless().getValue()); + ourLog.debug( + "Delivering {} message payload {} for {}", + theSourceMessage.getOperationType(), + payloadId, + theSubscription + .getIdElement(myFhirContext) + .toUnqualifiedVersionless() + .getValue()); } - private ResourceModifiedJsonMessage convertDeliveryMessageToResourceModifiedMessage(ResourceDeliveryMessage theMsg, IBaseResource thePayloadResource) { - ResourceModifiedMessage payload = new ResourceModifiedMessage(myFhirContext, thePayloadResource, theMsg.getOperationType()); + private ResourceModifiedJsonMessage convertDeliveryMessageToResourceModifiedMessage( + ResourceDeliveryMessage theMsg, IBaseResource thePayloadResource) { + ResourceModifiedMessage payload = + new ResourceModifiedMessage(myFhirContext, thePayloadResource, theMsg.getOperationType()); payload.setMessageKey(theMsg.getMessageKeyOrDefault()); payload.setTransactionId(theMsg.getTransactionId()); payload.setPartitionId(theMsg.getRequestPartitionId()); @@ -80,13 +96,14 @@ public class SubscriptionDeliveringMessageSubscriber extends BaseSubscriptionDel public void handleMessage(ResourceDeliveryMessage theMessage) throws MessagingException, URISyntaxException { CanonicalSubscription subscription = theMessage.getSubscription(); IBaseResource payloadResource = theMessage.getPayload(myFhirContext); - ResourceModifiedJsonMessage messageWrapperToSend = convertDeliveryMessageToResourceModifiedMessage(theMessage, payloadResource); + ResourceModifiedJsonMessage messageWrapperToSend = + convertDeliveryMessageToResourceModifiedMessage(theMessage, payloadResource); // Interceptor call: SUBSCRIPTION_BEFORE_MESSAGE_DELIVERY HookParams params = new HookParams() - .add(CanonicalSubscription.class, subscription) - .add(ResourceDeliveryMessage.class, theMessage) - .add(ResourceModifiedJsonMessage.class, messageWrapperToSend); + .add(CanonicalSubscription.class, subscription) + .add(ResourceDeliveryMessage.class, theMessage) + .add(ResourceModifiedJsonMessage.class, messageWrapperToSend); if (!getInterceptorBroadcaster().callHooks(Pointcut.SUBSCRIPTION_BEFORE_MESSAGE_DELIVERY, params)) { return; } @@ -98,7 +115,8 @@ public class SubscriptionDeliveringMessageSubscriber extends BaseSubscriptionDel ChannelProducerSettings channelSettings = new ChannelProducerSettings(); channelSettings.setQualifyChannelName(false); - IChannelProducer channelProducer = myChannelFactory.getOrCreateProducer(queueName, ResourceModifiedJsonMessage.class, channelSettings); + IChannelProducer channelProducer = + myChannelFactory.getOrCreateProducer(queueName, ResourceModifiedJsonMessage.class, channelSettings); // Grab the payload type (encoding mimetype) from the subscription String payloadString = subscription.getPayloadString(); @@ -108,15 +126,16 @@ public class SubscriptionDeliveringMessageSubscriber extends BaseSubscriptionDel } if (payloadType != EncodingEnum.JSON) { - throw new UnsupportedOperationException(Msg.code(4) + "Only JSON payload type is currently supported for Message Subscriptions"); + throw new UnsupportedOperationException( + Msg.code(4) + "Only JSON payload type is currently supported for Message Subscriptions"); } doDelivery(theMessage, subscription, channelProducer, messageWrapperToSend); // Interceptor call: SUBSCRIPTION_AFTER_MESSAGE_DELIVERY params = new HookParams() - .add(CanonicalSubscription.class, subscription) - .add(ResourceDeliveryMessage.class, theMessage); + .add(CanonicalSubscription.class, subscription) + .add(ResourceDeliveryMessage.class, theMessage); if (!getInterceptorBroadcaster().callHooks(Pointcut.SUBSCRIPTION_AFTER_MESSAGE_DELIVERY, params)) { //noinspection UnnecessaryReturnStatement return; diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/resthook/SubscriptionDeliveringRestHookSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/resthook/SubscriptionDeliveringRestHookSubscriber.java index 8b32daff4d0..6d4054a7d30 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/resthook/SubscriptionDeliveringRestHookSubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/resthook/SubscriptionDeliveringRestHookSubscriber.java @@ -43,9 +43,9 @@ import ca.uhn.fhir.rest.gclient.IClientExecutable; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.messaging.BaseResourceModifiedMessage; +import ca.uhn.fhir.util.BundleUtil; import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.util.StopWatch; -import ca.uhn.fhir.util.BundleUtil; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; @@ -55,13 +55,13 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.messaging.MessagingException; -import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -79,14 +79,23 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe super(); } - protected void deliverPayload(ResourceDeliveryMessage theMsg, CanonicalSubscription theSubscription, EncodingEnum thePayloadType, IGenericClient theClient) { + protected void deliverPayload( + ResourceDeliveryMessage theMsg, + CanonicalSubscription theSubscription, + EncodingEnum thePayloadType, + IGenericClient theClient) { IBaseResource payloadResource = getAndMassagePayload(theMsg, theSubscription); // Regardless of whether we have a payload, the rest-hook should be sent. doDelivery(theMsg, theSubscription, thePayloadType, theClient, payloadResource); } - protected void doDelivery(ResourceDeliveryMessage theMsg, CanonicalSubscription theSubscription, EncodingEnum thePayloadType, IGenericClient theClient, IBaseResource thePayloadResource) { + protected void doDelivery( + ResourceDeliveryMessage theMsg, + CanonicalSubscription theSubscription, + EncodingEnum thePayloadType, + IGenericClient theClient, + IBaseResource thePayloadResource) { IClientExecutable operation; if (theSubscription.isTopicSubscription()) { @@ -117,12 +126,22 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe throw e; } - Logs.getSubscriptionTroubleshootingLog().debug("Delivered {} rest-hook payload {} for {} in {}", theMsg.getOperationType(), payloadId, theSubscription.getIdElement(myFhirContext).toUnqualifiedVersionless().getValue(), sw); + Logs.getSubscriptionTroubleshootingLog() + .debug( + "Delivered {} rest-hook payload {} for {} in {}", + theMsg.getOperationType(), + payloadId, + theSubscription + .getIdElement(myFhirContext) + .toUnqualifiedVersionless() + .getValue(), + sw); } } @Nullable - private IClientExecutable createDeliveryRequestNormal(ResourceDeliveryMessage theMsg, IGenericClient theClient, IBaseResource thePayloadResource) { + private IClientExecutable createDeliveryRequestNormal( + ResourceDeliveryMessage theMsg, IGenericClient theClient, IBaseResource thePayloadResource) { IClientExecutable operation; switch (theMsg.getOperationType()) { case CREATE: @@ -140,7 +159,8 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe return operation; } - private IClientExecutable createDeliveryRequestTransaction(CanonicalSubscription theSubscription, IGenericClient theClient, IBaseResource thePayloadResource) { + private IClientExecutable createDeliveryRequestTransaction( + CanonicalSubscription theSubscription, IGenericClient theClient, IBaseResource thePayloadResource) { IBaseBundle bundle = createDeliveryBundleForPayloadSearchCriteria(theSubscription, thePayloadResource); return theClient.transaction().withBundle(bundle); } @@ -149,7 +169,8 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe return theClient.transaction().withBundle(theBundle); } - public IBaseResource getResource(IIdType thePayloadId, RequestPartitionId thePartitionId, boolean theDeletedOK) throws ResourceGoneException { + public IBaseResource getResource(IIdType thePayloadId, RequestPartitionId thePartitionId, boolean theDeletedOK) + throws ResourceGoneException { RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(thePayloadId.getResourceType()); SystemRequestDetails systemRequestDetails = new SystemRequestDetails().setRequestPartitionId(thePartitionId); IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceDef.getImplementingClass()); @@ -163,7 +184,8 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe * @param theSubscription * @return */ - protected IBaseResource getAndMassagePayload(ResourceDeliveryMessage theMsg, CanonicalSubscription theSubscription) { + protected IBaseResource getAndMassagePayload( + ResourceDeliveryMessage theMsg, CanonicalSubscription theSubscription) { IBaseResource payloadResource = theMsg.getPayload(myFhirContext); if (payloadResource instanceof IBaseBundle) { @@ -173,7 +195,8 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe } } - private IBaseResource getAndMassageBundle(ResourceDeliveryMessage theMsg, IBaseBundle theBundle, CanonicalSubscription theSubscription) { + private IBaseResource getAndMassageBundle( + ResourceDeliveryMessage theMsg, IBaseBundle theBundle, CanonicalSubscription theSubscription) { BundleUtil.processEntries(myFhirContext, theBundle, entry -> { IBaseResource entryResource = entry.getResource(); if (entryResource != null) { @@ -188,7 +211,8 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe return theBundle; } - private IBaseResource getAndMassageResource(ResourceDeliveryMessage theMsg, IBaseResource thePayloadResource, CanonicalSubscription theSubscription) { + private IBaseResource getAndMassageResource( + ResourceDeliveryMessage theMsg, IBaseResource thePayloadResource, CanonicalSubscription theSubscription) { if (thePayloadResource == null || theSubscription.getRestHookDetails().isDeliverLatestVersion()) { IIdType payloadId = theMsg.getPayloadId(myFhirContext).toVersionless(); @@ -197,13 +221,17 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe } try { if (payloadId != null) { - boolean deletedOK = theMsg.getOperationType() == BaseResourceModifiedMessage.OperationTypeEnum.DELETE; + boolean deletedOK = + theMsg.getOperationType() == BaseResourceModifiedMessage.OperationTypeEnum.DELETE; thePayloadResource = getResource(payloadId, theMsg.getRequestPartitionId(), deletedOK); } else { return null; } } catch (ResourceGoneException e) { - ourLog.warn("Resource {} is deleted, not going to deliver for subscription {}", payloadId, theSubscription.getIdElement(myFhirContext)); + ourLog.warn( + "Resource {} is deleted, not going to deliver for subscription {}", + payloadId, + theSubscription.getIdElement(myFhirContext)); return null; } } @@ -223,8 +251,8 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe // Interceptor call: SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY HookParams params = new HookParams() - .add(CanonicalSubscription.class, subscription) - .add(ResourceDeliveryMessage.class, theMessage); + .add(CanonicalSubscription.class, subscription) + .add(ResourceDeliveryMessage.class, theMessage); if (!getInterceptorBroadcaster().callHooks(Pointcut.SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY, params)) { return; } @@ -258,13 +286,12 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe // Interceptor call: SUBSCRIPTION_AFTER_REST_HOOK_DELIVERY params = new HookParams() - .add(CanonicalSubscription.class, subscription) - .add(ResourceDeliveryMessage.class, theMessage); + .add(CanonicalSubscription.class, subscription) + .add(ResourceDeliveryMessage.class, theMessage); if (!getInterceptorBroadcaster().callHooks(Pointcut.SUBSCRIPTION_AFTER_REST_HOOK_DELIVERY, params)) { //noinspection UnnecessaryReturnStatement return; } - } /** @@ -276,14 +303,16 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe List

    headers = parseHeadersFromSubscription(subscription); StringBuilder url = new StringBuilder(subscription.getEndpointUrl()); - IHttpClient client = myFhirContext.getRestfulClientFactory().getHttpClient(url, params, "", RequestTypeEnum.POST, headers); + IHttpClient client = + myFhirContext.getRestfulClientFactory().getHttpClient(url, params, "", RequestTypeEnum.POST, headers); IHttpRequest request = client.createParamRequest(myFhirContext, params, null); try { IHttpResponse response = request.execute(); // close connection in order to return a possible cached connection to the connection pool response.close(); } catch (IOException e) { - ourLog.error("Error trying to reach {}: {}", theMsg.getSubscription().getEndpointUrl(), e.toString()); + ourLog.error( + "Error trying to reach {}: {}", theMsg.getSubscription().getEndpointUrl(), e.toString()); throw new ResourceNotFoundException(Msg.code(5) + e.getMessage()); } } @@ -314,5 +343,4 @@ public class SubscriptionDeliveringRestHookSubscriber extends BaseSubscriptionDe } return headers; } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/SubscriptionWebsocketHandler.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/SubscriptionWebsocketHandler.java index f67e4f62739..706d01ee16d 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/SubscriptionWebsocketHandler.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/SubscriptionWebsocketHandler.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.subscription.match.deliver.websocket; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelRegistry; import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelWithHandlers; @@ -39,14 +38,16 @@ import org.springframework.web.socket.WebSocketHandler; import org.springframework.web.socket.WebSocketSession; import org.springframework.web.socket.handler.TextWebSocketHandler; +import java.io.IOException; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; -import java.io.IOException; public class SubscriptionWebsocketHandler extends TextWebSocketHandler implements WebSocketHandler { private static Logger ourLog = LoggerFactory.getLogger(SubscriptionWebsocketHandler.class); + @Autowired protected WebsocketConnectionValidator myWebsocketConnectionValidator; + @Autowired SubscriptionChannelRegistry mySubscriptionChannelRegistry; @@ -101,13 +102,11 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement } } - private interface IState { void closing(); void handleTextMessage(WebSocketSession theSession, TextMessage theMessage); - } private class BoundStaticSubscriptionState implements IState, MessageHandler { @@ -119,13 +118,15 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement mySession = theSession; myActiveSubscription = theActiveSubscription; - SubscriptionChannelWithHandlers subscriptionChannelWithHandlers = mySubscriptionChannelRegistry.getDeliveryReceiverChannel(theActiveSubscription.getChannelName()); + SubscriptionChannelWithHandlers subscriptionChannelWithHandlers = + mySubscriptionChannelRegistry.getDeliveryReceiverChannel(theActiveSubscription.getChannelName()); subscriptionChannelWithHandlers.addHandler(this); } @Override public void closing() { - SubscriptionChannelWithHandlers subscriptionChannelWithHandlers = mySubscriptionChannelRegistry.getDeliveryReceiverChannel(myActiveSubscription.getChannelName()); + SubscriptionChannelWithHandlers subscriptionChannelWithHandlers = + mySubscriptionChannelRegistry.getDeliveryReceiverChannel(myActiveSubscription.getChannelName()); subscriptionChannelWithHandlers.removeHandler(this); } @@ -208,10 +209,7 @@ public class SubscriptionWebsocketHandler extends TextWebSocketHandler implement } catch (IOException e) { handleFailure(e); } - } } - } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/WebsocketConnectionValidator.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/WebsocketConnectionValidator.java index 35b6e172732..cf82917ba0f 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/WebsocketConnectionValidator.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/deliver/websocket/WebsocketConnectionValidator.java @@ -35,7 +35,6 @@ public class WebsocketConnectionValidator { @Autowired SubscriptionRegistry mySubscriptionRegistry; - /** * Constructor */ @@ -45,7 +44,8 @@ public class WebsocketConnectionValidator { public WebsocketValidationResponse validate(@Nonnull IdType id) { if (!id.hasIdPart() || !id.isIdPartValid()) { - return WebsocketValidationResponse.INVALID_RESPONSE("Invalid bind request - No ID included: " + id.getValue()); + return WebsocketValidationResponse.INVALID_RESPONSE( + "Invalid bind request - No ID included: " + id.getValue()); } if (!id.hasResourceType()) { @@ -55,11 +55,13 @@ public class WebsocketConnectionValidator { ActiveSubscription activeSubscription = mySubscriptionRegistry.get(id.getIdPart()); if (activeSubscription == null) { - return WebsocketValidationResponse.INVALID_RESPONSE("Invalid bind request - Unknown subscription: " + id.getValue()); + return WebsocketValidationResponse.INVALID_RESPONSE( + "Invalid bind request - Unknown subscription: " + id.getValue()); } if (activeSubscription.getSubscription().getChannelType() != CanonicalSubscriptionChannelType.WEBSOCKET) { - return WebsocketValidationResponse.INVALID_RESPONSE("Subscription " + id.getValue() + " is not a " + CanonicalSubscriptionChannelType.WEBSOCKET + " subscription"); + return WebsocketValidationResponse.INVALID_RESPONSE("Subscription " + id.getValue() + " is not a " + + CanonicalSubscriptionChannelType.WEBSOCKET + " subscription"); } return WebsocketValidationResponse.VALID_RESPONSE(activeSubscription); diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/CompositeInMemoryDaoSubscriptionMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/CompositeInMemoryDaoSubscriptionMatcher.java index 9aac55c2a27..65dbb9de342 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/CompositeInMemoryDaoSubscriptionMatcher.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/CompositeInMemoryDaoSubscriptionMatcher.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.subscription.match.matcher.matching; -import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; @@ -33,10 +32,13 @@ public class CompositeInMemoryDaoSubscriptionMatcher implements ISubscriptionMat private final DaoSubscriptionMatcher myDaoSubscriptionMatcher; private final InMemorySubscriptionMatcher myInMemorySubscriptionMatcher; + @Autowired StorageSettings myStorageSettings; - public CompositeInMemoryDaoSubscriptionMatcher(DaoSubscriptionMatcher theDaoSubscriptionMatcher, InMemorySubscriptionMatcher theInMemorySubscriptionMatcher) { + public CompositeInMemoryDaoSubscriptionMatcher( + DaoSubscriptionMatcher theDaoSubscriptionMatcher, + InMemorySubscriptionMatcher theInMemorySubscriptionMatcher) { myDaoSubscriptionMatcher = theDaoSubscriptionMatcher; myInMemorySubscriptionMatcher = theInMemorySubscriptionMatcher; } @@ -49,7 +51,11 @@ public class CompositeInMemoryDaoSubscriptionMatcher implements ISubscriptionMat if (result.supported()) { result.setInMemory(true); } else { - ourLog.info("Criteria {} for Subscription {} not supported by InMemoryMatcher: {}. Reverting to DatabaseMatcher", theSubscription.getCriteriaString(), theSubscription.getIdElementString(), result.getUnsupportedReason()); + ourLog.info( + "Criteria {} for Subscription {} not supported by InMemoryMatcher: {}. Reverting to DatabaseMatcher", + theSubscription.getCriteriaString(), + theSubscription.getIdElementString(), + result.getUnsupportedReason()); result = myDaoSubscriptionMatcher.match(theSubscription, theMsg); } } else { diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcher.java index e39b34bf2d3..e61a81986a3 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcher.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcher.java @@ -53,7 +53,8 @@ public class DaoSubscriptionMatcher implements ISubscriptionMatcher { IIdType id = theMsg.getPayloadId(myCtx); String criteria = theSubscription.getCriteriaString(); - // Run the subscriptions query and look for matches, add the id as part of the criteria to avoid getting matches of previous resources rather than the recent resource + // Run the subscriptions query and look for matches, add the id as part of the criteria to avoid getting matches + // of previous resources rather than the recent resource criteria += "&_id=" + id.toUnqualifiedVersionless().getValue(); IBundleProvider results = performSearch(criteria, theSubscription); @@ -68,13 +69,15 @@ public class DaoSubscriptionMatcher implements ISubscriptionMatcher { */ private IBundleProvider performSearch(String theCriteria, CanonicalSubscription theSubscription) { IFhirResourceDao subscriptionDao = myDaoRegistry.getSubscriptionDao(); - RuntimeResourceDefinition responseResourceDef = subscriptionDao.validateCriteriaAndReturnResourceDefinition(theCriteria); + RuntimeResourceDefinition responseResourceDef = + subscriptionDao.validateCriteriaAndReturnResourceDefinition(theCriteria); SearchParameterMap responseCriteriaUrl = myMatchUrlService.translateMatchUrl(theCriteria, responseResourceDef); - IFhirResourceDao responseDao = myDaoRegistry.getResourceDao(responseResourceDef.getImplementingClass()); + IFhirResourceDao responseDao = + myDaoRegistry.getResourceDao(responseResourceDef.getImplementingClass()); responseCriteriaUrl.setLoadSynchronousUpTo(1); - return responseDao.search(responseCriteriaUrl, SubscriptionUtil.createRequestDetailForPartitionedRequest(theSubscription)); + return responseDao.search( + responseCriteriaUrl, SubscriptionUtil.createRequestDetailForPartitionedRequest(theSubscription)); } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/InMemorySubscriptionMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/InMemorySubscriptionMatcher.java index 0db1f7ae721..342b311d956 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/InMemorySubscriptionMatcher.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/InMemorySubscriptionMatcher.java @@ -35,17 +35,21 @@ public class InMemorySubscriptionMatcher implements ISubscriptionMatcher { @Autowired private FhirContext myContext; + @Autowired private SearchParamMatcher mySearchParamMatcher; @Override public InMemoryMatchResult match(CanonicalSubscription theSubscription, ResourceModifiedMessage theMsg) { try { - return mySearchParamMatcher.match(theSubscription.getCriteriaString(), theMsg.getNewPayload(myContext), null); + return mySearchParamMatcher.match( + theSubscription.getCriteriaString(), theMsg.getNewPayload(myContext), null); } catch (Exception e) { ourLog.error("Failure in in-memory matcher", e); - throw new InternalErrorException(Msg.code(1) + "Failure performing memory-match for resource ID[" + theMsg.getPayloadId(myContext) + "] for subscription ID[" + theSubscription.getIdElementString() + "]: " + e.getMessage(), e); + throw new InternalErrorException( + Msg.code(1) + "Failure performing memory-match for resource ID[" + theMsg.getPayloadId(myContext) + + "] for subscription ID[" + theSubscription.getIdElementString() + "]: " + e.getMessage(), + e); } } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/MatchingQueueSubscriberLoader.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/MatchingQueueSubscriberLoader.java index 2e0bdedffa5..b16b1874cd3 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/MatchingQueueSubscriberLoader.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/MatchingQueueSubscriberLoader.java @@ -41,20 +41,28 @@ import static ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.Subscription public class MatchingQueueSubscriberLoader { protected IChannelReceiver myMatchingChannel; private static final Logger ourLog = LoggerFactory.getLogger(MatchingQueueSubscriberLoader.class); + @Autowired FhirContext myFhirContext; + @Autowired private SubscriptionMatchingSubscriber mySubscriptionMatchingSubscriber; + @Autowired(required = false) private SubscriptionTopicMatchingSubscriber mySubscriptionTopicMatchingSubscriber; + @Autowired private SubscriptionChannelFactory mySubscriptionChannelFactory; + @Autowired private SubscriptionRegisteringSubscriber mySubscriptionRegisteringSubscriber; + @Autowired(required = false) private SubscriptionTopicRegisteringSubscriber mySubscriptionTopicRegisteringSubscriber; + @Autowired private SubscriptionActivatingSubscriber mySubscriptionActivatingSubscriber; + @Autowired private StorageSettings myStorageSettings; @@ -62,13 +70,17 @@ public class MatchingQueueSubscriberLoader { @Order(IHapiBootOrder.SUBSCRIPTION_MATCHING_CHANNEL_HANDLER) public void subscribeToMatchingChannel() { if (myMatchingChannel == null) { - myMatchingChannel = mySubscriptionChannelFactory.newMatchingReceivingChannel(SUBSCRIPTION_MATCHING_CHANNEL_NAME, getChannelConsumerSettings()); + myMatchingChannel = mySubscriptionChannelFactory.newMatchingReceivingChannel( + SUBSCRIPTION_MATCHING_CHANNEL_NAME, getChannelConsumerSettings()); } if (myMatchingChannel != null) { myMatchingChannel.subscribe(mySubscriptionMatchingSubscriber); myMatchingChannel.subscribe(mySubscriptionActivatingSubscriber); myMatchingChannel.subscribe(mySubscriptionRegisteringSubscriber); - ourLog.info("Subscription Matching Subscriber subscribed to Matching Channel {} with name {}", myMatchingChannel.getClass().getName(), SUBSCRIPTION_MATCHING_CHANNEL_NAME); + ourLog.info( + "Subscription Matching Subscriber subscribed to Matching Channel {} with name {}", + myMatchingChannel.getClass().getName(), + SUBSCRIPTION_MATCHING_CHANNEL_NAME); if (mySubscriptionTopicMatchingSubscriber != null) { ourLog.info("Starting SubscriptionTopic Matching Subscriber"); myMatchingChannel.subscribe(mySubscriptionTopicMatchingSubscriber); @@ -89,12 +101,14 @@ public class MatchingQueueSubscriberLoader { @PreDestroy public void stop() throws Exception { if (myMatchingChannel != null) { - ourLog.info("Destroying matching Channel {} with name {}", myMatchingChannel.getClass().getName(), SUBSCRIPTION_MATCHING_CHANNEL_NAME); + ourLog.info( + "Destroying matching Channel {} with name {}", + myMatchingChannel.getClass().getName(), + SUBSCRIPTION_MATCHING_CHANNEL_NAME); myMatchingChannel.destroy(); myMatchingChannel.unsubscribe(mySubscriptionMatchingSubscriber); myMatchingChannel.unsubscribe(mySubscriptionActivatingSubscriber); myMatchingChannel.unsubscribe(mySubscriptionRegisteringSubscriber); } } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionActivatingSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionActivatingSubscriber.java index 53f28bfcc18..55914efdde5 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionActivatingSubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionActivatingSubscriber.java @@ -51,12 +51,16 @@ import javax.annotation.Nonnull; */ public class SubscriptionActivatingSubscriber implements MessageHandler { private final Logger ourLog = LoggerFactory.getLogger(SubscriptionActivatingSubscriber.class); + @Autowired private FhirContext myFhirContext; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private SubscriptionCanonicalizer mySubscriptionCanonicalizer; + @Autowired private StorageSettings myStorageSettings; @@ -90,7 +94,6 @@ public class SubscriptionActivatingSubscriber implements MessageHandler { default: break; } - } /** @@ -102,7 +105,8 @@ public class SubscriptionActivatingSubscriber implements MessageHandler { public synchronized boolean activateSubscriptionIfRequired(final IBaseResource theSubscription) { // Grab the value for "Subscription.channel.type" so we can see if this // subscriber applies.. - CanonicalSubscriptionChannelType subscriptionChannelType = mySubscriptionCanonicalizer.getChannelType(theSubscription); + CanonicalSubscriptionChannelType subscriptionChannelType = + mySubscriptionCanonicalizer.getChannelType(theSubscription); // Only activate supported subscriptions if (subscriptionChannelType == null @@ -128,18 +132,21 @@ public class SubscriptionActivatingSubscriber implements MessageHandler { try { // read can throw ResourceGoneException // if this happens, we will treat this as a failure to activate - subscription = subscriptionDao.read(theSubscription.getIdElement(), SystemRequestDetails.forAllPartitions()); + subscription = + subscriptionDao.read(theSubscription.getIdElement(), SystemRequestDetails.forAllPartitions()); subscription.setId(subscription.getIdElement().toVersionless()); - ourLog.info("Activating subscription {} from status {} to {}", subscription.getIdElement().toUnqualified().getValue(), SubscriptionConstants.REQUESTED_STATUS, SubscriptionConstants.ACTIVE_STATUS); + ourLog.info( + "Activating subscription {} from status {} to {}", + subscription.getIdElement().toUnqualified().getValue(), + SubscriptionConstants.REQUESTED_STATUS, + SubscriptionConstants.ACTIVE_STATUS); SubscriptionUtil.setStatus(myFhirContext, subscription, SubscriptionConstants.ACTIVE_STATUS); subscriptionDao.update(subscription, srd); return true; } catch (final UnprocessableEntityException | ResourceGoneException e) { subscription = subscription != null ? subscription : theSubscription; - ourLog.error("Failed to activate subscription " - + subscription.getIdElement() - + " : " + e.getMessage()); + ourLog.error("Failed to activate subscription " + subscription.getIdElement() + " : " + e.getMessage()); ourLog.info("Changing status of {} to ERROR", subscription.getIdElement()); SubscriptionUtil.setStatus(myFhirContext, subscription, SubscriptionConstants.ERROR_STATUS); SubscriptionUtil.setReason(myFhirContext, subscription, e.getMessage()); @@ -149,8 +156,8 @@ public class SubscriptionActivatingSubscriber implements MessageHandler { } public boolean isChannelTypeSupported(IBaseResource theSubscription) { - Subscription.SubscriptionChannelType channelType = mySubscriptionCanonicalizer.getChannelType(theSubscription).toCanonical(); + Subscription.SubscriptionChannelType channelType = + mySubscriptionCanonicalizer.getChannelType(theSubscription).toCanonical(); return myStorageSettings.getSupportedSubscriptionTypes().contains(channelType); } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionCriteriaParser.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionCriteriaParser.java index 5f0902fbaef..af2f5ac4d3a 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionCriteriaParser.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionCriteriaParser.java @@ -26,10 +26,10 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.text.StringTokenizer; -import javax.annotation.Nullable; import java.util.Collections; import java.util.List; import java.util.Set; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -54,7 +54,6 @@ public enum SubscriptionCriteriaParser { * All types */ STARTYPE_EXPRESSION - } public static class SubscriptionCriteria { @@ -131,6 +130,4 @@ public enum SubscriptionCriteriaParser { return null; } - - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionDeliveryRequest.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionDeliveryRequest.java index 336847c3ede..0ad8112ca0e 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionDeliveryRequest.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionDeliveryRequest.java @@ -41,7 +41,10 @@ public class SubscriptionDeliveryRequest { private final RequestPartitionId myRequestPartitionId; private final String myTransactionId; - public SubscriptionDeliveryRequest(IBaseBundle theBundlePayload, ActiveSubscription theActiveSubscription, SubscriptionTopicDispatchRequest theSubscriptionTopicDispatchRequest) { + public SubscriptionDeliveryRequest( + IBaseBundle theBundlePayload, + ActiveSubscription theActiveSubscription, + SubscriptionTopicDispatchRequest theSubscriptionTopicDispatchRequest) { myPayload = theBundlePayload; myPayloadId = null; myActiveSubscription = theActiveSubscription; @@ -50,7 +53,10 @@ public class SubscriptionDeliveryRequest { myTransactionId = theSubscriptionTopicDispatchRequest.getTransactionId(); } - public SubscriptionDeliveryRequest(@Nonnull IBaseResource thePayload, @Nonnull ResourceModifiedMessage theMsg, @Nonnull ActiveSubscription theActiveSubscription) { + public SubscriptionDeliveryRequest( + @Nonnull IBaseResource thePayload, + @Nonnull ResourceModifiedMessage theMsg, + @Nonnull ActiveSubscription theActiveSubscription) { myPayload = thePayload; myPayloadId = null; myActiveSubscription = theActiveSubscription; @@ -59,7 +65,10 @@ public class SubscriptionDeliveryRequest { myTransactionId = theMsg.getTransactionId(); } - public SubscriptionDeliveryRequest(@Nonnull IIdType thePayloadId, @Nonnull ResourceModifiedMessage theMsg, @Nonnull ActiveSubscription theActiveSubscription) { + public SubscriptionDeliveryRequest( + @Nonnull IIdType thePayloadId, + @Nonnull ResourceModifiedMessage theMsg, + @Nonnull ActiveSubscription theActiveSubscription) { myPayload = null; myPayloadId = thePayloadId; myActiveSubscription = theActiveSubscription; @@ -68,8 +77,6 @@ public class SubscriptionDeliveryRequest { myTransactionId = theMsg.getTransactionId(); } - - public IBaseResource getPayload() { return myPayload; } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionMatchDeliverer.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionMatchDeliverer.java index 511678b2adb..488a961c89d 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionMatchDeliverer.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionMatchDeliverer.java @@ -48,18 +48,26 @@ public class SubscriptionMatchDeliverer { private final IInterceptorBroadcaster myInterceptorBroadcaster; private final SubscriptionChannelRegistry mySubscriptionChannelRegistry; - public SubscriptionMatchDeliverer(FhirContext theFhirContext, IInterceptorBroadcaster theInterceptorBroadcaster, SubscriptionChannelRegistry theSubscriptionChannelRegistry) { + public SubscriptionMatchDeliverer( + FhirContext theFhirContext, + IInterceptorBroadcaster theInterceptorBroadcaster, + SubscriptionChannelRegistry theSubscriptionChannelRegistry) { myFhirContext = theFhirContext; myInterceptorBroadcaster = theInterceptorBroadcaster; mySubscriptionChannelRegistry = theSubscriptionChannelRegistry; } - public boolean deliverPayload(@Nullable IBaseResource thePayload, @Nonnull ResourceModifiedMessage theMsg, @Nonnull ActiveSubscription theActiveSubscription, @Nullable InMemoryMatchResult theInMemoryMatchResult) { + public boolean deliverPayload( + @Nullable IBaseResource thePayload, + @Nonnull ResourceModifiedMessage theMsg, + @Nonnull ActiveSubscription theActiveSubscription, + @Nullable InMemoryMatchResult theInMemoryMatchResult) { SubscriptionDeliveryRequest subscriptionDeliveryRequest; if (thePayload != null) { subscriptionDeliveryRequest = new SubscriptionDeliveryRequest(thePayload, theMsg, theActiveSubscription); } else { - subscriptionDeliveryRequest = new SubscriptionDeliveryRequest(theMsg.getPayloadId(myFhirContext), theMsg, theActiveSubscription); + subscriptionDeliveryRequest = + new SubscriptionDeliveryRequest(theMsg.getPayloadId(myFhirContext), theMsg, theActiveSubscription); } ResourceDeliveryMessage deliveryMsg = buildResourceDeliveryMessage(subscriptionDeliveryRequest); deliveryMsg.copyAdditionalPropertiesFrom(theMsg); @@ -67,20 +75,27 @@ public class SubscriptionMatchDeliverer { return sendToDeliveryChannel(theActiveSubscription, theInMemoryMatchResult, deliveryMsg); } - public boolean deliverPayload(@Nonnull SubscriptionDeliveryRequest subscriptionDeliveryRequest, @Nullable InMemoryMatchResult theInMemoryMatchResult) { + public boolean deliverPayload( + @Nonnull SubscriptionDeliveryRequest subscriptionDeliveryRequest, + @Nullable InMemoryMatchResult theInMemoryMatchResult) { ResourceDeliveryMessage deliveryMsg = buildResourceDeliveryMessage(subscriptionDeliveryRequest); - return sendToDeliveryChannel(subscriptionDeliveryRequest.getActiveSubscription(), theInMemoryMatchResult, deliveryMsg); + return sendToDeliveryChannel( + subscriptionDeliveryRequest.getActiveSubscription(), theInMemoryMatchResult, deliveryMsg); } - private boolean sendToDeliveryChannel(@Nonnull ActiveSubscription theActiveSubscription, @Nullable InMemoryMatchResult theInMemoryMatchResult, @Nonnull ResourceDeliveryMessage deliveryMsg) { + private boolean sendToDeliveryChannel( + @Nonnull ActiveSubscription theActiveSubscription, + @Nullable InMemoryMatchResult theInMemoryMatchResult, + @Nonnull ResourceDeliveryMessage deliveryMsg) { if (!callHooks(theActiveSubscription, theInMemoryMatchResult, deliveryMsg)) { return false; } boolean retVal = false; ResourceDeliveryJsonMessage wrappedMsg = new ResourceDeliveryJsonMessage(deliveryMsg); - MessageChannel deliveryChannel = mySubscriptionChannelRegistry.getDeliverySenderChannel(theActiveSubscription.getChannelName()); + MessageChannel deliveryChannel = + mySubscriptionChannelRegistry.getDeliverySenderChannel(theActiveSubscription.getChannelName()); if (deliveryChannel != null) { retVal = true; trySendToDeliveryChannel(wrappedMsg, deliveryChannel); @@ -95,7 +110,9 @@ public class SubscriptionMatchDeliverer { CanonicalSubscription subscription = theRequest.getSubscription(); - if (subscription != null && subscription.getPayloadString() != null && !subscription.getPayloadString().isEmpty()) { + if (subscription != null + && subscription.getPayloadString() != null + && !subscription.getPayloadString().isEmpty()) { encoding = EncodingEnum.forContentType(subscription.getPayloadString()); } encoding = defaultIfNull(encoding, EncodingEnum.JSON); @@ -114,20 +131,25 @@ public class SubscriptionMatchDeliverer { return deliveryMsg; } - private boolean callHooks(ActiveSubscription theActiveSubscription, InMemoryMatchResult theInMemoryMatchResult, ResourceDeliveryMessage deliveryMsg) { + private boolean callHooks( + ActiveSubscription theActiveSubscription, + InMemoryMatchResult theInMemoryMatchResult, + ResourceDeliveryMessage deliveryMsg) { // Interceptor call: SUBSCRIPTION_RESOURCE_MATCHED HookParams params = new HookParams() - .add(CanonicalSubscription.class, theActiveSubscription.getSubscription()) - .add(ResourceDeliveryMessage.class, deliveryMsg) - .add(InMemoryMatchResult.class, theInMemoryMatchResult); + .add(CanonicalSubscription.class, theActiveSubscription.getSubscription()) + .add(ResourceDeliveryMessage.class, deliveryMsg) + .add(InMemoryMatchResult.class, theInMemoryMatchResult); if (!myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_RESOURCE_MATCHED, params)) { - ourLog.info("Interceptor has decided to abort processing of subscription {}", theActiveSubscription.getId()); + ourLog.info( + "Interceptor has decided to abort processing of subscription {}", theActiveSubscription.getId()); return false; } return true; } - private void trySendToDeliveryChannel(ResourceDeliveryJsonMessage theWrappedMsg, MessageChannel theDeliveryChannel) { + private void trySendToDeliveryChannel( + ResourceDeliveryJsonMessage theWrappedMsg, MessageChannel theDeliveryChannel) { try { boolean success = theDeliveryChannel.send(theWrappedMsg); if (!success) { diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionMatchingSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionMatchingSubscriber.java index e8c32f7cf8b..08623ae0221 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionMatchingSubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionMatchingSubscriber.java @@ -39,8 +39,8 @@ import org.springframework.messaging.Message; import org.springframework.messaging.MessageHandler; import org.springframework.messaging.MessagingException; -import javax.annotation.Nonnull; import java.util.Collection; +import javax.annotation.Nonnull; import static ca.uhn.fhir.rest.server.messaging.BaseResourceMessage.OperationTypeEnum.DELETE; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -51,12 +51,16 @@ public class SubscriptionMatchingSubscriber implements MessageHandler { @Autowired private ISubscriptionMatcher mySubscriptionMatcher; + @Autowired private FhirContext myFhirContext; + @Autowired private SubscriptionRegistry mySubscriptionRegistry; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private SubscriptionMatchDeliverer mySubscriptionMatchDeliverer; @@ -94,8 +98,7 @@ public class SubscriptionMatchingSubscriber implements MessageHandler { } // Interceptor call: SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED - HookParams params = new HookParams() - .add(ResourceModifiedMessage.class, theMsg); + HookParams params = new HookParams().add(ResourceModifiedMessage.class, theMsg); if (!myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED, params)) { return; } @@ -122,8 +125,7 @@ public class SubscriptionMatchingSubscriber implements MessageHandler { if (!anySubscriptionsMatchedResource) { // Interceptor call: SUBSCRIPTION_RESOURCE_MATCHED - HookParams params = new HookParams() - .add(ResourceModifiedMessage.class, theMsg); + HookParams params = new HookParams().add(ResourceModifiedMessage.class, theMsg); myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_RESOURCE_DID_NOT_MATCH_ANY_SUBSCRIPTIONS, params); } } @@ -132,12 +134,15 @@ public class SubscriptionMatchingSubscriber implements MessageHandler { * Returns true if subscription matched, and processing completed successfully, and the message was sent to the delivery channel. False otherwise. * */ - private boolean processSubscription(ResourceModifiedMessage theMsg, IIdType theResourceId, ActiveSubscription theActiveSubscription) { + private boolean processSubscription( + ResourceModifiedMessage theMsg, IIdType theResourceId, ActiveSubscription theActiveSubscription) { // skip if the partitions don't match CanonicalSubscription subscription = theActiveSubscription.getSubscription(); - if (subscription != null && theMsg.getPartitionId() != null && - theMsg.getPartitionId().hasPartitionIds() && !subscription.getCrossPartitionEnabled() && - !theMsg.getPartitionId().hasPartitionId(subscription.getRequestPartitionId())) { + if (subscription != null + && theMsg.getPartitionId() != null + && theMsg.getPartitionId().hasPartitionIds() + && !subscription.getCrossPartitionEnabled() + && !theMsg.getPartitionId().hasPartitionId(subscription.getRequestPartitionId())) { return false; } String nextSubscriptionId = theActiveSubscription.getId(); @@ -145,7 +150,10 @@ public class SubscriptionMatchingSubscriber implements MessageHandler { if (isNotBlank(theMsg.getSubscriptionId())) { if (!theMsg.getSubscriptionId().equals(nextSubscriptionId)) { // TODO KHS we should use a hash to look it up instead of this full table scan - ourLog.debug("Ignoring subscription {} because it is not {}", nextSubscriptionId, theMsg.getSubscriptionId()); + ourLog.debug( + "Ignoring subscription {} because it is not {}", + nextSubscriptionId, + theMsg.getSubscriptionId()); return false; } } @@ -165,20 +173,23 @@ public class SubscriptionMatchingSubscriber implements MessageHandler { if (theActiveSubscription.getCriteria().getType() == SubscriptionCriteriaParser.TypeEnum.SEARCH_EXPRESSION) { matchResult = mySubscriptionMatcher.match(theActiveSubscription.getSubscription(), theMsg); if (!matchResult.matched()) { - ourLog.trace("Subscription {} was not matched by resource {} {}", + ourLog.trace( + "Subscription {} was not matched by resource {} {}", + theActiveSubscription.getId(), + theResourceId.toUnqualifiedVersionless().getValue(), + matchResult.isInMemory() ? "in-memory" : "by querying the repository"); + return false; + } + ourLog.debug( + "Subscription {} was matched by resource {} {}", theActiveSubscription.getId(), theResourceId.toUnqualifiedVersionless().getValue(), matchResult.isInMemory() ? "in-memory" : "by querying the repository"); - return false; - } - ourLog.debug("Subscription {} was matched by resource {} {}", - theActiveSubscription.getId(), - theResourceId.toUnqualifiedVersionless().getValue(), - matchResult.isInMemory() ? "in-memory" : "by querying the repository"); } else { - ourLog.trace("Subscription {} was not matched by resource {} - No search expression", - theActiveSubscription.getId(), - theResourceId.toUnqualifiedVersionless().getValue()); + ourLog.trace( + "Subscription {} was not matched by resource {} - No search expression", + theActiveSubscription.getId(), + theResourceId.toUnqualifiedVersionless().getValue()); matchResult = InMemoryMatchResult.successfulMatch(); matchResult.setInMemory(true); } @@ -187,8 +198,8 @@ public class SubscriptionMatchingSubscriber implements MessageHandler { return mySubscriptionMatchDeliverer.deliverPayload(payload, theMsg, theActiveSubscription, matchResult); } - - private boolean resourceTypeIsAppropriateForSubscription(ActiveSubscription theActiveSubscription, IIdType theResourceId) { + private boolean resourceTypeIsAppropriateForSubscription( + ActiveSubscription theActiveSubscription, IIdType theResourceId) { SubscriptionCriteriaParser.SubscriptionCriteria criteria = theActiveSubscription.getCriteria(); String subscriptionId = theActiveSubscription.getId(); String resourceType = theResourceId.getResourceType(); @@ -213,6 +224,5 @@ public class SubscriptionMatchingSubscriber implements MessageHandler { ourLog.trace("Subscription {} start resource type check: {}", subscriptionId, match); return match; } - } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionRegisteringSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionRegisteringSubscriber.java index e0d479f76d3..c2ca97b0ee4 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionRegisteringSubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/subscriber/SubscriptionRegisteringSubscriber.java @@ -48,12 +48,16 @@ import javax.annotation.Nonnull; */ public class SubscriptionRegisteringSubscriber implements MessageHandler { private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionRegisteringSubscriber.class); + @Autowired private FhirContext myFhirContext; + @Autowired private SubscriptionRegistry mySubscriptionRegistry; + @Autowired private SubscriptionCanonicalizer mySubscriptionCanonicalizer; + @Autowired private DaoRegistry myDaoRegistry; @@ -111,7 +115,6 @@ public class SubscriptionRegisteringSubscriber implements MessageHandler { } else { mySubscriptionRegistry.unregisterSubscriptionIfRegistered(payloadId.getIdPart()); } - } /** @@ -123,11 +126,11 @@ public class SubscriptionRegisteringSubscriber implements MessageHandler { private RequestDetails getPartitionAwareRequestDetails(ResourceModifiedMessage payload) { RequestPartitionId payloadPartitionId = payload.getPartitionId(); if (payloadPartitionId == null || payloadPartitionId.isDefaultPartition()) { - // This may look redundant but the package installer STORE_AND_INSTALL Subscriptions when partitioning is enabled + // This may look redundant but the package installer STORE_AND_INSTALL Subscriptions when partitioning is + // enabled // creates a corrupt default partition. This resets it to a clean one. payloadPartitionId = RequestPartitionId.defaultPartition(); } return new SystemRequestDetails().setRequestPartitionId(payloadPartitionId); } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/ActiveSubscriptionCache.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/ActiveSubscriptionCache.java index 671b0e95b5f..ce5d616ee28 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/ActiveSubscriptionCache.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/ActiveSubscriptionCache.java @@ -93,14 +93,14 @@ class ActiveSubscriptionCache { public List getTopicSubscriptionsForTopic(String theTopic) { assert !isBlank(theTopic); return getAll().stream() - .filter(as -> as.getSubscription().isTopicSubscription()) - .filter(as -> theTopic.equals(as.getSubscription().getTopic())) - .collect(Collectors.toList()); + .filter(as -> as.getSubscription().isTopicSubscription()) + .filter(as -> theTopic.equals(as.getSubscription().getTopic())) + .collect(Collectors.toList()); } public List getAllNonTopicSubscriptions() { return getAll().stream() - .filter(as -> !as.getSubscription().isTopicSubscription()) - .collect(Collectors.toList()); + .filter(as -> !as.getSubscription().isTopicSubscription()) + .collect(Collectors.toList()); } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java index de32117bbdf..13e14e72dc2 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionLoader.java @@ -32,11 +32,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.util.HashSet; import java.util.List; import java.util.Set; - +import javax.annotation.Nonnull; public class SubscriptionLoader extends BaseResourceCacheSynchronizer { private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionLoader.class); @@ -67,9 +66,11 @@ public class SubscriptionLoader extends BaseResourceCacheSynchronizer { SearchParameterMap map = new SearchParameterMap(); if (mySearchParamRegistry.getActiveSearchParam("Subscription", "status") != null) { - map.add(Subscription.SP_STATUS, new TokenOrListParam() - .addOr(new TokenParam(null, Subscription.SubscriptionStatus.REQUESTED.toCode())) - .addOr(new TokenParam(null, Subscription.SubscriptionStatus.ACTIVE.toCode()))); + map.add( + Subscription.SP_STATUS, + new TokenOrListParam() + .addOr(new TokenParam(null, Subscription.SubscriptionStatus.REQUESTED.toCode())) + .addOr(new TokenParam(null, Subscription.SubscriptionStatus.ACTIVE.toCode()))); } map.setLoadSynchronousUpTo(SubscriptionConstants.MAX_SUBSCRIPTION_RESULTS); return map; @@ -106,7 +107,10 @@ public class SubscriptionLoader extends BaseResourceCacheSynchronizer { } mySubscriptionRegistry.unregisterAllSubscriptionsNotInCollection(allIds); - ourLog.debug("Finished sync subscriptions - activated {} and registered {}", theResourceList.size(), registeredCount); + ourLog.debug( + "Finished sync subscriptions - activated {} and registered {}", + theResourceList.size(), + registeredCount); return activatedCount; } @@ -117,7 +121,8 @@ public class SubscriptionLoader extends BaseResourceCacheSynchronizer { private boolean activateSubscriptionIfRequested(IBaseResource theSubscription) { boolean successfullyActivated = false; - if (SubscriptionConstants.REQUESTED_STATUS.equals(mySubscriptionCanonicalizer.getSubscriptionStatus(theSubscription))) { + if (SubscriptionConstants.REQUESTED_STATUS.equals( + mySubscriptionCanonicalizer.getSubscriptionStatus(theSubscription))) { if (mySubscriptionActivatingInterceptor.isChannelTypeSupported(theSubscription)) { // internally, subscriptions that cannot activate will be set to error if (mySubscriptionActivatingInterceptor.activateSubscriptionIfRequired(theSubscription)) { @@ -126,9 +131,10 @@ public class SubscriptionLoader extends BaseResourceCacheSynchronizer { logSubscriptionNotActivatedPlusErrorIfPossible(theSubscription); } } else { - ourLog.debug("Could not activate subscription {} because channel type {} is not supported.", - theSubscription.getIdElement(), - mySubscriptionCanonicalizer.getChannelType(theSubscription)); + ourLog.debug( + "Could not activate subscription {} because channel type {} is not supported.", + theSubscription.getIdElement(), + mySubscriptionCanonicalizer.getChannelType(theSubscription)); } } @@ -152,15 +158,13 @@ public class SubscriptionLoader extends BaseResourceCacheSynchronizer { error = ""; } ourLog.error("Subscription " - + theSubscription.getIdElement().getIdPart() - + " could not be activated." - + " This will not prevent startup, but it could lead to undesirable outcomes! " - + (StringUtils.isBlank(error) ? "" : "Error: " + error) - ); + + theSubscription.getIdElement().getIdPart() + + " could not be activated." + + " This will not prevent startup, but it could lead to undesirable outcomes! " + + (StringUtils.isBlank(error) ? "" : "Error: " + error)); } public void syncSubscriptions() { super.syncDatabaseToCache(); } } - diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionRegistry.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionRegistry.java index d701b1ddb7f..e791afa51e8 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionRegistry.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionRegistry.java @@ -35,27 +35,30 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.PreDestroy; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; +import javax.annotation.PreDestroy; /** * Cache of active subscriptions. When a new subscription is added to the cache, a new Spring Channel is created * and a new MessageHandler for that subscription is subscribed to that channel. These subscriptions, channels, and * handlers are all caches in this registry so they can be removed it the subscription is deleted. */ - public class SubscriptionRegistry { private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionRegistry.class); private final ActiveSubscriptionCache myActiveSubscriptionCache = new ActiveSubscriptionCache(); + @Autowired private SubscriptionCanonicalizer mySubscriptionCanonicalizer; + @Autowired private ISubscriptionDeliveryChannelNamer mySubscriptionDeliveryChannelNamer; + @Autowired private SubscriptionChannelRegistry mySubscriptionChannelRegistry; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; @@ -81,7 +84,8 @@ public class SubscriptionRegistry { private Optional hasSubscription(IIdType theId) { Validate.notNull(theId); Validate.notBlank(theId.getIdPart()); - Optional activeSubscription = Optional.ofNullable(myActiveSubscriptionCache.get(theId.getIdPart())); + Optional activeSubscription = + Optional.ofNullable(myActiveSubscriptionCache.get(theId.getIdPart())); return activeSubscription.map(ActiveSubscription::getSubscription); } @@ -91,7 +95,8 @@ public class SubscriptionRegistry { * Returns the configuration, or null, if no retry (or a bad retry value) * is specified. */ - private ChannelRetryConfiguration getRetryConfigurationFromSubscriptionExtensions(CanonicalSubscription theSubscription) { + private ChannelRetryConfiguration getRetryConfigurationFromSubscriptionExtensions( + CanonicalSubscription theSubscription) { ChannelRetryConfiguration configuration = new ChannelRetryConfiguration(); List retryCount = theSubscription.getChannelExtensions(HapiExtensions.EX_RETRY_COUNT); @@ -118,7 +123,8 @@ public class SubscriptionRegistry { String channelName = mySubscriptionDeliveryChannelNamer.nameFromSubscription(theCanonicalSubscription); // get the actual retry configuration - ChannelRetryConfiguration configuration = getRetryConfigurationFromSubscriptionExtensions(theCanonicalSubscription); + ChannelRetryConfiguration configuration = + getRetryConfigurationFromSubscriptionExtensions(theCanonicalSubscription); ActiveSubscription activeSubscription = new ActiveSubscription(theCanonicalSubscription, channelName); activeSubscription.setRetryConfiguration(configuration); @@ -127,11 +133,13 @@ public class SubscriptionRegistry { mySubscriptionChannelRegistry.add(activeSubscription); myActiveSubscriptionCache.put(subscriptionId, activeSubscription); - ourLog.info("Registered active subscription Subscription/{} - Have {} registered", subscriptionId, myActiveSubscriptionCache.size()); + ourLog.info( + "Registered active subscription Subscription/{} - Have {} registered", + subscriptionId, + myActiveSubscriptionCache.size()); // Interceptor call: SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED - HookParams params = new HookParams() - .add(CanonicalSubscription.class, theCanonicalSubscription); + HookParams params = new HookParams().add(CanonicalSubscription.class, theCanonicalSubscription); myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED, params); } @@ -141,7 +149,10 @@ public class SubscriptionRegistry { ActiveSubscription activeSubscription = myActiveSubscriptionCache.remove(theSubscriptionId); if (activeSubscription != null) { mySubscriptionChannelRegistry.remove(activeSubscription); - ourLog.info("Unregistered active subscription {} - Have {} registered", theSubscriptionId, myActiveSubscriptionCache.size()); + ourLog.info( + "Unregistered active subscription {} - Have {} registered", + theSubscriptionId, + myActiveSubscriptionCache.size()); // Interceptor call: SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_UNREGISTERED HookParams params = new HookParams(); @@ -159,7 +170,8 @@ public class SubscriptionRegistry { synchronized void unregisterAllSubscriptionsNotInCollection(Collection theAllIds) { - List idsToDelete = myActiveSubscriptionCache.markAllSubscriptionsNotInCollectionForDeletionAndReturnIdsToDelete(theAllIds); + List idsToDelete = + myActiveSubscriptionCache.markAllSubscriptionsNotInCollectionForDeletionAndReturnIdsToDelete(theAllIds); for (String id : idsToDelete) { unregisterSubscriptionIfRegistered(id); } @@ -175,9 +187,12 @@ public class SubscriptionRegistry { // No changes return false; } - ourLog.info("Updating already-registered active subscription {}", theSubscription.getIdElement().toUnqualified().getValue()); + ourLog.info( + "Updating already-registered active subscription {}", + theSubscription.getIdElement().toUnqualified().getValue()); if (channelTypeSame(existingSubscription.get(), newSubscription)) { - ourLog.info("Channel type is same. Updating active subscription and re-using existing channel and handlers."); + ourLog.info( + "Channel type is same. Updating active subscription and re-using existing channel and handlers."); updateSubscription(theSubscription); return true; } @@ -201,12 +216,12 @@ public class SubscriptionRegistry { activeSubscription.setSubscription(canonicalized); // Interceptor call: SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED - HookParams params = new HookParams() - .add(CanonicalSubscription.class, canonicalized); + HookParams params = new HookParams().add(CanonicalSubscription.class, canonicalized); myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED, params); } - private boolean channelTypeSame(CanonicalSubscription theExistingSubscription, CanonicalSubscription theNewSubscription) { + private boolean channelTypeSame( + CanonicalSubscription theExistingSubscription, CanonicalSubscription theNewSubscription) { return theExistingSubscription.getChannelType().equals(theNewSubscription.getChannelType()); } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/package-info.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/package-info.java index 83318a2080f..784e3f78b4a 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/package-info.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/package-info.java @@ -28,4 +28,3 @@ * Activated by {@link ca.uhn.fhir.jpa.model.config.PartitionSettings#setPartitioningEnabled(boolean)} */ package ca.uhn.fhir.jpa.subscription; - diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/config/SubscriptionSubmitterConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/config/SubscriptionSubmitterConfig.java index 412e9c8ecc5..0a842a8ee3f 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/config/SubscriptionSubmitterConfig.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/config/SubscriptionSubmitterConfig.java @@ -52,7 +52,8 @@ public class SubscriptionSubmitterConfig { } @Bean - public SubscriptionQueryValidator subscriptionQueryValidator(DaoRegistry theDaoRegistry, SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { + public SubscriptionQueryValidator subscriptionQueryValidator( + DaoRegistry theDaoRegistry, SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { return new SubscriptionQueryValidator(theDaoRegistry, theSubscriptionStrategyEvaluator); } @@ -66,6 +67,4 @@ public class SubscriptionSubmitterConfig { public ISubscriptionTriggeringSvc subscriptionTriggeringSvc() { return new SubscriptionTriggeringSvcImpl(); } - - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionMatcherInterceptor.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionMatcherInterceptor.java index b8179282589..d051cca1240 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionMatcherInterceptor.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionMatcherInterceptor.java @@ -54,14 +54,19 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; @Interceptor public class SubscriptionMatcherInterceptor implements IResourceModifiedConsumer { private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionMatcherInterceptor.class); + @Autowired private FhirContext myFhirContext; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private SubscriptionChannelFactory mySubscriptionChannelFactory; + @Autowired private StorageSettings myStorageSettings; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; @@ -77,11 +82,14 @@ public class SubscriptionMatcherInterceptor implements IResourceModifiedConsumer @EventListener(classes = {ContextRefreshedEvent.class}) public void startIfNeeded() { if (myStorageSettings.getSupportedSubscriptionTypes().isEmpty()) { - ourLog.debug("Subscriptions are disabled on this server. Skipping {} channel creation.", SubscriptionMatchingSubscriber.SUBSCRIPTION_MATCHING_CHANNEL_NAME); + ourLog.debug( + "Subscriptions are disabled on this server. Skipping {} channel creation.", + SubscriptionMatchingSubscriber.SUBSCRIPTION_MATCHING_CHANNEL_NAME); return; } if (myMatchingChannel == null) { - myMatchingChannel = mySubscriptionChannelFactory.newMatchingSendingChannel(SubscriptionMatchingSubscriber.SUBSCRIPTION_MATCHING_CHANNEL_NAME, getChannelProducerSettings()); + myMatchingChannel = mySubscriptionChannelFactory.newMatchingSendingChannel( + SubscriptionMatchingSubscriber.SUBSCRIPTION_MATCHING_CHANNEL_NAME, getChannelProducerSettings()); } } @@ -117,15 +125,21 @@ public class SubscriptionMatcherInterceptor implements IResourceModifiedConsumer * This is an internal API - Use with caution! */ @Override - public void submitResourceModified(IBaseResource theNewResource, ResourceModifiedMessage.OperationTypeEnum theOperationType, RequestDetails theRequest) { - // Even though the resource is being written, the subscription will be interacting with it by effectively "reading" it so we set the RequestPartitionId as a read request - RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForRead(theRequest, theNewResource.getIdElement().getResourceType(), theNewResource.getIdElement()); - ResourceModifiedMessage msg = new ResourceModifiedMessage(myFhirContext, theNewResource, theOperationType, theRequest, requestPartitionId); + public void submitResourceModified( + IBaseResource theNewResource, + ResourceModifiedMessage.OperationTypeEnum theOperationType, + RequestDetails theRequest) { + // Even though the resource is being written, the subscription will be interacting with it by effectively + // "reading" it so we set the RequestPartitionId as a read request + RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForRead( + theRequest, theNewResource.getIdElement().getResourceType(), theNewResource.getIdElement()); + ResourceModifiedMessage msg = new ResourceModifiedMessage( + myFhirContext, theNewResource, theOperationType, theRequest, requestPartitionId); // Interceptor call: SUBSCRIPTION_RESOURCE_MODIFIED - HookParams params = new HookParams() - .add(ResourceModifiedMessage.class, msg); - boolean outcome = CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.SUBSCRIPTION_RESOURCE_MODIFIED, params); + HookParams params = new HookParams().add(ResourceModifiedMessage.class, msg); + boolean outcome = CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.SUBSCRIPTION_RESOURCE_MODIFIED, params); if (!outcome) { return; } @@ -162,7 +176,9 @@ public class SubscriptionMatcherInterceptor implements IResourceModifiedConsumer protected void sendToProcessingChannel(final ResourceModifiedMessage theMessage) { ourLog.trace("Sending resource modified message to processing channel"); - Validate.notNull(myMatchingChannel, "A SubscriptionMatcherInterceptor has been registered without calling start() on it."); + Validate.notNull( + myMatchingChannel, + "A SubscriptionMatcherInterceptor has been registered without calling start() on it."); myMatchingChannel.send(new ResourceModifiedJsonMessage(theMessage)); } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionQueryValidator.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionQueryValidator.java index cb78dd85ccb..a40f64908dd 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionQueryValidator.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionQueryValidator.java @@ -32,7 +32,8 @@ public class SubscriptionQueryValidator { private final DaoRegistry myDaoRegistry; private final SubscriptionStrategyEvaluator mySubscriptionStrategyEvaluator; - public SubscriptionQueryValidator(DaoRegistry theDaoRegistry, SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { + public SubscriptionQueryValidator( + DaoRegistry theDaoRegistry, SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { myDaoRegistry = theDaoRegistry; mySubscriptionStrategyEvaluator = theSubscriptionStrategyEvaluator; } @@ -53,7 +54,8 @@ public class SubscriptionQueryValidator { for (String next : parsedCriteria.getApplicableResourceTypes()) { if (!myDaoRegistry.isResourceTypeSupported(next)) { - throw new UnprocessableEntityException(Msg.code(13) + theFieldName + " contains invalid/unsupported resource type: " + next); + throw new UnprocessableEntityException( + Msg.code(13) + theFieldName + " contains invalid/unsupported resource type: " + next); } } @@ -63,12 +65,14 @@ public class SubscriptionQueryValidator { int sep = theCriteria.indexOf('?'); if (sep <= 1) { - throw new UnprocessableEntityException(Msg.code(14) + theFieldName + " must be in the form \"{Resource Type}?[params]\""); + throw new UnprocessableEntityException( + Msg.code(14) + theFieldName + " must be in the form \"{Resource Type}?[params]\""); } String resType = theCriteria.substring(0, sep); if (resType.contains("/")) { - throw new UnprocessableEntityException(Msg.code(15) + theFieldName + " must be in the form \"{Resource Type}?[params]\""); + throw new UnprocessableEntityException( + Msg.code(15) + theFieldName + " must be in the form \"{Resource Type}?[params]\""); } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionSubmitInterceptorLoader.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionSubmitInterceptorLoader.java index bd285eabd71..2f72cb60509 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionSubmitInterceptorLoader.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionSubmitInterceptorLoader.java @@ -28,32 +28,39 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.PostConstruct; import java.util.Set; +import javax.annotation.PostConstruct; public class SubscriptionSubmitInterceptorLoader { private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionSubmitInterceptorLoader.class); @Autowired private SubscriptionMatcherInterceptor mySubscriptionMatcherInterceptor; + @Autowired private SubscriptionValidatingInterceptor mySubscriptionValidatingInterceptor; + @Autowired(required = false) private SubscriptionTopicValidatingInterceptor mySubscriptionTopicValidatingInterceptor; + @Autowired private StorageSettings myStorageSettings; + @Autowired private IInterceptorService myInterceptorRegistry; + private boolean mySubscriptionValidatingInterceptorRegistered; private boolean mySubscriptionMatcherInterceptorRegistered; private boolean mySubscriptionTopicValidatingInterceptorRegistered; @PostConstruct public void start() { - Set supportedSubscriptionTypes = myStorageSettings.getSupportedSubscriptionTypes(); + Set supportedSubscriptionTypes = + myStorageSettings.getSupportedSubscriptionTypes(); if (supportedSubscriptionTypes.isEmpty()) { - ourLog.info("Subscriptions are disabled on this server. Subscriptions will not be activated and incoming resources will not be matched against subscriptions."); + ourLog.info( + "Subscriptions are disabled on this server. Subscriptions will not be activated and incoming resources will not be matched against subscriptions."); } else { if (!mySubscriptionMatcherInterceptorRegistered) { ourLog.info("Registering subscription matcher interceptor"); diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptor.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptor.java index 348d8fd2a10..ca6e683a758 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptor.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/submit/interceptor/SubscriptionValidatingInterceptor.java @@ -58,32 +58,45 @@ import java.net.URISyntaxException; import java.util.Optional; import static org.apache.commons.lang3.StringUtils.isBlank; + @Interceptor public class SubscriptionValidatingInterceptor { @Autowired private SubscriptionCanonicalizer mySubscriptionCanonicalizer; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private StorageSettings myStorageSettings; + @Autowired private SubscriptionStrategyEvaluator mySubscriptionStrategyEvaluator; private FhirContext myFhirContext; + @Autowired private IRequestPartitionHelperSvc myRequestPartitionHelperSvc; + @Autowired private SubscriptionQueryValidator mySubscriptionQueryValidator; @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED) - public void resourcePreCreate(IBaseResource theResource, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { - validateSubmittedSubscription(theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED); + public void resourcePreCreate( + IBaseResource theResource, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + validateSubmittedSubscription( + theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED); } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void resourceUpdated(IBaseResource theOldResource, IBaseResource theResource, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { - validateSubmittedSubscription(theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED); + public void resourceUpdated( + IBaseResource theOldResource, + IBaseResource theResource, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { + validateSubmittedSubscription( + theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED); } @Autowired @@ -92,12 +105,16 @@ public class SubscriptionValidatingInterceptor { } @VisibleForTesting - void validateSubmittedSubscription(IBaseResource theSubscription, - RequestDetails theRequestDetails, - RequestPartitionId theRequestPartitionId, - Pointcut thePointcut) { - if (Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED != thePointcut && Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED != thePointcut) { - throw new UnprocessableEntityException(Msg.code(2267) + "Expected Pointcut to be either STORAGE_PRESTORAGE_RESOURCE_CREATED or STORAGE_PRESTORAGE_RESOURCE_UPDATED but was: " + thePointcut); + void validateSubmittedSubscription( + IBaseResource theSubscription, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId, + Pointcut thePointcut) { + if (Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED != thePointcut + && Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED != thePointcut) { + throw new UnprocessableEntityException(Msg.code(2267) + + "Expected Pointcut to be either STORAGE_PRESTORAGE_RESOURCE_CREATED or STORAGE_PRESTORAGE_RESOURCE_UPDATED but was: " + + thePointcut); } if (!"Subscription".equals(myFhirContext.getResourceType(theSubscription))) { @@ -112,7 +129,8 @@ public class SubscriptionValidatingInterceptor { } boolean finished = false; if (subscription.getStatus() == null) { - throw new UnprocessableEntityException(Msg.code(8) + "Can not process submitted Subscription - Subscription.status must be populated on this server"); + throw new UnprocessableEntityException(Msg.code(8) + + "Can not process submitted Subscription - Subscription.status must be populated on this server"); } switch (subscription.getStatus()) { @@ -133,17 +151,24 @@ public class SubscriptionValidatingInterceptor { if (!finished) { if (subscription.isTopicSubscription()) { - if (myFhirContext.getVersion().getVersion() != FhirVersionEnum.R4) { // In R4 topic subscriptions exist without a corresponidng SubscriptionTopic resource + if (myFhirContext.getVersion().getVersion() + != FhirVersionEnum + .R4) { // In R4 topic subscriptions exist without a corresponidng SubscriptionTopic + // resource Optional oTopic = findSubscriptionTopicByUrl(subscription.getTopic()); if (!oTopic.isPresent()) { - throw new UnprocessableEntityException(Msg.code(2322) + "No SubscriptionTopic exists with topic: " + subscription.getTopic()); + throw new UnprocessableEntityException( + Msg.code(2322) + "No SubscriptionTopic exists with topic: " + subscription.getTopic()); } } } else { validateQuery(subscription.getCriteriaString(), "Subscription.criteria"); if (subscription.getPayloadSearchCriteria() != null) { - validateQuery(subscription.getPayloadSearchCriteria(), "Subscription.extension(url='" + HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA + "')"); + validateQuery( + subscription.getPayloadSearchCriteria(), + "Subscription.extension(url='" + HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA + + "')"); } } @@ -151,33 +176,39 @@ public class SubscriptionValidatingInterceptor { try { SubscriptionMatchingStrategy strategy = mySubscriptionStrategyEvaluator.determineStrategy(subscription); - if (!(SubscriptionMatchingStrategy.IN_MEMORY == strategy) && myStorageSettings.isOnlyAllowInMemorySubscriptions()) { - throw new InvalidRequestException(Msg.code(2367)+ "This server is configured to only allow in-memory subscriptions. This subscription's criteria cannot be evaluated in-memory."); + if (!(SubscriptionMatchingStrategy.IN_MEMORY == strategy) + && myStorageSettings.isOnlyAllowInMemorySubscriptions()) { + throw new InvalidRequestException( + Msg.code(2367) + + "This server is configured to only allow in-memory subscriptions. This subscription's criteria cannot be evaluated in-memory."); } mySubscriptionCanonicalizer.setMatchingStrategyTag(theSubscription, strategy); } catch (InvalidRequestException | DataFormatException e) { - throw new UnprocessableEntityException(Msg.code(9) + "Invalid subscription criteria submitted: " + subscription.getCriteriaString() + " " + e.getMessage()); + throw new UnprocessableEntityException(Msg.code(9) + "Invalid subscription criteria submitted: " + + subscription.getCriteriaString() + " " + e.getMessage()); } if (subscription.getChannelType() == null) { - throw new UnprocessableEntityException(Msg.code(10) + "Subscription.channel.type must be populated on this server"); + throw new UnprocessableEntityException( + Msg.code(10) + "Subscription.channel.type must be populated on this server"); } else if (subscription.getChannelType() == CanonicalSubscriptionChannelType.MESSAGE) { validateMessageSubscriptionEndpoint(subscription.getEndpointUrl()); } - - } } - protected void validatePermissions(IBaseResource theSubscription, - CanonicalSubscription theCanonicalSubscription, - RequestDetails theRequestDetails, - RequestPartitionId theRequestPartitionId, - Pointcut thePointcut) { + protected void validatePermissions( + IBaseResource theSubscription, + CanonicalSubscription theCanonicalSubscription, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId, + Pointcut thePointcut) { // If the subscription has the cross partition tag - if (SubscriptionUtil.isCrossPartition(theSubscription) && !(theRequestDetails instanceof SystemRequestDetails)) { - if (!myStorageSettings.isCrossPartitionSubscriptionEnabled()){ - throw new UnprocessableEntityException(Msg.code(2009) + "Cross partition subscription is not enabled on this server"); + if (SubscriptionUtil.isCrossPartition(theSubscription) + && !(theRequestDetails instanceof SystemRequestDetails)) { + if (!myStorageSettings.isCrossPartitionSubscriptionEnabled()) { + throw new UnprocessableEntityException( + Msg.code(2009) + "Cross partition subscription is not enabled on this server"); } if (theRequestPartitionId == null && Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED == thePointcut) { @@ -187,12 +218,13 @@ public class SubscriptionValidatingInterceptor { // if we have a partition id already, we'll use that // otherwise we might end up with READ and CREATE pointcuts // returning conflicting partitions (say, all vs default) - RequestPartitionId toCheckPartitionId = theRequestPartitionId != null ? - theRequestPartitionId : - determinePartition(theRequestDetails, theSubscription); + RequestPartitionId toCheckPartitionId = theRequestPartitionId != null + ? theRequestPartitionId + : determinePartition(theRequestDetails, theSubscription); if (!toCheckPartitionId.isDefaultPartition()) { - throw new UnprocessableEntityException(Msg.code(2010) + "Cross partition subscription must be created on the default partition"); + throw new UnprocessableEntityException( + Msg.code(2010) + "Cross partition subscription must be created on the default partition"); } } } @@ -200,9 +232,11 @@ public class SubscriptionValidatingInterceptor { private RequestPartitionId determinePartition(RequestDetails theRequestDetails, IBaseResource theResource) { switch (theRequestDetails.getRestOperationType()) { case CREATE: - return myRequestPartitionHelperSvc.determineCreatePartitionForRequest(theRequestDetails, theResource, "Subscription"); + return myRequestPartitionHelperSvc.determineCreatePartitionForRequest( + theRequestDetails, theResource, "Subscription"); case UPDATE: - return myRequestPartitionHelperSvc.determineReadPartitionForRequestForRead(theRequestDetails, "Subscription", theResource.getIdElement()); + return myRequestPartitionHelperSvc.determineReadPartitionForRequestForRead( + theRequestDetails, "Subscription", theResource.getIdElement()); default: return null; } @@ -230,14 +264,17 @@ public class SubscriptionValidatingInterceptor { URI uri = new URI(theEndpointUrl); if (!"channel".equals(uri.getScheme())) { - throw new UnprocessableEntityException(Msg.code(17) + "Only 'channel' protocol is supported for Subscriptions with channel type 'message'"); + throw new UnprocessableEntityException(Msg.code(17) + + "Only 'channel' protocol is supported for Subscriptions with channel type 'message'"); } String channelName = uri.getSchemeSpecificPart(); if (isBlank(channelName)) { - throw new UnprocessableEntityException(Msg.code(18) + "A channel name must appear after channel: in a message Subscription endpoint"); + throw new UnprocessableEntityException( + Msg.code(18) + "A channel name must appear after channel: in a message Subscription endpoint"); } } catch (URISyntaxException e) { - throw new UnprocessableEntityException(Msg.code(19) + "Invalid subscription endpoint uri " + theEndpointUrl, e); + throw new UnprocessableEntityException( + Msg.code(19) + "Invalid subscription endpoint uri " + theEndpointUrl, e); } } @@ -254,14 +291,17 @@ public class SubscriptionValidatingInterceptor { @SuppressWarnings("WeakerAccess") protected void validateChannelEndpoint(CanonicalSubscription theResource) { if (isBlank(theResource.getEndpointUrl())) { - throw new UnprocessableEntityException(Msg.code(21) + "Rest-hook subscriptions must have Subscription.channel.endpoint defined"); + throw new UnprocessableEntityException( + Msg.code(21) + "Rest-hook subscriptions must have Subscription.channel.endpoint defined"); } } @SuppressWarnings("WeakerAccess") protected void validateChannelPayload(CanonicalSubscription theResource) { - if (!isBlank(theResource.getPayloadString()) && EncodingEnum.forContentType(theResource.getPayloadString()) == null) { - throw new UnprocessableEntityException(Msg.code(1985) + "Invalid value for Subscription.channel.payload: " + theResource.getPayloadString()); + if (!isBlank(theResource.getPayloadString()) + && EncodingEnum.forContentType(theResource.getPayloadString()) == null) { + throw new UnprocessableEntityException(Msg.code(1985) + "Invalid value for Subscription.channel.payload: " + + theResource.getPayloadString()); } } @@ -287,12 +327,11 @@ public class SubscriptionValidatingInterceptor { myRequestPartitionHelperSvc = theRequestPartitionHelperSvc; } - @VisibleForTesting @SuppressWarnings("WeakerAccess") - public void setSubscriptionStrategyEvaluatorForUnitTest(SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { + public void setSubscriptionStrategyEvaluatorForUnitTest( + SubscriptionStrategyEvaluator theSubscriptionStrategyEvaluator) { mySubscriptionStrategyEvaluator = theSubscriptionStrategyEvaluator; mySubscriptionQueryValidator = new SubscriptionQueryValidator(myDaoRegistry, theSubscriptionStrategyEvaluator); } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java index 7500d0470e8..2cbd1c0d291 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java @@ -66,8 +66,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -80,6 +78,8 @@ import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; import static ca.uhn.fhir.rest.server.provider.ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID; import static java.util.Objects.isNull; @@ -93,30 +93,42 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTriggeringSvcImpl.class); private static final int DEFAULT_MAX_SUBMIT = 10000; private final List myActiveJobs = new ArrayList<>(); + @Autowired private FhirContext myFhirContext; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private ISearchCoordinatorSvc mySearchCoordinatorSvc; + @Autowired private MatchUrlService myMatchUrlService; + @Autowired private IResourceModifiedConsumer myResourceModifiedConsumer; + @Autowired private HapiTransactionService myTransactionService; + private int myMaxSubmitPerPass = DEFAULT_MAX_SUBMIT; private ExecutorService myExecutorService; @Autowired private ISearchSvc mySearchService; + @Autowired private SearchBuilderFactory mySearchBuilderFactory; @Override - public IBaseParameters triggerSubscription(@Nullable List> theResourceIds, @Nullable List> theSearchUrls, @Nullable IIdType theSubscriptionId) { + public IBaseParameters triggerSubscription( + @Nullable List> theResourceIds, + @Nullable List> theSearchUrls, + @Nullable IIdType theSubscriptionId) { if (myStorageSettings.getSupportedSubscriptionTypes().isEmpty()) { throw new PreconditionFailedException(Msg.code(22) + "Subscription processing not active on this server"); @@ -143,21 +155,28 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc // Resource URLs must be compete for (IPrimitiveType next : resourceIds) { IdType resourceId = new IdType(next.getValue()); - ValidateUtil.isTrueOrThrowInvalidRequest(resourceId.hasResourceType(), SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID + " parameter must have resource type"); - ValidateUtil.isTrueOrThrowInvalidRequest(resourceId.hasIdPart(), SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID + " parameter must have resource ID part"); + ValidateUtil.isTrueOrThrowInvalidRequest( + resourceId.hasResourceType(), + SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID + " parameter must have resource type"); + ValidateUtil.isTrueOrThrowInvalidRequest( + resourceId.hasIdPart(), + SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID + " parameter must have resource ID part"); } // Search URLs must be valid for (IPrimitiveType next : searchUrls) { if (!next.getValue().contains("?")) { - throw new InvalidRequestException(Msg.code(24) + "Search URL is not valid (must be in the form \"[resource type]?[optional params]\")"); + throw new InvalidRequestException(Msg.code(24) + + "Search URL is not valid (must be in the form \"[resource type]?[optional params]\")"); } } SubscriptionTriggeringJobDetails jobDetails = new SubscriptionTriggeringJobDetails(); jobDetails.setJobId(UUID.randomUUID().toString()); - jobDetails.setRemainingResourceIds(resourceIds.stream().map(IPrimitiveType::getValue).collect(Collectors.toList())); - jobDetails.setRemainingSearchUrls(searchUrls.stream().map(IPrimitiveType::getValue).collect(Collectors.toList())); + jobDetails.setRemainingResourceIds( + resourceIds.stream().map(IPrimitiveType::getValue).collect(Collectors.toList())); + jobDetails.setRemainingSearchUrls( + searchUrls.stream().map(IPrimitiveType::getValue).collect(Collectors.toList())); if (theSubscriptionId != null) { jobDetails.setSubscriptionId(theSubscriptionId.getIdPart()); } @@ -165,12 +184,18 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc // Submit job for processing synchronized (myActiveJobs) { myActiveJobs.add(jobDetails); - ourLog.info("Subscription triggering requested for {} resource and {} search - Gave job ID: {} and have {} jobs", resourceIds.size(), searchUrls.size(), jobDetails.getJobId(), myActiveJobs.size()); + ourLog.info( + "Subscription triggering requested for {} resource and {} search - Gave job ID: {} and have {} jobs", + resourceIds.size(), + searchUrls.size(), + jobDetails.getJobId(), + myActiveJobs.size()); } // Create a parameters response IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext); - IPrimitiveType value = (IPrimitiveType) myFhirContext.getElementDefinition("string").newInstance(); + IPrimitiveType value = + (IPrimitiveType) myFhirContext.getElementDefinition("string").newInstance(); value.setValueAsString("Subscription triggering job submitted as JOB ID: " + jobDetails.myJobId); ParametersUtil.addParameterToParameters(myFhirContext, retVal, "information", value); return retVal; @@ -180,12 +205,13 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc public void runDeliveryPass() { synchronized (myActiveJobs) { - if (myActiveJobs.isEmpty()) { return; } - String activeJobIds = myActiveJobs.stream().map(SubscriptionTriggeringJobDetails::getJobId).collect(Collectors.joining(", ")); + String activeJobIds = myActiveJobs.stream() + .map(SubscriptionTriggeringJobDetails::getJobId) + .collect(Collectors.joining(", ")); ourLog.info("Starting pass: currently have {} active job IDs: {}", myActiveJobs.size(), activeJobIds); SubscriptionTriggeringJobDetails activeJob = myActiveJobs.get(0); @@ -201,13 +227,12 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc if (myActiveJobs.size() > 0) { remainingJobsMsg = "(" + myActiveJobs.size() + " jobs remaining)"; } - ourLog.info("Subscription triggering job {} is complete{}", activeJob.getJobId(), remainingJobsMsg); + ourLog.info( + "Subscription triggering job {} is complete{}", activeJob.getJobId(), remainingJobsMsg); } } } - } - } private void runJob(SubscriptionTriggeringJobDetails theJobDetails) { @@ -233,7 +258,9 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc // This is the job initial step where we set ourselves up to do the actual re-submitting of resources // to the broker. Note that querying of resource can be done synchronously or asynchronously - if (isInitialStep(theJobDetails) && isNotEmpty(theJobDetails.getRemainingSearchUrls()) && totalSubmitted < myMaxSubmitPerPass) { + if (isInitialStep(theJobDetails) + && isNotEmpty(theJobDetails.getRemainingSearchUrls()) + && totalSubmitted < myMaxSubmitPerPass) { String nextSearchUrl = theJobDetails.getRemainingSearchUrls().remove(0); RuntimeResourceDefinition resourceDef = UrlUtil.parseUrlResourceType(myFhirContext, nextSearchUrl); @@ -245,7 +272,13 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc ourLog.info("Triggering job[{}] is starting a search for {}", theJobDetails.getJobId(), nextSearchUrl); - search = mySearchCoordinatorSvc.registerSearch(callingDao, params, resourceType, new CacheControlDirective(), null, RequestPartitionId.allPartitions()); + search = mySearchCoordinatorSvc.registerSearch( + callingDao, + params, + resourceType, + new CacheControlDirective(), + null, + RequestPartitionId.allPartitions()); if (isNull(search.getUuid())) { // we don't have a search uuid i.e. we're setting up for synchronous processing @@ -270,7 +303,11 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc String searchUrl = theJobDetails.getCurrentSearchUrl(); - ourLog.info("Triggered job [{}] - Starting synchronous processing at offset {} and index {}", theJobDetails.getJobId(), theJobDetails.getCurrentOffset(), fromIndex); + ourLog.info( + "Triggered job [{}] - Starting synchronous processing at offset {} and index {}", + theJobDetails.getJobId(), + theJobDetails.getCurrentOffset(), + fromIndex); int submittableCount = myMaxSubmitPerPass - totalSubmitted; int toIndex = fromIndex + submittableCount; @@ -293,16 +330,21 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc params.setOffset(offset); params.setCount(toIndex); - ourLog.info("Triggered job[{}] requesting {} resources from offset {}", theJobDetails.getJobId(), toIndex, offset); + ourLog.info( + "Triggered job[{}] requesting {} resources from offset {}", + theJobDetails.getJobId(), + toIndex, + offset); - search = mySearchService.executeQuery(resourceDef.getName(), params, RequestPartitionId.allPartitions()); + search = + mySearchService.executeQuery(resourceDef.getName(), params, RequestPartitionId.allPartitions()); allCurrentResources = search.getAllResources(); } - ourLog.info("Triggered job[{}] delivering {} resources", theJobDetails.getJobId(), allCurrentResources.size()); + ourLog.info( + "Triggered job[{}] delivering {} resources", theJobDetails.getJobId(), allCurrentResources.size()); int highestIndexSubmitted = theJobDetails.getCurrentSearchLastUploadedIndex(); - for (IBaseResource nextResource : allCurrentResources) { Future future = submitResource(theJobDetails.getSubscriptionId(), nextResource); futures.add(Pair.of(nextResource.getIdElement().getIdPart(), future)); @@ -316,23 +358,31 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc theJobDetails.setCurrentSearchLastUploadedIndex(highestIndexSubmitted); - ourLog.info("Triggered job[{}] lastUploadedIndex is {}", theJobDetails.getJobId(), theJobDetails.getCurrentSearchLastUploadedIndex()); + ourLog.info( + "Triggered job[{}] lastUploadedIndex is {}", + theJobDetails.getJobId(), + theJobDetails.getCurrentSearchLastUploadedIndex()); - if (allCurrentResources.isEmpty() || nonNull(theJobDetails.getCurrentSearchCount()) && toIndex >= theJobDetails.getCurrentSearchCount()) { - ourLog.info("Triggered job[{}] for search URL {} has completed ", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUrl()); + if (allCurrentResources.isEmpty() + || nonNull(theJobDetails.getCurrentSearchCount()) + && toIndex >= theJobDetails.getCurrentSearchCount()) { + ourLog.info( + "Triggered job[{}] for search URL {} has completed ", + theJobDetails.getJobId(), + theJobDetails.getCurrentSearchUrl()); theJobDetails.setCurrentSearchResourceType(null); theJobDetails.clearCurrentSearchUrl(); theJobDetails.setCurrentSearchLastUploadedIndex(-1); theJobDetails.setCurrentSearchCount(null); } - } // processing step for asynchronous processing mode if (isNotBlank(theJobDetails.getCurrentSearchUuid()) && totalSubmitted < myMaxSubmitPerPass) { int fromIndex = theJobDetails.getCurrentSearchLastUploadedIndex() + 1; - IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(theJobDetails.getCurrentSearchResourceType()); + IFhirResourceDao resourceDao = + myDaoRegistry.getResourceDao(theJobDetails.getCurrentSearchResourceType()); int maxQuerySize = myMaxSubmitPerPass - totalSubmitted; int toIndex; @@ -342,26 +392,32 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc toIndex = fromIndex + maxQuerySize; } - ourLog.info("Triggering job[{}] search {} requesting resources {} - {}", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex); - + ourLog.info( + "Triggering job[{}] search {} requesting resources {} - {}", + theJobDetails.getJobId(), + theJobDetails.getCurrentSearchUuid(), + fromIndex, + toIndex); List> resourceIds; RequestPartitionId requestPartitionId = RequestPartitionId.allPartitions(); - resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex, null, requestPartitionId); + resourceIds = mySearchCoordinatorSvc.getResources( + theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex, null, requestPartitionId); ourLog.info("Triggering job[{}] delivering {} resources", theJobDetails.getJobId(), resourceIds.size()); int highestIndexSubmitted = theJobDetails.getCurrentSearchLastUploadedIndex(); String resourceType = myFhirContext.getResourceType(theJobDetails.getCurrentSearchResourceType()); - RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theJobDetails.getCurrentSearchResourceType()); - ISearchBuilder searchBuilder = mySearchBuilderFactory.newSearchBuilder(resourceDao, resourceType, resourceDef.getImplementingClass()); + RuntimeResourceDefinition resourceDef = + myFhirContext.getResourceDefinition(theJobDetails.getCurrentSearchResourceType()); + ISearchBuilder searchBuilder = mySearchBuilderFactory.newSearchBuilder( + resourceDao, resourceType, resourceDef.getImplementingClass()); List listToPopulate = new ArrayList<>(); - myTransactionService - .withSystemRequest() - .execute(() -> { - searchBuilder.loadResourcesByPid(resourceIds, Collections.emptyList(), listToPopulate, false, new SystemRequestDetails()); - }); + myTransactionService.withSystemRequest().execute(() -> { + searchBuilder.loadResourcesByPid( + resourceIds, Collections.emptyList(), listToPopulate, false, new SystemRequestDetails()); + }); for (IBaseResource nextResource : listToPopulate) { Future future = submitResource(theJobDetails.getSubscriptionId(), nextResource); @@ -376,8 +432,13 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc theJobDetails.setCurrentSearchLastUploadedIndex(highestIndexSubmitted); - if (resourceIds.size() == 0 || (theJobDetails.getCurrentSearchCount() != null && toIndex >= theJobDetails.getCurrentSearchCount())) { - ourLog.info("Triggering job[{}] search {} has completed ", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUuid()); + if (resourceIds.size() == 0 + || (theJobDetails.getCurrentSearchCount() != null + && toIndex >= theJobDetails.getCurrentSearchCount())) { + ourLog.info( + "Triggering job[{}] search {} has completed ", + theJobDetails.getJobId(), + theJobDetails.getCurrentSearchUuid()); theJobDetails.setCurrentSearchResourceType(null); theJobDetails.setCurrentSearchUuid(null); theJobDetails.setCurrentSearchLastUploadedIndex(-1); @@ -385,7 +446,12 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc } } - ourLog.info("Subscription trigger job[{}] triggered {} resources in {}ms ({} res / second)", theJobDetails.getJobId(), totalSubmitted, sw.getMillis(), sw.getThroughput(totalSubmitted, TimeUnit.SECONDS)); + ourLog.info( + "Subscription trigger job[{}] triggered {} resources in {}ms ({} res / second)", + theJobDetails.getJobId(), + totalSubmitted, + sw.getMillis(), + sw.getThroughput(totalSubmitted, TimeUnit.SECONDS)); } private boolean isInitialStep(SubscriptionTriggeringJobDetails theJobDetails) { @@ -425,9 +491,13 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc private Future submitResource(String theSubscriptionId, IBaseResource theResourceToTrigger) { - ourLog.info("Submitting resource {} to subscription {}", theResourceToTrigger.getIdElement().toUnqualifiedVersionless().getValue(), theSubscriptionId); + ourLog.info( + "Submitting resource {} to subscription {}", + theResourceToTrigger.getIdElement().toUnqualifiedVersionless().getValue(), + theSubscriptionId); - ResourceModifiedMessage msg = new ResourceModifiedMessage(myFhirContext, theResourceToTrigger, ResourceModifiedMessage.OperationTypeEnum.UPDATE); + ResourceModifiedMessage msg = new ResourceModifiedMessage( + myFhirContext, theResourceToTrigger, ResourceModifiedMessage.OperationTypeEnum.UPDATE); msg.setSubscriptionId(theSubscriptionId); return myExecutorService.submit(() -> { @@ -440,14 +510,14 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc throw new InternalErrorException(Msg.code(25) + e); } - ourLog.warn("Exception while retriggering subscriptions (going to sleep and retry): {}", e.toString()); + ourLog.warn( + "Exception while retriggering subscriptions (going to sleep and retry): {}", e.toString()); Thread.sleep(1000); } } return null; }); - } public void cancelAll() { @@ -476,34 +546,30 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc private void createExecutorService() { LinkedBlockingQueue executorQueue = new LinkedBlockingQueue<>(1000); BasicThreadFactory threadFactory = new BasicThreadFactory.Builder() - .namingPattern("SubscriptionTriggering-%d") - .daemon(false) - .priority(Thread.NORM_PRIORITY) - .build(); + .namingPattern("SubscriptionTriggering-%d") + .daemon(false) + .priority(Thread.NORM_PRIORITY) + .build(); RejectedExecutionHandler rejectedExecutionHandler = new RejectedExecutionHandler() { @Override public void rejectedExecution(Runnable theRunnable, ThreadPoolExecutor theExecutor) { - ourLog.info("Note: Subscription triggering queue is full ({} elements), waiting for a slot to become available!", executorQueue.size()); + ourLog.info( + "Note: Subscription triggering queue is full ({} elements), waiting for a slot to become available!", + executorQueue.size()); StopWatch sw = new StopWatch(); try { executorQueue.put(theRunnable); } catch (InterruptedException theE) { // Restore interrupted state... Thread.currentThread().interrupt(); - throw new RejectedExecutionException(Msg.code(26) + "Task " + theRunnable.toString() + - " rejected from " + theE.toString()); + throw new RejectedExecutionException( + Msg.code(26) + "Task " + theRunnable.toString() + " rejected from " + theE.toString()); } ourLog.info("Slot become available after {}ms", sw.getMillis()); } }; myExecutorService = new ThreadPoolExecutor( - 10, - 10, - 0L, - TimeUnit.MILLISECONDS, - executorQueue, - threadFactory, - rejectedExecutionHandler); + 10, 10, 0L, TimeUnit.MILLISECONDS, executorQueue, threadFactory, rejectedExecutionHandler); } @Override @@ -627,5 +693,4 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc myCurrentOffset = ObjectUtils.defaultIfNull(theCurrentOffset, 0); } } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionDebugLogInterceptor.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionDebugLogInterceptor.java index ae4265aba23..ebbfd974d38 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionDebugLogInterceptor.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionDebugLogInterceptor.java @@ -57,7 +57,8 @@ import java.util.function.Function; @Interceptor public class SubscriptionDebugLogInterceptor { - private static final String SUBSCRIPTION_DEBUG_LOG_INTERCEPTOR_PRECHECK = "SubscriptionDebugLogInterceptor_precheck"; + private static final String SUBSCRIPTION_DEBUG_LOG_INTERCEPTOR_PRECHECK = + "SubscriptionDebugLogInterceptor_precheck"; private final Level myLevel; private final EnumMap myLoggers; @@ -88,7 +89,11 @@ public class SubscriptionDebugLogInterceptor { // Delete operations have no payload resourceId = theMessage.getId(); } - log(EventCodeEnum.SUBS1, "Resource {} was submitted to the processing pipeline (op={})", resourceId, theMessage.getOperationType()); + log( + EventCodeEnum.SUBS1, + "Resource {} was submitted to the processing pipeline (op={})", + resourceId, + theMessage.getOperationType()); } /* @@ -103,12 +108,21 @@ public class SubscriptionDebugLogInterceptor { @Hook(Pointcut.SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED) public void step20_beforeChecked(ResourceModifiedMessage theMessage) { - log(EventCodeEnum.SUBS2, "Checking resource {} (op={}) for matching subscriptions", theMessage.getPayloadId(), theMessage.getOperationType()); + log( + EventCodeEnum.SUBS2, + "Checking resource {} (op={}) for matching subscriptions", + theMessage.getPayloadId(), + theMessage.getOperationType()); } @Hook(Pointcut.SUBSCRIPTION_RESOURCE_MATCHED) public void step30_subscriptionMatched(ResourceDeliveryMessage theMessage, InMemoryMatchResult theResult) { - log(EventCodeEnum.SUBS3, "Resource {} matched by subscription {} (memory match={})", theMessage.getPayloadId(), theMessage.getSubscription().getIdElementString(), theResult.isInMemory()); + log( + EventCodeEnum.SUBS3, + "Resource {} matched by subscription {} (memory match={})", + theMessage.getPayloadId(), + theMessage.getSubscription().getIdElementString(), + theResult.isInMemory()); } @Hook(Pointcut.SUBSCRIPTION_RESOURCE_DID_NOT_MATCH_ANY_SUBSCRIPTIONS) @@ -118,7 +132,13 @@ public class SubscriptionDebugLogInterceptor { @Hook(Pointcut.SUBSCRIPTION_BEFORE_DELIVERY) public void step40_beforeDelivery(ResourceDeliveryMessage theMessage) { - log(EventCodeEnum.SUBS5, "Delivering resource {} for subscription {} to channel of type {} to endpoint {}", theMessage.getPayloadId(), theMessage.getSubscription().getIdElementString(), theMessage.getSubscription().getChannelType(), theMessage.getSubscription().getEndpointUrl()); + log( + EventCodeEnum.SUBS5, + "Delivering resource {} for subscription {} to channel of type {} to endpoint {}", + theMessage.getPayloadId(), + theMessage.getSubscription().getIdElementString(), + theMessage.getSubscription().getChannelType(), + theMessage.getSubscription().getEndpointUrl()); } @Hook(Pointcut.SUBSCRIPTION_AFTER_DELIVERY_FAILED) @@ -137,19 +157,31 @@ public class SubscriptionDebugLogInterceptor { if (theFailure != null) { failureString = theFailure.toString(); } - log(EventCodeEnum.SUBS6, "Delivery of resource {} for subscription {} to channel of type {} - Failure: {}", payloadId, subscriptionId, channelType, failureString); + log( + EventCodeEnum.SUBS6, + "Delivery of resource {} for subscription {} to channel of type {} - Failure: {}", + payloadId, + subscriptionId, + channelType, + failureString); } @Hook(Pointcut.SUBSCRIPTION_AFTER_DELIVERY) public void step50_afterDelivery(ResourceDeliveryMessage theMessage) { String processingTime = theMessage - .getAttribute(SUBSCRIPTION_DEBUG_LOG_INTERCEPTOR_PRECHECK) - .map(Long::parseLong) - .map(Date::new) - .map(start -> new StopWatch(start).toString()) - .orElse("(unknown)"); + .getAttribute(SUBSCRIPTION_DEBUG_LOG_INTERCEPTOR_PRECHECK) + .map(Long::parseLong) + .map(Date::new) + .map(start -> new StopWatch(start).toString()) + .orElse("(unknown)"); - log(EventCodeEnum.SUBS7, "Finished delivery of resource {} for subscription {} to channel of type {} - Total processing time: {}", theMessage.getPayloadId(), theMessage.getSubscription().getIdElementString(), theMessage.getSubscription().getChannelType(), processingTime); + log( + EventCodeEnum.SUBS7, + "Finished delivery of resource {} for subscription {} to channel of type {} - Total processing time: {}", + theMessage.getPayloadId(), + theMessage.getSubscription().getIdElementString(), + theMessage.getSubscription().getChannelType(), + processingTime); } protected void log(EventCodeEnum theEventCode, String theMessage, Object... theArguments) { @@ -210,9 +242,7 @@ public class SubscriptionDebugLogInterceptor { SUBS7 } - private static Function defaultLogFactory() { return code -> LoggerFactory.getLogger(SubscriptionDebugLogInterceptor.class.getName() + "." + code.name()); } - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionUtil.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionUtil.java index 3263ff1d273..07a6e055cac 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionUtil.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionUtil.java @@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.subscription.util; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; /** * Utilities for working with the subscription resource @@ -31,7 +31,8 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; public class SubscriptionUtil { public static RequestDetails createRequestDetailForPartitionedRequest(CanonicalSubscription theSubscription) { - RequestPartitionId requestPartitionId = new PartitionablePartitionId(theSubscription.getRequestPartitionId(), null).toPartitionId(); + RequestPartitionId requestPartitionId = + new PartitionablePartitionId(theSubscription.getRequestPartitionId(), null).toPartitionId(); if (theSubscription.getCrossPartitionEnabled()) { requestPartitionId = RequestPartitionId.allPartitions(); diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/ActiveSubscriptionTopicCache.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/ActiveSubscriptionTopicCache.java index 1bda0e9ea48..ad7a9cb24ee 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/ActiveSubscriptionTopicCache.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/ActiveSubscriptionTopicCache.java @@ -64,7 +64,7 @@ public class ActiveSubscriptionTopicCache { return myCache.values(); } - public void remove(String theSubscriptionTopicId) { + public void remove(String theSubscriptionTopicId) { myCache.remove(theSubscriptionTopicId); - } + } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicCanonicalizer.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicCanonicalizer.java index dcec70eb0bc..fae50cfcb82 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicCanonicalizer.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicCanonicalizer.java @@ -28,17 +28,19 @@ import org.hl7.fhir.r5.model.SubscriptionTopic; public final class SubscriptionTopicCanonicalizer { private static final FhirContext ourFhirContextR5 = FhirContext.forR5(); - private SubscriptionTopicCanonicalizer() { - } + private SubscriptionTopicCanonicalizer() {} public static SubscriptionTopic canonicalizeTopic(FhirContext theFhirContext, IBaseResource theSubscriptionTopic) { switch (theFhirContext.getVersion().getVersion()) { case R4B: - return (SubscriptionTopic) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.SubscriptionTopic) theSubscriptionTopic); + return (SubscriptionTopic) VersionConvertorFactory_43_50.convertResource( + (org.hl7.fhir.r4b.model.SubscriptionTopic) theSubscriptionTopic); case R5: return (SubscriptionTopic) theSubscriptionTopic; default: - throw new UnsupportedOperationException(Msg.code(2337) + "Subscription topics are not supported in FHIR version " + theFhirContext.getVersion().getVersion()); + throw new UnsupportedOperationException( + Msg.code(2337) + "Subscription topics are not supported in FHIR version " + + theFhirContext.getVersion().getVersion()); } } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java index eca243b2a9b..6e53878e4e4 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicConfig.java @@ -37,7 +37,8 @@ public class SubscriptionTopicConfig { } @Bean - SubscriptionTopicSupport subscriptionTopicSupport(FhirContext theFhirContext, DaoRegistry theDaoRegistry, SearchParamMatcher theSearchParamMatcher) { + SubscriptionTopicSupport subscriptionTopicSupport( + FhirContext theFhirContext, DaoRegistry theDaoRegistry, SearchParamMatcher theSearchParamMatcher) { return new SubscriptionTopicSupport(theFhirContext, theDaoRegistry, theSearchParamMatcher); } @@ -52,7 +53,8 @@ public class SubscriptionTopicConfig { } @Bean - SubscriptionTopicValidatingInterceptor subscriptionTopicValidatingInterceptor(FhirContext theFhirContext, SubscriptionQueryValidator theSubscriptionQueryValidator) { + SubscriptionTopicValidatingInterceptor subscriptionTopicValidatingInterceptor( + FhirContext theFhirContext, SubscriptionQueryValidator theSubscriptionQueryValidator) { return new SubscriptionTopicValidatingInterceptor(theFhirContext, theSubscriptionQueryValidator); } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicDispatchRequest.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicDispatchRequest.java index 50dd1a0911d..4727829ff78 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicDispatchRequest.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicDispatchRequest.java @@ -25,75 +25,88 @@ import ca.uhn.fhir.jpa.topic.filter.ISubscriptionTopicFilterMatcher; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import org.hl7.fhir.instance.model.api.IBaseResource; +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; public class SubscriptionTopicDispatchRequest { - @Nonnull - private final String myTopicUrl; - @Nonnull - private final List myResources; - @Nonnull - private final ISubscriptionTopicFilterMatcher mySubscriptionTopicFilterMatcher; - @Nonnull - private final RestOperationTypeEnum myRequestType; - @Nullable - private final InMemoryMatchResult myInMemoryMatchResult; - @Nullable - private final RequestPartitionId myRequestPartitionId; - @Nullable - private final String myTransactionId; + @Nonnull + private final String myTopicUrl; - /** - * @param theTopicUrl Deliver to subscriptions for this topic - * @param theResources The list of resources to deliver. The first resource will be the primary "focus" resource per the Subscription documentation. - * This list should _not_ include the SubscriptionStatus. The SubscriptionStatus will be added as the first element to - * the delivered bundle. The reason for this is that the SubscriptionStatus needs to reference the subscription ID, which is - * not known until the bundle is delivered. - * @param theSubscriptionTopicFilterMatcher is used to match the primary "focus" resource against the subscription filters - * @param theRequestType The type of request that led to this dispatch. This determines the request type of the bundle entries - * @param theInMemoryMatchResult Information about the match event that led to this dispatch that is sent to SUBSCRIPTION_RESOURCE_MATCHED - * @param theRequestPartitionId The request partitions of the request, if any. This is used by subscriptions that need to perform repository - * operations as a part of their delivery. Those repository operations will be performed on the supplied request partitions - * @param theTransactionId The transaction ID of the request, if any. This is used for logging. - * - */ - public SubscriptionTopicDispatchRequest(@Nonnull String theTopicUrl, @Nonnull List theResources, @Nonnull ISubscriptionTopicFilterMatcher theSubscriptionTopicFilterMatcher, @Nonnull RestOperationTypeEnum theRequestType, @Nullable InMemoryMatchResult theInMemoryMatchResult, @Nullable RequestPartitionId theRequestPartitionId, @Nullable String theTransactionId) { - myTopicUrl = theTopicUrl; - myResources = theResources; - mySubscriptionTopicFilterMatcher = theSubscriptionTopicFilterMatcher; - myRequestType = theRequestType; - myInMemoryMatchResult = theInMemoryMatchResult; - myRequestPartitionId = theRequestPartitionId; - myTransactionId = theTransactionId; - } + @Nonnull + private final List myResources; - public String getTopicUrl() { - return myTopicUrl; - } + @Nonnull + private final ISubscriptionTopicFilterMatcher mySubscriptionTopicFilterMatcher; - public List getResources() { - return myResources; - } + @Nonnull + private final RestOperationTypeEnum myRequestType; - public ISubscriptionTopicFilterMatcher getSubscriptionTopicFilterMatcher() { - return mySubscriptionTopicFilterMatcher; - } + @Nullable + private final InMemoryMatchResult myInMemoryMatchResult; - public RestOperationTypeEnum getRequestType() { - return myRequestType; - } + @Nullable + private final RequestPartitionId myRequestPartitionId; - public InMemoryMatchResult getInMemoryMatchResult() { - return myInMemoryMatchResult; - } + @Nullable + private final String myTransactionId; - public RequestPartitionId getRequestPartitionId() { - return myRequestPartitionId; - } + /** + * @param theTopicUrl Deliver to subscriptions for this topic + * @param theResources The list of resources to deliver. The first resource will be the primary "focus" resource per the Subscription documentation. + * This list should _not_ include the SubscriptionStatus. The SubscriptionStatus will be added as the first element to + * the delivered bundle. The reason for this is that the SubscriptionStatus needs to reference the subscription ID, which is + * not known until the bundle is delivered. + * @param theSubscriptionTopicFilterMatcher is used to match the primary "focus" resource against the subscription filters + * @param theRequestType The type of request that led to this dispatch. This determines the request type of the bundle entries + * @param theInMemoryMatchResult Information about the match event that led to this dispatch that is sent to SUBSCRIPTION_RESOURCE_MATCHED + * @param theRequestPartitionId The request partitions of the request, if any. This is used by subscriptions that need to perform repository + * operations as a part of their delivery. Those repository operations will be performed on the supplied request partitions + * @param theTransactionId The transaction ID of the request, if any. This is used for logging. + * + */ + public SubscriptionTopicDispatchRequest( + @Nonnull String theTopicUrl, + @Nonnull List theResources, + @Nonnull ISubscriptionTopicFilterMatcher theSubscriptionTopicFilterMatcher, + @Nonnull RestOperationTypeEnum theRequestType, + @Nullable InMemoryMatchResult theInMemoryMatchResult, + @Nullable RequestPartitionId theRequestPartitionId, + @Nullable String theTransactionId) { + myTopicUrl = theTopicUrl; + myResources = theResources; + mySubscriptionTopicFilterMatcher = theSubscriptionTopicFilterMatcher; + myRequestType = theRequestType; + myInMemoryMatchResult = theInMemoryMatchResult; + myRequestPartitionId = theRequestPartitionId; + myTransactionId = theTransactionId; + } - public String getTransactionId() { - return myTransactionId; - } + public String getTopicUrl() { + return myTopicUrl; + } + + public List getResources() { + return myResources; + } + + public ISubscriptionTopicFilterMatcher getSubscriptionTopicFilterMatcher() { + return mySubscriptionTopicFilterMatcher; + } + + public RestOperationTypeEnum getRequestType() { + return myRequestType; + } + + public InMemoryMatchResult getInMemoryMatchResult() { + return myInMemoryMatchResult; + } + + public RequestPartitionId getRequestPartitionId() { + return myRequestPartitionId; + } + + public String getTransactionId() { + return myTransactionId; + } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicDispatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicDispatcher.java index b4018b45be9..7f0abe36ee9 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicDispatcher.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicDispatcher.java @@ -54,7 +54,11 @@ public class SubscriptionTopicDispatcher { private final SubscriptionMatchDeliverer mySubscriptionMatchDeliverer; private final SubscriptionTopicPayloadBuilder mySubscriptionTopicPayloadBuilder; - public SubscriptionTopicDispatcher(FhirContext theFhirContext, SubscriptionRegistry theSubscriptionRegistry, SubscriptionMatchDeliverer theSubscriptionMatchDeliverer, SubscriptionTopicPayloadBuilder theSubscriptionTopicPayloadBuilder) { + public SubscriptionTopicDispatcher( + FhirContext theFhirContext, + SubscriptionRegistry theSubscriptionRegistry, + SubscriptionMatchDeliverer theSubscriptionMatchDeliverer, + SubscriptionTopicPayloadBuilder theSubscriptionTopicPayloadBuilder) { myFhirContext = theFhirContext; mySubscriptionRegistry = theSubscriptionRegistry; mySubscriptionMatchDeliverer = theSubscriptionMatchDeliverer; @@ -72,9 +76,15 @@ public class SubscriptionTopicDispatcher { * @param theRequestType The type of request that led to this dispatch. This determines the request type of the bundle entries * @return The number of subscription notifications that were successfully queued for delivery */ - public int dispatch(String theTopicUrl, List theResources, RestOperationTypeEnum theRequestType) { - SubscriptionTopicDispatchRequest subscriptionTopicDispatchRequest = new SubscriptionTopicDispatchRequest(theTopicUrl, theResources, (f, r) -> InMemoryMatchResult.successfulMatch(), theRequestType, null, null, null); + SubscriptionTopicDispatchRequest subscriptionTopicDispatchRequest = new SubscriptionTopicDispatchRequest( + theTopicUrl, + theResources, + (f, r) -> InMemoryMatchResult.successfulMatch(), + theRequestType, + null, + null, + null); return dispatch(subscriptionTopicDispatchRequest); } @@ -87,7 +97,8 @@ public class SubscriptionTopicDispatcher { public int dispatch(SubscriptionTopicDispatchRequest theSubscriptionTopicDispatchRequest) { int count = 0; - List topicSubscriptions = mySubscriptionRegistry.getTopicSubscriptionsByTopic(theSubscriptionTopicDispatchRequest.getTopicUrl()); + List topicSubscriptions = + mySubscriptionRegistry.getTopicSubscriptionsByTopic(theSubscriptionTopicDispatchRequest.getTopicUrl()); if (!topicSubscriptions.isEmpty()) { for (ActiveSubscription activeSubscription : topicSubscriptions) { boolean success = matchFiltersAndDeliver(theSubscriptionTopicDispatchRequest, activeSubscription); @@ -99,12 +110,14 @@ public class SubscriptionTopicDispatcher { return count; } - - private boolean matchFiltersAndDeliver(SubscriptionTopicDispatchRequest theSubscriptionTopicDispatchRequest, ActiveSubscription theActiveSubscription) { + private boolean matchFiltersAndDeliver( + SubscriptionTopicDispatchRequest theSubscriptionTopicDispatchRequest, + ActiveSubscription theActiveSubscription) { String topicUrl = theSubscriptionTopicDispatchRequest.getTopicUrl(); List resources = theSubscriptionTopicDispatchRequest.getResources(); - ISubscriptionTopicFilterMatcher subscriptionTopicFilterMatcher = theSubscriptionTopicDispatchRequest.getSubscriptionTopicFilterMatcher(); + ISubscriptionTopicFilterMatcher subscriptionTopicFilterMatcher = + theSubscriptionTopicDispatchRequest.getSubscriptionTopicFilterMatcher(); if (resources.size() > 0) { IBaseResource firstResource = resources.get(0); @@ -112,19 +125,28 @@ public class SubscriptionTopicDispatcher { CanonicalSubscription subscription = theActiveSubscription.getSubscription(); CanonicalTopicSubscription topicSubscription = subscription.getTopicSubscription(); if (topicSubscription.hasFilters()) { - ourLog.debug("Checking if resource {} matches {} subscription filters on {}", firstResource.getIdElement().toUnqualifiedVersionless().getValue(), - topicSubscription.getFilters().size(), - subscription.getIdElement(myFhirContext).toUnqualifiedVersionless().getValue()); + ourLog.debug( + "Checking if resource {} matches {} subscription filters on {}", + firstResource.getIdElement().toUnqualifiedVersionless().getValue(), + topicSubscription.getFilters().size(), + subscription + .getIdElement(myFhirContext) + .toUnqualifiedVersionless() + .getValue()); - if (!SubscriptionTopicFilterUtil.matchFilters(firstResource, resourceType, subscriptionTopicFilterMatcher, topicSubscription)) { + if (!SubscriptionTopicFilterUtil.matchFilters( + firstResource, resourceType, subscriptionTopicFilterMatcher, topicSubscription)) { return false; } } } theActiveSubscription.incrementDeliveriesCount(); - IBaseBundle bundlePayload = mySubscriptionTopicPayloadBuilder.buildPayload(resources, theActiveSubscription, topicUrl, theSubscriptionTopicDispatchRequest.getRequestType()); + IBaseBundle bundlePayload = mySubscriptionTopicPayloadBuilder.buildPayload( + resources, theActiveSubscription, topicUrl, theSubscriptionTopicDispatchRequest.getRequestType()); bundlePayload.setId(UUID.randomUUID().toString()); - SubscriptionDeliveryRequest subscriptionDeliveryRequest = new SubscriptionDeliveryRequest(bundlePayload, theActiveSubscription, theSubscriptionTopicDispatchRequest); - return mySubscriptionMatchDeliverer.deliverPayload(subscriptionDeliveryRequest, theSubscriptionTopicDispatchRequest.getInMemoryMatchResult()); + SubscriptionDeliveryRequest subscriptionDeliveryRequest = new SubscriptionDeliveryRequest( + bundlePayload, theActiveSubscription, theSubscriptionTopicDispatchRequest); + return mySubscriptionMatchDeliverer.deliverPayload( + subscriptionDeliveryRequest, theSubscriptionTopicDispatchRequest.getInMemoryMatchResult()); } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicLoader.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicLoader.java index bd19de5b9b4..daaaa3bcc20 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicLoader.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicLoader.java @@ -33,17 +33,17 @@ import org.hl7.fhir.r5.model.SubscriptionTopic; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.util.HashSet; import java.util.List; import java.util.Set; - +import javax.annotation.Nonnull; public class SubscriptionTopicLoader extends BaseResourceCacheSynchronizer { private static final Logger ourLog = Logs.getSubscriptionTopicLog(); @Autowired private FhirContext myFhirContext; + @Autowired private SubscriptionTopicRegistry mySubscriptionTopicRegistry; @@ -109,8 +109,8 @@ public class SubscriptionTopicLoader extends BaseResourceCacheSynchronizer { } else if (theResource instanceof org.hl7.fhir.r4b.model.SubscriptionTopic) { return SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, theResource); } else { - throw new IllegalArgumentException(Msg.code(2332) + "Only R4B and R5 SubscriptionTopic is currently supported. Found " + theResource.getClass()); + throw new IllegalArgumentException(Msg.code(2332) + + "Only R4B and R5 SubscriptionTopic is currently supported. Found " + theResource.getClass()); } } } - diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatcher.java index ffedc145564..08daa5b6d30 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatcher.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatcher.java @@ -42,7 +42,8 @@ public class SubscriptionTopicMatcher { List triggers = myTopic.getResourceTrigger(); for (SubscriptionTopic.SubscriptionTopicResourceTriggerComponent next : triggers) { if (resourceName.equals(next.getResource())) { - SubscriptionTriggerMatcher matcher = new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, theMsg, next); + SubscriptionTriggerMatcher matcher = + new SubscriptionTriggerMatcher(mySubscriptionTopicSupport, theMsg, next); InMemoryMatchResult result = matcher.match(); if (result.matched()) { // as soon as one trigger matches, we're done diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatchingSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatchingSubscriber.java index b7a1d7b32ba..f8a195ca174 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatchingSubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicMatchingSubscriber.java @@ -39,29 +39,37 @@ import org.springframework.messaging.Message; import org.springframework.messaging.MessageHandler; import org.springframework.messaging.MessagingException; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.Collections; import java.util.List; +import javax.annotation.Nonnull; public class SubscriptionTopicMatchingSubscriber implements MessageHandler { private static final Logger ourLog = Logs.getSubscriptionTopicLog(); private final FhirContext myFhirContext; + @Autowired SubscriptionTopicSupport mySubscriptionTopicSupport; + @Autowired SubscriptionTopicRegistry mySubscriptionTopicRegistry; + @Autowired SubscriptionRegistry mySubscriptionRegistry; + @Autowired SubscriptionMatchDeliverer mySubscriptionMatchDeliverer; + @Autowired SubscriptionTopicPayloadBuilder mySubscriptionTopicPayloadBuilder; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private SubscriptionTopicDispatcher mySubscriptionTopicDispatcher; + @Autowired private InMemoryTopicFilterMatcher myInMemoryTopicFilterMatcher; @@ -81,9 +89,9 @@ public class SubscriptionTopicMatchingSubscriber implements MessageHandler { ResourceModifiedMessage msg = ((ResourceModifiedJsonMessage) theMessage).getPayload(); // Interceptor call: SUBSCRIPTION_TOPIC_BEFORE_PERSISTED_RESOURCE_CHECKED - HookParams params = new HookParams() - .add(ResourceModifiedMessage.class, msg); - if (!myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_TOPIC_BEFORE_PERSISTED_RESOURCE_CHECKED, params)) { + HookParams params = new HookParams().add(ResourceModifiedMessage.class, msg); + if (!myInterceptorBroadcaster.callHooks( + Pointcut.SUBSCRIPTION_TOPIC_BEFORE_PERSISTED_RESOURCE_CHECKED, params)) { return; } try { @@ -102,17 +110,31 @@ public class SubscriptionTopicMatchingSubscriber implements MessageHandler { InMemoryMatchResult result = matcher.match(theMsg); if (result.matched()) { int deliveries = deliverToTopicSubscriptions(theMsg, topic, result); - ourLog.info("Matched topic {} to message {}. Notifications sent to {} subscriptions for delivery.", topic.getUrl(), theMsg, deliveries); + ourLog.info( + "Matched topic {} to message {}. Notifications sent to {} subscriptions for delivery.", + topic.getUrl(), + theMsg, + deliveries); } } } - private int deliverToTopicSubscriptions(ResourceModifiedMessage theMsg, SubscriptionTopic theSubscriptionTopic, InMemoryMatchResult theInMemoryMatchResult) { + private int deliverToTopicSubscriptions( + ResourceModifiedMessage theMsg, + SubscriptionTopic theSubscriptionTopic, + InMemoryMatchResult theInMemoryMatchResult) { String topicUrl = theSubscriptionTopic.getUrl(); IBaseResource matchedResource = theMsg.getNewPayload(myFhirContext); List matchedResourceList = Collections.singletonList(matchedResource); RestOperationTypeEnum restOperationType = theMsg.getOperationType().asRestOperationType(); - return mySubscriptionTopicDispatcher.dispatch(new SubscriptionTopicDispatchRequest(topicUrl, matchedResourceList, myInMemoryTopicFilterMatcher, restOperationType, theInMemoryMatchResult, theMsg.getPartitionId(), theMsg.getTransactionId())); + return mySubscriptionTopicDispatcher.dispatch(new SubscriptionTopicDispatchRequest( + topicUrl, + matchedResourceList, + myInMemoryTopicFilterMatcher, + restOperationType, + theInMemoryMatchResult, + theMsg.getPartitionId(), + theMsg.getTransactionId())); } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicPayloadBuilder.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicPayloadBuilder.java index 1e7111260e9..2efab3bb355 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicPayloadBuilder.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicPayloadBuilder.java @@ -62,16 +62,22 @@ public class SubscriptionTopicPayloadBuilder { } } - public IBaseBundle buildPayload(List theResources, ActiveSubscription theActiveSubscription, String theTopicUrl, RestOperationTypeEnum theRestOperationType) { + public IBaseBundle buildPayload( + List theResources, + ActiveSubscription theActiveSubscription, + String theTopicUrl, + RestOperationTypeEnum theRestOperationType) { BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext); - IBaseResource notificationStatus = myNotificationStatusBuilder.buildNotificationStatus(theResources, theActiveSubscription, theTopicUrl); + IBaseResource notificationStatus = + myNotificationStatusBuilder.buildNotificationStatus(theResources, theActiveSubscription, theTopicUrl); bundleBuilder.addCollectionEntry(notificationStatus); addResources(bundleBuilder, theResources, theRestOperationType); // WIP STR5 add support for notificationShape include, revinclude - // Note we need to set the bundle type after we add the resources since adding the resources automatically sets the bundle type + // Note we need to set the bundle type after we add the resources since adding the resources automatically sets + // the bundle type setBundleType(bundleBuilder); IBaseBundle retval = bundleBuilder.getBundle(); if (ourLog.isDebugEnabled()) { @@ -81,7 +87,8 @@ public class SubscriptionTopicPayloadBuilder { return retval; } - private static void addResources(BundleBuilder bundleBuilder, List theResources, RestOperationTypeEnum theRestOperationType) { + private static void addResources( + BundleBuilder bundleBuilder, List theResources, RestOperationTypeEnum theRestOperationType) { for (IBaseResource resource : theResources) { switch (theRestOperationType) { case CREATE: @@ -112,6 +119,7 @@ public class SubscriptionTopicPayloadBuilder { } private IllegalStateException unsupportedFhirVersionException() { - return new IllegalStateException(Msg.code(2331) + "SubscriptionTopic subscriptions are not supported on FHIR version: " + myFhirVersion); + return new IllegalStateException( + Msg.code(2331) + "SubscriptionTopic subscriptions are not supported on FHIR version: " + myFhirVersion); } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicRegisteringSubscriber.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicRegisteringSubscriber.java index 126cde3fb65..c2c17a7e5f2 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicRegisteringSubscriber.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicRegisteringSubscriber.java @@ -52,8 +52,10 @@ public class SubscriptionTopicRegisteringSubscriber implements MessageHandler { @Autowired private FhirContext myFhirContext; + @Autowired private SubscriptionTopicRegistry mySubscriptionTopicRegistry; + @Autowired private DaoRegistry myDaoRegistry; @@ -107,7 +109,8 @@ public class SubscriptionTopicRegisteringSubscriber implements MessageHandler { return; } - SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, payloadResource); + SubscriptionTopic subscriptionTopic = + SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, payloadResource); if (subscriptionTopic.getStatus() == Enumerations.PublicationStatus.ACTIVE) { mySubscriptionTopicRegistry.register(subscriptionTopic); } else { @@ -124,12 +127,11 @@ public class SubscriptionTopicRegisteringSubscriber implements MessageHandler { private RequestDetails getPartitionAwareRequestDetails(ResourceModifiedMessage payload) { RequestPartitionId payloadPartitionId = payload.getPartitionId(); if (payloadPartitionId == null || payloadPartitionId.isDefaultPartition()) { - // This may look redundant but the package installer STORE_AND_INSTALL Subscriptions when partitioning is enabled + // This may look redundant but the package installer STORE_AND_INSTALL Subscriptions when partitioning is + // enabled // creates a corrupt default partition. This resets it to a clean one. payloadPartitionId = RequestPartitionId.defaultPartition(); } return new SystemRequestDetails().setRequestPartitionId(payloadPartitionId); } - - } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicRegistry.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicRegistry.java index f696abe6818..8d02eafb16a 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicRegistry.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicRegistry.java @@ -27,8 +27,7 @@ import java.util.Set; public class SubscriptionTopicRegistry { private final ActiveSubscriptionTopicCache myActiveSubscriptionTopicCache = new ActiveSubscriptionTopicCache(); - public SubscriptionTopicRegistry() { - } + public SubscriptionTopicRegistry() {} public int size() { return myActiveSubscriptionTopicCache.size(); diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicSupport.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicSupport.java index 8798dd1339f..4aa2fc01557 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicSupport.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicSupport.java @@ -28,7 +28,8 @@ public class SubscriptionTopicSupport { private final DaoRegistry myDaoRegistry; private final SearchParamMatcher mySearchParamMatcher; - public SubscriptionTopicSupport(FhirContext theFhirContext, DaoRegistry theDaoRegistry, SearchParamMatcher theSearchParamMatcher) { + public SubscriptionTopicSupport( + FhirContext theFhirContext, DaoRegistry theDaoRegistry, SearchParamMatcher theSearchParamMatcher) { myFhirContext = theFhirContext; myDaoRegistry = theDaoRegistry; mySearchParamMatcher = theSearchParamMatcher; diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicUtil.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicUtil.java index 0250f3d05eb..7d863fc077c 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicUtil.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicUtil.java @@ -26,15 +26,20 @@ import org.hl7.fhir.r5.model.SubscriptionTopic; import java.util.List; public class SubscriptionTopicUtil { - public static boolean matches(BaseResourceMessage.OperationTypeEnum theOperationType, List> theSupportedInteractions) { + public static boolean matches( + BaseResourceMessage.OperationTypeEnum theOperationType, + List> theSupportedInteractions) { for (Enumeration next : theSupportedInteractions) { - if (next.getValue() == SubscriptionTopic.InteractionTrigger.CREATE && theOperationType == BaseResourceMessage.OperationTypeEnum.CREATE) { + if (next.getValue() == SubscriptionTopic.InteractionTrigger.CREATE + && theOperationType == BaseResourceMessage.OperationTypeEnum.CREATE) { return true; } - if (next.getValue() == SubscriptionTopic.InteractionTrigger.UPDATE && theOperationType == BaseResourceMessage.OperationTypeEnum.UPDATE) { + if (next.getValue() == SubscriptionTopic.InteractionTrigger.UPDATE + && theOperationType == BaseResourceMessage.OperationTypeEnum.UPDATE) { return true; } - if (next.getValue() == SubscriptionTopic.InteractionTrigger.DELETE && theOperationType == BaseResourceMessage.OperationTypeEnum.DELETE) { + if (next.getValue() == SubscriptionTopic.InteractionTrigger.DELETE + && theOperationType == BaseResourceMessage.OperationTypeEnum.DELETE) { return true; } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicValidatingInterceptor.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicValidatingInterceptor.java index d303bb00a00..1dde1de9a23 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicValidatingInterceptor.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTopicValidatingInterceptor.java @@ -42,39 +42,54 @@ public class SubscriptionTopicValidatingInterceptor { private final FhirContext myFhirContext; private final SubscriptionQueryValidator mySubscriptionQueryValidator; - public SubscriptionTopicValidatingInterceptor(FhirContext theFhirContext, SubscriptionQueryValidator theSubscriptionQueryValidator) { + public SubscriptionTopicValidatingInterceptor( + FhirContext theFhirContext, SubscriptionQueryValidator theSubscriptionQueryValidator) { myFhirContext = theFhirContext; mySubscriptionQueryValidator = theSubscriptionQueryValidator; } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED) - public void resourcePreCreate(IBaseResource theResource, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { - validateSubmittedSubscriptionTopic(theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED); + public void resourcePreCreate( + IBaseResource theResource, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { + validateSubmittedSubscriptionTopic( + theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED); } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void resourceUpdated(IBaseResource theOldResource, IBaseResource theResource, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) { - validateSubmittedSubscriptionTopic(theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED); + public void resourceUpdated( + IBaseResource theOldResource, + IBaseResource theResource, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId) { + validateSubmittedSubscriptionTopic( + theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED); } @VisibleForTesting - void validateSubmittedSubscriptionTopic(IBaseResource theSubscription, - RequestDetails theRequestDetails, - RequestPartitionId theRequestPartitionId, - Pointcut thePointcut) { - if (Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED != thePointcut && Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED != thePointcut) { - throw new UnprocessableEntityException(Msg.code(2340) + "Expected Pointcut to be either STORAGE_PRESTORAGE_RESOURCE_CREATED or STORAGE_PRESTORAGE_RESOURCE_UPDATED but was: " + thePointcut); + void validateSubmittedSubscriptionTopic( + IBaseResource theSubscription, + RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId, + Pointcut thePointcut) { + if (Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED != thePointcut + && Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED != thePointcut) { + throw new UnprocessableEntityException(Msg.code(2340) + + "Expected Pointcut to be either STORAGE_PRESTORAGE_RESOURCE_CREATED or STORAGE_PRESTORAGE_RESOURCE_UPDATED but was: " + + thePointcut); } if (!"SubscriptionTopic".equals(myFhirContext.getResourceType(theSubscription))) { return; } - SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, theSubscription); + SubscriptionTopic subscriptionTopic = + SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, theSubscription); boolean finished = false; if (subscriptionTopic.getStatus() == null) { - throw new UnprocessableEntityException(Msg.code(2338) + "Can not process submitted SubscriptionTopic - SubscriptionTopic.status must be populated on this server"); + throw new UnprocessableEntityException( + Msg.code(2338) + + "Can not process submitted SubscriptionTopic - SubscriptionTopic.status must be populated on this server"); } switch (subscriptionTopic.getStatus()) { @@ -91,14 +106,15 @@ public class SubscriptionTopicValidatingInterceptor { // strategy with an extension like Subscription? if (!finished) { - subscriptionTopic.getResourceTrigger().stream() - .forEach(t -> validateQueryCriteria(t.getQueryCriteria())); + subscriptionTopic.getResourceTrigger().stream().forEach(t -> validateQueryCriteria(t.getQueryCriteria())); } } - private void validateQueryCriteria(SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent theQueryCriteria) { + private void validateQueryCriteria( + SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent theQueryCriteria) { if (theQueryCriteria.getPrevious() != null) { - validateCriteria(theQueryCriteria.getPrevious(), "SubscriptionTopic.resourceTrigger.queryCriteria.previous"); + validateCriteria( + theQueryCriteria.getPrevious(), "SubscriptionTopic.resourceTrigger.queryCriteria.previous"); } if (theQueryCriteria.getCurrent() != null) { validateCriteria(theQueryCriteria.getCurrent(), "SubscriptionTopic.resourceTrigger.queryCriteria.current"); @@ -110,10 +126,12 @@ public class SubscriptionTopicValidatingInterceptor { mySubscriptionQueryValidator.validateCriteria(theCriteria, theFieldName); SubscriptionMatchingStrategy strategy = mySubscriptionQueryValidator.determineStrategy(theCriteria); if (strategy != SubscriptionMatchingStrategy.IN_MEMORY) { - ourLog.warn("Warning: Query Criteria '{}' in {} cannot be evaluated in-memory", theCriteria, theFieldName); + ourLog.warn( + "Warning: Query Criteria '{}' in {} cannot be evaluated in-memory", theCriteria, theFieldName); } } catch (InvalidRequestException | DataFormatException e) { - throw new UnprocessableEntityException(Msg.code(2339) + "Invalid SubscriptionTopic criteria '" + theCriteria + "' in " + theFieldName + ": " + e.getMessage()); + throw new UnprocessableEntityException(Msg.code(2339) + "Invalid SubscriptionTopic criteria '" + theCriteria + + "' in " + theFieldName + ": " + e.getMessage()); } } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcher.java index 14d6b3dfc79..d5b39ee5761 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcher.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/SubscriptionTriggerMatcher.java @@ -46,7 +46,10 @@ public class SubscriptionTriggerMatcher { private final PreviousVersionReader myPreviousVersionReader; private final SystemRequestDetails mySrd; - public SubscriptionTriggerMatcher(SubscriptionTopicSupport theSubscriptionTopicSupport, ResourceModifiedMessage theMsg, SubscriptionTopic.SubscriptionTopicResourceTriggerComponent theTrigger) { + public SubscriptionTriggerMatcher( + SubscriptionTopicSupport theSubscriptionTopicSupport, + ResourceModifiedMessage theMsg, + SubscriptionTopic.SubscriptionTopicResourceTriggerComponent theTrigger) { mySubscriptionTopicSupport = theSubscriptionTopicSupport; myOperation = theMsg.getOperationType(); myResource = theMsg.getPayload(theSubscriptionTopicSupport.getFhirContext()); @@ -58,9 +61,11 @@ public class SubscriptionTriggerMatcher { } public InMemoryMatchResult match() { - List> supportedInteractions = myTrigger.getSupportedInteraction(); + List> supportedInteractions = + myTrigger.getSupportedInteraction(); if (SubscriptionTopicUtil.matches(myOperation, supportedInteractions)) { - SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent queryCriteria = myTrigger.getQueryCriteria(); + SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent queryCriteria = + myTrigger.getQueryCriteria(); InMemoryMatchResult result = match(queryCriteria); if (result.matched()) { return result; @@ -69,7 +74,8 @@ public class SubscriptionTriggerMatcher { return InMemoryMatchResult.noMatch(); } - private InMemoryMatchResult match(SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent theQueryCriteria) { + private InMemoryMatchResult match( + SubscriptionTopic.SubscriptionTopicResourceTriggerQueryCriteriaComponent theQueryCriteria) { String previousCriteria = theQueryCriteria.getPrevious(); String currentCriteria = theQueryCriteria.getCurrent(); InMemoryMatchResult previousMatches = InMemoryMatchResult.fromBoolean(previousCriteria == null); @@ -84,14 +90,16 @@ public class SubscriptionTriggerMatcher { } if (previousCriteria != null) { - if (myOperation == ResourceModifiedMessage.OperationTypeEnum.UPDATE || - myOperation == ResourceModifiedMessage.OperationTypeEnum.DELETE) { + if (myOperation == ResourceModifiedMessage.OperationTypeEnum.UPDATE + || myOperation == ResourceModifiedMessage.OperationTypeEnum.DELETE) { Optional oPreviousVersion = myPreviousVersionReader.readPreviousVersion(myResource); if (oPreviousVersion.isPresent()) { previousMatches = matchResource(oPreviousVersion.get(), previousCriteria); } else { - ourLog.warn("Resource {} has a version of 1, which should not be the case for a create or delete operation", myResource.getIdElement().toUnqualifiedVersionless()); + ourLog.warn( + "Resource {} has a version of 1, which should not be the case for a create or delete operation", + myResource.getIdElement().toUnqualifiedVersionless()); } } } @@ -104,9 +112,13 @@ public class SubscriptionTriggerMatcher { } private InMemoryMatchResult matchResource(IBaseResource theResource, String theCriteria) { - InMemoryMatchResult result = mySubscriptionTopicSupport.getSearchParamMatcher().match(theCriteria, theResource, mySrd); + InMemoryMatchResult result = + mySubscriptionTopicSupport.getSearchParamMatcher().match(theCriteria, theResource, mySrd); if (!result.supported()) { - ourLog.warn("Subscription topic {} has a query criteria that is not supported in-memory: {}", myTrigger.getId(), theCriteria); + ourLog.warn( + "Subscription topic {} has a query criteria that is not supported in-memory: {}", + myTrigger.getId(), + theCriteria); } return result; } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/ISubscriptionTopicFilterMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/ISubscriptionTopicFilterMatcher.java index b3af2b28a9a..519f28338d2 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/ISubscriptionTopicFilterMatcher.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/ISubscriptionTopicFilterMatcher.java @@ -30,5 +30,6 @@ public interface ISubscriptionTopicFilterMatcher { * @param theIBaseResource * @return */ - InMemoryMatchResult match(CanonicalTopicSubscriptionFilter theCanonicalTopicSubscriptionFilter, IBaseResource theIBaseResource); + InMemoryMatchResult match( + CanonicalTopicSubscriptionFilter theCanonicalTopicSubscriptionFilter, IBaseResource theIBaseResource); } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/InMemoryTopicFilterMatcher.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/InMemoryTopicFilterMatcher.java index 5eadd43b18f..198bb2ff319 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/InMemoryTopicFilterMatcher.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/InMemoryTopicFilterMatcher.java @@ -33,7 +33,9 @@ public class InMemoryTopicFilterMatcher implements ISubscriptionTopicFilterMatch } @Override - public InMemoryMatchResult match(CanonicalTopicSubscriptionFilter theCanonicalTopicSubscriptionFilter, IBaseResource theResource) { - return mySearchParamMatcher.match(theCanonicalTopicSubscriptionFilter.asCriteriaString(), theResource, new SystemRequestDetails()); + public InMemoryMatchResult match( + CanonicalTopicSubscriptionFilter theCanonicalTopicSubscriptionFilter, IBaseResource theResource) { + return mySearchParamMatcher.match( + theCanonicalTopicSubscriptionFilter.asCriteriaString(), theResource, new SystemRequestDetails()); } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/SubscriptionTopicFilterUtil.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/SubscriptionTopicFilterUtil.java index 0731dfe2fdf..99558c07b7f 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/SubscriptionTopicFilterUtil.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/filter/SubscriptionTopicFilterUtil.java @@ -29,21 +29,33 @@ import javax.annotation.Nonnull; public final class SubscriptionTopicFilterUtil { private static final Logger ourLog = Logs.getSubscriptionTopicLog(); - private SubscriptionTopicFilterUtil() { - } - public static boolean matchFilters(@Nonnull IBaseResource theResource, @Nonnull String theResourceType, @Nonnull ISubscriptionTopicFilterMatcher theSubscriptionTopicFilterMatcher, @Nonnull CanonicalTopicSubscription topicSubscription) { + private SubscriptionTopicFilterUtil() {} + + public static boolean matchFilters( + @Nonnull IBaseResource theResource, + @Nonnull String theResourceType, + @Nonnull ISubscriptionTopicFilterMatcher theSubscriptionTopicFilterMatcher, + @Nonnull CanonicalTopicSubscription topicSubscription) { boolean match = true; - for (CanonicalTopicSubscriptionFilter filter : topicSubscription.getFilters()) { - if (filter.getResourceType() == null || "Resource".equals(filter.getResourceType()) || !filter.getResourceType().equals(theResourceType)) { - continue; - } - if (!theSubscriptionTopicFilterMatcher.match(filter, theResource).matched()) { - match = false; - ourLog.debug("Resource {} did not match filter {}. Skipping remaining filters.", theResource.getIdElement().toUnqualifiedVersionless().getValue(), filter.asCriteriaString()); - break; - } - ourLog.debug("Resource {} matches filter {}", theResource.getIdElement().toUnqualifiedVersionless().getValue(), filter.asCriteriaString()); + for (CanonicalTopicSubscriptionFilter filter : topicSubscription.getFilters()) { + if (filter.getResourceType() == null + || "Resource".equals(filter.getResourceType()) + || !filter.getResourceType().equals(theResourceType)) { + continue; + } + if (!theSubscriptionTopicFilterMatcher.match(filter, theResource).matched()) { + match = false; + ourLog.debug( + "Resource {} did not match filter {}. Skipping remaining filters.", + theResource.getIdElement().toUnqualifiedVersionless().getValue(), + filter.asCriteriaString()); + break; + } + ourLog.debug( + "Resource {} matches filter {}", + theResource.getIdElement().toUnqualifiedVersionless().getValue(), + filter.asCriteriaString()); } return match; } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/INotificationStatusBuilder.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/INotificationStatusBuilder.java index 8bc5965c7b6..85de6a70def 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/INotificationStatusBuilder.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/INotificationStatusBuilder.java @@ -33,5 +33,6 @@ public interface INotificationStatusBuilder { * @param theTopicUrl The topic URL of the topic subscription * @return the notification status resource. The resource type varies depending on the FHIR version. */ - T buildNotificationStatus(List theResources, ActiveSubscription theActiveSubscription, String theTopicUrl); + T buildNotificationStatus( + List theResources, ActiveSubscription theActiveSubscription, String theTopicUrl); } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R4BNotificationStatusBuilder.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R4BNotificationStatusBuilder.java index 405a134d015..cbf3e95ac75 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R4BNotificationStatusBuilder.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R4BNotificationStatusBuilder.java @@ -35,8 +35,10 @@ public class R4BNotificationStatusBuilder implements INotificationStatusBuilder< } @Override - public SubscriptionStatus buildNotificationStatus(List theResources, ActiveSubscription theActiveSubscription, String theTopicUrl) { - org.hl7.fhir.r5.model.SubscriptionStatus subscriptionStatus = myR5NotificationStatusBuilder.buildNotificationStatus(theResources, theActiveSubscription, theTopicUrl); + public SubscriptionStatus buildNotificationStatus( + List theResources, ActiveSubscription theActiveSubscription, String theTopicUrl) { + org.hl7.fhir.r5.model.SubscriptionStatus subscriptionStatus = + myR5NotificationStatusBuilder.buildNotificationStatus(theResources, theActiveSubscription, theTopicUrl); return (SubscriptionStatus) VersionConvertorFactory_43_50.convertResource(subscriptionStatus); } } diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R4NotificationStatusBuilder.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R4NotificationStatusBuilder.java index 02a9d2310be..68da40e3b99 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R4NotificationStatusBuilder.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R4NotificationStatusBuilder.java @@ -43,18 +43,23 @@ public class R4NotificationStatusBuilder implements INotificationStatusBuilder

    theResources, ActiveSubscription theActiveSubscription, String theTopicUrl) { + public Parameters buildNotificationStatus( + List theResources, ActiveSubscription theActiveSubscription, String theTopicUrl) { Long eventNumber = theActiveSubscription.getDeliveriesCount(); // See http://build.fhir.org/ig/HL7/fhir-subscription-backport-ig/Parameters-r4-notification-status.json.html - // and http://build.fhir.org/ig/HL7/fhir-subscription-backport-ig/StructureDefinition-backport-subscription-status-r4.html + // and + // http://build.fhir.org/ig/HL7/fhir-subscription-backport-ig/StructureDefinition-backport-subscription-status-r4.html Parameters parameters = new Parameters(); parameters.getMeta().addProfile(SubscriptionConstants.SUBSCRIPTION_TOPIC_STATUS); parameters.setId(UUID.randomUUID().toString()); - parameters.addParameter("subscription", new Reference(theActiveSubscription.getSubscription().getIdElement(myFhirContext))); + parameters.addParameter( + "subscription", + new Reference(theActiveSubscription.getSubscription().getIdElement(myFhirContext))); parameters.addParameter("topic", new CanonicalType(theTopicUrl)); parameters.addParameter("status", new CodeType(Subscription.SubscriptionStatus.ACTIVE.toCode())); - parameters.addParameter("type", new CodeType(SubscriptionStatus.SubscriptionNotificationType.EVENTNOTIFICATION.toCode())); + parameters.addParameter( + "type", new CodeType(SubscriptionStatus.SubscriptionNotificationType.EVENTNOTIFICATION.toCode())); // WIP STR5 events-since-subscription-start should be read from the database parameters.addParameter("events-since-subscription-start", eventNumber.toString()); Parameters.ParametersParameterComponent notificationEvent = parameters.addParameter(); @@ -63,7 +68,10 @@ public class R4NotificationStatusBuilder implements INotificationStatusBuilder

    0) { IBaseResource firstResource = theResources.get(0); - notificationEvent.addPart().setName("focus").setValue(new Reference(firstResource.getIdElement().toUnqualifiedVersionless())); + notificationEvent + .addPart() + .setName("focus") + .setValue(new Reference(firstResource.getIdElement().toUnqualifiedVersionless())); } return parameters; diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R5NotificationStatusBuilder.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R5NotificationStatusBuilder.java index eb4ad1d526a..c9128d9f638 100644 --- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R5NotificationStatusBuilder.java +++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/topic/status/R5NotificationStatusBuilder.java @@ -37,7 +37,8 @@ public class R5NotificationStatusBuilder implements INotificationStatusBuilder theResources, ActiveSubscription theActiveSubscription, String theTopicUrl) { + public SubscriptionStatus buildNotificationStatus( + List theResources, ActiveSubscription theActiveSubscription, String theTopicUrl) { long eventNumber = theActiveSubscription.getDeliveriesCount(); SubscriptionStatus subscriptionStatus = new SubscriptionStatus(); @@ -46,12 +47,14 @@ public class R5NotificationStatusBuilder implements INotificationStatusBuilder 0) { event.setFocus(new Reference(theResources.get(0).getIdElement())); } - subscriptionStatus.setSubscription(new Reference(theActiveSubscription.getSubscription().getIdElement(myFhirContext))); + subscriptionStatus.setSubscription( + new Reference(theActiveSubscription.getSubscription().getIdElement(myFhirContext))); subscriptionStatus.setTopic(theTopicUrl); return subscriptionStatus; } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/DaoTestUtils.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/DaoTestUtils.java index bad1ac68d7b..59cec51e735 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/DaoTestUtils.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/DaoTestUtils.java @@ -36,18 +36,20 @@ public final class DaoTestUtils { private DaoTestUtils() {} - public static void assertConflictException(ResourceVersionConflictException e) { - assertThat(e.getMessage(), matchesPattern( - Msg.code(550) + Msg.code(515) + "Unable to delete [a-zA-Z]+/[0-9]+ because at least one resource has a reference to this resource. First reference found was resource [a-zA-Z]+/[0-9]+ in path [a-zA-Z]+.[a-zA-Z]+")); - } + public static void assertConflictException(ResourceVersionConflictException e) { + assertThat( + e.getMessage(), + matchesPattern( + Msg.code(550) + Msg.code(515) + + "Unable to delete [a-zA-Z]+/[0-9]+ because at least one resource has a reference to this resource. First reference found was resource [a-zA-Z]+/[0-9]+ in path [a-zA-Z]+.[a-zA-Z]+")); + } public static void logAllInterceptors(IInterceptorService theInterceptorRegistry) { List allInterceptors = theInterceptorRegistry.getAllRegisteredInterceptors(); - String interceptorList = allInterceptors - .stream() - .map(t -> t.getClass().toString()) - .sorted() - .collect(Collectors.joining("\n * ")); + String interceptorList = allInterceptors.stream() + .map(t -> t.getClass().toString()) + .sorted() + .collect(Collectors.joining("\n * ")); ourLog.info("Registered interceptors:\n * {}", interceptorList); } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/SimplePartitionTestHelper.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/SimplePartitionTestHelper.java index 333a6e356c9..06e9169dfd7 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/SimplePartitionTestHelper.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/SimplePartitionTestHelper.java @@ -36,7 +36,10 @@ public class SimplePartitionTestHelper implements BeforeEachCallback, AfterEachC private final IInterceptorService myInterceptorRegistry; private final PartitionInterceptorReadAllPartitions myInterceptor = new PartitionInterceptorReadAllPartitions(); - public SimplePartitionTestHelper(PartitionSettings thePartitionSettings, IPartitionLookupSvc thePartitionConfigSvc, IInterceptorService theInterceptorRegistry) { + public SimplePartitionTestHelper( + PartitionSettings thePartitionSettings, + IPartitionLookupSvc thePartitionConfigSvc, + IInterceptorService theInterceptorRegistry) { myPartitionSettings = thePartitionSettings; myPartitionConfigSvc = thePartitionConfigSvc; myInterceptorRegistry = theInterceptorRegistry; @@ -45,7 +48,8 @@ public class SimplePartitionTestHelper implements BeforeEachCallback, AfterEachC @Override public void beforeEach(ExtensionContext context) throws Exception { myPartitionSettings.setPartitioningEnabled(true); - myPartitionConfigSvc.createPartition(new PartitionEntity().setId(TEST_PARTITION_ID).setName(TEST_PARTITION_NAME), null); + myPartitionConfigSvc.createPartition( + new PartitionEntity().setId(TEST_PARTITION_ID).setName(TEST_PARTITION_NAME), null); myInterceptorRegistry.registerInterceptor(myInterceptor); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/TestDaoSearch.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/TestDaoSearch.java index 5b7ac13b849..471bc7baa1a 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/TestDaoSearch.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/dao/TestDaoSearch.java @@ -22,12 +22,12 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; -import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.ResourceSearch; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.server.IBundleProvider; +import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.server.IPagingProvider; import ca.uhn.fhir.rest.server.IRestfulServerDefaults; import ca.uhn.fhir.rest.server.method.SortParameter; @@ -42,9 +42,9 @@ import org.springframework.context.annotation.Configuration; import org.springframework.web.util.UriComponents; import org.springframework.web.util.UriComponentsBuilder; -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.hasItems; @@ -63,12 +63,11 @@ public class TestDaoSearch { public static class Config { @Bean TestDaoSearch testDaoSearch( - @Autowired FhirContext theFhirContext, - @Autowired DaoRegistry theDaoRegistry, - @Autowired MatchUrlService theMatchUrlService, - @Autowired ISearchParamRegistry theSearchParamRegistry + @Autowired FhirContext theFhirContext, + @Autowired DaoRegistry theDaoRegistry, + @Autowired MatchUrlService theMatchUrlService, + @Autowired ISearchParamRegistry theSearchParamRegistry) { - ) { return new TestDaoSearch(theFhirContext, theDaoRegistry, theMatchUrlService, theSearchParamRegistry); } } @@ -81,7 +80,11 @@ public class TestDaoSearch { final MatchUrlService myMatchUrlService; final ISearchParamRegistry mySearchParamRegistry; - public TestDaoSearch(FhirContext theFhirCtx, DaoRegistry theDaoRegistry, MatchUrlService theMatchUrlService, ISearchParamRegistry theSearchParamRegistry) { + public TestDaoSearch( + FhirContext theFhirCtx, + DaoRegistry theDaoRegistry, + MatchUrlService theMatchUrlService, + ISearchParamRegistry theSearchParamRegistry) { myMatchUrlService = theMatchUrlService; myDaoRegistry = theDaoRegistry; myFhirCtx = theFhirCtx; @@ -98,7 +101,7 @@ public class TestDaoSearch { * @param theQueryUrl FHIR query - e.g. /Patient?name=kelly * @param theIds the resource ids to expect. */ - public void assertSearchFinds(String theReason, String theQueryUrl, String ...theIds) { + public void assertSearchFinds(String theReason, String theQueryUrl, String... theIds) { assertSearchResultIds(theQueryUrl, theReason, hasItems(theIds)); } @@ -108,7 +111,7 @@ public class TestDaoSearch { * @param theQueryUrl FHIR query - e.g. /Patient?name=kelly * @param theIds the id-part of the resource ids to expect. */ - public void assertSearchFinds(String theReason, String theQueryUrl, IIdType...theIds) { + public void assertSearchFinds(String theReason, String theQueryUrl, IIdType... theIds) { String[] bareIds = idTypeToIdParts(theIds); assertSearchResultIds(theQueryUrl, theReason, hasItems(bareIds)); @@ -126,7 +129,7 @@ public class TestDaoSearch { * @param theQueryUrl FHIR query - e.g. /Patient?name=kelly * @param theIds the id-part of the resource ids to not-expect. */ - public void assertSearchNotFound(String theReason, String theQueryUrl, IIdType ...theIds) { + public void assertSearchNotFound(String theReason, String theQueryUrl, IIdType... theIds) { List ids = searchForIds(theQueryUrl); MatcherAssert.assertThat(theReason, ids, everyItem(not(in(idTypeToIdParts(theIds))))); @@ -146,13 +149,14 @@ public class TestDaoSearch { return result.getAllResources(); } - public List searchForIds(String theQueryUrl) { + public List searchForIds(String theQueryUrl) { // fake out the server url parsing IBundleProvider result = searchForBundleProvider(theQueryUrl); // getAllResources is not safe as size is not always set - return result.getResources(0, Integer.MAX_VALUE) - .stream().map(resource -> resource.getIdElement().getIdPart()).collect(Collectors.toList()); + return result.getResources(0, Integer.MAX_VALUE).stream() + .map(resource -> resource.getIdElement().getIdPart()) + .collect(Collectors.toList()); } public IBundleProvider searchForBundleProvider(String theQueryUrl, boolean theSynchronousMode) { @@ -161,13 +165,16 @@ public class TestDaoSearch { SearchParameterMap map = search.getSearchParameterMap(); map.setLoadSynchronous(theSynchronousMode); - SortSpec sort = (SortSpec) new SortParameter(myFhirCtx).translateQueryParametersIntoServerArgument(fakeRequestDetailsFromUrl(theQueryUrl), null); + SortSpec sort = (SortSpec) new SortParameter(myFhirCtx) + .translateQueryParametersIntoServerArgument(fakeRequestDetailsFromUrl(theQueryUrl), null); if (sort != null) { map.setSort(sort); } // for asynchronous mode, we also need to make the request paginated ar synchronous is forced - SystemRequestDetails reqDetails = theSynchronousMode ? fakeRequestDetailsFromUrl(theQueryUrl) : fakePaginatedRequestDetailsFromUrl(theQueryUrl); + SystemRequestDetails reqDetails = theSynchronousMode + ? fakeRequestDetailsFromUrl(theQueryUrl) + : fakePaginatedRequestDetailsFromUrl(theQueryUrl); return dao.search(map, reqDetails); } @@ -180,7 +187,8 @@ public class TestDaoSearch { SearchParameterMap map = search.getSearchParameterMap(); map.setLoadSynchronous(true); - SortSpec sort = (SortSpec) new SortParameter(myFhirCtx).translateQueryParametersIntoServerArgument(fakeRequestDetailsFromUrl(theQueryUrl), null); + SortSpec sort = (SortSpec) new SortParameter(myFhirCtx) + .translateQueryParametersIntoServerArgument(fakeRequestDetailsFromUrl(theQueryUrl), null); if (sort != null) { map.setSort(sort); } @@ -190,18 +198,20 @@ public class TestDaoSearch { @Nonnull private SystemRequestDetails fakeRequestDetailsFromUrl(String theQueryUrl) { SystemRequestDetails request = new SystemRequestDetails(); - UriComponents uriComponents = UriComponentsBuilder.fromUriString(theQueryUrl).build(); - uriComponents.getQueryParams() - .forEach((key, value) -> request.addParameter(key, value.toArray(new String[0]))); + UriComponents uriComponents = + UriComponentsBuilder.fromUriString(theQueryUrl).build(); + uriComponents.getQueryParams().forEach((key, value) -> request.addParameter(key, value.toArray(new String[0]))); return request; } @Nonnull private SystemRequestDetails fakePaginatedRequestDetailsFromUrl(String theQueryUrl) { SystemRequestDetails spiedReqDetails = spy(SystemRequestDetails.class); - UriComponents uriComponents = UriComponentsBuilder.fromUriString(theQueryUrl).build(); - uriComponents.getQueryParams() - .forEach((key, value) -> spiedReqDetails.addParameter(key, value.toArray(new String[0]))); + UriComponents uriComponents = + UriComponentsBuilder.fromUriString(theQueryUrl).build(); + uriComponents + .getQueryParams() + .forEach((key, value) -> spiedReqDetails.addParameter(key, value.toArray(new String[0]))); IPagingProvider mockPagingProvider = mock(IPagingProvider.class); IRestfulServerDefaults mockServerDfts = mock(IRestfulServerDefaults.class); @@ -209,6 +219,4 @@ public class TestDaoSearch { doReturn(mockPagingProvider).when(mockServerDfts).getPagingProvider(); return spiedReqDetails; } - - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/DatabaseInitializerHelper.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/DatabaseInitializerHelper.java index 27cf5270d9d..94329e140c1 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/DatabaseInitializerHelper.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/DatabaseInitializerHelper.java @@ -30,14 +30,17 @@ public class DatabaseInitializerHelper { private static final Logger ourLog = LoggerFactory.getLogger(DatabaseInitializerHelper.class); public void initializePersistenceSchema(JpaEmbeddedDatabase theDatabase) { - String fileName = String.format("migration/releases/%s/schema/%s.sql", HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION, theDatabase.getDriverType()); + String fileName = String.format( + "migration/releases/%s/schema/%s.sql", + HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION, theDatabase.getDriverType()); String sql = getSqlFromResourceFile(fileName); theDatabase.executeSqlAsBatch(sql); } - public void insertPersistenceTestData(JpaEmbeddedDatabase theDatabase) { - String fileName = String.format("migration/releases/%s/data/%s.sql", HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION, theDatabase.getDriverType()); + String fileName = String.format( + "migration/releases/%s/data/%s.sql", + HapiEmbeddedDatabasesExtension.FIRST_TESTED_VERSION, theDatabase.getDriverType()); String sql = getSqlFromResourceFile(fileName); theDatabase.insertTestData(sql); } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/H2EmbeddedDatabase.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/H2EmbeddedDatabase.java index f244ad815e1..81291623e77 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/H2EmbeddedDatabase.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/H2EmbeddedDatabase.java @@ -54,7 +54,6 @@ public class H2EmbeddedDatabase extends JpaEmbeddedDatabase { deleteDatabaseDirectoryIfExists(); } - @Override public void disableConstraints() { getJdbcTemplate().execute("SET REFERENTIAL_INTEGRITY = FALSE"); @@ -84,7 +83,8 @@ public class H2EmbeddedDatabase extends JpaEmbeddedDatabase { private void dropTables() { List sql = new ArrayList<>(); - List> tableResult = query("SELECT TABLE_NAME FROM information_schema.tables WHERE TABLE_SCHEMA = 'PUBLIC'"); + List> tableResult = + query("SELECT TABLE_NAME FROM information_schema.tables WHERE TABLE_SCHEMA = 'PUBLIC'"); for (Map result : tableResult) { String tableName = result.get("TABLE_NAME").toString(); sql.add(String.format("DROP TABLE %s CASCADE", tableName)); @@ -94,7 +94,8 @@ public class H2EmbeddedDatabase extends JpaEmbeddedDatabase { private void dropSequences() { List sql = new ArrayList<>(); - List> sequenceResult = query("SELECT * FROM information_schema.sequences WHERE SEQUENCE_SCHEMA = 'PUBLIC'"); + List> sequenceResult = + query("SELECT * FROM information_schema.sequences WHERE SEQUENCE_SCHEMA = 'PUBLIC'"); for (Map sequence : sequenceResult) { String sequenceName = sequence.get("SEQUENCE_NAME").toString(); sql.add(String.format("DROP SEQUENCE %s", sequenceName)); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java index 05f7a5cc88f..8ccfb9d6d9b 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiEmbeddedDatabasesExtension.java @@ -30,7 +30,6 @@ import org.junit.jupiter.params.provider.ArgumentsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.sql.DataSource; import java.net.URL; import java.nio.file.Files; import java.nio.file.Paths; @@ -39,6 +38,7 @@ import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Stream; +import javax.sql.DataSource; public class HapiEmbeddedDatabasesExtension implements AfterAllCallback { @@ -57,7 +57,8 @@ public class HapiEmbeddedDatabasesExtension implements AfterAllCallback { if (canUseOracle()) { myEmbeddedDatabases.add(new OracleEmbeddedDatabase()); } else { - String message = "Cannot add OracleEmbeddedDatabase. If you are using a Mac you must configure the TestContainers API to run using Colima (https://www.testcontainers.org/supported_docker_environment#using-colima)"; + String message = + "Cannot add OracleEmbeddedDatabase. If you are using a Mac you must configure the TestContainers API to run using Colima (https://www.testcontainers.org/supported_docker_environment#using-colima)"; ourLog.warn(message); } } @@ -70,11 +71,10 @@ public class HapiEmbeddedDatabasesExtension implements AfterAllCallback { } public JpaEmbeddedDatabase getEmbeddedDatabase(DriverTypeEnum theDriverType) { - return getAllEmbeddedDatabases() - .stream() - .filter(db -> theDriverType.equals(db.getDriverType())) - .findFirst() - .orElseThrow(); + return getAllEmbeddedDatabases().stream() + .filter(db -> theDriverType.equals(db.getDriverType())) + .findFirst() + .orElseThrow(); } public void clearDatabases() { @@ -138,7 +138,7 @@ public class HapiEmbeddedDatabasesExtension implements AfterAllCallback { private static boolean isColimaConfigured() { return StringUtils.isNotBlank(System.getenv("TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE")) - && StringUtils.isNotBlank(System.getenv("DOCKER_HOST")) - && System.getenv("DOCKER_HOST").contains("colima"); + && StringUtils.isNotBlank(System.getenv("DOCKER_HOST")) + && System.getenv("DOCKER_HOST").contains("colima"); } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiForeignKeyIndexHelper.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiForeignKeyIndexHelper.java index 16ae8342526..7c6816743f7 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiForeignKeyIndexHelper.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/HapiForeignKeyIndexHelper.java @@ -25,12 +25,12 @@ import org.intellij.lang.annotations.Language; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.sql.DataSource; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Collection; +import javax.sql.DataSource; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -55,15 +55,16 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class HapiForeignKeyIndexHelper { @Language("SQL") - private static final String FK_QUERY = """ + private static final String FK_QUERY = + """ SELECT c.conrelid::regclass AS "table", - /* list of key column names in order */ - string_agg(a.attname, ',' ORDER BY x.n) AS columns, - pg_catalog.pg_size_pretty( - pg_catalog.pg_relation_size(c.conrelid) - ) AS size, - c.conname AS constraint, - c.confrelid::regclass AS referenced_table + /* list of key column names in order */ + string_agg(a.attname, ',' ORDER BY x.n) AS columns, + pg_catalog.pg_size_pretty( + pg_catalog.pg_relation_size(c.conrelid) + ) AS size, + c.conname AS constraint, + c.confrelid::regclass AS referenced_table FROM pg_catalog.pg_constraint c /* enumerated key column numbers per foreign key */ CROSS JOIN LATERAL @@ -73,16 +74,16 @@ public class HapiForeignKeyIndexHelper { ON a.attnum = x.attnum AND a.attrelid = c.conrelid WHERE NOT EXISTS - /* is there a matching index for the constraint? */ - (SELECT 1 FROM pg_catalog.pg_index i + /* is there a matching index for the constraint? */ + (SELECT 1 FROM pg_catalog.pg_index i WHERE i.indrelid = c.conrelid - /* it must not be a partial index */ - AND i.indpred IS NULL - /* the first index columns must be the same as the - key columns, but order doesn't matter */ - AND (i.indkey::smallint[])[0:cardinality(c.conkey)-1] + /* it must not be a partial index */ + AND i.indpred IS NULL + /* the first index columns must be the same as the + key columns, but order doesn't matter */ + AND (i.indkey::smallint[])[0:cardinality(c.conkey)-1] OPERATOR(pg_catalog.@>) c.conkey) - AND c.contype = 'f' + AND c.contype = 'f' GROUP BY c.conrelid, c.conname, c.confrelid ORDER BY pg_catalog.pg_relation_size(c.conrelid) DESC; """; @@ -126,20 +127,23 @@ public class HapiForeignKeyIndexHelper { String tableName = results.getString("table").toUpperCase(); String columns = results.getString("columns").toUpperCase(); String constraint = results.getString("constraint").toUpperCase(); - String referenced_table = results.getString("referenced_table").toUpperCase(); + String referenced_table = + results.getString("referenced_table").toUpperCase(); - ourLog.warn(String.format("Table %s, Columns %s, Constraint %s, Referenced Table %s", tableName, columns, constraint, referenced_table)); + ourLog.warn(String.format( + "Table %s, Columns %s, Constraint %s, Referenced Table %s", + tableName, columns, constraint, referenced_table)); Collection whiteListColumns = ourTableToColumnsWhitelist.get(tableName); boolean isWhiteListed = whiteListColumns.contains(columns); - assertTrue(isWhiteListed, - String.format("Unindexed foreign key detected! Table.column: %s.%s. Constraint: %s", tableName, columns, constraint) - ); + assertTrue( + isWhiteListed, + String.format( + "Unindexed foreign key detected! Table.column: %s.%s. Constraint: %s", + tableName, columns, constraint)); } } } - } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java index e1a67358e8d..ae26426bdee 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/JpaEmbeddedDatabase.java @@ -19,14 +19,12 @@ */ package ca.uhn.fhir.jpa.embedded; - import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.jdbc.core.JdbcTemplate; -import javax.sql.DataSource; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; @@ -34,6 +32,7 @@ import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import javax.sql.DataSource; /** * For testing purposes. diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/MsSqlEmbeddedDatabase.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/MsSqlEmbeddedDatabase.java index f864beca5e2..a912989a442 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/MsSqlEmbeddedDatabase.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/MsSqlEmbeddedDatabase.java @@ -43,10 +43,15 @@ public class MsSqlEmbeddedDatabase extends JpaEmbeddedDatabase { private final MSSQLServerContainer myContainer; public MsSqlEmbeddedDatabase() { - DockerImageName msSqlImage = DockerImageName.parse("mcr.microsoft.com/azure-sql-edge:latest").asCompatibleSubstituteFor("mcr.microsoft.com/mssql/server"); + DockerImageName msSqlImage = DockerImageName.parse("mcr.microsoft.com/azure-sql-edge:latest") + .asCompatibleSubstituteFor("mcr.microsoft.com/mssql/server"); myContainer = new MSSQLServerContainer(msSqlImage).acceptLicense(); myContainer.start(); - super.initialize(DriverTypeEnum.MSSQL_2012, myContainer.getJdbcUrl(), myContainer.getUsername(), myContainer.getPassword()); + super.initialize( + DriverTypeEnum.MSSQL_2012, + myContainer.getJdbcUrl(), + myContainer.getUsername(), + myContainer.getPassword()); } @Override @@ -82,7 +87,8 @@ public class MsSqlEmbeddedDatabase extends JpaEmbeddedDatabase { private void dropForeignKeys() { List sql = new ArrayList<>(); - List> queryResults = query("SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_TYPE = 'FOREIGN KEY'"); + List> queryResults = + query("SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE CONSTRAINT_TYPE = 'FOREIGN KEY'"); for (Map row : queryResults) { String tableName = row.get("TABLE_NAME").toString(); String constraintName = row.get("CONSTRAINT_NAME").toString(); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/OracleEmbeddedDatabase.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/OracleEmbeddedDatabase.java index bb0b453146a..a68463f4f4b 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/OracleEmbeddedDatabase.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/OracleEmbeddedDatabase.java @@ -39,10 +39,13 @@ public class OracleEmbeddedDatabase extends JpaEmbeddedDatabase { private final OracleContainer myContainer; public OracleEmbeddedDatabase() { - myContainer = new OracleContainer("gvenzl/oracle-xe:21-slim-faststart") - .withPrivilegedMode(true); + myContainer = new OracleContainer("gvenzl/oracle-xe:21-slim-faststart").withPrivilegedMode(true); myContainer.start(); - super.initialize(DriverTypeEnum.ORACLE_12C, myContainer.getJdbcUrl(), myContainer.getUsername(), myContainer.getPassword()); + super.initialize( + DriverTypeEnum.ORACLE_12C, + myContainer.getJdbcUrl(), + myContainer.getUsername(), + myContainer.getPassword()); } @Override @@ -53,7 +56,8 @@ public class OracleEmbeddedDatabase extends JpaEmbeddedDatabase { @Override public void disableConstraints() { List sql = new ArrayList<>(); - List> queryResults = query("SELECT CONSTRAINT_NAME, TABLE_NAME FROM USER_CONSTRAINTS WHERE CONSTRAINT_TYPE != 'P'"); + List> queryResults = + query("SELECT CONSTRAINT_NAME, TABLE_NAME FROM USER_CONSTRAINTS WHERE CONSTRAINT_TYPE != 'P'"); for (Map row : queryResults) { String tableName = row.get("TABLE_NAME").toString(); String constraintName = row.get("CONSTRAINT_NAME").toString(); @@ -65,7 +69,8 @@ public class OracleEmbeddedDatabase extends JpaEmbeddedDatabase { @Override public void enableConstraints() { List sql = new ArrayList<>(); - List> queryResults = query("SELECT CONSTRAINT_NAME, TABLE_NAME FROM USER_CONSTRAINTS WHERE CONSTRAINT_TYPE != 'P'"); + List> queryResults = + query("SELECT CONSTRAINT_NAME, TABLE_NAME FROM USER_CONSTRAINTS WHERE CONSTRAINT_TYPE != 'P'"); for (Map row : queryResults) { String tableName = row.get("TABLE_NAME").toString(); String constraintName = row.get("CONSTRAINT_NAME").toString(); @@ -87,7 +92,8 @@ public class OracleEmbeddedDatabase extends JpaEmbeddedDatabase { private void dropTables() { List sql = new ArrayList<>(); - List> tableResult = query(String.format("SELECT object_name FROM all_objects WHERE object_type = 'TABLE' AND owner = '%s'", getOwner())); + List> tableResult = query(String.format( + "SELECT object_name FROM all_objects WHERE object_type = 'TABLE' AND owner = '%s'", getOwner())); for (Map result : tableResult) { String tableName = result.get("object_name").toString(); sql.add(String.format("DROP TABLE \"%s\" CASCADE CONSTRAINTS PURGE", tableName)); @@ -97,7 +103,8 @@ public class OracleEmbeddedDatabase extends JpaEmbeddedDatabase { private void dropSequences() { List sql = new ArrayList<>(); - List> tableResult = query(String.format("SELECT object_name FROM all_objects WHERE object_type = 'SEQUENCE' AND owner = '%s'", getOwner())); + List> tableResult = query(String.format( + "SELECT object_name FROM all_objects WHERE object_type = 'SEQUENCE' AND owner = '%s'", getOwner())); for (Map result : tableResult) { String sequenceName = result.get("object_name").toString(); sql.add(String.format("DROP SEQUENCE \"%s\"", sequenceName)); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/PostgresEmbeddedDatabase.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/PostgresEmbeddedDatabase.java index 400836d1adb..53a7cb74c26 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/PostgresEmbeddedDatabase.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/embedded/PostgresEmbeddedDatabase.java @@ -42,7 +42,11 @@ public class PostgresEmbeddedDatabase extends JpaEmbeddedDatabase { public PostgresEmbeddedDatabase() { myContainer = new PostgreSQLContainer(DockerImageName.parse("postgres:latest")); myContainer.start(); - super.initialize(DriverTypeEnum.POSTGRES_9_4, myContainer.getJdbcUrl(), myContainer.getUsername(), myContainer.getPassword()); + super.initialize( + DriverTypeEnum.POSTGRES_9_4, + myContainer.getJdbcUrl(), + myContainer.getUsername(), + myContainer.getPassword()); } @Override @@ -84,7 +88,9 @@ public class PostgresEmbeddedDatabase extends JpaEmbeddedDatabase { private void dropSequences() { List sql = new ArrayList<>(); - List> sequenceResult = getJdbcTemplate().queryForList("SELECT sequence_name FROM information_schema.sequences WHERE sequence_schema = 'public'"); + List> sequenceResult = getJdbcTemplate() + .queryForList( + "SELECT sequence_name FROM information_schema.sequences WHERE sequence_schema = 'public'"); for (Map sequence : sequenceResult) { String sequenceName = sequence.get("sequence_name").toString(); sql.add(String.format("DROP SEQUENCE \"%s\" CASCADE", sequenceName)); @@ -94,7 +100,8 @@ public class PostgresEmbeddedDatabase extends JpaEmbeddedDatabase { private List getAllTableNames() { List allTableNames = new ArrayList<>(); - List> queryResults = query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"); + List> queryResults = + query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"); for (Map row : queryResults) { String tableName = row.get("table_name").toString(); allTableNames.add(tableName); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/interceptor/ex/PartitionInterceptorReadAllPartitions.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/interceptor/ex/PartitionInterceptorReadAllPartitions.java index 1ca003b2afd..44f5e653f57 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/interceptor/ex/PartitionInterceptorReadAllPartitions.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/interceptor/ex/PartitionInterceptorReadAllPartitions.java @@ -32,5 +32,4 @@ public class PartitionInterceptorReadAllPartitions { public RequestPartitionId readPartition() { return RequestPartitionId.allPartitions(); } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/interceptor/ex/PartitionInterceptorReadPartitionsBasedOnScopes.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/interceptor/ex/PartitionInterceptorReadPartitionsBasedOnScopes.java index 10a3ca05d74..588ffa0cb2e 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/interceptor/ex/PartitionInterceptorReadPartitionsBasedOnScopes.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/interceptor/ex/PartitionInterceptorReadPartitionsBasedOnScopes.java @@ -26,8 +26,8 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import javax.servlet.http.HttpServletRequest; import java.util.Set; +import javax.servlet.http.HttpServletRequest; // This class is replicated in PartitionExamples.java -- Keep it up to date there too!! @Interceptor @@ -37,16 +37,14 @@ public class PartitionInterceptorReadPartitionsBasedOnScopes { public RequestPartitionId readPartition(ServletRequestDetails theRequest) { HttpServletRequest servletRequest = theRequest.getServletRequest(); - Set approvedScopes = (Set) servletRequest.getAttribute("ca.cdr.servletattribute.session.oidc.approved_scopes"); + Set approvedScopes = + (Set) servletRequest.getAttribute("ca.cdr.servletattribute.session.oidc.approved_scopes"); - String partition = approvedScopes - .stream() - .filter(t->t.startsWith("partition-")) - .map(t->t.substring("partition-".length())) - .findFirst() - .orElseThrow(()->new InvalidRequestException("No partition scopes found in request")); + String partition = approvedScopes.stream() + .filter(t -> t.startsWith("partition-")) + .map(t -> t.substring("partition-".length())) + .findFirst() + .orElseThrow(() -> new InvalidRequestException("No partition scopes found in request")); return RequestPartitionId.fromPartitionName(partition); - } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/packages/FakeNpmServlet.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/packages/FakeNpmServlet.java index b7ab0e1016d..7a0e28df80c 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/packages/FakeNpmServlet.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/packages/FakeNpmServlet.java @@ -24,12 +24,12 @@ import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; public class FakeNpmServlet extends HttpServlet { private static final Logger ourLog = LoggerFactory.getLogger(FakeNpmServlet.class); @@ -56,7 +56,6 @@ public class FakeNpmServlet extends HttpServlet { resp.sendError(404); } - } public Map getResponses() { diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java index 67e5a243ded..df75c3611a2 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/BaseResourceProviderR4Test.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.jpa.provider; +import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider; import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; -import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; import ca.uhn.fhir.jpa.dao.data.IPartitionDao; import ca.uhn.fhir.jpa.graphql.GraphQLProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; @@ -65,74 +65,81 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { @RegisterExtension protected static HttpClientExtension ourHttpClient = new HttpClientExtension(); + protected int myPort; protected String myServerBase; protected IGenericClient myClient; + @Autowired @RegisterExtension protected RestfulServerExtension myServer; @RegisterExtension protected RestfulServerConfigurerExtension myServerConfigurer = new RestfulServerConfigurerExtension(() -> myServer) - .withServerBeforeAll(s -> { - s.registerProviders(myResourceProviders.createProviders()); - s.setDefaultResponseEncoding(EncodingEnum.XML); - s.setDefaultPrettyPrint(false); + .withServerBeforeAll(s -> { + s.registerProviders(myResourceProviders.createProviders()); + s.setDefaultResponseEncoding(EncodingEnum.XML); + s.setDefaultPrettyPrint(false); - myFhirContext.setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator()); + myFhirContext.setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator()); - s.registerProvider(mySystemProvider); - s.registerProvider(myBinaryAccessProvider); - s.registerProvider(myAppCtx.getBean(BulkDataExportProvider.class)); - s.registerProvider(myAppCtx.getBean(DeleteExpungeProvider.class)); - s.registerProvider(myAppCtx.getBean(DiffProvider.class)); - s.registerProvider(myAppCtx.getBean(GraphQLProvider.class)); - s.registerProvider(myAppCtx.getBean(ProcessMessageProvider.class)); - s.registerProvider(myAppCtx.getBean(ReindexProvider.class)); - s.registerProvider(myAppCtx.getBean(SubscriptionTriggeringProvider.class)); - s.registerProvider(myAppCtx.getBean(TerminologyUploaderProvider.class)); - s.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class)); + s.registerProvider(mySystemProvider); + s.registerProvider(myBinaryAccessProvider); + s.registerProvider(myAppCtx.getBean(BulkDataExportProvider.class)); + s.registerProvider(myAppCtx.getBean(DeleteExpungeProvider.class)); + s.registerProvider(myAppCtx.getBean(DiffProvider.class)); + s.registerProvider(myAppCtx.getBean(GraphQLProvider.class)); + s.registerProvider(myAppCtx.getBean(ProcessMessageProvider.class)); + s.registerProvider(myAppCtx.getBean(ReindexProvider.class)); + s.registerProvider(myAppCtx.getBean(SubscriptionTriggeringProvider.class)); + s.registerProvider(myAppCtx.getBean(TerminologyUploaderProvider.class)); + s.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class)); - s.setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class)); + s.setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class)); - JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider(s, mySystemDao, myStorageSettings, mySearchParamRegistry, myValidationSupport); - confProvider.setImplementationDescription("THIS IS THE DESC"); - s.setServerConformanceProvider(confProvider); + JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider( + s, mySystemDao, myStorageSettings, mySearchParamRegistry, myValidationSupport); + confProvider.setImplementationDescription("THIS IS THE DESC"); + s.setServerConformanceProvider(confProvider); - // Register a CORS filter - CorsConfiguration config = new CorsConfiguration(); - CorsInterceptor corsInterceptor = new CorsInterceptor(config); - config.addAllowedHeader("Accept"); - config.addAllowedHeader("Access-Control-Request-Headers"); - config.addAllowedHeader("Access-Control-Request-Method"); - config.addAllowedHeader("Cache-Control"); - config.addAllowedHeader("Content-Type"); - config.addAllowedHeader("Origin"); - config.addAllowedHeader("Prefer"); - config.addAllowedHeader("x-fhir-starter"); - config.addAllowedHeader("X-Requested-With"); - config.addAllowedOrigin("*"); - config.addExposedHeader("Location"); - config.addExposedHeader("Content-Location"); - config.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE", "OPTIONS")); - s.registerInterceptor(corsInterceptor); + // Register a CORS filter + CorsConfiguration config = new CorsConfiguration(); + CorsInterceptor corsInterceptor = new CorsInterceptor(config); + config.addAllowedHeader("Accept"); + config.addAllowedHeader("Access-Control-Request-Headers"); + config.addAllowedHeader("Access-Control-Request-Method"); + config.addAllowedHeader("Cache-Control"); + config.addAllowedHeader("Content-Type"); + config.addAllowedHeader("Origin"); + config.addAllowedHeader("Prefer"); + config.addAllowedHeader("x-fhir-starter"); + config.addAllowedHeader("X-Requested-With"); + config.addAllowedOrigin("*"); + config.addExposedHeader("Location"); + config.addExposedHeader("Content-Location"); + config.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE", "OPTIONS")); + s.registerInterceptor(corsInterceptor); + }) + .withServerBeforeEach(s -> { + myPort = myServer.getPort(); + myServerBase = myServer.getBaseUrl(); + myClient = myServer.getFhirClient(); - }).withServerBeforeEach(s -> { - myPort = myServer.getPort(); - myServerBase = myServer.getBaseUrl(); - myClient = myServer.getFhirClient(); + myClient.getInterceptorService().unregisterInterceptorsIf(t -> t instanceof LoggingInterceptor); + if (shouldLogClient()) { + myClient.registerInterceptor(new LoggingInterceptor()); + } + }); - myClient.getInterceptorService().unregisterInterceptorsIf(t -> t instanceof LoggingInterceptor); - if (shouldLogClient()) { - myClient.registerInterceptor(new LoggingInterceptor()); - } - }); @Autowired protected SubscriptionLoader mySubscriptionLoader; + @Autowired protected DaoRegistry myDaoRegistry; + @Autowired protected IPartitionDao myPartitionDao; + @Autowired protected ResourceCountCache myResourceCountsCache; @@ -154,7 +161,10 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { List names = new ArrayList<>(); for (BundleEntryComponent next : resp.getEntry()) { Patient nextPt = (Patient) next.getResource(); - String nextStr = nextPt.getName().size() > 0 ? nextPt.getName().get(0).getGivenAsSingleString() + " " + nextPt.getName().get(0).getFamily() : ""; + String nextStr = nextPt.getName().size() > 0 + ? nextPt.getName().get(0).getGivenAsSingleString() + " " + + nextPt.getName().get(0).getFamily() + : ""; if (isNotBlank(nextStr)) { names.add(nextStr); } @@ -195,7 +205,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { return params; } - public static ParametersParameterComponent getPartByName(ParametersParameterComponent theParameter, String theName) { + public static ParametersParameterComponent getPartByName( + ParametersParameterComponent theParameter, String theName) { for (ParametersParameterComponent part : theParameter.getPart()) { if (part.getName().equals(theName)) { return part; @@ -224,10 +235,10 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { ourLog.info(resp); Bundle bundle = myFhirContext.newXmlParser().parseResource(Bundle.class, resp); ids = toUnqualifiedVersionlessIdValues(bundle); - ourLog.debug("Observation: \n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle)); + ourLog.debug("Observation: \n" + + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle)); } return ids; } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/ServerConfiguration.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/ServerConfiguration.java index 72f26efedf8..aa9eb858e21 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/ServerConfiguration.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/ServerConfiguration.java @@ -35,11 +35,10 @@ public class ServerConfiguration { @Bean public RestfulServerExtension restfulServerExtension(FhirContext theFhirContext) { return new RestfulServerExtension(configureFhirContext(theFhirContext)) - .keepAliveBetweenTests() - .withValidationMode(ServerValidationModeEnum.NEVER) - .withContextPath("/fhir") - .withServletPath("/context/*") - .withSpringWebsocketSupport(BaseJpaTest.WEBSOCKET_CONTEXT, WebsocketDispatcherConfig.class); + .keepAliveBetweenTests() + .withValidationMode(ServerValidationModeEnum.NEVER) + .withContextPath("/fhir") + .withServletPath("/context/*") + .withSpringWebsocketSupport(BaseJpaTest.WEBSOCKET_CONTEXT, WebsocketDispatcherConfig.class); } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java index 10bbdf44e3c..943fa6bbd81 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java @@ -90,14 +90,19 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { protected static DatabaseBackedPagingProvider ourPagingProvider; private static GenericWebApplicationContext ourWebApplicationContext; protected IGenericClient myClient; + @Autowired protected SubscriptionLoader mySubscriptionLoader; + @Autowired protected DaoRegistry myDaoRegistry; + @Autowired protected IPartitionDao myPartitionDao; + @Autowired private DeleteExpungeProvider myDeleteExpungeProvider; + @Autowired private ReindexProvider myReindexProvider; @@ -118,7 +123,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { @BeforeEach public void before() throws Exception { super.before(); - + myFhirContext.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); myFhirContext.getRestfulClientFactory().setSocketTimeout(1200 * 1000); myFhirContext.setParserErrorHandler(new StrictErrorHandler()); @@ -136,7 +141,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { myTerminologyUploaderProvider = myAppCtx.getBean(TerminologyUploaderProvider.class); myDaoRegistry = myAppCtx.getBean(DaoRegistry.class); - ourRestServer.registerProviders(mySystemProvider, myTerminologyUploaderProvider, myDeleteExpungeProvider, myReindexProvider); + ourRestServer.registerProviders( + mySystemProvider, myTerminologyUploaderProvider, myDeleteExpungeProvider, myReindexProvider); ourRestServer.registerProvider(myAppCtx.getBean(GraphQLProvider.class)); ourRestServer.registerProvider(myAppCtx.getBean(DiffProvider.class)); ourRestServer.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class)); @@ -155,7 +161,10 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { ourWebApplicationContext = new GenericWebApplicationContext(); ourWebApplicationContext.setParent(myAppCtx); ourWebApplicationContext.refresh(); - proxyHandler.getServletContext().setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, ourWebApplicationContext); + proxyHandler + .getServletContext() + .setAttribute( + WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, ourWebApplicationContext); DispatcherServlet dispatcherServlet = new DispatcherServlet(); // dispatcherServlet.setApplicationContext(webApplicationContext); @@ -163,8 +172,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { ServletHolder subsServletHolder = new ServletHolder(); subsServletHolder.setServlet(dispatcherServlet); subsServletHolder.setInitParameter( - ContextLoader.CONFIG_LOCATION_PARAM, - WebsocketDispatcherConfig.class.getName()); + ContextLoader.CONFIG_LOCATION_PARAM, WebsocketDispatcherConfig.class.getName()); proxyHandler.addServlet(subsServletHolder, "/*"); // Register a CORS filter @@ -186,7 +194,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { ourSearchParamRegistry = myAppCtx.getBean(SearchParamRegistryImpl.class); IValidationSupport validationSupport = myAppCtx.getBean(IValidationSupport.class); - ourCapabilityStatementProvider = new JpaCapabilityStatementProvider(ourRestServer, mySystemDao, myStorageSettings, ourSearchParamRegistry, validationSupport); + ourCapabilityStatementProvider = new JpaCapabilityStatementProvider( + ourRestServer, mySystemDao, myStorageSettings, ourSearchParamRegistry, validationSupport); ourCapabilityStatementProvider.setImplementationDescription("THIS IS THE DESC"); ourRestServer.setServerConformanceProvider(ourCapabilityStatementProvider); @@ -195,14 +204,16 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { ourPort = JettyUtil.getPortForStartedServer(server); ourServerBase = "http://localhost:" + ourPort + "/fhir/context"; - WebApplicationContext wac = WebApplicationContextUtils.getWebApplicationContext(subsServletHolder.getServlet().getServletConfig().getServletContext()); + WebApplicationContext wac = WebApplicationContextUtils.getWebApplicationContext( + subsServletHolder.getServlet().getServletConfig().getServletContext()); myValidationSupport = wac.getBean(IValidationSupport.class); mySearchCoordinatorSvc = wac.getBean(ISearchCoordinatorSvc.class); myFhirContext.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); myFhirContext.getRestfulClientFactory().setSocketTimeout(400000); - PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); + PoolingHttpClientConnectionManager connectionManager = + new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); connectionManager.setMaxTotal(10); connectionManager.setDefaultMaxPerRoute(10); HttpClientBuilder builder = HttpClientBuilder.create(); @@ -231,7 +242,10 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { List names = new ArrayList<>(); for (BundleEntryComponent next : resp.getEntry()) { Patient nextPt = (Patient) next.getResource(); - String nextStr = nextPt.getName().size() > 0 ? nextPt.getName().get(0).getGivenAsSingleString() + " " + nextPt.getName().get(0).getFamily() : ""; + String nextStr = nextPt.getName().size() > 0 + ? nextPt.getName().get(0).getGivenAsSingleString() + " " + + nextPt.getName().get(0).getFamily() + : ""; if (isNotBlank(nextStr)) { names.add(nextStr); } @@ -239,7 +253,6 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { return names; } - @AfterAll public static void afterClassClearContextBaseResourceProviderR4Test() throws Exception { JettyUtil.closeServer(ourServer); @@ -285,7 +298,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { return params; } - public static ParametersParameterComponent getPartByName(ParametersParameterComponent theParameter, String theName) { + public static ParametersParameterComponent getPartByName( + ParametersParameterComponent theParameter, String theName) { for (ParametersParameterComponent part : theParameter.getPart()) { if (part.getName().equals(theName)) { return part; @@ -304,5 +318,4 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { return false; } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/CompositeSearchParameterTestCases.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/CompositeSearchParameterTestCases.java index dc07b29c03d..444704c9a10 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/CompositeSearchParameterTestCases.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/CompositeSearchParameterTestCases.java @@ -59,7 +59,8 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil final ITestDataBuilder.Support myTestDataBuilder; final TestDaoSearch myTestDaoSearch; - protected CompositeSearchParameterTestCases(ITestDataBuilder.Support theTestDataBuilder, TestDaoSearch theTestDaoSearch) { + protected CompositeSearchParameterTestCases( + ITestDataBuilder.Support theTestDataBuilder, TestDaoSearch theTestDaoSearch) { myTestDataBuilder = theTestDataBuilder; myTestDaoSearch = theTestDaoSearch; } @@ -69,8 +70,6 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil return myTestDataBuilder; } - - /** * Should we run test cases that depend on engine support sub-element correlation? * @@ -85,89 +84,88 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil @Test void searchCodeQuantity_onSameComponent_found() { IIdType id1 = createObservation( - withObservationComponent( - withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_8480_6, null), - withQuantityAtPath("valueQuantity", 60, null, "mmHg")), - withObservationComponent( - withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_3421_5, null), - withQuantityAtPath("valueQuantity", 100, null, "mmHg")) - ); + withObservationComponent( + withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_8480_6, null), + withQuantityAtPath("valueQuantity", 60, null, "mmHg")), + withObservationComponent( + withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_3421_5, null), + withQuantityAtPath("valueQuantity", 100, null, "mmHg"))); - myTestDaoSearch.assertSearchFinds("search matches both sps in composite", - "Observation?component-code-value-quantity=8480-6$60", id1); + myTestDaoSearch.assertSearchFinds( + "search matches both sps in composite", "Observation?component-code-value-quantity=8480-6$60", id1); } @EnabledIf("isCorrelatedSupported") @Test void searchCodeQuantity_differentComponents_notFound() { createObservation( - withObservationCode(SYSTEM_LOINC_ORG, CODE_8480_6), - withObservationComponent( - withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_8480_6), - withQuantityAtPath("valueQuantity", 60, null, "mmHg")), - withObservationComponent( - withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_3421_5), - withQuantityAtPath("valueQuantity", 100, null, "mmHg")) - ); + withObservationCode(SYSTEM_LOINC_ORG, CODE_8480_6), + withObservationComponent( + withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_8480_6), + withQuantityAtPath("valueQuantity", 60, null, "mmHg")), + withObservationComponent( + withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_3421_5), + withQuantityAtPath("valueQuantity", 100, null, "mmHg"))); List ids = myTestDaoSearch.searchForIds("Observation?component-code-value-quantity=8480-6$100"); - assertThat("Search for the value from one component, but the code from the other, so it shouldn't match", ids, empty()); + assertThat( + "Search for the value from one component, but the code from the other, so it shouldn't match", + ids, + empty()); } - @Test void searchCodeCode_onSameComponent_found() { IIdType id1 = createObservation( - withObservationComponent( - withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_8480_6, null), - withCodingAt("valueCodeableConcept.coding", SYSTEM_LOINC_ORG, "some-code")), - withObservationComponent( - withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_3421_5, null), - withCodingAt("valueCodeableConcept.coding", SYSTEM_LOINC_ORG, "another-code")) - ); + withObservationComponent( + withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_8480_6, null), + withCodingAt("valueCodeableConcept.coding", SYSTEM_LOINC_ORG, "some-code")), + withObservationComponent( + withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_3421_5, null), + withCodingAt("valueCodeableConcept.coding", SYSTEM_LOINC_ORG, "another-code"))); - myTestDaoSearch.assertSearchFinds("search matches both sps in composite", - "Observation?component-code-value-concept=8480-6$some-code", id1); + myTestDaoSearch.assertSearchFinds( + "search matches both sps in composite", + "Observation?component-code-value-concept=8480-6$some-code", + id1); } @EnabledIf("isCorrelatedSupported") @Test void searchCodeCode_differentComponents_notFound() { createObservation( - withObservationCode(SYSTEM_LOINC_ORG, CODE_8480_6), - withObservationComponent( - withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_8480_6, null), - withCodingAt("valueCodeableConcept.coding", SYSTEM_LOINC_ORG, "some-code")), - withObservationComponent( - withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_3421_5, null), - withCodingAt("valueCodeableConcept.coding", SYSTEM_LOINC_ORG, "another-code")) - ); + withObservationCode(SYSTEM_LOINC_ORG, CODE_8480_6), + withObservationComponent( + withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_8480_6, null), + withCodingAt("valueCodeableConcept.coding", SYSTEM_LOINC_ORG, "some-code")), + withObservationComponent( + withCodingAt("code.coding", SYSTEM_LOINC_ORG, CODE_3421_5, null), + withCodingAt("valueCodeableConcept.coding", SYSTEM_LOINC_ORG, "another-code"))); List ids = myTestDaoSearch.searchForIds("Observation?component-code-value-concept=8480-6$another-code"); - assertThat("Search for the value from one component, but the code from the other, so it shouldn't match", ids, empty()); + assertThat( + "Search for the value from one component, but the code from the other, so it shouldn't match", + ids, + empty()); } @Test void searchCodeDate_onSameResource_found() { IIdType id1 = createObservation( - withObservationCode( SYSTEM_LOINC_ORG, CODE_8480_6, null), - withDateTimeAt("valueDateTime", "2020-01-01T12:34:56") - ); + withObservationCode(SYSTEM_LOINC_ORG, CODE_8480_6, null), + withDateTimeAt("valueDateTime", "2020-01-01T12:34:56")); - myTestDaoSearch.assertSearchFinds("search matches both sps in composite", - "Observation?code-value-date=8480-6$lt2021", id1); + myTestDaoSearch.assertSearchFinds( + "search matches both sps in composite", "Observation?code-value-date=8480-6$lt2021", id1); } - @Test void searchCodeString_onSameResource_found() { IIdType id1 = createObservation( - withObservationCode( SYSTEM_LOINC_ORG, CODE_8480_6, null), - withDateTimeAt("valueString", "ABCDEF") - ); + withObservationCode(SYSTEM_LOINC_ORG, CODE_8480_6, null), withDateTimeAt("valueString", "ABCDEF")); - myTestDaoSearch.assertSearchFinds("token code + string prefix matches", - "Observation?code-value-string=8480-6$ABC", id1); + myTestDaoSearch.assertSearchFinds( + "token code + string prefix matches", "Observation?code-value-string=8480-6$ABC", id1); } /** @@ -181,8 +179,10 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil void searchUriNumber_onSameResource_found() { // Combine existing SPs to test uri + number SearchParameter searchParameter = createCompositeSearchParameter("uri-number-compound-test", "RiskAssessment"); - searchParameter.addComponent(componentFrom("http://hl7.org/fhir/SearchParameter/Resource-source", "meta.source")); - searchParameter.addComponent(componentFrom("http://hl7.org/fhir/SearchParameter/RiskAssessment-probability", "prediction.probability")); + searchParameter.addComponent( + componentFrom("http://hl7.org/fhir/SearchParameter/Resource-source", "meta.source")); + searchParameter.addComponent(componentFrom( + "http://hl7.org/fhir/SearchParameter/RiskAssessment-probability", "prediction.probability")); doCreateResource(searchParameter); // enable this sp. @@ -190,18 +190,30 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil RiskAssessment riskAssessment = new RiskAssessment(); riskAssessment.setMeta(new Meta().setSource("https://example.com/ourSource")); - riskAssessment.addPrediction(new RiskAssessment.RiskAssessmentPredictionComponent().setProbability(new DecimalType(0.02))); + riskAssessment.addPrediction( + new RiskAssessment.RiskAssessmentPredictionComponent().setProbability(new DecimalType(0.02))); IIdType raId = doCreateResource(riskAssessment); // verify config - myTestDaoSearch.assertSearchFinds("simple uri search works", "RiskAssessment?_source=https://example.com/ourSource", raId); + myTestDaoSearch.assertSearchFinds( + "simple uri search works", "RiskAssessment?_source=https://example.com/ourSource", raId); myTestDaoSearch.assertSearchFinds("simple number search works", "RiskAssessment?probability=0.02", raId); // verify composite queries - myTestDaoSearch.assertSearchFinds("composite uri + number", "RiskAssessment?uri-number-compound-test=https://example.com/ourSource$0.02", raId); - myTestDaoSearch.assertSearchNotFound("both params must match ", "RiskAssessment?uri-number-compound-test=https://example.com/ourSource$0.08", raId); - myTestDaoSearch.assertSearchNotFound("both params must match ", "RiskAssessment?uri-number-compound-test=https://example.com/otherUrI$0.02", raId); - //verify combo query - myTestDaoSearch.assertSearchFinds("combo uri + number", "RiskAssessment?_source=https://example.com/ourSource&probability=0.02", raId); + myTestDaoSearch.assertSearchFinds( + "composite uri + number", + "RiskAssessment?uri-number-compound-test=https://example.com/ourSource$0.02", + raId); + myTestDaoSearch.assertSearchNotFound( + "both params must match ", + "RiskAssessment?uri-number-compound-test=https://example.com/ourSource$0.08", + raId); + myTestDaoSearch.assertSearchNotFound( + "both params must match ", + "RiskAssessment?uri-number-compound-test=https://example.com/otherUrI$0.02", + raId); + // verify combo query + myTestDaoSearch.assertSearchFinds( + "combo uri + number", "RiskAssessment?_source=https://example.com/ourSource&probability=0.02", raId); } @ParameterizedTest @@ -209,8 +221,10 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil void testComboSearch_withTokenAndNumber_returnsMatchingResources(Extension theExtension) { // Combine existing SPs to test Token + number SearchParameter searchParameter = createCompositeSearchParameter("token-number-combo-test", "RiskAssessment"); - searchParameter.addComponent(componentFrom("http://hl7.org/fhir/SearchParameter/RiskAssessment-method", "RiskAssessment")); - searchParameter.addComponent(componentFrom("http://hl7.org/fhir/SearchParameter/RiskAssessment-probability", "RiskAssessment")); + searchParameter.addComponent( + componentFrom("http://hl7.org/fhir/SearchParameter/RiskAssessment-method", "RiskAssessment")); + searchParameter.addComponent( + componentFrom("http://hl7.org/fhir/SearchParameter/RiskAssessment-probability", "RiskAssessment")); searchParameter.setExtension(List.of(theExtension)); doCreateResource(searchParameter); @@ -219,24 +233,28 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil RiskAssessment riskAssessment = new RiskAssessment(); riskAssessment.setMethod(new CodeableConcept(new Coding(null, "BRCAPRO", null))); - riskAssessment.addPrediction(new RiskAssessment.RiskAssessmentPredictionComponent().setProbability(new DecimalType(0.02))); + riskAssessment.addPrediction( + new RiskAssessment.RiskAssessmentPredictionComponent().setProbability(new DecimalType(0.02))); IIdType raId = doCreateResource(riskAssessment); RiskAssessment riskAssessmentNonMatch = new RiskAssessment(); riskAssessmentNonMatch.setMethod(new CodeableConcept(new Coding(null, "NOT_FOUND_CODE", null))); - riskAssessmentNonMatch.addPrediction(new RiskAssessment.RiskAssessmentPredictionComponent().setProbability(new DecimalType(0.03))); + riskAssessmentNonMatch.addPrediction( + new RiskAssessment.RiskAssessmentPredictionComponent().setProbability(new DecimalType(0.03))); doCreateResource(riskAssessmentNonMatch); // verify combo query myTestDaoSearch.assertSearchFinds("combo uri + number", "RiskAssessment?method=BRCAPRO&probability=0.02", raId); - myTestDaoSearch.assertSearchNotFound("both params must match", "RiskAssessment?method=CODE&probability=0.02", raId); - myTestDaoSearch.assertSearchNotFound("both params must match", "RiskAssessment?method=BRCAPRO&probability=0.09", raId); + myTestDaoSearch.assertSearchNotFound( + "both params must match", "RiskAssessment?method=CODE&probability=0.02", raId); + myTestDaoSearch.assertSearchNotFound( + "both params must match", "RiskAssessment?method=BRCAPRO&probability=0.09", raId); } @ParameterizedTest @MethodSource("extensionProvider") void testComboSearch_withUriAndString_returnsMatchingResources(Extension theExtension) { - //Combine existing SPs to test URI + String + // Combine existing SPs to test URI + String SearchParameter searchParameter = createCompositeSearchParameter("uri-string-combo-test", "Device"); searchParameter.addComponent(componentFrom("http://hl7.org/fhir/SearchParameter/Device-url", "Device")); searchParameter.addComponent(componentFrom("http://hl7.org/fhir/SearchParameter/Device-model", "Device")); @@ -257,9 +275,12 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil deviceNonMatch.setModelNumber("someModelNumber"); // verify combo query - myTestDaoSearch.assertSearchFinds("combo uri + string", "Device?url=http://deviceUrl&model=modelNumber", deviceId); - myTestDaoSearch.assertSearchNotFound("both params must match", "Device?url=http://wrongUrl&model=modelNumber", deviceId); - myTestDaoSearch.assertSearchNotFound("both params must match", "Device?url=http://deviceUrl&model=wrongModel", deviceId); + myTestDaoSearch.assertSearchFinds( + "combo uri + string", "Device?url=http://deviceUrl&model=modelNumber", deviceId); + myTestDaoSearch.assertSearchNotFound( + "both params must match", "Device?url=http://wrongUrl&model=modelNumber", deviceId); + myTestDaoSearch.assertSearchNotFound( + "both params must match", "Device?url=http://deviceUrl&model=wrongModel", deviceId); } private static SearchParameter createCompositeSearchParameter(String theCodeValue, String theBase) { @@ -275,16 +296,21 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil return retVal; } - private SearchParameter.SearchParameterComponentComponent componentFrom(String theDefinition, String theExpression) { - return new SearchParameter.SearchParameterComponentComponent().setDefinition(theDefinition).setExpression(theExpression); + private SearchParameter.SearchParameterComponentComponent componentFrom( + String theDefinition, String theExpression) { + return new SearchParameter.SearchParameterComponentComponent() + .setDefinition(theDefinition) + .setExpression(theExpression); } static Stream extensionProvider() { return Stream.of( - Arguments.of( - new Extension(HapiExtensions.EXT_SP_UNIQUE, new BooleanType(false))), // composite SP of type combo with non-unique index - Arguments.of( - new Extension(HapiExtensions.EXT_SP_UNIQUE, new BooleanType(true))) // composite SP of type combo with non-unique index - ); + Arguments.of(new Extension( + HapiExtensions.EXT_SP_UNIQUE, + new BooleanType(false))), // composite SP of type combo with non-unique index + Arguments.of(new Extension( + HapiExtensions.EXT_SP_UNIQUE, + new BooleanType(true))) // composite SP of type combo with non-unique index + ); } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/MockHapiTransactionService.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/MockHapiTransactionService.java index d3780313b2f..76a080e4ca1 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/MockHapiTransactionService.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/MockHapiTransactionService.java @@ -32,5 +32,4 @@ public class MockHapiTransactionService extends HapiTransactionService { protected T doExecute(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback) { return theCallback.doInTransaction(new SimpleTransactionStatus()); } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/QuantitySearchParameterTestCases.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/QuantitySearchParameterTestCases.java index f549671a0fc..fa35f019994 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/QuantitySearchParameterTestCases.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/search/QuantitySearchParameterTestCases.java @@ -54,7 +54,8 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild private IIdType myResourceId; - protected QuantitySearchParameterTestCases(Support theTestDataBuilder, TestDaoSearch theTestDaoSearch, JpaStorageSettings theStorageSettings) { + protected QuantitySearchParameterTestCases( + Support theTestDataBuilder, TestDaoSearch theTestDaoSearch, JpaStorageSettings theStorageSettings) { myTestDataBuilder = theTestDataBuilder; myTestDaoSearch = theTestDaoSearch; myStorageSettings = theStorageSettings; @@ -77,8 +78,9 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void noQuantityThrows() { String invalidQtyParam = "|http://another.org"; - DataFormatException thrown = assertThrows(DataFormatException.class, - () -> myTestDaoSearch.searchForIds("/Observation?value-quantity=" + invalidQtyParam)); + DataFormatException thrown = assertThrows( + DataFormatException.class, + () -> myTestDaoSearch.searchForIds("/Observation?value-quantity=" + invalidQtyParam)); assertTrue(thrown.getMessage().startsWith("HAPI-1940: Invalid")); assertTrue(thrown.getMessage().contains(invalidQtyParam)); @@ -86,8 +88,9 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void invalidPrefixThrows() { - DataFormatException thrown = assertThrows(DataFormatException.class, - () -> myTestDaoSearch.searchForIds("/Observation?value-quantity=st5.35")); + DataFormatException thrown = assertThrows( + DataFormatException.class, + () -> myTestDaoSearch.searchForIds("/Observation?value-quantity=st5.35")); assertEquals("HAPI-1941: Invalid prefix: \"st\"", thrown.getMessage()); } @@ -133,7 +136,6 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild assertFind("when gt", "/Observation?value-quantity=gt0.5"); assertNotFind("when eq", "/Observation?value-quantity=gt0.6"); assertNotFind("when lt", "/Observation?value-quantity=gt0.7"); - } @Test @@ -193,23 +195,34 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild void ltAndOrClauses() { withObservationWithValueQuantity(0.6); - assertFind("when lt0.7 and eq (0.5 or 0.6)", "/Observation?value-quantity=lt0.7&value-quantity=0.5,0.6"); + assertFind( + "when lt0.7 and eq (0.5 or 0.6)", + "/Observation?value-quantity=lt0.7&value-quantity=0.5,0.6"); // make sure it doesn't find everything when using or clauses - assertNotFind("when lt0.4 and eq (0.5 or 0.6)", "/Observation?value-quantity=lt0.4&value-quantity=0.5,0.6"); - assertNotFind("when lt0.7 and eq (0.4 or 0.5)", "/Observation?value-quantity=lt0.7&value-quantity=0.4,0.5"); + assertNotFind( + "when lt0.4 and eq (0.5 or 0.6)", + "/Observation?value-quantity=lt0.4&value-quantity=0.5,0.6"); + assertNotFind( + "when lt0.7 and eq (0.4 or 0.5)", + "/Observation?value-quantity=lt0.7&value-quantity=0.4,0.5"); } @Test void gtAndOrClauses() { withObservationWithValueQuantity(0.6); - assertFind("when gt0.4 and eq (0.5 or 0.6)", "/Observation?value-quantity=gt0.4&value-quantity=0.5,0.6"); - assertNotFind("when gt0.7 and eq (0.5 or 0.7)", "/Observation?value-quantity=gt0.7&value-quantity=0.5,0.7"); - assertNotFind("when gt0.3 and eq (0.4 or 0.5)", "/Observation?value-quantity=gt0.3&value-quantity=0.4,0.5"); + assertFind( + "when gt0.4 and eq (0.5 or 0.6)", + "/Observation?value-quantity=gt0.4&value-quantity=0.5,0.6"); + assertNotFind( + "when gt0.7 and eq (0.5 or 0.7)", + "/Observation?value-quantity=gt0.7&value-quantity=0.5,0.7"); + assertNotFind( + "when gt0.3 and eq (0.4 or 0.5)", + "/Observation?value-quantity=gt0.3&value-quantity=0.4,0.5"); } } - @Nested public class QualifiedOrClauses { @@ -277,36 +290,34 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild assertFind("when le0.5 or le0.6", "/Observation?value-quantity=le0.5,le0.6"); assertNotFind("when le0.5 or le0.59", "/Observation?value-quantity=le0.5,le0.59"); } - } @Test void testMultipleComponentsHandlesAndOr() { IIdType obs1Id = createObservation( - withObservationComponent( - withCodingAt("code.coding", "http://loinc.org", "8480-6"), - withQuantityAtPath("valueQuantity", 107, "http://unitsofmeasure.org", "mm[Hg]")), - withObservationComponent( - withCodingAt("code.coding", "http://loinc.org", "8462-4"), - withQuantityAtPath("valueQuantity", 60, "http://unitsofmeasure.org", "mm[Hg]")) - ); - + withObservationComponent( + withCodingAt("code.coding", "http://loinc.org", "8480-6"), + withQuantityAtPath("valueQuantity", 107, "http://unitsofmeasure.org", "mm[Hg]")), + withObservationComponent( + withCodingAt("code.coding", "http://loinc.org", "8462-4"), + withQuantityAtPath("valueQuantity", 60, "http://unitsofmeasure.org", "mm[Hg]"))); IIdType obs2Id = createObservation( - withObservationComponent( - withCodingAt("code.coding", "http://loinc.org", "8480-6"), - withQuantityAtPath("valueQuantity", 307, "http://unitsofmeasure.org", "mm[Hg]")), - withObservationComponent( - withCodingAt("code.coding", "http://loinc.org", "8462-4"), - withQuantityAtPath("valueQuantity", 260, "http://unitsofmeasure.org", "mm[Hg]")) - ); - + withObservationComponent( + withCodingAt("code.coding", "http://loinc.org", "8480-6"), + withQuantityAtPath("valueQuantity", 307, "http://unitsofmeasure.org", "mm[Hg]")), + withObservationComponent( + withCodingAt("code.coding", "http://loinc.org", "8462-4"), + withQuantityAtPath("valueQuantity", 260, "http://unitsofmeasure.org", "mm[Hg]"))); // andClauses { String theUrl = "/Observation?component-value-quantity=107&component-value-quantity=60"; List resourceIds = myTestDaoSearch.searchForIds(theUrl); - assertThat("when same component with qtys 107 and 60", resourceIds, hasItem(equalTo(obs1Id.getIdPart()))); + assertThat( + "when same component with qtys 107 and 60", + resourceIds, + hasItem(equalTo(obs1Id.getIdPart()))); } { String theUrl = "/Observation?component-value-quantity=107&component-value-quantity=260"; @@ -314,11 +325,14 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild assertThat("when same component with qtys 107 and 260", resourceIds, empty()); } - //andAndOrClauses + // andAndOrClauses { String theUrl = "/Observation?component-value-quantity=107&component-value-quantity=gt50,lt70"; List resourceIds = myTestDaoSearch.searchForIds(theUrl); - assertThat("when same component with qtys 107 and lt70,gt80", resourceIds, hasItem(equalTo(obs1Id.getIdPart()))); + assertThat( + "when same component with qtys 107 and lt70,gt80", + resourceIds, + hasItem(equalTo(obs1Id.getIdPart()))); } { String theUrl = "/Observation?component-value-quantity=50,70&component-value-quantity=260"; @@ -330,7 +344,10 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild { String theUrl = "/Observation?component-value-quantity=50,60&component-value-quantity=105,107"; List resourceIds = myTestDaoSearch.searchForIds(theUrl); - assertThat("when same component with qtys 50,60 and 105,107", resourceIds, hasItem(equalTo(obs1Id.getIdPart()))); + assertThat( + "when same component with qtys 50,60 and 105,107", + resourceIds, + hasItem(equalTo(obs1Id.getIdPart()))); } { String theUrl = "/Observation?component-value-quantity=50,60&component-value-quantity=250,260"; @@ -359,14 +376,14 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void unitsMustMatch() { myResourceId = createObservation( - withObservationComponent( - withQuantityAtPath("valueQuantity", 42, null, "cats")), - withObservationComponent( - withQuantityAtPath("valueQuantity", 18, null, "dogs"))); + withObservationComponent(withQuantityAtPath("valueQuantity", 42, null, "cats")), + withObservationComponent(withQuantityAtPath("valueQuantity", 18, null, "dogs"))); assertFind("no units matches value", "/Observation?component-value-quantity=42"); assertFind("correct units matches value", "/Observation?component-value-quantity=42||cats"); - assertNotFind("mixed unit from other element in same resource", "/Observation?component-value-quantity=42||dogs"); + assertNotFind( + "mixed unit from other element in same resource", + "/Observation?component-value-quantity=42||dogs"); } } @@ -403,7 +420,6 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild // GET [base]/Observation?value-quantity=ap5.4 :: 5.4(+/- 10%) :: [4.86 ... 5.94] assertFindIds("when le", Set.of(id2, id3), "/Observation?value-quantity=ap5.4"); } - } } @@ -416,7 +432,7 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild void setUp() { mySavedNomalizedSetting = myStorageSettings.getNormalizedQuantitySearchLevel(); myStorageSettings.setNormalizedQuantitySearchLevel( - NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED); + NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED); } @AfterEach @@ -429,7 +445,7 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void ne() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); assertFind("when lt UCUM", "/Observation?value-quantity=ne70|" + UCUM_CODESYSTEM_URL + "|10*3/L"); assertFind("when gt UCUM", "/Observation?value-quantity=ne50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); @@ -438,34 +454,45 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void eq() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); assertFind("when eq UCUM 10*3/L ", "/Observation?value-quantity=60|" + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertFind("when eq UCUM 10*9/L", "/Observation?value-quantity=0.000060|" + UCUM_CODESYSTEM_URL + "|10*9/L"); + assertFind( + "when eq UCUM 10*9/L", + "/Observation?value-quantity=0.000060|" + UCUM_CODESYSTEM_URL + "|10*9/L"); - assertNotFind("when ne UCUM 10*3/L", "/Observation?value-quantity=80|" + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertNotFind("when gt UCUM 10*3/L", "/Observation?value-quantity=50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertNotFind("when lt UCUM 10*3/L", "/Observation?value-quantity=70|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertNotFind( + "when ne UCUM 10*3/L", "/Observation?value-quantity=80|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertNotFind( + "when gt UCUM 10*3/L", "/Observation?value-quantity=50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertNotFind( + "when lt UCUM 10*3/L", "/Observation?value-quantity=70|" + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertFind("Units required to match and do", "/Observation?value-quantity=60000|" + UCUM_CODESYSTEM_URL + "|/L"); + assertFind( + "Units required to match and do", + "/Observation?value-quantity=60000|" + UCUM_CODESYSTEM_URL + "|/L"); // request generates a quantity which value matches the "value-norm", but not the "code-norm" - assertNotFind("Units required to match and don't", "/Observation?value-quantity=6000000000|" + UCUM_CODESYSTEM_URL + "|cm"); + assertNotFind( + "Units required to match and don't", + "/Observation?value-quantity=6000000000|" + UCUM_CODESYSTEM_URL + "|cm"); } @Test public void ap() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); assertNotFind("when gt UCUM", "/Observation?value-quantity=ap50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertFind("when little gt UCUM", "/Observation?value-quantity=ap58|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertFind( + "when little gt UCUM", "/Observation?value-quantity=ap58|" + UCUM_CODESYSTEM_URL + "|10*3/L"); assertFind("when eq UCUM", "/Observation?value-quantity=ap60|" + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertFind("when a little lt UCUM", "/Observation?value-quantity=ap63|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertFind( + "when a little lt UCUM", "/Observation?value-quantity=ap63|" + UCUM_CODESYSTEM_URL + "|10*3/L"); assertNotFind("when lt UCUM", "/Observation?value-quantity=ap71|" + UCUM_CODESYSTEM_URL + "|10*3/L"); } @Test public void gt() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); assertFind("when gt UCUM", "/Observation?value-quantity=gt50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); assertNotFind("when eq UCUM", "/Observation?value-quantity=gt60|" + UCUM_CODESYSTEM_URL + "|10*3/L"); @@ -474,7 +501,7 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void ge() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); assertFind("when gt UCUM", "/Observation?value-quantity=ge50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); assertFind("when eq UCUM", "/Observation?value-quantity=ge60|" + UCUM_CODESYSTEM_URL + "|10*3/L"); @@ -483,7 +510,7 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void lt() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); assertNotFind("when gt", "/Observation?value-quantity=lt50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); assertNotFind("when eq", "/Observation?value-quantity=lt60|" + UCUM_CODESYSTEM_URL + "|10*3/L"); @@ -492,14 +519,13 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void le() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); assertNotFind("when gt", "/Observation?value-quantity=le50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); assertFind("when eq", "/Observation?value-quantity=le60|" + UCUM_CODESYSTEM_URL + "|10*3/L"); assertFind("when lt", "/Observation?value-quantity=le70|" + UCUM_CODESYSTEM_URL + "|10*3/L"); } - /** * "value-quantity" data is stored in a nested object, so if not queried properly * it could return false positives. For instance: two Observations for following @@ -511,11 +537,12 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild * */ @Test void nestedMustCorrelate() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); - withObservationWithQuantity(0.02, UCUM_CODESYSTEM_URL, "10*3/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); + withObservationWithQuantity(0.02, UCUM_CODESYSTEM_URL, "10*3/L"); - assertNotFind("when one predicate matches each object", "/Observation" + - "?value-quantity=0.06|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertNotFind( + "when one predicate matches each object", + "/Observation" + "?value-quantity=0.06|" + UCUM_CODESYSTEM_URL + "|10*3/L"); } @Nested @@ -523,57 +550,77 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void storeCelsiusSearchFahrenheit() { - withObservationWithQuantity(37.5, UCUM_CODESYSTEM_URL, "Cel" ); + withObservationWithQuantity(37.5, UCUM_CODESYSTEM_URL, "Cel"); - assertFind( "when eq UCUM 99.5 degF", "/Observation?value-quantity=99.5|" + UCUM_CODESYSTEM_URL + "|[degF]"); - assertNotFind( "when eq UCUM 101.1 degF", "/Observation?value-quantity=101.1|" + UCUM_CODESYSTEM_URL + "|[degF]"); - assertNotFind( "when eq UCUM 97.8 degF", "/Observation?value-quantity=97.8|" + UCUM_CODESYSTEM_URL + "|[degF]"); + assertFind( + "when eq UCUM 99.5 degF", + "/Observation?value-quantity=99.5|" + UCUM_CODESYSTEM_URL + "|[degF]"); + assertNotFind( + "when eq UCUM 101.1 degF", + "/Observation?value-quantity=101.1|" + UCUM_CODESYSTEM_URL + "|[degF]"); + assertNotFind( + "when eq UCUM 97.8 degF", + "/Observation?value-quantity=97.8|" + UCUM_CODESYSTEM_URL + "|[degF]"); } @Test public void storeFahrenheitSearchCelsius() { - withObservationWithQuantity(99.5, UCUM_CODESYSTEM_URL, "[degF]" ); + withObservationWithQuantity(99.5, UCUM_CODESYSTEM_URL, "[degF]"); - assertFind( "when eq UCUM 37.5 Cel", "/Observation?value-quantity=37.5|" + UCUM_CODESYSTEM_URL + "|Cel"); - assertNotFind( "when eq UCUM 37.3 Cel", "/Observation?value-quantity=37.3|" + UCUM_CODESYSTEM_URL + "|Cel"); - assertNotFind( "when eq UCUM 37.7 Cel", "/Observation?value-quantity=37.7|" + UCUM_CODESYSTEM_URL + "|Cel"); + assertFind( + "when eq UCUM 37.5 Cel", + "/Observation?value-quantity=37.5|" + UCUM_CODESYSTEM_URL + "|Cel"); + assertNotFind( + "when eq UCUM 37.3 Cel", + "/Observation?value-quantity=37.3|" + UCUM_CODESYSTEM_URL + "|Cel"); + assertNotFind( + "when eq UCUM 37.7 Cel", + "/Observation?value-quantity=37.7|" + UCUM_CODESYSTEM_URL + "|Cel"); } } - } - @Nested public class CombinedQueries { @Test void gtAndLt() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); - assertFind("when gt 50 and lt 70", "/Observation" + - "?value-quantity=gt50|" + UCUM_CODESYSTEM_URL + "|10*3/L" + - "&value-quantity=lt70|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertFind( + "when gt 50 and lt 70", + "/Observation" + "?value-quantity=gt50|" + + UCUM_CODESYSTEM_URL + "|10*3/L" + "&value-quantity=lt70|" + + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertNotFind("when gt50 and lt60", "/Observation" + - "?value-quantity=gt50|" + UCUM_CODESYSTEM_URL + "|10*3/L" + - "&value-quantity=lt60|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertNotFind( + "when gt50 and lt60", + "/Observation" + "?value-quantity=gt50|" + + UCUM_CODESYSTEM_URL + "|10*3/L" + "&value-quantity=lt60|" + + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertNotFind("when gt65 and lt70", "/Observation" + - "?value-quantity=gt65|" + UCUM_CODESYSTEM_URL + "|10*3/L" + - "&value-quantity=lt70|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertNotFind( + "when gt65 and lt70", + "/Observation" + "?value-quantity=gt65|" + + UCUM_CODESYSTEM_URL + "|10*3/L" + "&value-quantity=lt70|" + + UCUM_CODESYSTEM_URL + "|10*3/L"); - assertNotFind("when gt 70 and lt 50", "/Observation" + - "?value-quantity=gt70|" + UCUM_CODESYSTEM_URL + "|10*3/L" + - "&value-quantity=lt50|" + UCUM_CODESYSTEM_URL + "|10*3/L"); + assertNotFind( + "when gt 70 and lt 50", + "/Observation" + "?value-quantity=gt70|" + + UCUM_CODESYSTEM_URL + "|10*3/L" + "&value-quantity=lt50|" + + UCUM_CODESYSTEM_URL + "|10*3/L"); } @Test void gtAndLtWithMixedUnits() { - withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ); + withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L"); - assertFind("when gt 50|10*3/L and lt 70|10*9/L", "/Observation" + - "?value-quantity=gt50|" + UCUM_CODESYSTEM_URL + "|10*3/L" + - "&value-quantity=lt0.000070|" + UCUM_CODESYSTEM_URL + "|10*9/L"); + assertFind( + "when gt 50|10*3/L and lt 70|10*9/L", + "/Observation" + "?value-quantity=gt50|" + + UCUM_CODESYSTEM_URL + "|10*3/L" + "&value-quantity=lt0.000070|" + + UCUM_CODESYSTEM_URL + "|10*9/L"); } @Test @@ -583,17 +630,20 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild // this configuration must generate a combo-value-quantity entry with both quantity objects myResourceId = createObservation(List.of( - withQuantityAtPath("valueQuantity", 0.02, UCUM_CODESYSTEM_URL, "10*6/L"), - withQuantityAtPath("component.valueQuantity", 0.06, UCUM_CODESYSTEM_URL, "10*6/L") - )); + withQuantityAtPath("valueQuantity", 0.02, UCUM_CODESYSTEM_URL, "10*6/L"), + withQuantityAtPath("component.valueQuantity", 0.06, UCUM_CODESYSTEM_URL, "10*6/L"))); // myLogbackLevelOverrideExtension.resetLevel(DaoTestDataBuilder.class); assertFind("by value", "Observation?value-quantity=0.02|" + UCUM_CODESYSTEM_URL + "|10*6/L"); - assertFind("by component value", "Observation?component-value-quantity=0.06|" + UCUM_CODESYSTEM_URL + "|10*6/L"); + assertFind( + "by component value", + "Observation?component-value-quantity=0.06|" + UCUM_CODESYSTEM_URL + "|10*6/L"); assertNotFind("by value", "Observation?value-quantity=0.06|" + UCUM_CODESYSTEM_URL + "|10*6/L"); - assertNotFind("by component value", "Observation?component-value-quantity=0.02|" + UCUM_CODESYSTEM_URL + "|10*6/L"); + assertNotFind( + "by component value", + "Observation?component-value-quantity=0.02|" + UCUM_CODESYSTEM_URL + "|10*6/L"); } } @@ -605,16 +655,18 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild @Test public void sortByNumeric() { - String idAlpha1 = withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L" ).getIdPart(); // 60,000 - String idAlpha2 = withObservationWithQuantity(50, UCUM_CODESYSTEM_URL, "10*3/L" ).getIdPart(); // 50,000 - String idAlpha3 = withObservationWithQuantity(0.000070, UCUM_CODESYSTEM_URL, "10*9/L" ).getIdPart(); // 70_000 + String idAlpha1 = withObservationWithQuantity(0.06, UCUM_CODESYSTEM_URL, "10*6/L") + .getIdPart(); // 60,000 + String idAlpha2 = withObservationWithQuantity(50, UCUM_CODESYSTEM_URL, "10*3/L") + .getIdPart(); // 50,000 + String idAlpha3 = withObservationWithQuantity(0.000070, UCUM_CODESYSTEM_URL, "10*9/L") + .getIdPart(); // 70_000 // this search is not freetext because there is no freetext-known parameter name List allIds = myTestDaoSearch.searchForIds("/Observation?_sort=value-quantity"); assertThat(allIds, contains(idAlpha2, idAlpha1, idAlpha3)); } } - } private void assertFind(String theMessage, String theUrl) { @@ -628,18 +680,16 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild } private IIdType withObservationWithQuantity(double theValue, String theSystem, String theCode) { - myResourceId = createObservation( - withQuantityAtPath("valueQuantity", theValue, theSystem, theCode) - ); + myResourceId = createObservation(withQuantityAtPath("valueQuantity", theValue, theSystem, theCode)); return myResourceId; } private IIdType withObservationWithValueQuantity(double theValue) { - myResourceId = createObservation(List.of(withElementAt("valueQuantity", - withPrimitiveAttribute("value", theValue), - withPrimitiveAttribute("system", UCUM_CODESYSTEM_URL), - withPrimitiveAttribute("code", "mm[Hg]") - ))); + myResourceId = createObservation(List.of(withElementAt( + "valueQuantity", + withPrimitiveAttribute("value", theValue), + withPrimitiveAttribute("system", UCUM_CODESYSTEM_URL), + withPrimitiveAttribute("code", "mm[Hg]")))); return myResourceId; } @@ -647,5 +697,4 @@ public abstract class QuantitySearchParameterTestCases implements ITestDataBuild List resourceIds = myTestDaoSearch.searchForIds(theUrl); assertEquals(theResourceIds, new HashSet<>(resourceIds), theMessage); } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/subscription/CountingInterceptor.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/subscription/CountingInterceptor.java index b5be691e96d..06a9a9c642b 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/subscription/CountingInterceptor.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/subscription/CountingInterceptor.java @@ -34,11 +34,13 @@ public class CountingInterceptor implements ChannelInterceptor { private List mySent = new ArrayList<>(); public int getSentCount(String theContainingKeyword) { - return (int) mySent.stream().filter(t -> t.contains(theContainingKeyword)).count(); + return (int) + mySent.stream().filter(t -> t.contains(theContainingKeyword)).count(); } @Override - public void afterSendCompletion(Message theMessage, MessageChannel theChannel, boolean theSent, Exception theException) { + public void afterSendCompletion( + Message theMessage, MessageChannel theChannel, boolean theSent, Exception theException) { ourLog.info("Send complete for message: {}", theMessage); if (theSent) { mySent.add(theMessage.toString()); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/subscription/NotificationServlet.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/subscription/NotificationServlet.java index 62f9af9d61d..825c0edff25 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/subscription/NotificationServlet.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/subscription/NotificationServlet.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.jpa.subscription; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicLong; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; /** * Receives subscription notification without payloads. @@ -45,7 +45,6 @@ public class NotificationServlet extends HttpServlet { receivedAuthorizationHeaders.add(req.getHeader("Authorization")); } - @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) { receivedNotificationCount.incrementAndGet(); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemDeleteJobSvcWithUniTestFailures.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemDeleteJobSvcWithUniTestFailures.java index 8dce738410c..34ef92adf77 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemDeleteJobSvcWithUniTestFailures.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemDeleteJobSvcWithUniTestFailures.java @@ -26,8 +26,8 @@ import com.google.common.annotations.VisibleForTesting; import java.util.concurrent.atomic.AtomicBoolean; -public class TermCodeSystemDeleteJobSvcWithUniTestFailures extends TermCodeSystemDeleteJobSvc implements ITermCodeSystemDeleteJobSvc { - +public class TermCodeSystemDeleteJobSvcWithUniTestFailures extends TermCodeSystemDeleteJobSvc + implements ITermCodeSystemDeleteJobSvc { private static final AtomicBoolean ourFailNextDeleteCodeSystemVersion = new AtomicBoolean(false); @@ -39,7 +39,6 @@ public class TermCodeSystemDeleteJobSvcWithUniTestFailures extends TermCodeSyste ourFailNextDeleteCodeSystemVersion.set(theFailNextDeleteCodeSystemVersion); } - @Override public void deleteCodeSystemVersion(long theVersionPid) { // Force a failure for unit tests @@ -49,6 +48,4 @@ public class TermCodeSystemDeleteJobSvcWithUniTestFailures extends TermCodeSyste super.deleteCodeSystemVersion(theVersionPid); } - - - } +} diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/TermTestUtil.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/TermTestUtil.java index 8318d23ab6a..09ed21bffc8 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/TermTestUtil.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/TermTestUtil.java @@ -55,10 +55,10 @@ import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_XML_FIL import static org.junit.jupiter.api.Assertions.assertEquals; public final class TermTestUtil { - public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system"; - public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set"; + public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system"; + public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set"; - private TermTestUtil() {} + private TermTestUtil() {} public static void addLoincMandatoryFilesAndSinglePartLinkToZip(ZipCollectionBuilder theFiles) throws IOException { addBaseLoincMandatoryFilesToZip(theFiles, true); @@ -66,14 +66,16 @@ public final class TermTestUtil { theFiles.addFileZip("/loinc/", LOINC_PART_LINK_FILE_DEFAULT.getCode()); } - public static void addLoincMandatoryFilesAndConsumerNameAndLinguisticVariants(ZipCollectionBuilder theFiles) throws IOException { + public static void addLoincMandatoryFilesAndConsumerNameAndLinguisticVariants(ZipCollectionBuilder theFiles) + throws IOException { addBaseLoincMandatoryFilesToZip(theFiles, true); theFiles.addFileZip("/loinc/", "loincupload_singlepartlink.properties"); theFiles.addFileZip("/loinc/", LOINC_PART_LINK_FILE_DEFAULT.getCode()); theFiles.addFileZip("/loinc/", LOINC_CONSUMER_NAME_FILE_DEFAULT.getCode()); theFiles.addFileZip("/loinc/", LOINC_LINGUISTIC_VARIANTS_FILE_DEFAULT.getCode()); theFiles.addFileZip("/loinc/", LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + "zhCN5LinguisticVariant.csv"); - theFiles.addFileZip("/loinc/", LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + "deAT24LinguisticVariant.csv"); + theFiles.addFileZip( + "/loinc/", LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + "deAT24LinguisticVariant.csv"); theFiles.addFileZip("/loinc/", LOINC_LINGUISTIC_VARIANTS_PATH_DEFAULT.getCode() + "frCA8LinguisticVariant.csv"); } @@ -91,7 +93,8 @@ public final class TermTestUtil { theFiles.addFileZip("/loinc/", LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT.getCode()); } - public static void addLoincMandatoryFilesWithPropertiesFileToZip(ZipCollectionBuilder theFiles, String thePropertiesFile) throws IOException { + public static void addLoincMandatoryFilesWithPropertiesFileToZip( + ZipCollectionBuilder theFiles, String thePropertiesFile) throws IOException { if (thePropertiesFile != null) { theFiles.addFileZip("/loinc/", thePropertiesFile); } @@ -100,7 +103,8 @@ public final class TermTestUtil { addBaseLoincMandatoryFilesToZip(theFiles, true); } - static void addBaseLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles, Boolean theIncludeTop2000) throws IOException{ + static void addBaseLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles, Boolean theIncludeTop2000) + throws IOException { theFiles.addFileZip("/loinc/", LOINC_XML_FILE.getCode()); theFiles.addFileZip("/loinc/", LOINC_GROUP_FILE_DEFAULT.getCode()); theFiles.addFileZip("/loinc/", LOINC_GROUP_TERMS_FILE_DEFAULT.getCode()); @@ -123,24 +127,33 @@ public final class TermTestUtil { theFiles.addFileZip("/loinc/", LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode()); theFiles.addFileZip("/loinc/", LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode()); } - } static void verifyConsumerName(Collection designationList, String theConsumerName) { - TermConceptDesignation consumerNameDesignation = null; - for (TermConceptDesignation designation : designationList) { - if ("ConsumerName".equals(designation.getUseDisplay() )) { - consumerNameDesignation = designation; - } - } - assertEquals(theConsumerName, consumerNameDesignation.getValue()); + TermConceptDesignation consumerNameDesignation = null; + for (TermConceptDesignation designation : designationList) { + if ("ConsumerName".equals(designation.getUseDisplay())) { + consumerNameDesignation = designation; + } + } + assertEquals(theConsumerName, consumerNameDesignation.getValue()); } - static void verifyLinguisticVariant(Collection designationList, String theLanguage, - String theComponent, String theProperty, String theTimeAspct, String theSystem, String theScaleTyp, - String methodType, String theClass, String theShortName, String theLongCommonName, String theRelatedName2, - String theLinguisticVariantDisplayName) { + static void verifyLinguisticVariant( + Collection designationList, + String theLanguage, + String theComponent, + String theProperty, + String theTimeAspct, + String theSystem, + String theScaleTyp, + String methodType, + String theClass, + String theShortName, + String theLongCommonName, + String theRelatedName2, + String theLinguisticVariantDisplayName) { TermConceptDesignation formalNameDes = null; TermConceptDesignation shortNameDes = null; @@ -150,27 +163,33 @@ public final class TermTestUtil { for (TermConceptDesignation designation : designationList) { if (theLanguage.equals(designation.getLanguage())) { - if ("FullySpecifiedName".equals(designation.getUseDisplay())) - formalNameDes = designation; + if ("FullySpecifiedName".equals(designation.getUseDisplay())) formalNameDes = designation; - if ("SHORTNAME".equals(designation.getUseDisplay())) - shortNameDes = designation; - if ("LONG_COMMON_NAME".equals(designation.getUseDisplay())) - longCommonNameDes = designation; + if ("SHORTNAME".equals(designation.getUseDisplay())) shortNameDes = designation; + if ("LONG_COMMON_NAME".equals(designation.getUseDisplay())) longCommonNameDes = designation; if ("LinguisticVariantDisplayName".equals(designation.getUseDisplay())) linguisticVariantDisplayNameDes = designation; } } - verifyDesignation(formalNameDes, ITermLoaderSvc.LOINC_URI, "FullySpecifiedName", theComponent+":"+theProperty+":"+theTimeAspct+":"+theSystem+":"+theScaleTyp+":"+methodType); + verifyDesignation( + formalNameDes, + ITermLoaderSvc.LOINC_URI, + "FullySpecifiedName", + theComponent + ":" + theProperty + ":" + theTimeAspct + ":" + theSystem + ":" + theScaleTyp + ":" + + methodType); verifyDesignation(shortNameDes, ITermLoaderSvc.LOINC_URI, "SHORTNAME", theShortName); verifyDesignation(longCommonNameDes, ITermLoaderSvc.LOINC_URI, "LONG_COMMON_NAME", theLongCommonName); - verifyDesignation(linguisticVariantDisplayNameDes, ITermLoaderSvc.LOINC_URI, "LinguisticVariantDisplayName", theLinguisticVariantDisplayName); + verifyDesignation( + linguisticVariantDisplayNameDes, + ITermLoaderSvc.LOINC_URI, + "LinguisticVariantDisplayName", + theLinguisticVariantDisplayName); } - private static void verifyDesignation(TermConceptDesignation theDesignation, String theUseSystem, String theUseCode, String theValue) { - if (theDesignation == null) - return; + private static void verifyDesignation( + TermConceptDesignation theDesignation, String theUseSystem, String theUseCode, String theValue) { + if (theDesignation == null) return; assertEquals(theUseSystem, theDesignation.getUseSystem()); assertEquals(theUseCode, theDesignation.getUseCode()); assertEquals(theValue, theDesignation.getValue()); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/ZipCollectionBuilder.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/ZipCollectionBuilder.java index 5931a78f5cb..ce1800a993c 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/ZipCollectionBuilder.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/term/ZipCollectionBuilder.java @@ -40,7 +40,6 @@ public class ZipCollectionBuilder { public static final String ZIP_ENTRY_PREFIX = "SnomedCT_Release_INT_20160131_Full/Terminology/"; - private static final Logger ourLog = LoggerFactory.getLogger(ZipCollectionBuilder.class); private final ArrayList myFiles; @@ -76,7 +75,8 @@ public class ZipCollectionBuilder { addFileZip(theClasspathPrefix, theClasspathFileName, theClasspathFileName); } - public void addFileZip(String theClasspathPrefix, String theClasspathFileName, String theOutputFilename) throws IOException { + public void addFileZip(String theClasspathPrefix, String theClasspathFileName, String theOutputFilename) + throws IOException { ByteArrayOutputStream bos; bos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(bos); diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/ForceSynchronousSearchInterceptor.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/ForceSynchronousSearchInterceptor.java index 3a63d588010..b2bcdf3ae47 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/ForceSynchronousSearchInterceptor.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/ForceSynchronousSearchInterceptor.java @@ -29,5 +29,4 @@ public class ForceSynchronousSearchInterceptor { public void storagePreSearchRegistered(SearchParameterMap theMap) { theMap.setLoadSynchronous(true); } - } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/ValueSetTestUtil.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/ValueSetTestUtil.java index ef9b2da7fff..c5d72f13825 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/ValueSetTestUtil.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/ValueSetTestUtil.java @@ -25,9 +25,9 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.ValueSet; -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.util.HapiExtensions.EXT_VALUESET_EXPANSION_MESSAGE; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -51,6 +51,8 @@ public class ValueSetTestUtil { @Nonnull public List toCodes(IBaseResource theExpandedValueSet) { ValueSet outcome = myCanonicalizer.valueSetToCanonical(theExpandedValueSet); - return outcome.getExpansion().getContains().stream().map(t -> t.getCode()).collect(Collectors.toList()); + return outcome.getExpansion().getContains().stream() + .map(t -> t.getCode()) + .collect(Collectors.toList()); } } diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/WebsocketSubscriptionClient.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/WebsocketSubscriptionClient.java index 813a97ac5a5..62480239d6a 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/WebsocketSubscriptionClient.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/util/WebsocketSubscriptionClient.java @@ -48,7 +48,8 @@ public class WebsocketSubscriptionClient implements AfterEachCallback { /** * Constructor */ - public WebsocketSubscriptionClient(Supplier theServerSupplier, Supplier theStorageSettings) { + public WebsocketSubscriptionClient( + Supplier theServerSupplier, Supplier theStorageSettings) { assert theServerSupplier != null; assert theStorageSettings != null; @@ -68,7 +69,8 @@ public class WebsocketSubscriptionClient implements AfterEachCallback { try { myWebSocketClient.start(); - URI echoUri = new URI("ws://localhost:" + server.getPort() + server.getWebsocketContextPath() + myStorageSettings.get().getWebsocketContextPath()); + URI echoUri = new URI("ws://localhost:" + server.getPort() + server.getWebsocketContextPath() + + myStorageSettings.get().getWebsocketContextPath()); ClientUpgradeRequest request = new ClientUpgradeRequest(); ourLog.info("Connecting to : {}", echoUri); diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/FhirContextFactory.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/FhirContextFactory.java index b4a7da3d8e3..6f5c28912ec 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/FhirContextFactory.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/FhirContextFactory.java @@ -1,10 +1,9 @@ package ca.uhn.fhirtest; +import ca.uhn.fhir.context.FhirContext; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; -import ca.uhn.fhir.context.FhirContext; - public class FhirContextFactory implements FactoryBean, InitializingBean { private int myConnectionRequestTimeout = 5000; @@ -37,8 +36,7 @@ public class FhirContextFactory implements FactoryBean, Initializin private FhirContext myCtx; - public FhirContextFactory() { - } + public FhirContextFactory() {} @Override public FhirContext getObject() throws Exception { @@ -62,5 +60,4 @@ public class FhirContextFactory implements FactoryBean, Initializin myCtx.getRestfulClientFactory().setSocketTimeout(mySocketTimeout); myCtx.getRestfulClientFactory().setConnectionRequestTimeout(myConnectionRequestTimeout); } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/RequestInterceptor.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/RequestInterceptor.java index 1ec92508f2d..b3de18fdce0 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/RequestInterceptor.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/RequestInterceptor.java @@ -1,12 +1,11 @@ package ca.uhn.fhirtest; +import ca.uhn.fhir.jpa.provider.BaseJpaProvider; import org.apache.commons.lang3.StringUtils; import org.springframework.ui.ModelMap; import org.springframework.web.context.request.WebRequest; import org.springframework.web.context.request.WebRequestInterceptor; -import ca.uhn.fhir.jpa.provider.BaseJpaProvider; - public class RequestInterceptor implements WebRequestInterceptor { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RequestInterceptor.class); @@ -43,7 +42,5 @@ public class RequestInterceptor implements WebRequestInterceptor { org.slf4j.MDC.put(BaseJpaProvider.REMOTE_UA, userAgent); ourLog.trace("User agent is: {}", userAgent); - } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/ScheduledSubscriptionDeleter.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/ScheduledSubscriptionDeleter.java index 4ad99263670..361fc6c5330 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/ScheduledSubscriptionDeleter.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/ScheduledSubscriptionDeleter.java @@ -61,14 +61,13 @@ public class ScheduledSubscriptionDeleter { Date cutoff = DateUtils.addDays(new Date(), -1); IFhirResourceDao subscriptionDao = myDaoRegistry.getResourceDao("Subscription"); - SearchParameterMap params = SearchParameterMap - .newSynchronous() - .setCount(count); + SearchParameterMap params = SearchParameterMap.newSynchronous().setCount(count); IBundleProvider subscriptions = subscriptionDao.search(params, new SystemRequestDetails()); for (IBaseResource next : subscriptions.getResources(0, count)) { if (next.getMeta().getLastUpdated().before(cutoff)) { ourLog.info("Auto deleting old subscription: {}", next.getIdElement()); - subscriptionDao.delete(next.getIdElement().toUnqualifiedVersionless(), new SystemRequestDetails()); + subscriptionDao.delete( + next.getIdElement().toUnqualifiedVersionless(), new SystemRequestDetails()); } } } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java index 60127c26465..47ba8f8ec7f 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/TestRestfulServer.java @@ -1,5 +1,6 @@ package ca.uhn.fhirtest; +import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IValidationSupport; @@ -8,17 +9,15 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider; import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc; import ca.uhn.fhir.jpa.graphql.GraphQLProvider; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; -import ca.uhn.fhir.storage.interceptor.balp.BalpAuditCaptureInterceptor; import ca.uhn.fhir.jpa.ips.provider.IpsOperationProvider; import ca.uhn.fhir.jpa.provider.DiffProvider; +import ca.uhn.fhir.jpa.provider.InstanceReindexProvider; import ca.uhn.fhir.jpa.provider.JpaCapabilityStatementProvider; import ca.uhn.fhir.jpa.provider.JpaConformanceProviderDstu2; import ca.uhn.fhir.jpa.provider.JpaSystemProvider; -import ca.uhn.fhir.jpa.provider.InstanceReindexProvider; import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; import ca.uhn.fhir.jpa.provider.ValueSetOperationProvider; import ca.uhn.fhir.jpa.provider.dstu3.JpaConformanceProviderDstu3; @@ -38,6 +37,7 @@ import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor; import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; +import ca.uhn.fhir.storage.interceptor.balp.BalpAuditCaptureInterceptor; import ca.uhn.fhirtest.config.SqlCaptureInterceptor; import ca.uhn.fhirtest.config.TestAuditConfig; import ca.uhn.fhirtest.config.TestDstu2Config; @@ -52,11 +52,11 @@ import org.springframework.web.context.ContextLoaderListener; import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.support.AnnotationConfigWebApplicationContext; +import java.util.ArrayList; +import java.util.List; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; -import java.util.ArrayList; -import java.util.List; public class TestRestfulServer extends RestfulServer { @@ -95,7 +95,8 @@ public class TestRestfulServer extends RestfulServer { Validate.notNull(fhirVersionParam); setImplementationDescription("HAPI FHIR Test/Demo Server " + fhirVersionParam + " Endpoint"); - setCopyright("This server is **Open Source Software**, licensed under the terms of the [Apache Software License 2.0](https://www.apache.org/licenses/LICENSE-2.0)."); + setCopyright( + "This server is **Open Source Software**, licensed under the terms of the [Apache Software License 2.0](https://www.apache.org/licenses/LICENSE-2.0)."); // Depending on the version this server is supporing, we will // retrieve all the appropriate resource providers and the @@ -120,7 +121,8 @@ public class TestRestfulServer extends RestfulServer { beans = myAppCtx.getBean("myResourceProvidersDstu2", ResourceProviderFactory.class); systemDao = myAppCtx.getBean("mySystemDaoDstu2", IFhirSystemDao.class); etagSupport = ETagSupportEnum.ENABLED; - JpaConformanceProviderDstu2 confProvider = new JpaConformanceProviderDstu2(this, systemDao, myAppCtx.getBean(JpaStorageSettings.class)); + JpaConformanceProviderDstu2 confProvider = + new JpaConformanceProviderDstu2(this, systemDao, myAppCtx.getBean(JpaStorageSettings.class)); setServerConformanceProvider(confProvider); registerInterceptor(myAppCtx.getBean(BalpAuditCaptureInterceptor.class)); break; @@ -136,7 +138,11 @@ public class TestRestfulServer extends RestfulServer { beans = myAppCtx.getBean("myResourceProvidersDstu3", ResourceProviderFactory.class); systemDao = myAppCtx.getBean("mySystemDaoDstu3", IFhirSystemDao.class); etagSupport = ETagSupportEnum.ENABLED; - JpaConformanceProviderDstu3 confProvider = new JpaConformanceProviderDstu3(this, systemDao, myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class)); + JpaConformanceProviderDstu3 confProvider = new JpaConformanceProviderDstu3( + this, + systemDao, + myAppCtx.getBean(JpaStorageSettings.class), + myAppCtx.getBean(ISearchParamRegistry.class)); setServerConformanceProvider(confProvider); providers.add(myAppCtx.getBean(TerminologyUploaderProvider.class)); providers.add(myAppCtx.getBean(GraphQLProvider.class)); @@ -155,7 +161,12 @@ public class TestRestfulServer extends RestfulServer { systemDao = myAppCtx.getBean("mySystemDaoR4", IFhirSystemDao.class); etagSupport = ETagSupportEnum.ENABLED; IValidationSupport validationSupport = myAppCtx.getBean(IValidationSupport.class); - JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider(this, systemDao, myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class), validationSupport); + JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider( + this, + systemDao, + myAppCtx.getBean(JpaStorageSettings.class), + myAppCtx.getBean(ISearchParamRegistry.class), + validationSupport); setServerConformanceProvider(confProvider); providers.add(myAppCtx.getBean(TerminologyUploaderProvider.class)); providers.add(myAppCtx.getBean(GraphQLProvider.class)); @@ -175,7 +186,12 @@ public class TestRestfulServer extends RestfulServer { systemDao = myAppCtx.getBean("mySystemDaoR4B", IFhirSystemDao.class); etagSupport = ETagSupportEnum.ENABLED; IValidationSupport validationSupport = myAppCtx.getBean(IValidationSupport.class); - JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider(this, systemDao, myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class), validationSupport); + JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider( + this, + systemDao, + myAppCtx.getBean(JpaStorageSettings.class), + myAppCtx.getBean(ISearchParamRegistry.class), + validationSupport); setServerConformanceProvider(confProvider); providers.add(myAppCtx.getBean(TerminologyUploaderProvider.class)); providers.add(myAppCtx.getBean(GraphQLProvider.class)); @@ -194,7 +210,12 @@ public class TestRestfulServer extends RestfulServer { systemDao = myAppCtx.getBean("mySystemDaoR5", IFhirSystemDao.class); etagSupport = ETagSupportEnum.ENABLED; IValidationSupport validationSupport = myAppCtx.getBean(IValidationSupport.class); - JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider(this, systemDao, myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class), validationSupport); + JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider( + this, + systemDao, + myAppCtx.getBean(JpaStorageSettings.class), + myAppCtx.getBean(ISearchParamRegistry.class), + validationSupport); setServerConformanceProvider(confProvider); providers.add(myAppCtx.getBean(TerminologyUploaderProvider.class)); providers.add(myAppCtx.getBean(GraphQLProvider.class)); @@ -213,12 +234,18 @@ public class TestRestfulServer extends RestfulServer { systemDao = myAppCtx.getBean("mySystemDaoR4", IFhirSystemDao.class); etagSupport = ETagSupportEnum.ENABLED; IValidationSupport validationSupport = myAppCtx.getBean(IValidationSupport.class); - JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider(this, systemDao, myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class), validationSupport); + JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider( + this, + systemDao, + myAppCtx.getBean(JpaStorageSettings.class), + myAppCtx.getBean(ISearchParamRegistry.class), + validationSupport); setServerConformanceProvider(confProvider); break; } default: - throw new ServletException(Msg.code(1975) + "Unknown FHIR version specified in init-param[FhirVersion]: " + fhirVersionParam); + throw new ServletException(Msg.code(1975) + + "Unknown FHIR version specified in init-param[FhirVersion]: " + fhirVersionParam); } providers.add(myAppCtx.getBean(JpaSystemProvider.class)); @@ -306,8 +333,10 @@ public class TestRestfulServer extends RestfulServer { */ DaoRegistry daoRegistry = myAppCtx.getBean(DaoRegistry.class); IInterceptorBroadcaster interceptorBroadcaster = myAppCtx.getBean(IInterceptorBroadcaster.class); - ThreadSafeResourceDeleterSvc threadSafeResourceDeleterSvc = myAppCtx.getBean(ThreadSafeResourceDeleterSvc.class); - CascadingDeleteInterceptor cascadingDeleteInterceptor = new CascadingDeleteInterceptor(ctx, daoRegistry, interceptorBroadcaster, threadSafeResourceDeleterSvc); + ThreadSafeResourceDeleterSvc threadSafeResourceDeleterSvc = + myAppCtx.getBean(ThreadSafeResourceDeleterSvc.class); + CascadingDeleteInterceptor cascadingDeleteInterceptor = + new CascadingDeleteInterceptor(ctx, daoRegistry, interceptorBroadcaster, threadSafeResourceDeleterSvc); registerInterceptor(cascadingDeleteInterceptor); /* @@ -325,7 +354,8 @@ public class TestRestfulServer extends RestfulServer { // Logging for request type LoggingInterceptor loggingInterceptor = new LoggingInterceptor(); - loggingInterceptor.setMessageFormat("${operationType} Content-Type: ${requestHeader.content-type} - Accept: ${responseEncodingNoDefault} \"${requestHeader.accept}\" - Agent: ${requestHeader.user-agent}"); + loggingInterceptor.setMessageFormat( + "${operationType} Content-Type: ${requestHeader.content-type} - Accept: ${responseEncodingNoDefault} \"${requestHeader.accept}\" - Agent: ${requestHeader.user-agent}"); registerInterceptor(loggingInterceptor); // SQL Capturing @@ -356,8 +386,5 @@ public class TestRestfulServer extends RestfulServer { } return retVal; } - } - - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonConfig.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonConfig.java index 2e68066f00f..0db46aa8638 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonConfig.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonConfig.java @@ -6,10 +6,6 @@ import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.config.ThreadPoolFactoryConfig; import ca.uhn.fhir.jpa.batch2.JpaBatch2Config; -import ca.uhn.fhir.storage.interceptor.balp.AsyncMemoryQueueBackedFhirClientBalpSink; -import ca.uhn.fhir.storage.interceptor.balp.BalpAuditCaptureInterceptor; -import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditContextServices; -import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditEventSink; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig; import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig; @@ -19,6 +15,10 @@ import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig; import ca.uhn.fhir.jpa.util.LoggingEmailSender; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor; +import ca.uhn.fhir.storage.interceptor.balp.AsyncMemoryQueueBackedFhirClientBalpSink; +import ca.uhn.fhir.storage.interceptor.balp.BalpAuditCaptureInterceptor; +import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditContextServices; +import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditEventSink; import ca.uhn.fhirtest.ScheduledSubscriptionDeleter; import ca.uhn.fhirtest.interceptor.AnalyticsInterceptor; import ca.uhn.fhirtest.joke.HolyFooCowInterceptor; @@ -48,7 +48,7 @@ public class CommonConfig { LoggingInterceptor retVal = new LoggingInterceptor(); retVal.setLoggerName("fhirtest.access"); retVal.setMessageFormat( - "Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]"); + "Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] Operation[${operationType} ${operationName} ${idOrResourceName}] UA[${requestHeader.user-agent}] Params[${requestParameters}] ResponseEncoding[${responseEncodingNoDefault}]"); retVal.setLogExceptions(true); retVal.setErrorMessageFormat("ERROR - ${requestVerb} ${requestUrl}"); return retVal; @@ -109,17 +109,20 @@ public class CommonConfig { } @Bean - public CommonJpaStorageSettingsConfigurer commonJpaStorageSettingsConfigurer(JpaStorageSettings theStorageSettings) { + public CommonJpaStorageSettingsConfigurer commonJpaStorageSettingsConfigurer( + JpaStorageSettings theStorageSettings) { return new CommonJpaStorageSettingsConfigurer(theStorageSettings); } @Bean public IBalpAuditEventSink balpAuditEventSink() { - return new AsyncMemoryQueueBackedFhirClientBalpSink(FhirContext.forR4Cached(), "http://localhost:8000/baseAudit"); + return new AsyncMemoryQueueBackedFhirClientBalpSink( + FhirContext.forR4Cached(), "http://localhost:8000/baseAudit"); } @Bean - public BalpAuditCaptureInterceptor balpAuditCaptureInterceptor(IBalpAuditEventSink theAuditSink, IBalpAuditContextServices theAuditContextServices) { + public BalpAuditCaptureInterceptor balpAuditCaptureInterceptor( + IBalpAuditEventSink theAuditSink, IBalpAuditContextServices theAuditContextServices) { return new BalpAuditCaptureInterceptor(theAuditSink, theAuditContextServices); } @@ -131,6 +134,4 @@ public class CommonConfig { public static boolean isLocalTestMode() { return "true".equalsIgnoreCase(System.getProperty("testmode.local")); } - - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonJpaStorageSettingsConfigurer.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonJpaStorageSettingsConfigurer.java index 0c5353bee68..229f400c1ed 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonJpaStorageSettingsConfigurer.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/CommonJpaStorageSettingsConfigurer.java @@ -6,6 +6,5 @@ public class CommonJpaStorageSettingsConfigurer { public CommonJpaStorageSettingsConfigurer(JpaStorageSettings theStorageSettings) { theStorageSettings.setIndexOnUpliftedRefchains(true); theStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(false); - } } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/DbServerConfig.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/DbServerConfig.java index b670e9a2cd7..583e941817b 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/DbServerConfig.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/DbServerConfig.java @@ -3,6 +3,4 @@ package ca.uhn.fhirtest.config; import org.springframework.context.annotation.Configuration; @Configuration -public class DbServerConfig { - -} +public class DbServerConfig {} diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/FhirTestBalpAuditContextServices.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/FhirTestBalpAuditContextServices.java index 2771812f995..394e35eb70d 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/FhirTestBalpAuditContextServices.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/FhirTestBalpAuditContextServices.java @@ -1,8 +1,8 @@ package ca.uhn.fhirtest.config; -import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditContextServices; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import ca.uhn.fhir.storage.interceptor.balp.IBalpAuditContextServices; import joptsimple.internal.Strings; import org.hl7.fhir.r4.model.Reference; diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/FhirTesterConfig.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/FhirTesterConfig.java index a4934adf479..b41af3fa429 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/FhirTesterConfig.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/FhirTesterConfig.java @@ -40,83 +40,78 @@ public class FhirTesterConfig { @Bean public TesterConfig testerConfig() { TesterConfig retVal = new TesterConfig(); - retVal - .addServer() - .withId("home_r4") - .withFhirVersion(FhirVersionEnum.R4) - .withBaseUrl("http://hapi.fhir.org/baseR4") - .withName("HAPI Test Server (R4 FHIR)") - .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) - .withSearchResultRowOperation("$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) - .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) - .withSearchResultRowOperation("$summary", id -> "Patient".equals(id.getResourceType())) + retVal.addServer() + .withId("home_r4") + .withFhirVersion(FhirVersionEnum.R4) + .withBaseUrl("http://hapi.fhir.org/baseR4") + .withName("HAPI Test Server (R4 FHIR)") + .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) + .withSearchResultRowOperation( + "$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) + .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) + .withSearchResultRowOperation("$summary", id -> "Patient".equals(id.getResourceType())) + .addServer() + .withId("home_r5") + .withFhirVersion(FhirVersionEnum.R5) + .withBaseUrl("http://hapi.fhir.org/baseR5") + .withName("HAPI Test Server (R5 FHIR)") + .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) + .withSearchResultRowOperation( + "$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) + .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) + .addServer() + .withId("home_audit") + .withFhirVersion(FhirVersionEnum.R4) + .withBaseUrl("http://hapi.fhir.org/baseAudit") + .withName("HAPI Test Server (R4 Audit)") + .addServer() + .withId("home_r4b") + .withFhirVersion(FhirVersionEnum.R4B) + .withBaseUrl("http://hapi.fhir.org/baseR4B") + .withName("HAPI Test Server (R4B FHIR)") + .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) + .withSearchResultRowOperation( + "$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) + .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) + .addServer() + .withId("home_21") + .withFhirVersion(FhirVersionEnum.DSTU3) + .withBaseUrl("http://hapi.fhir.org/baseDstu3") + .withName("HAPI Test Server (STU3 FHIR)") + .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) + .withSearchResultRowOperation( + "$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) + .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) + .addServer() + .withId("hapi_dev") + .withFhirVersion(FhirVersionEnum.DSTU2) + .withBaseUrl("http://hapi.fhir.org/baseDstu2") + .withName("HAPI Test Server (DSTU2 FHIR)") + .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) + .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) - .addServer() - .withId("home_r5") - .withFhirVersion(FhirVersionEnum.R5) - .withBaseUrl("http://hapi.fhir.org/baseR5") - .withName("HAPI Test Server (R5 FHIR)") - .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) - .withSearchResultRowOperation("$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) - .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) + // Non-HAPI servers follow - .addServer() - .withId("home_audit") - .withFhirVersion(FhirVersionEnum.R4) - .withBaseUrl("http://hapi.fhir.org/baseAudit") - .withName("HAPI Test Server (R4 Audit)") - - .addServer() - .withId("home_r4b") - .withFhirVersion(FhirVersionEnum.R4B) - .withBaseUrl("http://hapi.fhir.org/baseR4B") - .withName("HAPI Test Server (R4B FHIR)") - .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) - .withSearchResultRowOperation("$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) - .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) - - .addServer() - .withId("home_21") - .withFhirVersion(FhirVersionEnum.DSTU3) - .withBaseUrl("http://hapi.fhir.org/baseDstu3") - .withName("HAPI Test Server (STU3 FHIR)") - .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) - .withSearchResultRowOperation("$diff", id -> id.isVersionIdPartValidLong() && id.getVersionIdPartAsLong() > 1) - .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) - - .addServer() - .withId("hapi_dev") - .withFhirVersion(FhirVersionEnum.DSTU2) - .withBaseUrl("http://hapi.fhir.org/baseDstu2") - .withName("HAPI Test Server (DSTU2 FHIR)") - .withSearchResultRowOperation(EXTOP_VALIDATE, id -> true) - .withSearchResultRowOperation("$everything", id -> "Patient".equals(id.getResourceType())) - - // Non-HAPI servers follow - - .addServer() - .withId("hi4") - .withFhirVersion(FhirVersionEnum.DSTU3) - .withBaseUrl("http://test.fhir.org/r4") - .withName("Health Intersections (R4 FHIR)") - - .addServer() - .withId("hi3") - .withFhirVersion(FhirVersionEnum.DSTU3) - .withBaseUrl("http://test.fhir.org/r3") - .withName("Health Intersections (STU3 FHIR)") - - .addServer() - .withId("hi2") - .withFhirVersion(FhirVersionEnum.DSTU2) - .withBaseUrl("http://test.fhir.org/r2") - .withName("Health Intersections (DSTU2 FHIR)") - - .addServer() - .withId("spark2") - .withFhirVersion(FhirVersionEnum.DSTU3) - .withBaseUrl("http://vonk.fire.ly/") - .withName("Vonk - Firely (STU3 FHIR)"); + .addServer() + .withId("hi4") + .withFhirVersion(FhirVersionEnum.DSTU3) + .withBaseUrl("http://test.fhir.org/r4") + .withName("Health Intersections (R4 FHIR)") + .addServer() + .withId("hi3") + .withFhirVersion(FhirVersionEnum.DSTU3) + .withBaseUrl("http://test.fhir.org/r3") + .withName("Health Intersections (STU3 FHIR)") + .addServer() + .withId("hi2") + .withFhirVersion(FhirVersionEnum.DSTU2) + .withBaseUrl("http://test.fhir.org/r2") + .withName("Health Intersections (DSTU2 FHIR)") + .addServer() + .withId("spark2") + .withFhirVersion(FhirVersionEnum.DSTU3) + .withBaseUrl("http://vonk.fire.ly/") + .withName("Vonk - Firely (STU3 FHIR)"); return retVal; } @@ -125,5 +120,4 @@ public class FhirTesterConfig { public SubscriptionPlaygroundController subscriptionPlaygroundController() { return new SubscriptionPlaygroundController(); } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/OldAuditEventPurgeService.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/OldAuditEventPurgeService.java index 83e6a401a66..5a0e7f97a60 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/OldAuditEventPurgeService.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/OldAuditEventPurgeService.java @@ -23,8 +23,10 @@ public class OldAuditEventPurgeService { @Autowired private ISchedulerService mySchedulerSvc; + @Autowired private IDeleteExpungeJobSubmitter myDeleteExpungeSubmitter; + @Autowired private JpaStorageSettings myStorageSettings; @@ -60,5 +62,4 @@ public class OldAuditEventPurgeService { mySvc.doPass(); } } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/SqlCaptureInterceptor.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/SqlCaptureInterceptor.java index ed897baaf4d..2a20263d695 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/SqlCaptureInterceptor.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/SqlCaptureInterceptor.java @@ -27,11 +27,13 @@ public class SqlCaptureInterceptor { sql = UrlUtil.sanitizeUrlPart(sql); theRequestDetails.getResponse().addHeader("X-Executed-SQL", sql); - theRequestDetails.getResponse().addHeader("X-Executed-SQL-Outcome", "Returned " + nextQuery.getSize() + " in " + StopWatch.formatMillis(nextQuery.getElapsedTime())); - + theRequestDetails + .getResponse() + .addHeader( + "X-Executed-SQL-Outcome", + "Returned " + nextQuery.getSize() + " in " + + StopWatch.formatMillis(nextQuery.getElapsedTime())); } } - } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestAuditConfig.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestAuditConfig.java index 973bf616e86..bd6c6690f16 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestAuditConfig.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestAuditConfig.java @@ -26,11 +26,11 @@ import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; +import java.util.Properties; +import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import javax.persistence.EntityManagerFactory; import javax.sql.DataSource; -import java.util.Properties; -import java.util.concurrent.TimeUnit; @Configuration @Import({CommonConfig.class, JpaR4Config.class, HapiJpaConfig.class}) @@ -69,7 +69,6 @@ public class TestAuditConfig { return retVal; } - @Bean(name = "myPersistenceDataSourceR4") public DataSource dataSource() { BasicDataSource retVal = new BasicDataSource(); @@ -83,13 +82,12 @@ public class TestAuditConfig { retVal.setPassword(myDbPassword); TestR5Config.applyCommonDatasourceParams(retVal); - DataSource dataSource = ProxyDataSourceBuilder - .create(retVal) -// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") - .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) - .afterQuery(new CurrentThreadCaptureQueriesListener()) - .countQuery() - .build(); + DataSource dataSource = ProxyDataSourceBuilder.create(retVal) + // .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") + .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) + .afterQuery(new CurrentThreadCaptureQueriesListener()) + .countQuery() + .build(); return dataSource; } @@ -103,8 +101,10 @@ public class TestAuditConfig { } @Bean - public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { - LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(theConfigurableListableBeanFactory, theFhirContext); + public LocalContainerEntityManagerFactoryBean entityManagerFactory( + ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { + LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory( + theConfigurableListableBeanFactory, theFhirContext); retVal.setPersistenceUnitName("PU_HapiFhirJpaAudit"); retVal.setDataSource(dataSource()); retVal.setJpaProperties(jpaProperties()); @@ -152,7 +152,6 @@ public class TestAuditConfig { return new PropertySourcesPlaceholderConfigurer(); } - @Bean public OldAuditEventPurgeService oldEventPurgeService() { return new OldAuditEventPurgeService(); @@ -172,7 +171,5 @@ public class TestAuditConfig { public void start() { myDaoRegistry.setSupportedResourceTypes("AuditEvent", "SearchParameter", "Subscription"); } - } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu2Config.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu2Config.java index 9cc7b540f9a..7d957240c90 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu2Config.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu2Config.java @@ -32,10 +32,10 @@ import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; import java.util.Properties; import java.util.concurrent.TimeUnit; +import javax.persistence.EntityManagerFactory; +import javax.sql.DataSource; @Configuration @Import({CommonConfig.class, JpaDstu2Config.class, HapiJpaConfig.class}) @@ -76,7 +76,7 @@ public class TestDstu2Config { retVal.setDefaultSearchParamsCanBeOverridden(false); retVal.setIndexOnContainedResources(true); retVal.setIndexIdentifierOfType(true); -// retVal.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE); + // retVal.setHistoryCountMode(HistoryCountModeEnum.COUNT_ACCURATE); return retVal; } @@ -87,7 +87,6 @@ public class TestDstu2Config { return retVal; } - @Bean(name = "myPersistenceDataSourceDstu1") public DataSource dataSource() { BasicDataSource retVal = new BasicDataSource(); @@ -101,13 +100,12 @@ public class TestDstu2Config { retVal.setPassword(myDbPassword); TestR5Config.applyCommonDatasourceParams(retVal); - DataSource dataSource = ProxyDataSourceBuilder - .create(retVal) -// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") - .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) - .afterQuery(new CurrentThreadCaptureQueriesListener()) - .countQuery() - .build(); + DataSource dataSource = ProxyDataSourceBuilder.create(retVal) + // .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") + .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) + .afterQuery(new CurrentThreadCaptureQueriesListener()) + .countQuery() + .build(); return dataSource; } @@ -121,8 +119,10 @@ public class TestDstu2Config { } @Bean - public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { - LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(theConfigurableListableBeanFactory, theFhirContext); + public LocalContainerEntityManagerFactoryBean entityManagerFactory( + ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { + LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory( + theConfigurableListableBeanFactory, theFhirContext); retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu2"); retVal.setDataSource(dataSource()); retVal.setJpaProperties(jpaProperties()); @@ -146,8 +146,9 @@ public class TestDstu2Config { extraProperties.put("hibernate.cache.use_minimal_puts", "false"); extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); - extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), - HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put( + BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), + HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); @@ -181,9 +182,9 @@ public class TestDstu2Config { return new PropertySourcesPlaceholderConfigurer(); } -// @Bean -// public IServerInterceptor subscriptionSecurityInterceptor() { -// return new SubscriptionsRequireManualActivationInterceptorDstu2(); -// } + // @Bean + // public IServerInterceptor subscriptionSecurityInterceptor() { + // return new SubscriptionsRequireManualActivationInterceptorDstu2(); + // } } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu3Config.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu3Config.java index 115f1df3790..da6b07ac91b 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu3Config.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu3Config.java @@ -33,10 +33,10 @@ import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; import java.util.Properties; import java.util.concurrent.TimeUnit; +import javax.persistence.EntityManagerFactory; +import javax.sql.DataSource; @Configuration @Import({CommonConfig.class, JpaDstu3Config.class, HapiJpaConfig.class}) @@ -90,7 +90,6 @@ public class TestDstu3Config { return retVal; } - @Bean public PublicSecurityInterceptor securityInterceptor() { return new PublicSecurityInterceptor(); @@ -109,20 +108,21 @@ public class TestDstu3Config { retVal.setPassword(myDbPassword); TestR5Config.applyCommonDatasourceParams(retVal); - DataSource dataSource = ProxyDataSourceBuilder - .create(retVal) -// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") - .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) - .afterQuery(new CurrentThreadCaptureQueriesListener()) - .countQuery() - .build(); + DataSource dataSource = ProxyDataSourceBuilder.create(retVal) + // .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") + .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) + .afterQuery(new CurrentThreadCaptureQueriesListener()) + .countQuery() + .build(); return dataSource; } @Bean - public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { - LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(theConfigurableListableBeanFactory, theFhirContext); + public LocalContainerEntityManagerFactoryBean entityManagerFactory( + ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { + LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory( + theConfigurableListableBeanFactory, theFhirContext); retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu3"); retVal.setDataSource(dataSource()); retVal.setJpaProperties(jpaProperties()); @@ -146,8 +146,9 @@ public class TestDstu3Config { extraProperties.put("hibernate.cache.use_minimal_puts", "false"); extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); - extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), - HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put( + BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), + HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); @@ -162,7 +163,8 @@ public class TestDstu3Config { */ @Bean @Lazy - public RequestValidatingInterceptor requestValidatingInterceptor(IInstanceValidatorModule theFhirInstanceValidator) { + public RequestValidatingInterceptor requestValidatingInterceptor( + IInstanceValidatorModule theFhirInstanceValidator) { RequestValidatingInterceptor requestValidator = new RequestValidatingInterceptor(); requestValidator.setFailOnSeverity(null); requestValidator.setAddResponseHeaderOnSeverity(null); @@ -173,10 +175,10 @@ public class TestDstu3Config { return requestValidator; } -// @Bean -// public IServerInterceptor subscriptionSecurityInterceptor() { -// return new SubscriptionsRequireManualActivationInterceptorDstu3(); -// } + // @Bean + // public IServerInterceptor subscriptionSecurityInterceptor() { + // return new SubscriptionsRequireManualActivationInterceptorDstu3(); + // } @Bean @Primary diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4BConfig.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4BConfig.java index c79bcfe0989..454faeab90a 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4BConfig.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4BConfig.java @@ -34,10 +34,10 @@ import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; import java.util.Properties; import java.util.concurrent.TimeUnit; +import javax.persistence.EntityManagerFactory; +import javax.sql.DataSource; @Configuration @Import({CommonConfig.class, JpaR4BConfig.class, HapiJpaConfig.class}) @@ -97,13 +97,12 @@ public class TestR4BConfig { retVal.setPassword(myDbPassword); TestR5Config.applyCommonDatasourceParams(retVal); - DataSource dataSource = ProxyDataSourceBuilder - .create(retVal) -// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") - .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) - .afterQuery(new CurrentThreadCaptureQueriesListener()) - .countQuery() - .build(); + DataSource dataSource = ProxyDataSourceBuilder.create(retVal) + // .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") + .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) + .afterQuery(new CurrentThreadCaptureQueriesListener()) + .countQuery() + .build(); return dataSource; } @@ -118,8 +117,10 @@ public class TestR4BConfig { } @Bean - public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { - LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(theConfigurableListableBeanFactory, theFhirContext); + public LocalContainerEntityManagerFactoryBean entityManagerFactory( + ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { + LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory( + theConfigurableListableBeanFactory, theFhirContext); retVal.setPersistenceUnitName("PU_HapiFhirJpaR4B"); retVal.setDataSource(dataSource()); retVal.setJpaProperties(jpaProperties()); @@ -143,8 +144,9 @@ public class TestR4BConfig { extraProperties.put("hibernate.cache.use_minimal_puts", "false"); extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); - extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), - HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put( + BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), + HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); @@ -159,7 +161,8 @@ public class TestR4BConfig { */ @Bean @Lazy - public RequestValidatingInterceptor requestValidatingInterceptor(IInstanceValidatorModule theFhirInstanceValidator) { + public RequestValidatingInterceptor requestValidatingInterceptor( + IInstanceValidatorModule theFhirInstanceValidator) { RequestValidatingInterceptor requestValidator = new RequestValidatingInterceptor(); requestValidator.setFailOnSeverity(null); requestValidator.setAddResponseHeaderOnSeverity(null); @@ -190,6 +193,4 @@ public class TestR4BConfig { public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() { return new PropertySourcesPlaceholderConfigurer(); } - - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4Config.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4Config.java index 68e666b6c12..f95782ccc96 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4Config.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4Config.java @@ -39,10 +39,10 @@ import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; import java.util.Properties; import java.util.concurrent.TimeUnit; +import javax.persistence.EntityManagerFactory; +import javax.sql.DataSource; @Configuration @Import({CommonConfig.class, JpaR4Config.class, HapiJpaConfig.class}) @@ -88,7 +88,6 @@ public class TestR4Config { return retVal; } - @Bean(name = "myPersistenceDataSourceR4") public DataSource dataSource() { BasicDataSource retVal = new BasicDataSource(); @@ -102,13 +101,12 @@ public class TestR4Config { retVal.setPassword(myDbPassword); TestR5Config.applyCommonDatasourceParams(retVal); - DataSource dataSource = ProxyDataSourceBuilder - .create(retVal) -// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") - .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) - .afterQuery(new CurrentThreadCaptureQueriesListener()) - .countQuery() - .build(); + DataSource dataSource = ProxyDataSourceBuilder.create(retVal) + // .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") + .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) + .afterQuery(new CurrentThreadCaptureQueriesListener()) + .countQuery() + .build(); return dataSource; } @@ -122,8 +120,10 @@ public class TestR4Config { } @Bean - public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { - LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(theConfigurableListableBeanFactory, theFhirContext); + public LocalContainerEntityManagerFactoryBean entityManagerFactory( + ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { + LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory( + theConfigurableListableBeanFactory, theFhirContext); retVal.setPersistenceUnitName("PU_HapiFhirJpaR4"); retVal.setDataSource(dataSource()); retVal.setJpaProperties(jpaProperties()); @@ -147,8 +147,9 @@ public class TestR4Config { extraProperties.put("hibernate.cache.use_minimal_puts", "false"); extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); - extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), - HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put( + BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), + HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); @@ -162,7 +163,8 @@ public class TestR4Config { */ @Bean @Lazy - public RequestValidatingInterceptor requestValidatingInterceptor(IInstanceValidatorModule theFhirInstanceValidator) { + public RequestValidatingInterceptor requestValidatingInterceptor( + IInstanceValidatorModule theFhirInstanceValidator) { RequestValidatingInterceptor requestValidator = new RequestValidatingInterceptor(); requestValidator.setFailOnSeverity(null); requestValidator.setAddResponseHeaderOnSeverity(null); @@ -200,7 +202,8 @@ public class TestR4Config { } @Bean - public IIpsGeneratorSvc ipsGeneratorSvc(FhirContext theFhirContext, IIpsGenerationStrategy theGenerationStrategy, DaoRegistry theDaoRegistry) { + public IIpsGeneratorSvc ipsGeneratorSvc( + FhirContext theFhirContext, IIpsGenerationStrategy theGenerationStrategy, DaoRegistry theDaoRegistry) { return new IpsGeneratorSvcImpl(theFhirContext, theGenerationStrategy, theDaoRegistry); } @@ -208,5 +211,4 @@ public class TestR4Config { public IpsOperationProvider ipsOperationProvider(IIpsGeneratorSvc theIpsGeneratorSvc) { return new IpsOperationProvider(theIpsGeneratorSvc); } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR5Config.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR5Config.java index a2beaf096f6..2870a925c32 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR5Config.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR5Config.java @@ -35,10 +35,10 @@ import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; -import javax.persistence.EntityManagerFactory; -import javax.sql.DataSource; import java.util.Properties; import java.util.concurrent.TimeUnit; +import javax.persistence.EntityManagerFactory; +import javax.sql.DataSource; @Configuration @Import({CommonConfig.class, JpaR5Config.class, HapiJpaConfig.class}) @@ -102,13 +102,12 @@ public class TestR5Config { retVal.setPassword(myDbPassword); applyCommonDatasourceParams(retVal); - DataSource dataSource = ProxyDataSourceBuilder - .create(retVal) -// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") - .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) - .afterQuery(new CurrentThreadCaptureQueriesListener()) - .countQuery() - .build(); + DataSource dataSource = ProxyDataSourceBuilder.create(retVal) + // .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") + .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) + .afterQuery(new CurrentThreadCaptureQueriesListener()) + .countQuery() + .build(); return dataSource; } @@ -130,8 +129,10 @@ public class TestR5Config { } @Bean - public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { - LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory(theConfigurableListableBeanFactory, theFhirContext); + public LocalContainerEntityManagerFactoryBean entityManagerFactory( + ConfigurableListableBeanFactory theConfigurableListableBeanFactory, FhirContext theFhirContext) { + LocalContainerEntityManagerFactoryBean retVal = HapiEntityManagerFactoryUtil.newEntityManagerFactory( + theConfigurableListableBeanFactory, theFhirContext); retVal.setPersistenceUnitName("PU_HapiFhirJpaR5"); retVal.setDataSource(dataSource()); retVal.setJpaProperties(jpaProperties()); @@ -155,8 +156,9 @@ public class TestR5Config { extraProperties.put("hibernate.cache.use_minimal_puts", "false"); extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); - extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), - HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put( + BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), + HapiHSearchAnalysisConfigurers.HapiLuceneAnalysisConfigurer.class.getName()); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); @@ -171,7 +173,8 @@ public class TestR5Config { */ @Bean @Lazy - public RequestValidatingInterceptor requestValidatingInterceptor(IInstanceValidatorModule theFhirInstanceValidator) { + public RequestValidatingInterceptor requestValidatingInterceptor( + IInstanceValidatorModule theFhirInstanceValidator) { RequestValidatingInterceptor requestValidator = new RequestValidatingInterceptor(); requestValidator.setFailOnSeverity(null); requestValidator.setAddResponseHeaderOnSeverity(null); @@ -202,6 +205,4 @@ public class TestR5Config { public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() { return new PropertySourcesPlaceholderConfigurer(); } - - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/AnalyticsInterceptor.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/AnalyticsInterceptor.java index 44857fda6d8..65043a50226 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/AnalyticsInterceptor.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/AnalyticsInterceptor.java @@ -19,8 +19,6 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.quartz.JobExecutionContext; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; @@ -28,6 +26,7 @@ import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.UUID; +import javax.annotation.PreDestroy; import static org.apache.commons.lang3.StringUtils.defaultIfBlank; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -91,7 +90,10 @@ public class AnalyticsInterceptor extends InterceptorAdapter implements IHasSche b.append("&tid=").append(myAnalyticsTid); b.append("&t=event"); - b.append("&an=").append(UrlUtil.escapeUrlParam(myHostname)).append('+').append(UrlUtil.escapeUrlParam(next.getApplicationName())); + b.append("&an=") + .append(UrlUtil.escapeUrlParam(myHostname)) + .append('+') + .append(UrlUtil.escapeUrlParam(next.getApplicationName())); b.append("&ec=").append(next.getResourceName()); b.append("&ea=").append(next.getRestOperation()); @@ -106,11 +108,14 @@ public class AnalyticsInterceptor extends InterceptorAdapter implements IHasSche post.setEntity(new StringEntity(contents, ContentType.APPLICATION_FORM_URLENCODED)); try (CloseableHttpResponse response = (CloseableHttpResponse) myHttpClient.execute(post)) { ourLog.trace("Analytics response: {}", response); - ourLog.info("Flushed {} analytics events and got HTTP {} {}", eventsToFlush.size(), response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); + ourLog.info( + "Flushed {} analytics events and got HTTP {} {}", + eventsToFlush.size(), + response.getStatusLine().getStatusCode(), + response.getStatusLine().getReasonPhrase()); } catch (Exception e) { ourLog.error("Failed to submit analytics:", e); } - } private synchronized void flush() { @@ -162,7 +167,8 @@ public class AnalyticsInterceptor extends InterceptorAdapter implements IHasSche synchronized (myEventBuffer) { if (myEventBuffer.size() > myCollectThreshold) { - ourLog.warn("Not collecting analytics on request! Event buffer has {} items in it", myEventBuffer.size()); + ourLog.warn( + "Not collecting analytics on request! Event buffer has {} items in it", myEventBuffer.size()); } myEventBuffer.add(event); } @@ -238,6 +244,5 @@ public class AnalyticsInterceptor extends InterceptorAdapter implements IHasSche void setUserAgent(String theUserAgent) { myUserAgent = theUserAgent; } - } } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/PublicSecurityInterceptor.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/PublicSecurityInterceptor.java index 1ee270691cf..7964580b7dd 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/PublicSecurityInterceptor.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/PublicSecurityInterceptor.java @@ -1,8 +1,8 @@ package ca.uhn.fhirtest.interceptor; import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.jpa.provider.BaseJpaSystemProvider; import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.provider.BaseJpaSystemProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor; @@ -25,42 +25,75 @@ public class PublicSecurityInterceptor extends AuthorizationInterceptor { String passwordsString = System.getProperty("fhir.tdlpass"); String[] passwords = passwordsString.split(","); myTokens = new HashSet<>(Arrays.asList(passwords)); - + ourLog.info("We have {} valid security tokens", myTokens.size()); } - + @Override public List buildRuleList(RequestDetails theRequestDetails) { String authHeader = theRequestDetails.getHeader("Authorization"); if (isBlank(authHeader)) { return new RuleBuilder() - .deny().operation().named(BaseJpaSystemProvider.MARK_ALL_RESOURCES_FOR_REINDEXING).onServer().andAllowAllResponses().andThen() - .deny().operation().named(JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM).onServer().andAllowAllResponses().andThen() - .deny().operation().named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD).atAnyLevel().andAllowAllResponses().andThen() - .deny().operation().named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE).atAnyLevel().andAllowAllResponses().andThen() - .deny().operation().named(ProviderConstants.OPERATION_EXPUNGE).onServer().andAllowAllResponses().andThen() - .deny().operation().named(ProviderConstants.OPERATION_EXPUNGE).onAnyType().andAllowAllResponses().andThen() - .deny().operation().named(ProviderConstants.OPERATION_EXPUNGE).onAnyInstance().andAllowAllResponses().andThen() - .allowAll() - .build(); + .deny() + .operation() + .named(BaseJpaSystemProvider.MARK_ALL_RESOURCES_FOR_REINDEXING) + .onServer() + .andAllowAllResponses() + .andThen() + .deny() + .operation() + .named(JpaConstants.OPERATION_UPLOAD_EXTERNAL_CODE_SYSTEM) + .onServer() + .andAllowAllResponses() + .andThen() + .deny() + .operation() + .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_ADD) + .atAnyLevel() + .andAllowAllResponses() + .andThen() + .deny() + .operation() + .named(JpaConstants.OPERATION_APPLY_CODESYSTEM_DELTA_REMOVE) + .atAnyLevel() + .andAllowAllResponses() + .andThen() + .deny() + .operation() + .named(ProviderConstants.OPERATION_EXPUNGE) + .onServer() + .andAllowAllResponses() + .andThen() + .deny() + .operation() + .named(ProviderConstants.OPERATION_EXPUNGE) + .onAnyType() + .andAllowAllResponses() + .andThen() + .deny() + .operation() + .named(ProviderConstants.OPERATION_EXPUNGE) + .onAnyInstance() + .andAllowAllResponses() + .andThen() + .allowAll() + .build(); } if (!authHeader.startsWith("Bearer ")) { - throw new ForbiddenOperationException(Msg.code(1978) + "Invalid bearer token, must be in the form \"Authorization: Bearer [token]\""); + throw new ForbiddenOperationException( + Msg.code(1978) + "Invalid bearer token, must be in the form \"Authorization: Bearer [token]\""); } - + String token = authHeader.substring("Bearer ".length()).trim(); if (!myTokens.contains(token)) { ourLog.error("Invalid token '{}' - Valid are: {}", token, myTokens); throw new ForbiddenOperationException(Msg.code(1979) + "Unknown/expired bearer token"); } - + ourLog.info("User logged in with bearer token: " + token.substring(0, 4) + "..."); - return new RuleBuilder() - .allowAll() - .build(); + return new RuleBuilder().allowAll().build(); } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/TdlSecurityInterceptor.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/TdlSecurityInterceptor.java index 01e58ae54ee..ee3d6250815 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/TdlSecurityInterceptor.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/interceptor/TdlSecurityInterceptor.java @@ -1,54 +1,58 @@ package ca.uhn.fhirtest.interceptor; import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isBlank; - -import java.util.*; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; import ca.uhn.fhir.rest.server.interceptor.auth.*; +import java.util.*; + +import static org.apache.commons.lang3.StringUtils.isBlank; + public class TdlSecurityInterceptor extends AuthorizationInterceptor { private HashSet myTokens; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TdlSecurityInterceptor.class); - + public TdlSecurityInterceptor() { String passwordsString = System.getProperty("fhir.tdlpass"); String[] passwords = passwordsString.split(","); myTokens = new HashSet(Arrays.asList(passwords)); - + ourLog.info("We have {} valid security tokens", myTokens.size()); } - + @Override public List buildRuleList(RequestDetails theRequestDetails) { String authHeader = theRequestDetails.getHeader("Authorization"); - + if (isBlank(authHeader)) { return new RuleBuilder() - .allow().read().allResources().withAnyId().andThen() - .allow().metadata().andThen() + .allow() + .read() + .allResources() + .withAnyId() + .andThen() + .allow() + .metadata() + .andThen() .denyAll("Anonymous write access denied on this server") .build(); } - + if (!authHeader.startsWith("Bearer ")) { - throw new ForbiddenOperationException(Msg.code(1980) + "Invalid bearer token, must be in the form \"Authorization: Bearer [token]\""); + throw new ForbiddenOperationException( + Msg.code(1980) + "Invalid bearer token, must be in the form \"Authorization: Bearer [token]\""); } - + String token = authHeader.substring("Bearer ".length()).trim(); if (!myTokens.contains(token)) { ourLog.error("Invalid token '{}' - Valid are: {}", token, myTokens); throw new ForbiddenOperationException(Msg.code(1981) + "Unknown/expired bearer token"); } - + ourLog.info("User logged in with bearer token: " + token.substring(0, 4) + "..."); - return new RuleBuilder() - .allowAll() - .build(); + return new RuleBuilder().allowAll().build(); } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/joke/HolyFooCowInterceptor.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/joke/HolyFooCowInterceptor.java index e9f38700dc0..2405d91b39a 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/joke/HolyFooCowInterceptor.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/joke/HolyFooCowInterceptor.java @@ -1,23 +1,25 @@ package ca.uhn.fhirtest.joke; import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isNotBlank; +import ca.uhn.fhir.rest.server.exceptions.UnclassifiedServerFailureException; +import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import ca.uhn.fhir.rest.server.exceptions.UnclassifiedServerFailureException; -import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class HolyFooCowInterceptor extends InterceptorAdapter { @Override public boolean incomingRequestPreProcessed(HttpServletRequest theRequest, HttpServletResponse theResponse) { if (isNotBlank(theRequest.getParameter("holyfoocow"))) { - throw new UnclassifiedServerFailureException(418, Msg.code(1977) + "HTTP 418 IM A TEAPOT - Jenni, please do not hack with the server, it's very fragile today."); + throw new UnclassifiedServerFailureException( + 418, + Msg.code(1977) + + "HTTP 418 IM A TEAPOT - Jenni, please do not hack with the server, it's very fragile today."); } - + return true; } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/mvc/SubscriptionPlaygroundController.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/mvc/SubscriptionPlaygroundController.java index aa3677d8aac..f8dc38ef043 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/mvc/SubscriptionPlaygroundController.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/mvc/SubscriptionPlaygroundController.java @@ -1,26 +1,26 @@ package ca.uhn.fhirtest.mvc; -import java.util.ArrayList; -import java.util.List; - -import javax.servlet.http.HttpServletRequest; - +import ca.uhn.fhir.rest.client.impl.GenericClient; +import ca.uhn.fhir.to.BaseController; +import ca.uhn.fhir.to.model.HomeRequest; import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.Subscription; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.RequestMapping; -import ca.uhn.fhir.rest.client.impl.GenericClient; -import ca.uhn.fhir.to.BaseController; -import ca.uhn.fhir.to.model.HomeRequest; +import java.util.ArrayList; +import java.util.List; +import javax.servlet.http.HttpServletRequest; @org.springframework.stereotype.Controller() public class SubscriptionPlaygroundController extends BaseController { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SubscriptionPlaygroundController.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(SubscriptionPlaygroundController.class); @SuppressWarnings("unchecked") - @RequestMapping(value = { "/subscriptions" }) - public String subscriptionsHome(final HttpServletRequest theServletRequest, HomeRequest theRequest, final ModelMap theModel) { + @RequestMapping(value = {"/subscriptions"}) + public String subscriptionsHome( + final HttpServletRequest theServletRequest, HomeRequest theRequest, final ModelMap theModel) { addCommonParams(theServletRequest, theRequest, theModel); theModel.put("notHome", true); @@ -31,26 +31,26 @@ public class SubscriptionPlaygroundController extends BaseController { CaptureInterceptor interceptor = new CaptureInterceptor(); GenericClient client = theRequest.newClient(theServletRequest, getContext(theRequest), myConfig, interceptor); - Bundle resp = (Bundle) client - .search() - .forResource(Subscription.class) -// .where(Subscription.TYPE.exactly().code(SubscriptionChannelTypeEnum.WEBSOCKET.getCode())) -// .and(Subscription.STATUS.exactly().code(SubscriptionStatusEnum.ACTIVE.getCode())) - .sort().descending(Subscription.TYPE) - .sort().ascending(Subscription.STATUS) - .returnBundle(Bundle.class) - .execute(); - + Bundle resp = (Bundle) client.search() + .forResource(Subscription.class) + // .where(Subscription.TYPE.exactly().code(SubscriptionChannelTypeEnum.WEBSOCKET.getCode())) + // .and(Subscription.STATUS.exactly().code(SubscriptionStatusEnum.ACTIVE.getCode())) + .sort() + .descending(Subscription.TYPE) + .sort() + .ascending(Subscription.STATUS) + .returnBundle(Bundle.class) + .execute(); + List subscriptions = new ArrayList(); for (Bundle.BundleEntryComponent next : resp.getEntry()) { if (next.getResource() instanceof Subscription) { subscriptions.add((Subscription) next.getResource()); } } - + theModel.put("subscriptions", subscriptions); - + return "subscriptions"; } - } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/rp/FhirtestBaseResourceProviderDstu2.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/rp/FhirtestBaseResourceProviderDstu2.java index 72f5daf9ea1..50a7be13d49 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/rp/FhirtestBaseResourceProviderDstu2.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/rp/FhirtestBaseResourceProviderDstu2.java @@ -3,6 +3,4 @@ package ca.uhn.fhirtest.rp; import ca.uhn.fhir.jpa.provider.BaseJpaResourceProvider; import ca.uhn.fhir.model.api.IResource; -public class FhirtestBaseResourceProviderDstu2 extends BaseJpaResourceProvider { - -} +public class FhirtestBaseResourceProviderDstu2 extends BaseJpaResourceProvider {} diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IGoldenResourceMergerSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IGoldenResourceMergerSvc.java index 568e3a05d76..f474428efdd 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IGoldenResourceMergerSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IGoldenResourceMergerSvc.java @@ -32,5 +32,9 @@ public interface IGoldenResourceMergerSvc { * @param theToGoldenResource the golden resource we are merging to * @return updated theToGoldenResource with the merged fields and links. */ - IAnyResource mergeGoldenResources(IAnyResource theFromGoldenResource, IAnyResource theManuallyMergedResource, IAnyResource theToGoldenResource, MdmTransactionContext theMdmTransactionContext); + IAnyResource mergeGoldenResources( + IAnyResource theFromGoldenResource, + IAnyResource theManuallyMergedResource, + IAnyResource theToGoldenResource, + MdmTransactionContext theMdmTransactionContext); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmControllerSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmControllerSvc.java index 2df3a2b9060..dd4574c0559 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmControllerSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmControllerSvc.java @@ -28,39 +28,88 @@ import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.data.domain.Page; -import javax.annotation.Nullable; import java.math.BigDecimal; import java.util.List; +import javax.annotation.Nullable; public interface IMdmControllerSvc { @Deprecated - Page queryLinks(@Nullable String theGoldenResourceId, @Nullable String theSourceResourceId, @Nullable String theMatchResult, @Nullable String theLinkSource, MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest); + Page queryLinks( + @Nullable String theGoldenResourceId, + @Nullable String theSourceResourceId, + @Nullable String theMatchResult, + @Nullable String theLinkSource, + MdmTransactionContext theMdmTransactionContext, + MdmPageRequest thePageRequest); @Deprecated - Page queryLinks(@Nullable String theGoldenResourceId, @Nullable String theSourceResourceId, @Nullable String theMatchResult, @Nullable String theLinkSource, MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest, RequestDetails theRequestDetails); + Page queryLinks( + @Nullable String theGoldenResourceId, + @Nullable String theSourceResourceId, + @Nullable String theMatchResult, + @Nullable String theLinkSource, + MdmTransactionContext theMdmTransactionContext, + MdmPageRequest thePageRequest, + RequestDetails theRequestDetails); - Page queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext, RequestDetails theRequestDetails); + Page queryLinks( + MdmQuerySearchParameters theMdmQuerySearchParameters, + MdmTransactionContext theMdmTransactionContext, + RequestDetails theRequestDetails); @Deprecated - Page queryLinksFromPartitionList(@Nullable String theGoldenResourceId, @Nullable String theSourceResourceId, @Nullable String theMatchResult, @Nullable String theLinkSource, MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest, List thePartitionIds); + Page queryLinksFromPartitionList( + @Nullable String theGoldenResourceId, + @Nullable String theSourceResourceId, + @Nullable String theMatchResult, + @Nullable String theLinkSource, + MdmTransactionContext theMdmTransactionContext, + MdmPageRequest thePageRequest, + List thePartitionIds); - Page queryLinksFromPartitionList(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext); + Page queryLinksFromPartitionList( + MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext); - List queryLinkHistory(MdmHistorySearchParameters theMdmHistorySearchParameters, RequestDetails theRequestDetails); + List queryLinkHistory( + MdmHistorySearchParameters theMdmHistorySearchParameters, RequestDetails theRequestDetails); - Page getDuplicateGoldenResources(MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest); + Page getDuplicateGoldenResources( + MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest); - Page getDuplicateGoldenResources(MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest, RequestDetails theRequestDetails, String theRequestResourceType); + Page getDuplicateGoldenResources( + MdmTransactionContext theMdmTransactionContext, + MdmPageRequest thePageRequest, + RequestDetails theRequestDetails, + String theRequestResourceType); - void notDuplicateGoldenResource(String theGoldenResourceId, String theTargetGoldenResourceId, MdmTransactionContext theMdmTransactionContext); + void notDuplicateGoldenResource( + String theGoldenResourceId, + String theTargetGoldenResourceId, + MdmTransactionContext theMdmTransactionContext); - IAnyResource mergeGoldenResources(String theFromGoldenResourceId, String theToGoldenResourceId, IAnyResource theManuallyMergedGoldenResource, MdmTransactionContext theMdmTransactionContext); + IAnyResource mergeGoldenResources( + String theFromGoldenResourceId, + String theToGoldenResourceId, + IAnyResource theManuallyMergedGoldenResource, + MdmTransactionContext theMdmTransactionContext); - IAnyResource updateLink(String theGoldenResourceId, String theSourceResourceId, String theMatchResult, MdmTransactionContext theMdmTransactionContext); + IAnyResource updateLink( + String theGoldenResourceId, + String theSourceResourceId, + String theMatchResult, + MdmTransactionContext theMdmTransactionContext); - IAnyResource createLink(String theGoldenResourceId, String theSourceResourceId, @Nullable String theMatchResult, MdmTransactionContext theMdmTransactionContext); + IAnyResource createLink( + String theGoldenResourceId, + String theSourceResourceId, + @Nullable String theMatchResult, + MdmTransactionContext theMdmTransactionContext); - IBaseParameters submitMdmClearJob(List theResourceNames, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails); + IBaseParameters submitMdmClearJob( + List theResourceNames, + IPrimitiveType theBatchSize, + ServletRequestDetails theRequestDetails); - IBaseParameters submitMdmSubmitJob(List theUrls, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails); + IBaseParameters submitMdmSubmitJob( + List theUrls, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLink.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLink.java index efc45688b64..aaf89ec33a0 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLink.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLink.java @@ -65,17 +65,17 @@ public interface IMdmLink { IMdmLink setLinkSource(MdmLinkSourceEnum theLinkSource); - default boolean isAuto() { - return getLinkSource() == MdmLinkSourceEnum.AUTO; - } + default boolean isAuto() { + return getLinkSource() == MdmLinkSourceEnum.AUTO; + } - default boolean isManual() { - return getLinkSource() == MdmLinkSourceEnum.MANUAL; - } + default boolean isManual() { + return getLinkSource() == MdmLinkSourceEnum.MANUAL; + } - Date getCreated(); + Date getCreated(); - IMdmLink setCreated(Date theCreated); + IMdmLink setCreated(Date theCreated); Date getUpdated(); diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkCreateSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkCreateSvc.java index 03ba6bb2991..e8825d58bb3 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkCreateSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkCreateSvc.java @@ -22,8 +22,10 @@ package ca.uhn.fhir.mdm.api; import ca.uhn.fhir.mdm.model.MdmTransactionContext; import org.hl7.fhir.instance.model.api.IAnyResource; -import javax.annotation.Nullable; - public interface IMdmLinkCreateSvc { - IAnyResource createLink(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmMatchResultEnum theMatchResult, MdmTransactionContext theMdmContext); + IAnyResource createLink( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmMatchResultEnum theMatchResult, + MdmTransactionContext theMdmContext); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkQuerySvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkQuerySvc.java index 582b43cf1a7..482afd19e59 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkQuerySvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkQuerySvc.java @@ -31,12 +31,34 @@ import java.util.List; */ public interface IMdmLinkQuerySvc { @Deprecated - Page queryLinks(IIdType theGoldenResourceId, IIdType theSourceResourceId, MdmMatchResultEnum theMatchResult, MdmLinkSourceEnum theLinkSource, MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest); - @Deprecated - Page queryLinks(IIdType theGoldenResourceId, IIdType theSourceResourceId, MdmMatchResultEnum theMatchResult, MdmLinkSourceEnum theLinkSource, MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest, List thePartitionId); - Page queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmContext); - Page getDuplicateGoldenResources(MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest); - Page getDuplicateGoldenResources(MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest, List thePartitionId, String theRequestResourceType); + Page queryLinks( + IIdType theGoldenResourceId, + IIdType theSourceResourceId, + MdmMatchResultEnum theMatchResult, + MdmLinkSourceEnum theLinkSource, + MdmTransactionContext theMdmContext, + MdmPageRequest thePageRequest); - List queryLinkHistory(MdmHistorySearchParameters theMdmHistorySearchParameters); + @Deprecated + Page queryLinks( + IIdType theGoldenResourceId, + IIdType theSourceResourceId, + MdmMatchResultEnum theMatchResult, + MdmLinkSourceEnum theLinkSource, + MdmTransactionContext theMdmContext, + MdmPageRequest thePageRequest, + List thePartitionId); + + Page queryLinks( + MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmContext); + + Page getDuplicateGoldenResources(MdmTransactionContext theMdmContext, MdmPageRequest thePageRequest); + + Page getDuplicateGoldenResources( + MdmTransactionContext theMdmContext, + MdmPageRequest thePageRequest, + List thePartitionId, + String theRequestResourceType); + + List queryLinkHistory(MdmHistorySearchParameters theMdmHistorySearchParameters); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkSvc.java index d4eee131fd2..95b56c8538b 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkSvc.java @@ -37,7 +37,12 @@ public interface IMdmLinkSvc { * @param theLinkSource MANUAL or AUTO: what caused the link. * @param theMdmTransactionContext */ - void updateLink(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmMatchOutcome theMatchResult, MdmLinkSourceEnum theLinkSource, MdmTransactionContext theMdmTransactionContext); + void updateLink( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmMatchOutcome theMatchResult, + MdmLinkSourceEnum theLinkSource, + MdmTransactionContext theMdmTransactionContext); /** * Delete a link between given Golden Resource and the corresponding source resource @@ -46,7 +51,10 @@ public interface IMdmLinkSvc { * @param theSourceResource * @param theMdmTransactionContext */ - void deleteLink(IAnyResource theExistingGoldenResource, IAnyResource theSourceResource, MdmTransactionContext theMdmTransactionContext); + void deleteLink( + IAnyResource theExistingGoldenResource, + IAnyResource theSourceResource, + MdmTransactionContext theMdmTransactionContext); /** * Delete all link records whose source or target points to the provided pids. diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkUpdaterSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkUpdaterSvc.java index f13e17cad18..7f94cabc51d 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkUpdaterSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmLinkUpdaterSvc.java @@ -23,6 +23,12 @@ import ca.uhn.fhir.mdm.model.MdmTransactionContext; import org.hl7.fhir.instance.model.api.IAnyResource; public interface IMdmLinkUpdaterSvc { - IAnyResource updateLink(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmMatchResultEnum theMatchResult, MdmTransactionContext theMdmContext); - void notDuplicateGoldenResource(IAnyResource theGoldenResource, IAnyResource theTargetGoldenResource, MdmTransactionContext theMdmContext); + IAnyResource updateLink( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmMatchResultEnum theMatchResult, + MdmTransactionContext theMdmContext); + + void notDuplicateGoldenResource( + IAnyResource theGoldenResource, IAnyResource theTargetGoldenResource, MdmTransactionContext theMdmContext); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmMatchFinderSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmMatchFinderSvc.java index cff2f655120..9d94270d192 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmMatchFinderSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmMatchFinderSvc.java @@ -22,11 +22,11 @@ package ca.uhn.fhir.mdm.api; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import org.hl7.fhir.instance.model.api.IAnyResource; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface IMdmMatchFinderSvc { - + /** * Retrieve a list of possible target candidates for matching, based on the given {@link IAnyResource} * Internally, performs all MDM matching rules on the type of the resource. @@ -36,5 +36,6 @@ public interface IMdmMatchFinderSvc { * @return a List of {@link MatchedTarget} representing POSSIBLE_MATCH and MATCH outcomes. */ @Nonnull - List getMatchedTargets(String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId); + List getMatchedTargets( + String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmSubmitSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmSubmitSvc.java index 15efea42952..bf57cf6d50d 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmSubmitSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmSubmitSvc.java @@ -46,7 +46,8 @@ public interface IMdmSubmitSvc { * @param theCriteria The FHIR search critieria for filtering the resources to be submitted for MDM processing.. * @return the number of resources submitted for MDM processing. */ - long submitSourceResourceTypeToMdm(String theSourceResourceType, String theCriteria, RequestDetails theRequestDetails); + long submitSourceResourceTypeToMdm( + String theSourceResourceType, String theCriteria, RequestDetails theRequestDetails); /** * Convenience method that calls {@link #submitSourceResourceTypeToMdm(String, String)} with the type pre-populated. @@ -85,5 +86,4 @@ public interface IMdmSubmitSvc { * @param theBufferSize */ public void setBufferSize(int theBufferSize); - } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmSurvivorshipService.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmSurvivorshipService.java index 16032e99498..a0e23fd68b8 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmSurvivorshipService.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/IMdmSurvivorshipService.java @@ -58,5 +58,6 @@ public interface IMdmSurvivorshipService { * @param theMdmTransactionContext Current transaction context * @param Resource type to apply the survivorship rules to */ - void applySurvivorshipRulesToGoldenResource(T theTargetResource, T theGoldenResource, MdmTransactionContext theMdmTransactionContext); + void applySurvivorshipRulesToGoldenResource( + T theTargetResource, T theGoldenResource, MdmTransactionContext theMdmTransactionContext); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmConstants.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmConstants.java index 1b072d6ceb2..7d35442c202 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmConstants.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmConstants.java @@ -24,21 +24,25 @@ public class MdmConstants { /** * TAG system for Golden Resources which are managed by HAPI MDM. */ - public static final String SYSTEM_MDM_MANAGED = "https://hapifhir.org/NamingSystem/managing-mdm-system"; + public static final String CODE_HAPI_MDM_MANAGED = "HAPI-MDM"; - public static final String DISPLAY_HAPI_MDM_MANAGED = "This Golden Resource can only be modified by HAPI MDM system."; + public static final String DISPLAY_HAPI_MDM_MANAGED = + "This Golden Resource can only be modified by HAPI MDM system."; public static final String CODE_NO_MDM_MANAGED = "NO-MDM"; - public static final String HAPI_ENTERPRISE_IDENTIFIER_SYSTEM = "http://hapifhir.io/fhir/NamingSystem/mdm-golden-resource-enterprise-id"; + public static final String HAPI_ENTERPRISE_IDENTIFIER_SYSTEM = + "http://hapifhir.io/fhir/NamingSystem/mdm-golden-resource-enterprise-id"; public static final String ALL_RESOURCE_SEARCH_PARAM_TYPE = "*"; - public static final String FIHR_STRUCTURE_DEF_MATCH_GRADE_URL_NAMESPACE = "http://hl7.org/fhir/StructureDefinition/match-grade"; + public static final String FIHR_STRUCTURE_DEF_MATCH_GRADE_URL_NAMESPACE = + "http://hl7.org/fhir/StructureDefinition/match-grade"; public static final String SYSTEM_GOLDEN_RECORD_STATUS = "http://hapifhir.io/fhir/NamingSystem/mdm-record-status"; public static final String CODE_GOLDEN_RECORD = "GOLDEN_RECORD"; public static final String CODE_GOLDEN_RECORD_REDIRECTED = "REDIRECTED"; public static final String DISPLAY_GOLDEN_RECORD = "Golden Record"; - public static final String DISPLAY_GOLDEN_REDIRECT = "This resource was found to be a duplicate and has been redirected."; + public static final String DISPLAY_GOLDEN_REDIRECT = + "This resource was found to be a duplicate and has been redirected."; public static final String UNKNOWN_MDM_TYPES = "Unknown Resource Types"; } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmHistorySearchParameters.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmHistorySearchParameters.java index c4a9e230a9c..af1f8949779 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmHistorySearchParameters.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmHistorySearchParameters.java @@ -25,12 +25,12 @@ import ca.uhn.fhir.rest.server.provider.ProviderConstants; import org.apache.commons.lang3.builder.ToStringBuilder; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class MdmHistorySearchParameters { private List myGoldenResourceIds = new ArrayList<>(); @@ -61,7 +61,8 @@ public class MdmHistorySearchParameters { if (this == theO) return true; if (theO == null || getClass() != theO.getClass()) return false; final MdmHistorySearchParameters that = (MdmHistorySearchParameters) theO; - return Objects.equals(myGoldenResourceIds, that.myGoldenResourceIds) && Objects.equals(mySourceIds, that.mySourceIds); + return Objects.equals(myGoldenResourceIds, that.myGoldenResourceIds) + && Objects.equals(mySourceIds, that.mySourceIds); } @Override @@ -72,20 +73,21 @@ public class MdmHistorySearchParameters { @Override public String toString() { return new ToStringBuilder(this) - .append("myMdmGoldenResourceIds", myGoldenResourceIds) - .append("myMdmTargetResourceIds", mySourceIds) - .toString(); + .append("myMdmGoldenResourceIds", myGoldenResourceIds) + .append("myMdmTargetResourceIds", mySourceIds) + .toString(); } @Nonnull private static List extractId(List theTheGoldenResourceIds) { return theTheGoldenResourceIds.stream() - .map(MdmHistorySearchParameters::extractId) - .collect(Collectors.toUnmodifiableList()); + .map(MdmHistorySearchParameters::extractId) + .collect(Collectors.toUnmodifiableList()); } @Nullable private static IIdType extractId(String theTheGoldenResourceId) { - return MdmControllerUtil.extractGoldenResourceIdDtOrNull(ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theTheGoldenResourceId); + return MdmControllerUtil.extractGoldenResourceIdDtOrNull( + ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theTheGoldenResourceId); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkEvent.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkEvent.java index e7d6132429b..45951af446b 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkEvent.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkEvent.java @@ -20,14 +20,9 @@ package ca.uhn.fhir.mdm.api; import ca.uhn.fhir.model.api.IModelJson; -import com.fasterxml.jackson.annotation.JsonProperty; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; -import java.util.Set; public class MdmLinkEvent implements IModelJson { @@ -48,8 +43,6 @@ public class MdmLinkEvent implements IModelJson { @Override public String toString() { - return "MdmLinkEvent{" + - "myMdmLinks=" + myMdmLinks + - '}'; + return "MdmLinkEvent{" + "myMdmLinks=" + myMdmLinks + '}'; } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkJson.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkJson.java index b6cdf45843d..9f02f6763bf 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkJson.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkJson.java @@ -181,51 +181,51 @@ public class MdmLinkJson implements IModelJson { if (this == theO) return true; if (theO == null || getClass() != theO.getClass()) return false; final MdmLinkJson that = (MdmLinkJson) theO; - return Objects.equals(myGoldenResourceId, that.myGoldenResourceId) && - Objects.equals(mySourceId, that.mySourceId) && - myMatchResult == that.myMatchResult && - myLinkSource == that.myLinkSource && - Objects.equals(myCreated, that.myCreated) && - Objects.equals(myUpdated, that.myUpdated) && - Objects.equals(myVersion, that.myVersion) && - Objects.equals(myEidMatch, that.myEidMatch) && - Objects.equals(myLinkCreatedNewResource, that.myLinkCreatedNewResource) && - Objects.equals(myVector, that.myVector) && - Objects.equals(myScore, that.myScore) && - Objects.equals(myRuleCount, that.myRuleCount); + return Objects.equals(myGoldenResourceId, that.myGoldenResourceId) + && Objects.equals(mySourceId, that.mySourceId) + && myMatchResult == that.myMatchResult + && myLinkSource == that.myLinkSource + && Objects.equals(myCreated, that.myCreated) + && Objects.equals(myUpdated, that.myUpdated) + && Objects.equals(myVersion, that.myVersion) + && Objects.equals(myEidMatch, that.myEidMatch) + && Objects.equals(myLinkCreatedNewResource, that.myLinkCreatedNewResource) + && Objects.equals(myVector, that.myVector) + && Objects.equals(myScore, that.myScore) + && Objects.equals(myRuleCount, that.myRuleCount); } @Override public int hashCode() { - return Objects.hash(myGoldenResourceId, - mySourceId, - myMatchResult, - myLinkSource, - myCreated, - myUpdated, - myVersion, - myEidMatch, - myLinkCreatedNewResource, - myVector, - myScore, - myRuleCount); + return Objects.hash( + myGoldenResourceId, + mySourceId, + myMatchResult, + myLinkSource, + myCreated, + myUpdated, + myVersion, + myEidMatch, + myLinkCreatedNewResource, + myVector, + myScore, + myRuleCount); } @Override public String toString() { - return "MdmLinkJson{" + - "myGoldenResourceId='" + myGoldenResourceId + '\'' + - ", mySourceId='" + mySourceId + '\'' + - ", myMatchResult=" + myMatchResult + - ", myLinkSource=" + myLinkSource + - ", myCreated=" + myCreated + - ", myUpdated=" + myUpdated + - ", myVersion='" + myVersion + '\'' + - ", myEidMatch=" + myEidMatch + - ", myLinkCreatedNewResource=" + myLinkCreatedNewResource + - ", myVector=" + myVector + - ", myScore=" + myScore + - ", myRuleCount=" + myRuleCount + - '}'; + return "MdmLinkJson{" + "myGoldenResourceId='" + + myGoldenResourceId + '\'' + ", mySourceId='" + + mySourceId + '\'' + ", myMatchResult=" + + myMatchResult + ", myLinkSource=" + + myLinkSource + ", myCreated=" + + myCreated + ", myUpdated=" + + myUpdated + ", myVersion='" + + myVersion + '\'' + ", myEidMatch=" + + myEidMatch + ", myLinkCreatedNewResource=" + + myLinkCreatedNewResource + ", myVector=" + + myVector + ", myScore=" + + myScore + ", myRuleCount=" + + myRuleCount + '}'; } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkWithRevision.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkWithRevision.java index 7dfea941703..a4a9d6ce403 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkWithRevision.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkWithRevision.java @@ -58,9 +58,8 @@ public class MdmLinkWithRevision> { @Override public String toString() { return new ToStringBuilder(this) - .append("myMdmLink", myMdmLink) - .append("myEnversRevision", myEnversRevision) - .toString(); + .append("myMdmLink", myMdmLink) + .append("myEnversRevision", myEnversRevision) + .toString(); } - } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkWithRevisionJson.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkWithRevisionJson.java index 6303acdb876..81e5b506b04 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkWithRevisionJson.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmLinkWithRevisionJson.java @@ -60,7 +60,9 @@ public class MdmLinkWithRevisionJson implements IModelJson { if (this == theO) return true; if (theO == null || getClass() != theO.getClass()) return false; final MdmLinkWithRevisionJson that = (MdmLinkWithRevisionJson) theO; - return myMdmLink.equals(that.myMdmLink) && myRevisionNumber.equals(that.myRevisionNumber) && myRevisionTimestamp.equals(that.myRevisionTimestamp); + return myMdmLink.equals(that.myMdmLink) + && myRevisionNumber.equals(that.myRevisionNumber) + && myRevisionTimestamp.equals(that.myRevisionTimestamp); } @Override @@ -71,9 +73,9 @@ public class MdmLinkWithRevisionJson implements IModelJson { @Override public String toString() { return new ToStringBuilder(this) - .append("myMdmLink", myMdmLink) - .append("myRevisionNumber", myRevisionNumber) - .append("myRevisionTimestamp", myRevisionTimestamp) - .toString(); + .append("myMdmLink", myMdmLink) + .append("myRevisionNumber", myRevisionNumber) + .append("myRevisionTimestamp", myRevisionTimestamp) + .toString(); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java index 7bb5c0642a6..d3eec4f0e56 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmMatchOutcome.java @@ -26,11 +26,18 @@ import org.apache.commons.lang3.builder.ToStringBuilder; */ public final class MdmMatchOutcome { - public static final MdmMatchOutcome POSSIBLE_DUPLICATE = new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_DUPLICATE); - public static final MdmMatchOutcome NO_MATCH = new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.NO_MATCH); - public static final MdmMatchOutcome NEW_GOLDEN_RESOURCE_MATCH = new MdmMatchOutcome(null, 1.0).setMatchResultEnum(MdmMatchResultEnum.MATCH).setCreatedNewResource(true); - public static final MdmMatchOutcome EID_MATCH = new MdmMatchOutcome(null, 1.0).setMatchResultEnum(MdmMatchResultEnum.MATCH).setEidMatch(true); - public static final MdmMatchOutcome POSSIBLE_MATCH = new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_MATCH); + public static final MdmMatchOutcome POSSIBLE_DUPLICATE = + new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_DUPLICATE); + public static final MdmMatchOutcome NO_MATCH = + new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.NO_MATCH); + public static final MdmMatchOutcome NEW_GOLDEN_RESOURCE_MATCH = new MdmMatchOutcome(null, 1.0) + .setMatchResultEnum(MdmMatchResultEnum.MATCH) + .setCreatedNewResource(true); + public static final MdmMatchOutcome EID_MATCH = new MdmMatchOutcome(null, 1.0) + .setMatchResultEnum(MdmMatchResultEnum.MATCH) + .setEidMatch(true); + public static final MdmMatchOutcome POSSIBLE_MATCH = + new MdmMatchOutcome(null, null).setMatchResultEnum(MdmMatchResultEnum.POSSIBLE_MATCH); /** * A bitmap that indicates which rules matched @@ -73,7 +80,7 @@ public final class MdmMatchOutcome { } public boolean isPossibleMatch() { - return myMatchResultEnum == MdmMatchResultEnum.POSSIBLE_MATCH; + return myMatchResultEnum == MdmMatchResultEnum.POSSIBLE_MATCH; } public boolean isPossibleDuplicate() { @@ -132,9 +139,13 @@ public final class MdmMatchOutcome { return this; } - public Double getScore() { return score; } + public Double getScore() { + return score; + } - public Long getVector() { return vector; } + public Long getVector() { + return vector; + } /** * Gets normalized score that is in the range from zero to one @@ -152,11 +163,11 @@ public final class MdmMatchOutcome { @Override public String toString() { return new ToStringBuilder(this) - .append("vector", vector) - .append("score", score) - .append("myCreatedNewResource", myCreatedNewResource) - .append("myEidMatch", myEidMatch) - .append("myMatchResultEnum", myMatchResultEnum) - .toString(); + .append("vector", vector) + .append("score", score) + .append("myCreatedNewResource", myCreatedNewResource) + .append("myEidMatch", myEidMatch) + .append("myMatchResultEnum", myMatchResultEnum) + .toString(); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmQuerySearchParameters.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmQuerySearchParameters.java index 44075debd46..719c9ff3eb7 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmQuerySearchParameters.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/MdmQuerySearchParameters.java @@ -29,10 +29,10 @@ import ca.uhn.fhir.rest.server.provider.ProviderConstants; import org.apache.commons.lang3.builder.ToStringBuilder; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Set; +import javax.annotation.Nullable; import static org.hibernate.internal.util.StringHelper.isBlank; @@ -50,9 +50,16 @@ public class MdmQuerySearchParameters { public static final String SCORE_NAME = "myScore"; public static final Set ourValidSortParameters = Set.of( - GOLDEN_RESOURCE_PID_NAME, SOURCE_PID_NAME, MATCH_RESULT_NAME, LINK_SOURCE_NAME, PARTITION_ID_NAME, GOLDEN_RESOURCE_NAME, - RESOURCE_TYPE_NAME, CREATED_NAME, UPDATED_NAME, SCORE_NAME - ); + GOLDEN_RESOURCE_PID_NAME, + SOURCE_PID_NAME, + MATCH_RESULT_NAME, + LINK_SOURCE_NAME, + PARTITION_ID_NAME, + GOLDEN_RESOURCE_NAME, + RESOURCE_TYPE_NAME, + CREATED_NAME, + UPDATED_NAME, + SCORE_NAME); private IIdType myGoldenResourceId; private IIdType mySourceId; @@ -71,7 +78,14 @@ public class MdmQuerySearchParameters { } @Deprecated(since = "2023.02.R01", forRemoval = true) - public MdmQuerySearchParameters(@Nullable String theGoldenResourceId, @Nullable String theSourceId, @Nullable String theMatchResult, @Nullable String theLinkSource, MdmPageRequest thePageRequest, @Nullable List thePartitionIds, @Nullable String theResourceType) { + public MdmQuerySearchParameters( + @Nullable String theGoldenResourceId, + @Nullable String theSourceId, + @Nullable String theMatchResult, + @Nullable String theLinkSource, + MdmPageRequest thePageRequest, + @Nullable List thePartitionIds, + @Nullable String theResourceType) { setGoldenResourceId(theGoldenResourceId); setSourceId(theSourceId); setLinkSource(theLinkSource); @@ -82,7 +96,14 @@ public class MdmQuerySearchParameters { } @Deprecated(since = "2023.02.R01", forRemoval = true) - public MdmQuerySearchParameters(@Nullable IIdType theGoldenResourceId, @Nullable IIdType theSourceId, @Nullable MdmMatchResultEnum theMatchResult, @Nullable MdmLinkSourceEnum theLinkSource, MdmPageRequest thePageRequest, @Nullable List thePartitionIds, @Nullable String theResourceType) { + public MdmQuerySearchParameters( + @Nullable IIdType theGoldenResourceId, + @Nullable IIdType theSourceId, + @Nullable MdmMatchResultEnum theMatchResult, + @Nullable MdmLinkSourceEnum theLinkSource, + MdmPageRequest thePageRequest, + @Nullable List thePartitionIds, + @Nullable String theResourceType) { setGoldenResourceId(theGoldenResourceId); setSourceId(theSourceId); setLinkSource(theLinkSource); @@ -102,7 +123,8 @@ public class MdmQuerySearchParameters { } public MdmQuerySearchParameters setGoldenResourceId(String theGoldenResourceId) { - myGoldenResourceId = MdmControllerUtil.extractGoldenResourceIdDtOrNull(ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theGoldenResourceId); + myGoldenResourceId = MdmControllerUtil.extractGoldenResourceIdDtOrNull( + ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theGoldenResourceId); return this; } @@ -116,7 +138,8 @@ public class MdmQuerySearchParameters { } public MdmQuerySearchParameters setSourceId(String theSourceId) { - mySourceId = MdmControllerUtil.extractSourceIdDtOrNull(ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, theSourceId); + mySourceId = + MdmControllerUtil.extractSourceIdDtOrNull(ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, theSourceId); return this; } @@ -185,12 +208,13 @@ public class MdmQuerySearchParameters { for (String param : theSortString.split(",")) { String p = (param.startsWith("-") ? param.substring(1) : param).trim(); - if ( ! MdmQuerySearchParameters.ourValidSortParameters.contains(p)) { - throw new InvalidRequestException(Msg.code(2233) + "Unrecognized sort parameter: " + p + ". Valid parameters are: " + - String.join(", ", MdmQuerySearchParameters.ourValidSortParameters)); + if (!MdmQuerySearchParameters.ourValidSortParameters.contains(p)) { + throw new InvalidRequestException( + Msg.code(2233) + "Unrecognized sort parameter: " + p + ". Valid parameters are: " + + String.join(", ", MdmQuerySearchParameters.ourValidSortParameters)); } SortOrderEnum order = param.startsWith("-") ? SortOrderEnum.DESC : SortOrderEnum.ASC; - mySort.add( new SortSpec(p, order) ); + mySort.add(new SortSpec(p, order)); } return this; } @@ -198,14 +222,13 @@ public class MdmQuerySearchParameters { @Override public String toString() { return new ToStringBuilder(this) - .append(GOLDEN_RESOURCE_PID_NAME, myGoldenResourceId) - .append(SOURCE_PID_NAME, mySourceId) - .append(MATCH_RESULT_NAME, myMatchResult) - .append(LINK_SOURCE_NAME, myLinkSource) - .append(PARTITION_ID_NAME, myPartitionIds) - .append(RESOURCE_TYPE_NAME, myResourceType) - .append("myPageRequest", myPageRequest) - .toString(); + .append(GOLDEN_RESOURCE_PID_NAME, myGoldenResourceId) + .append(SOURCE_PID_NAME, mySourceId) + .append(MATCH_RESULT_NAME, myMatchResult) + .append(LINK_SOURCE_NAME, myLinkSource) + .append(PARTITION_ID_NAME, myPartitionIds) + .append(RESOURCE_TYPE_NAME, myResourceType) + .append("myPageRequest", myPageRequest) + .toString(); } - } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/paging/MdmPageLinkBuilder.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/paging/MdmPageLinkBuilder.java index d9b73b5b21d..0987b08904e 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/paging/MdmPageLinkBuilder.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/paging/MdmPageLinkBuilder.java @@ -43,22 +43,31 @@ public final class MdmPageLinkBuilder { * * @return the {@link MdmPageLinkTuple} */ - public static MdmPageLinkTuple buildMdmPageLinks(ServletRequestDetails theServletRequestDetails, Page theCurrentPage, MdmPageRequest thePageRequest) { - String urlWithoutPaging = RestfulServerUtils.createLinkSelfWithoutGivenParameters(theServletRequestDetails.getFhirServerBase(), theServletRequestDetails, Arrays.asList(PARAM_OFFSET, PARAM_COUNT)); + public static MdmPageLinkTuple buildMdmPageLinks( + ServletRequestDetails theServletRequestDetails, + Page theCurrentPage, + MdmPageRequest thePageRequest) { + String urlWithoutPaging = RestfulServerUtils.createLinkSelfWithoutGivenParameters( + theServletRequestDetails.getFhirServerBase(), + theServletRequestDetails, + Arrays.asList(PARAM_OFFSET, PARAM_COUNT)); return buildMdmPageLinks(urlWithoutPaging, theCurrentPage, thePageRequest); } - public static MdmPageLinkTuple buildMdmPageLinks(String theUrlWithoutPaging, Page theCurrentPage, MdmPageRequest thePageRequest) { + public static MdmPageLinkTuple buildMdmPageLinks( + String theUrlWithoutPaging, Page theCurrentPage, MdmPageRequest thePageRequest) { MdmPageLinkTuple tuple = new MdmPageLinkTuple(); - tuple.setSelfLink(buildLinkWithOffsetAndCount(theUrlWithoutPaging, thePageRequest.getCount(), thePageRequest.getOffset())); + tuple.setSelfLink(buildLinkWithOffsetAndCount( + theUrlWithoutPaging, thePageRequest.getCount(), thePageRequest.getOffset())); if (theCurrentPage.hasNext()) { - tuple.setNextLink(buildLinkWithOffsetAndCount(theUrlWithoutPaging,thePageRequest.getCount(), thePageRequest.getNextOffset())); + tuple.setNextLink(buildLinkWithOffsetAndCount( + theUrlWithoutPaging, thePageRequest.getCount(), thePageRequest.getNextOffset())); } if (theCurrentPage.hasPrevious()) { - tuple.setPreviousLink(buildLinkWithOffsetAndCount(theUrlWithoutPaging,thePageRequest.getCount(), thePageRequest.getPreviousOffset())); + tuple.setPreviousLink(buildLinkWithOffsetAndCount( + theUrlWithoutPaging, thePageRequest.getCount(), thePageRequest.getPreviousOffset())); } return tuple; - } public static String buildLinkWithOffsetAndCount(String theBaseUrl, int theCount, int theOffset) { diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/paging/MdmPageRequest.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/paging/MdmPageRequest.java index 7700ee743ae..64c5667c3d9 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/paging/MdmPageRequest.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/api/paging/MdmPageRequest.java @@ -20,19 +20,15 @@ package ca.uhn.fhir.mdm.api.paging; import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.rest.server.IPagingProvider; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.dstu3.model.UnsignedIntType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.slf4j.Logger; import org.springframework.data.domain.PageRequest; import javax.annotation.Nullable; import static ca.uhn.fhir.rest.api.Constants.PARAM_COUNT; import static ca.uhn.fhir.rest.api.Constants.PARAM_OFFSET; -import static org.slf4j.LoggerFactory.getLogger; /** * This class is essentially just a data clump of offset + count, as well as the ability to convert itself into a standard @@ -45,7 +41,11 @@ public class MdmPageRequest { private final int myOffset; private final int myCount; - public MdmPageRequest(@Nullable IPrimitiveType theOffset, @Nullable IPrimitiveType theCount, int theDefaultPageSize, int theMaximumPageSize) { + public MdmPageRequest( + @Nullable IPrimitiveType theOffset, + @Nullable IPrimitiveType theCount, + int theDefaultPageSize, + int theMaximumPageSize) { myOffset = theOffset == null ? 0 : theOffset.getValue(); myCount = theCount == null ? theDefaultPageSize : Math.min(theCount.getValue(), theMaximumPageSize); validatePagingParameters(myOffset, myCount); @@ -53,7 +53,8 @@ public class MdmPageRequest { this.myPage = myOffset / myCount; } - public MdmPageRequest(@Nullable Integer theOffset, @Nullable Integer theCount, int theDefaultPageSize, int theMaximumPageSize) { + public MdmPageRequest( + @Nullable Integer theOffset, @Nullable Integer theCount, int theDefaultPageSize, int theMaximumPageSize) { myOffset = theOffset == null ? 0 : theOffset; myCount = theCount == null ? theDefaultPageSize : Math.min(theCount, theMaximumPageSize); validatePagingParameters(myOffset, myCount); @@ -67,7 +68,7 @@ public class MdmPageRequest { if (theOffset < 0) { errorMessage += PARAM_OFFSET + " must be greater than or equal to 0. "; } - if (theCount <= 0 ) { + if (theCount <= 0) { errorMessage += PARAM_COUNT + " must be greater than 0."; } if (StringUtils.isNotEmpty(errorMessage)) { diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/dao/IMdmLinkDao.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/dao/IMdmLinkDao.java index 0eaec7d02bf..cdd69b50b95 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/dao/IMdmLinkDao.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/dao/IMdmLinkDao.java @@ -35,7 +35,6 @@ import org.springframework.data.domain.Example; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.history.Revisions; -import org.springframework.data.history.Revision; import java.util.Date; import java.util.List; @@ -46,15 +45,18 @@ public interface IMdmLinkDao

    > expandPidsFromGroupPidGivenMatchResult(P theGroupPid, MdmMatchResultEnum theMdmMatchResultEnum); + List> expandPidsFromGroupPidGivenMatchResult( + P theGroupPid, MdmMatchResultEnum theMdmMatchResultEnum); List> expandPidsBySourcePidAndMatchResult(P theSourcePid, MdmMatchResultEnum theMdmMatchResultEnum); - List> expandPidsByGoldenResourcePidAndMatchResult(P theSourcePid, MdmMatchResultEnum theMdmMatchResultEnum); + List> expandPidsByGoldenResourcePidAndMatchResult( + P theSourcePid, MdmMatchResultEnum theMdmMatchResultEnum); List

    findPidByResourceNameAndThreshold(String theResourceName, Date theHighThreshold, Pageable thePageable); - List

    findPidByResourceNameAndThresholdAndPartitionId(String theResourceName, Date theHighThreshold, List thePartitionIds, Pageable thePageable); + List

    findPidByResourceNameAndThresholdAndPartitionId( + String theResourceName, Date theHighThreshold, List thePartitionIds, Pageable thePageable); List findAllById(List

    thePids); @@ -80,9 +82,16 @@ public interface IMdmLinkDao

    search(IIdType theGoldenResourceId, IIdType theSourceId, MdmMatchResultEnum theMatchResult, MdmLinkSourceEnum theLinkSource, MdmPageRequest thePageRequest, List thePartitionId); + Page search( + IIdType theGoldenResourceId, + IIdType theSourceId, + MdmMatchResultEnum theMatchResult, + MdmLinkSourceEnum theLinkSource, + MdmPageRequest thePageRequest, + List thePartitionId); Page search(MdmQuerySearchParameters theMdmQuerySearchParameters); + Optional findBySourcePidAndMatchResult(P theSourcePid, MdmMatchResultEnum theMatch); void deleteLinksWithAnyReferenceToPids(List

    theResourcePersistentIds); diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/IMdmStorageInterceptor.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/IMdmStorageInterceptor.java index 6fbf5ddd67e..474ebbe86f5 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/IMdmStorageInterceptor.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/IMdmStorageInterceptor.java @@ -19,5 +19,4 @@ */ package ca.uhn.fhir.mdm.interceptor; -public interface IMdmStorageInterceptor { -} +public interface IMdmStorageInterceptor {} diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmSearchExpandingInterceptor.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmSearchExpandingInterceptor.java index 49ab460b48c..81e780b4300 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmSearchExpandingInterceptor.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmSearchExpandingInterceptor.java @@ -23,8 +23,8 @@ import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.mdm.api.IMdmLinkExpandSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.mdm.api.IMdmLinkExpandSvc; import ca.uhn.fhir.mdm.log.Logs; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.primitive.IdDt; @@ -84,22 +84,24 @@ public class MdmSearchExpandingInterceptor { ReferenceParam refParam = (ReferenceParam) iQueryParameterType; if (refParam.isMdmExpand()) { ourLog.debug("Found a reference parameter to expand: {}", refParam); - //First, attempt to expand as a source resource. - Set expandedResourceIds = myMdmLinkExpandSvc.expandMdmBySourceResourceId(new IdDt(refParam.getValue())); + // First, attempt to expand as a source resource. + Set expandedResourceIds = + myMdmLinkExpandSvc.expandMdmBySourceResourceId(new IdDt(refParam.getValue())); // If we failed, attempt to expand as a golden resource if (expandedResourceIds.isEmpty()) { - expandedResourceIds = myMdmLinkExpandSvc.expandMdmByGoldenResourceId(new IdDt(refParam.getValue())); + expandedResourceIds = + myMdmLinkExpandSvc.expandMdmByGoldenResourceId(new IdDt(refParam.getValue())); } - //Rebuild the search param list. + // Rebuild the search param list. if (!expandedResourceIds.isEmpty()) { ourLog.debug("Parameter has been expanded to: {}", String.join(", ", expandedResourceIds)); toRemove.add(refParam); expandedResourceIds.stream() - .map(resourceId -> addResourceTypeIfNecessary(refParam.getResourceType(), resourceId)) - .map(ReferenceParam::new) - .forEach(toAdd::add); + .map(resourceId -> addResourceTypeIfNecessary(refParam.getResourceType(), resourceId)) + .map(ReferenceParam::new) + .forEach(toAdd::add); } } } else if (theParamName.equalsIgnoreCase("_id")) { @@ -127,9 +129,10 @@ public class MdmSearchExpandingInterceptor { * @param theAddList * @param theRemoveList */ - private void expandIdParameter(IQueryParameterType theIdParameter, - List theAddList, - List theRemoveList) { + private void expandIdParameter( + IQueryParameterType theIdParameter, + List theAddList, + List theRemoveList) { // id parameters can either be StringParam (for $everything operation) // or TokenParam (for searches) // either case, we want to expand it out and grab all related resources @@ -148,8 +151,9 @@ public class MdmSearchExpandingInterceptor { if (id == null) { // in case the _id paramter type is different from the above - ourLog.warn("_id parameter of incorrect type. Expected StringParam or TokenParam, but got {}. No expansion will be done!", - theIdParameter.getClass().getSimpleName()); + ourLog.warn( + "_id parameter of incorrect type. Expected StringParam or TokenParam, but got {}. No expansion will be done!", + theIdParameter.getClass().getSimpleName()); } else if (mdmExpand) { ourLog.debug("_id parameter must be expanded out from: {}", id.getValue()); @@ -159,7 +163,7 @@ public class MdmSearchExpandingInterceptor { expandedResourceIds = myMdmLinkExpandSvc.expandMdmByGoldenResourceId((IdDt) id); } - //Rebuild + // Rebuild if (!expandedResourceIds.isEmpty()) { ourLog.debug("_id parameter has been expanded to: {}", String.join(", ", expandedResourceIds)); @@ -167,9 +171,7 @@ public class MdmSearchExpandingInterceptor { theRemoveList.add(theIdParameter); // add in all the linked values - expandedResourceIds.stream() - .map(creator::create) - .forEach(theAddList::add); + expandedResourceIds.stream().map(creator::create).forEach(theAddList::add); } } // else - no expansion required diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmStorageInterceptor.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmStorageInterceptor.java index ba129b916b3..9157a3be021 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmStorageInterceptor.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/interceptor/MdmStorageInterceptor.java @@ -55,25 +55,35 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor { @Autowired private IExpungeEverythingService myExpungeEverythingService; + @Autowired private MdmLinkDeleteSvc myMdmLinkDeleteSvc; + @Autowired private FhirContext myFhirContext; + @Autowired private EIDHelper myEIDHelper; + @Autowired private IMdmSettings myMdmSettings; - @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED) - public void blockManualResourceManipulationOnCreate(IBaseResource theBaseResource, RequestDetails theRequestDetails, ServletRequestDetails theServletRequestDetails) { - ourLog.debug("Starting pre-storage resource created hook for {}, {}, {}", theBaseResource, theRequestDetails, theServletRequestDetails); + public void blockManualResourceManipulationOnCreate( + IBaseResource theBaseResource, + RequestDetails theRequestDetails, + ServletRequestDetails theServletRequestDetails) { + ourLog.debug( + "Starting pre-storage resource created hook for {}, {}, {}", + theBaseResource, + theRequestDetails, + theServletRequestDetails); if (theBaseResource == null) { ourLog.warn("Attempting to block golden resource manipulation on a null resource"); return; } - //If running in single EID mode, forbid multiple eids. + // If running in single EID mode, forbid multiple eids. if (myMdmSettings.isPreventMultipleEids()) { ourLog.debug("Forbidding multiple EIDs on ", theBaseResource); forbidIfHasMultipleEids(theBaseResource); @@ -89,22 +99,33 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor { } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void blockManualGoldenResourceManipulationOnUpdate(IBaseResource theOldResource, IBaseResource theUpdatedResource, RequestDetails theRequestDetails, ServletRequestDetails theServletRequestDetails) { - ourLog.debug("Starting pre-storage resource updated hook for {}, {}, {}, {}", theOldResource, theUpdatedResource, theRequestDetails, theServletRequestDetails); + public void blockManualGoldenResourceManipulationOnUpdate( + IBaseResource theOldResource, + IBaseResource theUpdatedResource, + RequestDetails theRequestDetails, + ServletRequestDetails theServletRequestDetails) { + ourLog.debug( + "Starting pre-storage resource updated hook for {}, {}, {}, {}", + theOldResource, + theUpdatedResource, + theRequestDetails, + theServletRequestDetails); if (theUpdatedResource == null) { ourLog.warn("Attempting to block golden resource manipulation on a null resource"); return; } - //If running in single EID mode, forbid multiple eids. + // If running in single EID mode, forbid multiple eids. if (myMdmSettings.isPreventMultipleEids()) { ourLog.debug("Forbidding multiple EIDs on ", theUpdatedResource); forbidIfHasMultipleEids(theUpdatedResource); } if (MdmResourceUtil.isGoldenRecordRedirected(theUpdatedResource)) { - ourLog.debug("Deleting MDM links to deactivated Golden resource {}", theUpdatedResource.getIdElement().toUnqualifiedVersionless()); + ourLog.debug( + "Deleting MDM links to deactivated Golden resource {}", + theUpdatedResource.getIdElement().toUnqualifiedVersionless()); int deleted = myMdmLinkDeleteSvc.deleteNonRedirectWithAnyReferenceTo(theUpdatedResource); if (deleted > 0) { ourLog.debug("Deleted {} MDM links", deleted); @@ -157,7 +178,8 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor { } private void throwBlockEidChange() { - throw new ForbiddenOperationException(Msg.code(763) + "While running with EID updates disabled, EIDs may not be updated on source resources"); + throw new ForbiddenOperationException( + Msg.code(763) + "While running with EID updates disabled, EIDs may not be updated on source resources"); } /* @@ -200,15 +222,19 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor { } private void throwBlockMdmManagedTagChange() { - throw new ForbiddenOperationException(Msg.code(764) + "The " + MdmConstants.CODE_HAPI_MDM_MANAGED + " tag on a resource may not be changed once created."); + throw new ForbiddenOperationException(Msg.code(764) + "The " + MdmConstants.CODE_HAPI_MDM_MANAGED + + " tag on a resource may not be changed once created."); } private void throwModificationBlockedByMdm() { - throw new ForbiddenOperationException(Msg.code(765) + "Cannot create or modify Resources that are managed by MDM. This resource contains a tag with one of these systems: " + MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS + " or " + MdmConstants.SYSTEM_MDM_MANAGED); + throw new ForbiddenOperationException(Msg.code(765) + + "Cannot create or modify Resources that are managed by MDM. This resource contains a tag with one of these systems: " + + MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS + " or " + MdmConstants.SYSTEM_MDM_MANAGED); } private void throwBlockMultipleEids() { - throw new ForbiddenOperationException(Msg.code(766) + "While running with multiple EIDs disabled, source resources may have at most one EID."); + throw new ForbiddenOperationException(Msg.code(766) + + "While running with multiple EIDs disabled, source resources may have at most one EID."); } private String extractResourceType(IBaseResource theResource) { @@ -234,5 +260,4 @@ public class MdmStorageInterceptor implements IMdmStorageInterceptor { public static void resetLinksDeletedBeforehand() { ourLinksDeletedBeforehand.remove(); } - } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/CanonicalEID.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/CanonicalEID.java index e076ab7c214..07f7858bc3a 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/CanonicalEID.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/CanonicalEID.java @@ -36,7 +36,7 @@ public class CanonicalEID { private String myUse; private String myValue; - public CanonicalEID(String theSystem, String theValue, String theUse){ + public CanonicalEID(String theSystem, String theValue, String theUse) { mySystem = theSystem; myUse = theUse; myValue = theValue; @@ -63,36 +63,34 @@ public class CanonicalEID { * Patient.identifier.where(system='test-system').value * */ - private static String buildEidFhirPath(FhirContext theFhirContext, String theEidSystem, IBaseResource theBaseResource) { - return theFhirContext.getResourceType(theBaseResource) - + ".identifier.where(system='" - + theEidSystem - + "')"; + private static String buildEidFhirPath( + FhirContext theFhirContext, String theEidSystem, IBaseResource theBaseResource) { + return theFhirContext.getResourceType(theBaseResource) + ".identifier.where(system='" + theEidSystem + "')"; } public Identifier toR4() { return new Identifier() - .setUse(Identifier.IdentifierUse.fromCode(myUse)) - .setSystem(mySystem) - .setValue(myValue); + .setUse(Identifier.IdentifierUse.fromCode(myUse)) + .setSystem(mySystem) + .setValue(myValue); } - public org.hl7.fhir.dstu3.model.Identifier toDSTU3(){ + public org.hl7.fhir.dstu3.model.Identifier toDSTU3() { return new org.hl7.fhir.dstu3.model.Identifier() - .setUse(org.hl7.fhir.dstu3.model.Identifier.IdentifierUse.fromCode(myUse)) - .setSystem(mySystem) - .setValue(myValue); + .setUse(org.hl7.fhir.dstu3.model.Identifier.IdentifierUse.fromCode(myUse)) + .setSystem(mySystem) + .setValue(myValue); } - public String getSystem() { + public String getSystem() { return mySystem; } - public String getUse() { + public String getUse() { return myUse; } - public String getValue() { + public String getValue() { return myValue; } @@ -113,7 +111,6 @@ public class CanonicalEID { return mySystem + '|' + myValue; } - /** * A Factory method to generate a {@link CanonicalEID} object from an incoming resource. * @@ -123,14 +120,13 @@ public class CanonicalEID { * * @return an optional {@link CanonicalEID} object, representing a resource identifier that matched the given eidSystem. */ - public static List extractFromResource(FhirContext theFhirContext, String theEidSystem, IBaseResource theBaseResource) { + public static List extractFromResource( + FhirContext theFhirContext, String theEidSystem, IBaseResource theBaseResource) { IFhirPath fhirPath = theFhirContext.newFhirPath(); String eidPath = buildEidFhirPath(theFhirContext, theEidSystem, theBaseResource); List evaluate = fhirPath.evaluate(theBaseResource, eidPath, IBase.class); - return evaluate.stream() - .map(ibase -> new CanonicalEID(fhirPath, ibase)) - .collect(Collectors.toList()); + return evaluate.stream().map(ibase -> new CanonicalEID(fhirPath, ibase)).collect(Collectors.toList()); } @Override @@ -138,10 +134,10 @@ public class CanonicalEID { if (!(o instanceof CanonicalEID)) { return false; } - CanonicalEID otherEid = (CanonicalEID)o; + CanonicalEID otherEid = (CanonicalEID) o; return Objects.equals(otherEid.getSystem(), this.getSystem()) - && Objects.equals(otherEid.getValue(), this.getValue()) - && Objects.equals(otherEid.getUse(), this.getUse()); + && Objects.equals(otherEid.getValue(), this.getValue()) + && Objects.equals(otherEid.getUse(), this.getUse()); } @Override diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/MdmPidTuple.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/MdmPidTuple.java index 904dd102077..eadbc00c85e 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/MdmPidTuple.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/MdmPidTuple.java @@ -34,11 +34,11 @@ public class MdmPidTuple { return new MdmPidTuple<>(theGoldenPid, theSourcePid); } - public T getGoldenPid(){ + public T getGoldenPid() { return myGoldenPid; } - public T getSourcePid(){ + public T getSourcePid() { return mySourcePid; } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/MdmTransactionContext.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/MdmTransactionContext.java index 7bc6e2321f1..a2ca1b94930 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/MdmTransactionContext.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/model/MdmTransactionContext.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.mdm.model; import ca.uhn.fhir.mdm.api.IMdmLink; -import ca.uhn.fhir.mdm.api.MdmLinkEvent; import ca.uhn.fhir.rest.server.TransactionLogMessages; import java.util.ArrayList; @@ -56,8 +55,7 @@ public class MdmTransactionContext { return myTransactionLogMessages; } - public MdmTransactionContext() { - } + public MdmTransactionContext() {} public MdmTransactionContext(OperationType theRestOperation) { myRestOperation = theRestOperation; @@ -68,7 +66,8 @@ public class MdmTransactionContext { myTransactionLogMessages = theTransactionLogMessages; } - public MdmTransactionContext(TransactionLogMessages theTransactionLogMessages, OperationType theRestOperation, String theResourceType) { + public MdmTransactionContext( + TransactionLogMessages theTransactionLogMessages, OperationType theRestOperation, String theResourceType) { this(theTransactionLogMessages, theRestOperation); setResourceType(theResourceType); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/BaseMdmProvider.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/BaseMdmProvider.java index b878cf8ad69..9bcb4193148 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/BaseMdmProvider.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/BaseMdmProvider.java @@ -39,15 +39,14 @@ import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.data.domain.Page; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Collection; import java.util.Objects; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public abstract class BaseMdmProvider { @@ -57,11 +56,13 @@ public abstract class BaseMdmProvider { myFhirContext = theFhirContext; } - protected void validateMergeParameters(IPrimitiveType theFromGoldenResourceId, IPrimitiveType theToGoldenResourceId) { + protected void validateMergeParameters( + IPrimitiveType theFromGoldenResourceId, IPrimitiveType theToGoldenResourceId) { validateNotNull(ProviderConstants.MDM_MERGE_GR_FROM_GOLDEN_RESOURCE_ID, theFromGoldenResourceId); validateNotNull(ProviderConstants.MDM_MERGE_GR_TO_GOLDEN_RESOURCE_ID, theToGoldenResourceId); if (theFromGoldenResourceId.getValue().equals(theToGoldenResourceId.getValue())) { - throw new InvalidRequestException(Msg.code(1493) + "fromGoldenResourceId must be different from toGoldenResourceId"); + throw new InvalidRequestException( + Msg.code(1493) + "fromGoldenResourceId must be different from toGoldenResourceId"); } } @@ -71,17 +72,30 @@ public abstract class BaseMdmProvider { } } - protected void validateMdmLinkHistoryParameters(List> theGoldenResourceIds, List> theSourceIds) { - validateBothCannotBeNullOrEmpty(ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theGoldenResourceIds, ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, theSourceIds); + protected void validateMdmLinkHistoryParameters( + List> theGoldenResourceIds, List> theSourceIds) { + validateBothCannotBeNullOrEmpty( + ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, + theGoldenResourceIds, + ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, + theSourceIds); } - private void validateBothCannotBeNullOrEmpty(String theFirstName, List> theFirstList, String theSecondName, List> theSecondList) { + private void validateBothCannotBeNullOrEmpty( + String theFirstName, + List> theFirstList, + String theSecondName, + List> theSecondList) { if ((theFirstList == null || theFirstList.isEmpty()) && (theSecondList == null || theSecondList.isEmpty())) { - throw new InvalidRequestException(Msg.code(2292) + "Please include either ["+theFirstName+"]s, ["+theSecondName+"]s, or both in your search inputs."); + throw new InvalidRequestException(Msg.code(2292) + "Please include either [" + theFirstName + "]s, [" + + theSecondName + "]s, or both in your search inputs."); } } - protected void validateUpdateLinkParameters(IPrimitiveType theGoldenResourceId, IPrimitiveType theResourceId, IPrimitiveType theMatchResult) { + protected void validateUpdateLinkParameters( + IPrimitiveType theGoldenResourceId, + IPrimitiveType theResourceId, + IPrimitiveType theMatchResult) { validateNotNull(ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); validateNotNull(ProviderConstants.MDM_UPDATE_LINK_RESOURCE_ID, theResourceId); validateNotNull(ProviderConstants.MDM_UPDATE_LINK_MATCH_RESULT, theMatchResult); @@ -91,17 +105,22 @@ public abstract class BaseMdmProvider { case MATCH: break; default: - throw new InvalidRequestException(Msg.code(1495) + ProviderConstants.MDM_UPDATE_LINK + " illegal " + ProviderConstants.MDM_UPDATE_LINK_MATCH_RESULT + - " value '" + matchResult + "'. Must be " + MdmMatchResultEnum.NO_MATCH + " or " + MdmMatchResultEnum.MATCH); + throw new InvalidRequestException(Msg.code(1495) + ProviderConstants.MDM_UPDATE_LINK + " illegal " + + ProviderConstants.MDM_UPDATE_LINK_MATCH_RESULT + " value '" + matchResult + "'. Must be " + + MdmMatchResultEnum.NO_MATCH + " or " + MdmMatchResultEnum.MATCH); } } - protected void validateNotDuplicateParameters(IPrimitiveType theGoldenResourceId, IPrimitiveType theResourceId) { + protected void validateNotDuplicateParameters( + IPrimitiveType theGoldenResourceId, IPrimitiveType theResourceId) { validateNotNull(ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); validateNotNull(ProviderConstants.MDM_UPDATE_LINK_RESOURCE_ID, theResourceId); } - protected void validateCreateLinkParameters(IPrimitiveType theGoldenResourceId, IPrimitiveType theResourceId, @Nullable IPrimitiveType theMatchResult) { + protected void validateCreateLinkParameters( + IPrimitiveType theGoldenResourceId, + IPrimitiveType theResourceId, + @Nullable IPrimitiveType theMatchResult) { validateNotNull(ProviderConstants.MDM_CREATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId); validateNotNull(ProviderConstants.MDM_CREATE_LINK_RESOURCE_ID, theResourceId); if (theMatchResult != null) { @@ -112,31 +131,39 @@ public abstract class BaseMdmProvider { case MATCH: break; default: - throw new InvalidRequestException(Msg.code(1496) + ProviderConstants.MDM_CREATE_LINK + " illegal " + ProviderConstants.MDM_CREATE_LINK_MATCH_RESULT + - " value '" + matchResult + "'. Must be " + MdmMatchResultEnum.NO_MATCH + ", " + MdmMatchResultEnum.MATCH + " or " + MdmMatchResultEnum.POSSIBLE_MATCH); + throw new InvalidRequestException(Msg.code(1496) + ProviderConstants.MDM_CREATE_LINK + " illegal " + + ProviderConstants.MDM_CREATE_LINK_MATCH_RESULT + " value '" + matchResult + "'. Must be " + + MdmMatchResultEnum.NO_MATCH + ", " + MdmMatchResultEnum.MATCH + " or " + + MdmMatchResultEnum.POSSIBLE_MATCH); } } } - protected MdmTransactionContext createMdmContext(RequestDetails theRequestDetails, MdmTransactionContext.OperationType theOperationType, String theResourceType) { - TransactionLogMessages transactionLogMessages = TransactionLogMessages.createFromTransactionGuid(theRequestDetails.getTransactionGuid()); - MdmTransactionContext mdmTransactionContext = new MdmTransactionContext(transactionLogMessages, theOperationType); + protected MdmTransactionContext createMdmContext( + RequestDetails theRequestDetails, + MdmTransactionContext.OperationType theOperationType, + String theResourceType) { + TransactionLogMessages transactionLogMessages = + TransactionLogMessages.createFromTransactionGuid(theRequestDetails.getTransactionGuid()); + MdmTransactionContext mdmTransactionContext = + new MdmTransactionContext(transactionLogMessages, theOperationType); mdmTransactionContext.setResourceType(theResourceType); return mdmTransactionContext; } @Nonnull - protected List convertToStringsIncludingCommaDelimitedIfNotNull(List> thePrimitiveTypeStrings) { + protected List convertToStringsIncludingCommaDelimitedIfNotNull( + List> thePrimitiveTypeStrings) { if (thePrimitiveTypeStrings == null) { return Collections.emptyList(); } return thePrimitiveTypeStrings.stream() - .map(this::extractStringOrNull) - .filter(Objects::nonNull) - .map(input -> Arrays.asList(input.split(","))) - .flatMap(Collection::stream) - .collect(Collectors.toUnmodifiableList()); + .map(this::extractStringOrNull) + .filter(Objects::nonNull) + .map(input -> Arrays.asList(input.split(","))) + .flatMap(Collection::stream) + .collect(Collectors.toUnmodifiableList()); } protected String extractStringOrNull(IPrimitiveType theString) { @@ -146,8 +173,11 @@ public abstract class BaseMdmProvider { return theString.getValue(); } - protected IBaseParameters parametersFromMdmLinks(Page theMdmLinkStream, boolean includeResultAndSource, - ServletRequestDetails theServletRequestDetails, MdmPageRequest thePageRequest) { + protected IBaseParameters parametersFromMdmLinks( + Page theMdmLinkStream, + boolean includeResultAndSource, + ServletRequestDetails theServletRequestDetails, + MdmPageRequest thePageRequest) { IBaseParameters retval = ParametersUtil.newInstance(myFhirContext); addPagingParameters(retval, theMdmLinkStream, theServletRequestDetails, thePageRequest); theMdmLinkStream.getContent().forEach(mdmLink -> { @@ -156,22 +186,37 @@ public abstract class BaseMdmProvider { ParametersUtil.addPartString(myFhirContext, resultPart, "sourceResourceId", mdmLink.getSourceId()); if (includeResultAndSource) { - ParametersUtil.addPartString(myFhirContext, resultPart, "matchResult", mdmLink.getMatchResult().name()); - ParametersUtil.addPartString(myFhirContext, resultPart, "linkSource", mdmLink.getLinkSource().name()); + ParametersUtil.addPartString( + myFhirContext, + resultPart, + "matchResult", + mdmLink.getMatchResult().name()); + ParametersUtil.addPartString( + myFhirContext, + resultPart, + "linkSource", + mdmLink.getLinkSource().name()); ParametersUtil.addPartBoolean(myFhirContext, resultPart, "eidMatch", mdmLink.getEidMatch()); - ParametersUtil.addPartBoolean(myFhirContext, resultPart, "hadToCreateNewResource", mdmLink.getLinkCreatedNewResource()); + ParametersUtil.addPartBoolean( + myFhirContext, resultPart, "hadToCreateNewResource", mdmLink.getLinkCreatedNewResource()); ParametersUtil.addPartDecimal(myFhirContext, resultPart, "score", mdmLink.getScore()); - ParametersUtil.addPartDecimal(myFhirContext, resultPart, "linkCreated", (double) mdmLink.getCreated().getTime()); - ParametersUtil.addPartDecimal(myFhirContext, resultPart, "linkUpdated", (double) mdmLink.getUpdated().getTime()); + ParametersUtil.addPartDecimal(myFhirContext, resultPart, "linkCreated", (double) + mdmLink.getCreated().getTime()); + ParametersUtil.addPartDecimal(myFhirContext, resultPart, "linkUpdated", (double) + mdmLink.getUpdated().getTime()); } }); return retval; } - protected void parametersFromMdmLinkRevisions(IBaseParameters theRetVal, List theMdmLinkRevisions) { - if (theMdmLinkRevisions.isEmpty()) { - final IBase resultPart = ParametersUtil.addParameterToParameters(myFhirContext, theRetVal, "historical links not found for query parameters"); - ParametersUtil.addPartString(myFhirContext, resultPart, "theResults", "historical links not found for query parameters"); + protected void parametersFromMdmLinkRevisions( + IBaseParameters theRetVal, List theMdmLinkRevisions) { + if (theMdmLinkRevisions.isEmpty()) { + final IBase resultPart = ParametersUtil.addParameterToParameters( + myFhirContext, theRetVal, "historical links not found for query parameters"); + + ParametersUtil.addPartString( + myFhirContext, resultPart, "theResults", "historical links not found for query parameters"); } theMdmLinkRevisions.forEach(mdmLinkRevision -> parametersFromMdmLinkRevision(theRetVal, mdmLinkRevision)); @@ -182,29 +227,55 @@ public abstract class BaseMdmProvider { final MdmLinkJson mdmLink = mdmLinkRevision.getMdmLink(); ParametersUtil.addPartString(myFhirContext, resultPart, "goldenResourceId", mdmLink.getGoldenResourceId()); - ParametersUtil.addPartString(myFhirContext, resultPart, "revisionTimestamp", mdmLinkRevision.getRevisionTimestamp().toString()); + ParametersUtil.addPartString( + myFhirContext, + resultPart, + "revisionTimestamp", + mdmLinkRevision.getRevisionTimestamp().toString()); ParametersUtil.addPartString(myFhirContext, resultPart, "sourceResourceId", mdmLink.getSourceId()); - ParametersUtil.addPartString(myFhirContext, resultPart, "matchResult", mdmLink.getMatchResult().name()); + ParametersUtil.addPartString( + myFhirContext, + resultPart, + "matchResult", + mdmLink.getMatchResult().name()); ParametersUtil.addPartDecimal(myFhirContext, resultPart, "score", mdmLink.getScore()); - ParametersUtil.addPartString(myFhirContext, resultPart, "linkSource", mdmLink.getLinkSource().name()); + ParametersUtil.addPartString( + myFhirContext, resultPart, "linkSource", mdmLink.getLinkSource().name()); ParametersUtil.addPartBoolean(myFhirContext, resultPart, "eidMatch", mdmLink.getEidMatch()); - ParametersUtil.addPartBoolean(myFhirContext, resultPart, "hadToCreateNewResource", mdmLink.getLinkCreatedNewResource()); + ParametersUtil.addPartBoolean( + myFhirContext, resultPart, "hadToCreateNewResource", mdmLink.getLinkCreatedNewResource()); ParametersUtil.addPartDecimal(myFhirContext, resultPart, "score", mdmLink.getScore()); - ParametersUtil.addPartDecimal(myFhirContext, resultPart, "linkCreated", (double) mdmLink.getCreated().getTime()); - ParametersUtil.addPartDecimal(myFhirContext, resultPart, "linkUpdated", (double) mdmLink.getUpdated().getTime()); + ParametersUtil.addPartDecimal(myFhirContext, resultPart, "linkCreated", (double) + mdmLink.getCreated().getTime()); + ParametersUtil.addPartDecimal(myFhirContext, resultPart, "linkUpdated", (double) + mdmLink.getUpdated().getTime()); } - protected void addPagingParameters(IBaseParameters theParameters, Page theCurrentPage, ServletRequestDetails theServletRequestDetails, MdmPageRequest thePageRequest) { - MdmPageLinkTuple mdmPageLinkTuple = MdmPageLinkBuilder.buildMdmPageLinks(theServletRequestDetails, theCurrentPage, thePageRequest); + protected void addPagingParameters( + IBaseParameters theParameters, + Page theCurrentPage, + ServletRequestDetails theServletRequestDetails, + MdmPageRequest thePageRequest) { + MdmPageLinkTuple mdmPageLinkTuple = + MdmPageLinkBuilder.buildMdmPageLinks(theServletRequestDetails, theCurrentPage, thePageRequest); if (mdmPageLinkTuple.getPreviousLink().isPresent()) { - ParametersUtil.addParameterToParametersUri(myFhirContext, theParameters, "prev", mdmPageLinkTuple.getPreviousLink().get()); + ParametersUtil.addParameterToParametersUri( + myFhirContext, + theParameters, + "prev", + mdmPageLinkTuple.getPreviousLink().get()); } - ParametersUtil.addParameterToParametersUri(myFhirContext, theParameters, "self", mdmPageLinkTuple.getSelfLink()); + ParametersUtil.addParameterToParametersUri( + myFhirContext, theParameters, "self", mdmPageLinkTuple.getSelfLink()); if (mdmPageLinkTuple.getNextLink().isPresent()) { - ParametersUtil.addParameterToParametersUri(myFhirContext, theParameters, "next", mdmPageLinkTuple.getNextLink().get()); + ParametersUtil.addParameterToParametersUri( + myFhirContext, + theParameters, + "next", + mdmPageLinkTuple.getNextLink().get()); } } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java index 2849ae94408..03c67a66f50 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.mdm.provider; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; @@ -48,12 +48,12 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nonnull; import java.math.BigDecimal; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.UUID; +import javax.annotation.Nonnull; @Service public class MdmControllerHelper { @@ -66,12 +66,13 @@ public class MdmControllerHelper { private final IRequestPartitionHelperSvc myRequestPartitionHelperSvc; @Autowired - public MdmControllerHelper(FhirContext theFhirContext, - IResourceLoader theResourceLoader, - IMdmMatchFinderSvc theMdmMatchFinderSvc, - IMdmSettings theMdmSettings, - MessageHelper theMessageHelper, - IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { + public MdmControllerHelper( + FhirContext theFhirContext, + IResourceLoader theResourceLoader, + IMdmMatchFinderSvc theMdmMatchFinderSvc, + IMdmSettings theMdmSettings, + MessageHelper theMessageHelper, + IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { myFhirContext = theFhirContext; myResourceLoader = theResourceLoader; myMdmSettings = theMdmSettings; @@ -83,7 +84,8 @@ public class MdmControllerHelper { public void validateSameVersion(IAnyResource theResource, String theResourceId) { String storedId = theResource.getIdElement().getValue(); if (hasVersionIdPart(theResourceId) && !storedId.equals(theResourceId)) { - throw new ResourceVersionConflictException(Msg.code(1501) + "Requested resource " + theResourceId + " is not the latest version. Latest version is " + storedId); + throw new ResourceVersionConflictException(Msg.code(1501) + "Requested resource " + theResourceId + + " is not the latest version. Latest version is " + storedId); } } @@ -97,18 +99,20 @@ public class MdmControllerHelper { if (MdmResourceUtil.isGoldenRecord(iAnyResource)) { return iAnyResource; } else { - throw new InvalidRequestException(Msg.code(1502) + myMessageHelper.getMessageForFailedGoldenResourceLoad(theParamName, theGoldenResourceId)); + throw new InvalidRequestException(Msg.code(1502) + + myMessageHelper.getMessageForFailedGoldenResourceLoad(theParamName, theGoldenResourceId)); } } - public IAnyResource getLatestSourceFromIdOrThrowException(String theParamName, String theSourceId) { IIdType sourceId = MdmControllerUtil.getSourceIdDtOrThrowException(theParamName, theSourceId); return loadResource(sourceId.toUnqualifiedVersionless()); } protected IAnyResource loadResource(IIdType theResourceId) { - Class resourceClass = myFhirContext.getResourceDefinition(theResourceId.getResourceType()).getImplementingClass(); + Class resourceClass = myFhirContext + .getResourceDefinition(theResourceId.getResourceType()) + .getImplementingClass(); return (IAnyResource) myResourceLoader.load(resourceClass, theResourceId); } @@ -124,8 +128,8 @@ public class MdmControllerHelper { public void validateIsMdmManaged(String theName, IAnyResource theResource) { String resourceType = myFhirContext.getResourceType(theResource); if (!myMdmSettings.isSupportedMdmType(resourceType)) { - throw new InvalidRequestException(Msg.code(1503) + myMessageHelper.getMessageForUnsupportedResource(theName, resourceType) - ); + throw new InvalidRequestException( + Msg.code(1503) + myMessageHelper.getMessageForUnsupportedResource(theName, resourceType)); } if (!MdmResourceUtil.isMdmManaged(theResource)) { @@ -136,16 +140,22 @@ public class MdmControllerHelper { /** * Helper method which will return a bundle of all Matches and Possible Matches. */ - public IBaseBundle getMatchesAndPossibleMatchesForResource(IAnyResource theResource, String theResourceType, RequestDetails theRequestDetails) { + public IBaseBundle getMatchesAndPossibleMatchesForResource( + IAnyResource theResource, String theResourceType, RequestDetails theRequestDetails) { RequestPartitionId requestPartitionId; - ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forSearchType(theResourceType, null, null); + ReadPartitionIdRequestDetails details = + ReadPartitionIdRequestDetails.forSearchType(theResourceType, null, null); if (myMdmSettings.getSearchAllPartitionForMatch()) { requestPartitionId = RequestPartitionId.allPartitions(); } else { - requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); + requestPartitionId = + myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); } - List matches = myMdmMatchFinderSvc.getMatchedTargets(theResourceType, theResource, requestPartitionId); - matches.sort(Comparator.comparing((MatchedTarget m) -> m.getMatchResult().getNormalizedScore()).reversed()); + List matches = + myMdmMatchFinderSvc.getMatchedTargets(theResourceType, theResource, requestPartitionId); + matches.sort( + Comparator.comparing((MatchedTarget m) -> m.getMatchResult().getNormalizedScore()) + .reversed()); BundleBuilder builder = new BundleBuilder(myFhirContext); builder.setBundleField("type", "searchset"); @@ -168,14 +178,15 @@ public class MdmControllerHelper { return retVal; } - public IBaseBackboneElement toBundleEntrySearchComponent(BundleBuilder theBuilder, IBaseBackboneElement theSearch, MatchedTarget theMatchedTarget) { + public IBaseBackboneElement toBundleEntrySearchComponent( + BundleBuilder theBuilder, IBaseBackboneElement theSearch, MatchedTarget theMatchedTarget) { theBuilder.setSearchField(theSearch, "mode", "match"); double score = theMatchedTarget.getMatchResult().getNormalizedScore(); - theBuilder.setSearchField(theSearch, "score", - theBuilder.newPrimitive("decimal", BigDecimal.valueOf(score))); + theBuilder.setSearchField(theSearch, "score", theBuilder.newPrimitive("decimal", BigDecimal.valueOf(score))); String matchGrade = getMatchGrade(theMatchedTarget); - IBaseDatatype codeType = (IBaseDatatype) myFhirContext.getElementDefinition("code").newInstance(matchGrade); + IBaseDatatype codeType = + (IBaseDatatype) myFhirContext.getElementDefinition("code").newInstance(matchGrade); IBaseExtension searchExtension = theSearch.addExtension(); searchExtension.setUrl(MdmConstants.FIHR_STRUCTURE_DEF_MATCH_GRADE_URL_NAMESPACE); searchExtension.setValue(codeType); diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerUtil.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerUtil.java index 42141c6b20d..eae45d6ed6c 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerUtil.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerUtil.java @@ -58,7 +58,8 @@ public class MdmControllerUtil { static IdDt getGoldenIdDtOrThrowException(String theParamName, String theId) { IdDt goldenResourceId = new IdDt(theId); if (goldenResourceId.getIdPart() == null) { - throw new InvalidRequestException(Msg.code(1505) + theParamName + " is '" + theId + "'. must have form / where is the id of the resource"); + throw new InvalidRequestException(Msg.code(1505) + theParamName + " is '" + theId + + "'. must have form / where is the id of the resource"); } return goldenResourceId; } @@ -66,7 +67,9 @@ public class MdmControllerUtil { public static IIdType getSourceIdDtOrThrowException(String theParamName, String theSourceId) { IdDt sourceId = new IdDt(theSourceId); if (sourceId.getIdPart() == null) { - throw new InvalidRequestException(Msg.code(1506) + theParamName + " is '" + theSourceId + "'. must have form / where is the id of the resource and is the type of the resource"); + throw new InvalidRequestException( + Msg.code(1506) + theParamName + " is '" + theSourceId + + "'. must have form / where is the id of the resource and is the type of the resource"); } return sourceId; } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmLinkHistoryProviderDstu3Plus.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmLinkHistoryProviderDstu3Plus.java index d8121176a55..ecaaa40421e 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmLinkHistoryProviderDstu3Plus.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmLinkHistoryProviderDstu3Plus.java @@ -48,21 +48,34 @@ public class MdmLinkHistoryProviderDstu3Plus extends BaseMdmProvider { } @Operation(name = ProviderConstants.MDM_LINK_HISTORY, idempotent = true) - public IBaseParameters historyLinks(@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theMdmGoldenResourceIds, - @OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theResourceIds, - ServletRequestDetails theRequestDetails) { + public IBaseParameters historyLinks( + @OperationParam( + name = ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theMdmGoldenResourceIds, + @OperationParam( + name = ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theResourceIds, + ServletRequestDetails theRequestDetails) { validateMdmLinkHistoryParameters(theMdmGoldenResourceIds, theResourceIds); - final List goldenResourceIdsToUse = convertToStringsIncludingCommaDelimitedIfNotNull(theMdmGoldenResourceIds); + final List goldenResourceIdsToUse = + convertToStringsIncludingCommaDelimitedIfNotNull(theMdmGoldenResourceIds); final List resourceIdsToUse = convertToStringsIncludingCommaDelimitedIfNotNull(theResourceIds); final IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext); final MdmHistorySearchParameters mdmHistorySearchParameters = new MdmHistorySearchParameters() - .setGoldenResourceIds(goldenResourceIdsToUse) - .setSourceIds(resourceIdsToUse); + .setGoldenResourceIds(goldenResourceIdsToUse) + .setSourceIds(resourceIdsToUse); - final List mdmLinkRevisionsFromSvc = myMdmControllerSvc.queryLinkHistory(mdmHistorySearchParameters, theRequestDetails); + final List mdmLinkRevisionsFromSvc = + myMdmControllerSvc.queryLinkHistory(mdmHistorySearchParameters, theRequestDetails); parametersFromMdmLinkRevisions(retVal, mdmLinkRevisionsFromSvc); diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderDstu3Plus.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderDstu3Plus.java index 46c8f7d522d..b75e130ed2a 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderDstu3Plus.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderDstu3Plus.java @@ -25,9 +25,7 @@ import ca.uhn.fhir.mdm.api.IMdmControllerSvc; import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.mdm.api.IMdmSubmitSvc; import ca.uhn.fhir.mdm.api.MdmConstants; -import ca.uhn.fhir.mdm.api.MdmHistorySearchParameters; import ca.uhn.fhir.mdm.api.MdmLinkJson; -import ca.uhn.fhir.mdm.api.MdmLinkWithRevisionJson; import ca.uhn.fhir.mdm.api.MdmQuerySearchParameters; import ca.uhn.fhir.mdm.api.paging.MdmPageRequest; import ca.uhn.fhir.mdm.model.MdmTransactionContext; @@ -38,10 +36,8 @@ import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import ca.uhn.fhir.system.HapiSystemProperties; import ca.uhn.fhir.util.ParametersUtil; import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IAnyResource; @@ -53,12 +49,12 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.slf4j.Logger; import org.springframework.data.domain.Page; -import javax.annotation.Nonnull; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.rest.api.Constants.PARAM_OFFSET; import static org.slf4j.LoggerFactory.getLogger; @@ -80,12 +76,12 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { * Note that this is not a spring bean. Any necessary injections should * happen in the constructor */ - public MdmProviderDstu3Plus(FhirContext theFhirContext, - IMdmControllerSvc theMdmControllerSvc, - MdmControllerHelper theMdmHelper, - IMdmSubmitSvc theMdmSubmitSvc, - IMdmSettings theIMdmSettings - ) { + public MdmProviderDstu3Plus( + FhirContext theFhirContext, + IMdmControllerSvc theMdmControllerSvc, + MdmControllerHelper theMdmHelper, + IMdmSubmitSvc theMdmSubmitSvc, + IMdmSettings theIMdmSettings) { super(theFhirContext); myMdmControllerSvc = theMdmControllerSvc; myMdmControllerHelper = theMdmHelper; @@ -94,8 +90,10 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { } @Operation(name = ProviderConstants.EMPI_MATCH, typeName = "Patient") - public IBaseBundle match(@OperationParam(name = ProviderConstants.MDM_MATCH_RESOURCE, min = 1, max = 1, typeName = "Patient") IAnyResource thePatient, - RequestDetails theRequestDetails) { + public IBaseBundle match( + @OperationParam(name = ProviderConstants.MDM_MATCH_RESOURCE, min = 1, max = 1, typeName = "Patient") + IAnyResource thePatient, + RequestDetails theRequestDetails) { if (thePatient == null) { throw new InvalidRequestException(Msg.code(1498) + "resource may not be null"); } @@ -103,65 +101,111 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { } @Operation(name = ProviderConstants.MDM_MATCH) - public IBaseBundle serverMatch(@OperationParam(name = ProviderConstants.MDM_MATCH_RESOURCE, min = 1, max = 1) IAnyResource theResource, - @OperationParam(name = ProviderConstants.MDM_RESOURCE_TYPE, min = 1, max = 1, typeName = "string") IPrimitiveType theResourceType, - RequestDetails theRequestDetails - ) { + public IBaseBundle serverMatch( + @OperationParam(name = ProviderConstants.MDM_MATCH_RESOURCE, min = 1, max = 1) IAnyResource theResource, + @OperationParam(name = ProviderConstants.MDM_RESOURCE_TYPE, min = 1, max = 1, typeName = "string") + IPrimitiveType theResourceType, + RequestDetails theRequestDetails) { if (theResource == null) { throw new InvalidRequestException(Msg.code(1499) + "resource may not be null"); } - return myMdmControllerHelper.getMatchesAndPossibleMatchesForResource(theResource, theResourceType.getValueAsString(), theRequestDetails); + return myMdmControllerHelper.getMatchesAndPossibleMatchesForResource( + theResource, theResourceType.getValueAsString(), theRequestDetails); } @Operation(name = ProviderConstants.MDM_MERGE_GOLDEN_RESOURCES) - public IBaseResource mergeGoldenResources(@OperationParam(name = ProviderConstants.MDM_MERGE_GR_FROM_GOLDEN_RESOURCE_ID, min = 1, max = 1, typeName = "string") IPrimitiveType theFromGoldenResourceId, - @OperationParam(name = ProviderConstants.MDM_MERGE_GR_TO_GOLDEN_RESOURCE_ID, min = 1, max = 1, typeName = "string") IPrimitiveType theToGoldenResourceId, - @OperationParam(name = ProviderConstants.MDM_MERGE_RESOURCE, max = 1) IAnyResource theMergedResource, - RequestDetails theRequestDetails) { + public IBaseResource mergeGoldenResources( + @OperationParam( + name = ProviderConstants.MDM_MERGE_GR_FROM_GOLDEN_RESOURCE_ID, + min = 1, + max = 1, + typeName = "string") + IPrimitiveType theFromGoldenResourceId, + @OperationParam( + name = ProviderConstants.MDM_MERGE_GR_TO_GOLDEN_RESOURCE_ID, + min = 1, + max = 1, + typeName = "string") + IPrimitiveType theToGoldenResourceId, + @OperationParam(name = ProviderConstants.MDM_MERGE_RESOURCE, max = 1) IAnyResource theMergedResource, + RequestDetails theRequestDetails) { validateMergeParameters(theFromGoldenResourceId, theToGoldenResourceId); - MdmTransactionContext.OperationType operationType = (theMergedResource == null) ? - MdmTransactionContext.OperationType.MERGE_GOLDEN_RESOURCES : MdmTransactionContext.OperationType.MANUAL_MERGE_GOLDEN_RESOURCES; - MdmTransactionContext txContext = createMdmContext(theRequestDetails, operationType, - getResourceType(ProviderConstants.MDM_MERGE_GR_FROM_GOLDEN_RESOURCE_ID, theFromGoldenResourceId)); - return myMdmControllerSvc.mergeGoldenResources(theFromGoldenResourceId.getValueAsString(), theToGoldenResourceId.getValueAsString(), theMergedResource, txContext); + MdmTransactionContext.OperationType operationType = (theMergedResource == null) + ? MdmTransactionContext.OperationType.MERGE_GOLDEN_RESOURCES + : MdmTransactionContext.OperationType.MANUAL_MERGE_GOLDEN_RESOURCES; + MdmTransactionContext txContext = createMdmContext( + theRequestDetails, + operationType, + getResourceType(ProviderConstants.MDM_MERGE_GR_FROM_GOLDEN_RESOURCE_ID, theFromGoldenResourceId)); + return myMdmControllerSvc.mergeGoldenResources( + theFromGoldenResourceId.getValueAsString(), + theToGoldenResourceId.getValueAsString(), + theMergedResource, + txContext); } @Operation(name = ProviderConstants.MDM_UPDATE_LINK) - public IBaseResource updateLink(@OperationParam(name = ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, min = 1, max = 1) IPrimitiveType theGoldenResourceId, - @OperationParam(name = ProviderConstants.MDM_UPDATE_LINK_RESOURCE_ID, min = 1, max = 1) IPrimitiveType theResourceId, - @OperationParam(name = ProviderConstants.MDM_UPDATE_LINK_MATCH_RESULT, min = 1, max = 1) IPrimitiveType theMatchResult, - ServletRequestDetails theRequestDetails) { + public IBaseResource updateLink( + @OperationParam(name = ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, min = 1, max = 1) + IPrimitiveType theGoldenResourceId, + @OperationParam(name = ProviderConstants.MDM_UPDATE_LINK_RESOURCE_ID, min = 1, max = 1) + IPrimitiveType theResourceId, + @OperationParam(name = ProviderConstants.MDM_UPDATE_LINK_MATCH_RESULT, min = 1, max = 1) + IPrimitiveType theMatchResult, + ServletRequestDetails theRequestDetails) { validateUpdateLinkParameters(theGoldenResourceId, theResourceId, theMatchResult); - return myMdmControllerSvc.updateLink(theGoldenResourceId.getValueAsString(), theResourceId.getValue(), - theMatchResult.getValue(), createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.UPDATE_LINK, - getResourceType(ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId)) - ); + return myMdmControllerSvc.updateLink( + theGoldenResourceId.getValueAsString(), + theResourceId.getValue(), + theMatchResult.getValue(), + createMdmContext( + theRequestDetails, + MdmTransactionContext.OperationType.UPDATE_LINK, + getResourceType(ProviderConstants.MDM_UPDATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId))); } @Operation(name = ProviderConstants.MDM_CREATE_LINK) - public IBaseResource createLink(@OperationParam(name = ProviderConstants.MDM_CREATE_LINK_GOLDEN_RESOURCE_ID, min = 1, max = 1) IPrimitiveType theGoldenResourceId, - @OperationParam(name = ProviderConstants.MDM_CREATE_LINK_RESOURCE_ID, min = 1, max = 1) IPrimitiveType theResourceId, - @OperationParam(name = ProviderConstants.MDM_CREATE_LINK_MATCH_RESULT, min = 0, max = 1) IPrimitiveType theMatchResult, - ServletRequestDetails theRequestDetails) { + public IBaseResource createLink( + @OperationParam(name = ProviderConstants.MDM_CREATE_LINK_GOLDEN_RESOURCE_ID, min = 1, max = 1) + IPrimitiveType theGoldenResourceId, + @OperationParam(name = ProviderConstants.MDM_CREATE_LINK_RESOURCE_ID, min = 1, max = 1) + IPrimitiveType theResourceId, + @OperationParam(name = ProviderConstants.MDM_CREATE_LINK_MATCH_RESULT, min = 0, max = 1) + IPrimitiveType theMatchResult, + ServletRequestDetails theRequestDetails) { validateCreateLinkParameters(theGoldenResourceId, theResourceId, theMatchResult); - return myMdmControllerSvc.createLink(theGoldenResourceId.getValueAsString(), theResourceId.getValue(), extractStringOrNull(theMatchResult), - createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.CREATE_LINK, - getResourceType(ProviderConstants.MDM_CREATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId)) - ); + return myMdmControllerSvc.createLink( + theGoldenResourceId.getValueAsString(), + theResourceId.getValue(), + extractStringOrNull(theMatchResult), + createMdmContext( + theRequestDetails, + MdmTransactionContext.OperationType.CREATE_LINK, + getResourceType(ProviderConstants.MDM_CREATE_LINK_GOLDEN_RESOURCE_ID, theGoldenResourceId))); } - @Operation(name = ProviderConstants.OPERATION_MDM_CLEAR, returnParameters = { - @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "decimal") - }) - public IBaseParameters clearMdmLinks(@OperationParam(name = ProviderConstants.OPERATION_MDM_CLEAR_RESOURCE_NAME, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theResourceNames, - @OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType theBatchSize, - ServletRequestDetails theRequestDetails) { + @Operation( + name = ProviderConstants.OPERATION_MDM_CLEAR, + returnParameters = { + @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "decimal") + }) + public IBaseParameters clearMdmLinks( + @OperationParam( + name = ProviderConstants.OPERATION_MDM_CLEAR_RESOURCE_NAME, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theResourceNames, + @OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) + IPrimitiveType theBatchSize, + ServletRequestDetails theRequestDetails) { List resourceNames = new ArrayList<>(); if (theResourceNames != null) { - resourceNames.addAll(theResourceNames.stream().map(IPrimitiveType::getValue).collect(Collectors.toList())); + resourceNames.addAll( + theResourceNames.stream().map(IPrimitiveType::getValue).collect(Collectors.toList())); validateResourceNames(resourceNames); } else { resourceNames.addAll(myMdmSettings.getMdmRules().getMdmTypes()); @@ -173,36 +217,52 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { private void validateResourceNames(List theResourceNames) { for (String resourceName : theResourceNames) { if (!myMdmSettings.isSupportedMdmType(resourceName)) { - throw new InvalidRequestException(Msg.code(1500) + ProviderConstants.OPERATION_MDM_CLEAR + " does not support resource type: " + resourceName); + throw new InvalidRequestException(Msg.code(1500) + ProviderConstants.OPERATION_MDM_CLEAR + + " does not support resource type: " + resourceName); } } } // Is a set of the OR sufficient ot the contenxt she's investigating? @Operation(name = ProviderConstants.MDM_QUERY_LINKS, idempotent = true) - public IBaseParameters queryLinks(@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, min = 0, max = 1, typeName = "string") IPrimitiveType theGoldenResourceId, - @OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, min = 0, max = 1, typeName = "string") IPrimitiveType theResourceId, - @OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_MATCH_RESULT, min = 0, max = 1, typeName = "string") IPrimitiveType theMatchResult, - @OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_LINK_SOURCE, min = 0, max = 1, typeName = "string") - IPrimitiveType theLinkSource, - - @Description(value = "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = PARAM_OFFSET, min = 0, max = 1, typeName = "integer") - IPrimitiveType theOffset, - - @Description(value = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") - @OperationParam(name = Constants.PARAM_COUNT, min = 0, max = 1, typeName = "integer") - IPrimitiveType theCount, - - @OperationParam(name = Constants.PARAM_SORT, min = 0, max = 1, typeName = "string") - IPrimitiveType theSort, - - ServletRequestDetails theRequestDetails, - @OperationParam(name = ProviderConstants.MDM_RESOURCE_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theResourceType - ) { + public IBaseParameters queryLinks( + @OperationParam( + name = ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, + min = 0, + max = 1, + typeName = "string") + IPrimitiveType theGoldenResourceId, + @OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, min = 0, max = 1, typeName = "string") + IPrimitiveType theResourceId, + @OperationParam( + name = ProviderConstants.MDM_QUERY_LINKS_MATCH_RESULT, + min = 0, + max = 1, + typeName = "string") + IPrimitiveType theMatchResult, + @OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_LINK_SOURCE, min = 0, max = 1, typeName = "string") + IPrimitiveType theLinkSource, + @Description( + value = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = PARAM_OFFSET, min = 0, max = 1, typeName = "integer") + IPrimitiveType theOffset, + @Description( + value = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT, min = 0, max = 1, typeName = "integer") + IPrimitiveType theCount, + @OperationParam(name = Constants.PARAM_SORT, min = 0, max = 1, typeName = "string") + IPrimitiveType theSort, + ServletRequestDetails theRequestDetails, + @OperationParam(name = ProviderConstants.MDM_RESOURCE_TYPE, min = 0, max = 1, typeName = "string") + IPrimitiveType theResourceType) { MdmPageRequest mdmPageRequest = new MdmPageRequest(theOffset, theCount, DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE); - MdmTransactionContext mdmContext = createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.QUERY_LINKS, - getResourceType(ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theGoldenResourceId, theResourceType)); + MdmTransactionContext mdmContext = createMdmContext( + theRequestDetails, + MdmTransactionContext.OperationType.QUERY_LINKS, + getResourceType( + ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theGoldenResourceId, theResourceType)); MdmQuerySearchParameters mdmQuerySearchParameters = new MdmQuerySearchParameters(mdmPageRequest) .setGoldenResourceId(extractStringOrNull(theGoldenResourceId)) .setSourceId(extractStringOrNull(theResourceId)) @@ -211,55 +271,80 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { .setResourceType(extractStringOrNull(theResourceType)) .setSort(extractStringOrNull(theSort)); - Page mdmLinkJson = myMdmControllerSvc.queryLinks(mdmQuerySearchParameters, mdmContext, theRequestDetails); + Page mdmLinkJson = + myMdmControllerSvc.queryLinks(mdmQuerySearchParameters, mdmContext, theRequestDetails); return parametersFromMdmLinks(mdmLinkJson, true, theRequestDetails, mdmPageRequest); } - @Operation(name = ProviderConstants.MDM_DUPLICATE_GOLDEN_RESOURCES, idempotent = true) public IBaseParameters getDuplicateGoldenResources( - @Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") - @OperationParam(name = PARAM_OFFSET, min = 0, max = 1, typeName = "integer") - IPrimitiveType theOffset, - @Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") - @OperationParam(name = Constants.PARAM_COUNT, min = 0, max = 1, typeName = "integer") - IPrimitiveType theCount, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @OperationParam(name = PARAM_OFFSET, min = 0, max = 1, typeName = "integer") + IPrimitiveType theOffset, + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @OperationParam(name = Constants.PARAM_COUNT, min = 0, max = 1, typeName = "integer") + IPrimitiveType theCount, ServletRequestDetails theRequestDetails, - @Description(formalDefinition = "This parameter controls the returned resource type.") - @OperationParam(name = ProviderConstants.MDM_RESOURCE_TYPE, min = 0, max = 1, typeName = "string") - IPrimitiveType theResourceType) { + @Description(formalDefinition = "This parameter controls the returned resource type.") + @OperationParam(name = ProviderConstants.MDM_RESOURCE_TYPE, min = 0, max = 1, typeName = "string") + IPrimitiveType theResourceType) { MdmPageRequest mdmPageRequest = new MdmPageRequest(theOffset, theCount, DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE); - Page possibleDuplicates = myMdmControllerSvc.getDuplicateGoldenResources(createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.DUPLICATE_GOLDEN_RESOURCES, null), mdmPageRequest, theRequestDetails, extractStringOrNull(theResourceType)); + Page possibleDuplicates = myMdmControllerSvc.getDuplicateGoldenResources( + createMdmContext( + theRequestDetails, MdmTransactionContext.OperationType.DUPLICATE_GOLDEN_RESOURCES, null), + mdmPageRequest, + theRequestDetails, + extractStringOrNull(theResourceType)); return parametersFromMdmLinks(possibleDuplicates, false, theRequestDetails, mdmPageRequest); } @Operation(name = ProviderConstants.MDM_NOT_DUPLICATE) - public IBaseParameters notDuplicate(@OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, min = 1, max = 1, typeName = "string") IPrimitiveType theGoldenResourceId, - @OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, min = 1, max = 1, typeName = "string") IPrimitiveType theResourceId, - ServletRequestDetails theRequestDetails) { + public IBaseParameters notDuplicate( + @OperationParam( + name = ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, + min = 1, + max = 1, + typeName = "string") + IPrimitiveType theGoldenResourceId, + @OperationParam(name = ProviderConstants.MDM_QUERY_LINKS_RESOURCE_ID, min = 1, max = 1, typeName = "string") + IPrimitiveType theResourceId, + ServletRequestDetails theRequestDetails) { validateNotDuplicateParameters(theGoldenResourceId, theResourceId); - myMdmControllerSvc.notDuplicateGoldenResource(theGoldenResourceId.getValue(), theResourceId.getValue(), - createMdmContext(theRequestDetails, MdmTransactionContext.OperationType.NOT_DUPLICATE, - getResourceType(ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theGoldenResourceId)) - ); + myMdmControllerSvc.notDuplicateGoldenResource( + theGoldenResourceId.getValue(), + theResourceId.getValue(), + createMdmContext( + theRequestDetails, + MdmTransactionContext.OperationType.NOT_DUPLICATE, + getResourceType(ProviderConstants.MDM_QUERY_LINKS_GOLDEN_RESOURCE_ID, theGoldenResourceId))); IBaseParameters retval = ParametersUtil.newInstance(myFhirContext); ParametersUtil.addParameterToParametersBoolean(myFhirContext, retval, "success", true); return retval; } - @Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, returnParameters = { - @OperationParam(name = ProviderConstants.OPERATION_MDM_SUBMIT_OUT_PARAM_SUBMITTED, typeName = "integer") - }) + @Operation( + name = ProviderConstants.OPERATION_MDM_SUBMIT, + idempotent = false, + returnParameters = { + @OperationParam(name = ProviderConstants.OPERATION_MDM_SUBMIT_OUT_PARAM_SUBMITTED, typeName = "integer") + }) public IBaseParameters mdmBatchOnAllSourceResources( - @OperationParam(name = ProviderConstants.MDM_BATCH_RUN_RESOURCE_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theResourceType, - @OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, min = 0, max = 1, typeName = "string") IPrimitiveType theCriteria, - @OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType theBatchSize, - ServletRequestDetails theRequestDetails) { + @OperationParam(name = ProviderConstants.MDM_BATCH_RUN_RESOURCE_TYPE, min = 0, max = 1, typeName = "string") + IPrimitiveType theResourceType, + @OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, min = 0, max = 1, typeName = "string") + IPrimitiveType theCriteria, + @OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) + IPrimitiveType theBatchSize, + ServletRequestDetails theRequestDetails) { String criteria = convertStringTypeToString(theCriteria); String resourceType = convertStringTypeToString(theResourceType); long submittedCount; @@ -268,13 +353,13 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { return myMdmControllerSvc.submitMdmSubmitJob(urls, theBatchSize, theRequestDetails); } else { if (StringUtils.isNotBlank(resourceType)) { - submittedCount = myMdmSubmitSvc.submitSourceResourceTypeToMdm(resourceType, criteria, theRequestDetails); + submittedCount = + myMdmSubmitSvc.submitSourceResourceTypeToMdm(resourceType, criteria, theRequestDetails); } else { submittedCount = myMdmSubmitSvc.submitAllSourceTypesToMdm(criteria, theRequestDetails); } return buildMdmOutParametersWithCount(submittedCount); } - } @Nonnull @@ -284,37 +369,42 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { String theUrl = resourceType + "?" + criteria; urls.add(theUrl); } else { - myMdmSettings.getMdmRules().getMdmTypes() - .stream() - .map(type -> type + "?" + criteria) - .forEach(urls::add); + myMdmSettings.getMdmRules().getMdmTypes().stream() + .map(type -> type + "?" + criteria) + .forEach(urls::add); } return urls; } - private String convertStringTypeToString(IPrimitiveType theCriteria) { return theCriteria == null ? "" : theCriteria.getValueAsString(); } - - @Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Patient", returnParameters = { - @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer") - }) - public IBaseParameters mdmBatchPatientInstance( - @IdParam IIdType theIdParam, - RequestDetails theRequest) { + @Operation( + name = ProviderConstants.OPERATION_MDM_SUBMIT, + idempotent = false, + typeName = "Patient", + returnParameters = { + @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer") + }) + public IBaseParameters mdmBatchPatientInstance(@IdParam IIdType theIdParam, RequestDetails theRequest) { long submittedCount = myMdmSubmitSvc.submitSourceResourceToMdm(theIdParam, theRequest); return buildMdmOutParametersWithCount(submittedCount); } - @Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Patient", returnParameters = { - @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer") - }) + @Operation( + name = ProviderConstants.OPERATION_MDM_SUBMIT, + idempotent = false, + typeName = "Patient", + returnParameters = { + @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer") + }) public IBaseParameters mdmBatchPatientType( - @OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, typeName = "string") IPrimitiveType theCriteria, - @OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType theBatchSize, - ServletRequestDetails theRequest) { + @OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, typeName = "string") + IPrimitiveType theCriteria, + @OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) + IPrimitiveType theBatchSize, + ServletRequestDetails theRequest) { if (theRequest.isPreferRespondAsync()) { String theUrl = "Patient?"; return myMdmControllerSvc.submitMdmSubmitJob(Collections.singletonList(theUrl), theBatchSize, theRequest); @@ -325,26 +415,34 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { } } - @Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Practitioner", returnParameters = { - @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer") - }) - public IBaseParameters mdmBatchPractitionerInstance( - @IdParam IIdType theIdParam, - RequestDetails theRequest) { + @Operation( + name = ProviderConstants.OPERATION_MDM_SUBMIT, + idempotent = false, + typeName = "Practitioner", + returnParameters = { + @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer") + }) + public IBaseParameters mdmBatchPractitionerInstance(@IdParam IIdType theIdParam, RequestDetails theRequest) { long submittedCount = myMdmSubmitSvc.submitSourceResourceToMdm(theIdParam, theRequest); return buildMdmOutParametersWithCount(submittedCount); } - @Operation(name = ProviderConstants.OPERATION_MDM_SUBMIT, idempotent = false, typeName = "Practitioner", returnParameters = { - @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer") - }) + @Operation( + name = ProviderConstants.OPERATION_MDM_SUBMIT, + idempotent = false, + typeName = "Practitioner", + returnParameters = { + @OperationParam(name = ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, typeName = "integer") + }) public IBaseParameters mdmBatchPractitionerType( - @OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, typeName = "string") IPrimitiveType theCriteria, - @OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) IPrimitiveType theBatchSize, - ServletRequestDetails theRequest) { + @OperationParam(name = ProviderConstants.MDM_BATCH_RUN_CRITERIA, typeName = "string") + IPrimitiveType theCriteria, + @OperationParam(name = ProviderConstants.OPERATION_MDM_BATCH_SIZE, typeName = "decimal", min = 0, max = 1) + IPrimitiveType theBatchSize, + ServletRequestDetails theRequest) { if (theRequest.isPreferRespondAsync()) { String theUrl = "Practitioner?"; - return myMdmControllerSvc.submitMdmSubmitJob(Collections.singletonList(theUrl), theBatchSize, theRequest); + return myMdmControllerSvc.submitMdmSubmitJob(Collections.singletonList(theUrl), theBatchSize, theRequest); } else { String criteria = convertStringTypeToString(theCriteria); long submittedCount = myMdmSubmitSvc.submitPractitionerTypeToMdm(criteria, theRequest); @@ -357,7 +455,8 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { */ public IBaseParameters buildMdmOutParametersWithCount(long theCount) { IBaseParameters retval = ParametersUtil.newInstance(myFhirContext); - ParametersUtil.addParameterToParametersLong(myFhirContext, retval, ProviderConstants.OPERATION_MDM_SUBMIT_OUT_PARAM_SUBMITTED, theCount); + ParametersUtil.addParameterToParametersLong( + myFhirContext, retval, ProviderConstants.OPERATION_MDM_SUBMIT_OUT_PARAM_SUBMITTED, theCount); return retval; } @@ -369,8 +468,11 @@ public class MdmProviderDstu3Plus extends BaseMdmProvider { } } - private String getResourceType(String theParamName, IPrimitiveType theResourceId, IPrimitiveType theResourceType) { - return theResourceType != null ? theResourceType.getValueAsString() : getResourceType(theParamName, theResourceId); + private String getResourceType( + String theParamName, IPrimitiveType theResourceId, IPrimitiveType theResourceType) { + return theResourceType != null + ? theResourceType.getValueAsString() + : getResourceType(theParamName, theResourceId); } private String getResourceType(String theParamName, String theResourceId) { diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderLoader.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderLoader.java index 4ca2a8d1ab6..4de8067b3bd 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderLoader.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmProviderLoader.java @@ -36,16 +36,22 @@ import javax.annotation.PreDestroy; public class MdmProviderLoader { @Autowired private FhirContext myFhirContext; + @Autowired private ResourceProviderFactory myResourceProviderFactory; + @Autowired private MdmControllerHelper myMdmControllerHelper; + @Autowired private IMdmControllerSvc myMdmControllerSvc; + @Autowired private IMdmSubmitSvc myMdmSubmitSvc; + @Autowired private IMdmSettings myMdmSettings; + @Autowired private JpaStorageSettings myStorageSettings; @@ -55,18 +61,16 @@ public class MdmProviderLoader { switch (myFhirContext.getVersion().getVersion()) { case DSTU3: case R4: - myResourceProviderFactory.addSupplier(() -> new MdmProviderDstu3Plus(myFhirContext, - myMdmControllerSvc, - myMdmControllerHelper, - myMdmSubmitSvc, - myMdmSettings - )); + myResourceProviderFactory.addSupplier(() -> new MdmProviderDstu3Plus( + myFhirContext, myMdmControllerSvc, myMdmControllerHelper, myMdmSubmitSvc, myMdmSettings)); if (myStorageSettings.isNonResourceDbHistoryEnabled()) { - myResourceProviderFactory.addSupplier(() -> new MdmLinkHistoryProviderDstu3Plus(myFhirContext, myMdmControllerSvc)); + myResourceProviderFactory.addSupplier( + () -> new MdmLinkHistoryProviderDstu3Plus(myFhirContext, myMdmControllerSvc)); } break; default: - throw new ConfigurationException(Msg.code(1497) + "MDM not supported for FHIR version " + myFhirContext.getVersion().getVersion()); + throw new ConfigurationException(Msg.code(1497) + "MDM not supported for FHIR version " + + myFhirContext.getVersion().getVersion()); } } @@ -75,4 +79,3 @@ public class MdmProviderLoader { myResourceProviderFactory.removeSupplier(() -> myMdmProvider); } } - diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/config/MdmRuleValidator.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/config/MdmRuleValidator.java index 919f9627a76..3189d149783 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/config/MdmRuleValidator.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/config/MdmRuleValidator.java @@ -80,13 +80,15 @@ public class MdmRuleValidator implements IMdmRuleValidator { } private void validateEidSystemsMatchMdmTypes(MdmRulesJson theMdmRules) { - theMdmRules.getEnterpriseEIDSystems().keySet() - .forEach(key -> { - //Ensure each key is either * or a valid resource type. - if (!key.equalsIgnoreCase("*") && !theMdmRules.getMdmTypes().contains(key)) { - throw new ConfigurationException(Msg.code(1507) + String.format("There is an eidSystem set for [%s] but that is not one of the mdmTypes. Valid options are [%s].", key, buildValidEidKeysMessage(theMdmRules))); - } - }); + theMdmRules.getEnterpriseEIDSystems().keySet().forEach(key -> { + // Ensure each key is either * or a valid resource type. + if (!key.equalsIgnoreCase("*") && !theMdmRules.getMdmTypes().contains(key)) { + throw new ConfigurationException(Msg.code(1507) + + String.format( + "There is an eidSystem set for [%s] but that is not one of the mdmTypes. Valid options are [%s].", + key, buildValidEidKeysMessage(theMdmRules))); + } + }); } private String buildValidEidKeysMessage(MdmRulesJson theMdmRulesJson) { @@ -96,19 +98,21 @@ public class MdmRuleValidator implements IMdmRuleValidator { } private void validateSystemsAreUris(MdmRulesJson theMdmRules) { - theMdmRules.getEnterpriseEIDSystems().entrySet() - .forEach(entry -> { - String resourceType = entry.getKey(); - String uri = entry.getValue(); - if (!resourceType.equals("*")) { - try { - myFhirContext.getResourceType(resourceType); - }catch (DataFormatException e) { - throw new ConfigurationException(Msg.code(1508) + String.format("%s is not a valid resource type, but is set in the eidSystems field.", resourceType)); - } + theMdmRules.getEnterpriseEIDSystems().entrySet().forEach(entry -> { + String resourceType = entry.getKey(); + String uri = entry.getValue(); + if (!resourceType.equals("*")) { + try { + myFhirContext.getResourceType(resourceType); + } catch (DataFormatException e) { + throw new ConfigurationException(Msg.code(1508) + + String.format( + "%s is not a valid resource type, but is set in the eidSystems field.", + resourceType)); } - validateIsUri(uri); - }); + } + validateIsUri(uri); + }); } public void validateMdmTypes(MdmRulesJson theMdmRulesJson) { @@ -117,14 +121,16 @@ public class MdmRuleValidator implements IMdmRuleValidator { if (theMdmRulesJson.getMdmTypes() == null) { throw new ConfigurationException(Msg.code(1509) + "mdmTypes must be set to a list of resource types."); } - for (String resourceType: theMdmRulesJson.getMdmTypes()) { + for (String resourceType : theMdmRulesJson.getMdmTypes()) { validateTypeHasIdentifier(resourceType); } } public void validateTypeHasIdentifier(String theResourceType) { if (mySearchParamRetriever.getActiveSearchParam(theResourceType, "identifier") == null) { - throw new ConfigurationException(Msg.code(1510) + "Resource Type " + theResourceType + " is not supported, as it does not have an 'identifier' field, which is necessary for MDM workflow."); + throw new ConfigurationException( + Msg.code(1510) + "Resource Type " + theResourceType + + " is not supported, as it does not have an 'identifier' field, which is necessary for MDM workflow."); } } @@ -132,8 +138,10 @@ public class MdmRuleValidator implements IMdmRuleValidator { ourLog.info("Validating search parameters {}", theMdmRulesJson.getCandidateSearchParams()); for (MdmResourceSearchParamJson searchParams : theMdmRulesJson.getCandidateSearchParams()) { - searchParams.iterator().forEachRemaining( - searchParam -> validateSearchParam("candidateSearchParams", searchParams.getResourceType(), searchParam)); + searchParams + .iterator() + .forEachRemaining(searchParam -> + validateSearchParam("candidateSearchParams", searchParams.getResourceType(), searchParam)); } for (MdmFilterSearchParamJson filter : theMdmRulesJson.getCandidateFilterSearchParams()) { validateSearchParam("candidateFilterSearchParams", filter.getResourceType(), filter.getSearchParam()); @@ -152,7 +160,8 @@ public class MdmRuleValidator implements IMdmRuleValidator { private void validateResourceSearchParam(String theFieldName, String theResourceType, String theSearchParam) { String searchParam = SearchParameterUtil.stripModifier(theSearchParam); if (mySearchParamRetriever.getActiveSearchParam(theResourceType, searchParam) == null) { - throw new ConfigurationException(Msg.code(1511) + "Error in " + theFieldName + ": " + theResourceType + " does not have a search parameter called '" + theSearchParam + "'"); + throw new ConfigurationException(Msg.code(1511) + "Error in " + theFieldName + ": " + theResourceType + + " does not have a search parameter called '" + theSearchParam + "'"); } } @@ -162,13 +171,15 @@ public class MdmRuleValidator implements IMdmRuleValidator { Set names = new HashSet<>(); for (MdmFieldMatchJson fieldMatch : theMdmRulesJson.getMatchFields()) { if (names.contains(fieldMatch.getName())) { - throw new ConfigurationException(Msg.code(1512) + "Two MatchFields have the same name '" + fieldMatch.getName() + "'"); + throw new ConfigurationException( + Msg.code(1512) + "Two MatchFields have the same name '" + fieldMatch.getName() + "'"); } names.add(fieldMatch.getName()); if (fieldMatch.getSimilarity() != null) { validateSimilarity(fieldMatch); } else if (fieldMatch.getMatcher() == null) { - throw new ConfigurationException(Msg.code(1513) + "MatchField " + fieldMatch.getName() + " has neither a similarity nor a matcher. At least one must be present."); + throw new ConfigurationException(Msg.code(1513) + "MatchField " + fieldMatch.getName() + + " has neither a similarity nor a matcher. At least one must be present."); } validatePath(theMdmRulesJson.getMdmTypes(), fieldMatch); } @@ -177,7 +188,8 @@ public class MdmRuleValidator implements IMdmRuleValidator { private void validateSimilarity(MdmFieldMatchJson theFieldMatch) { MdmSimilarityJson similarity = theFieldMatch.getSimilarity(); if (similarity.getMatchThreshold() == null) { - throw new ConfigurationException(Msg.code(1514) + "MatchField " + theFieldMatch.getName() + " similarity " + similarity.getAlgorithm() + " requires a matchThreshold"); + throw new ConfigurationException(Msg.code(1514) + "MatchField " + theFieldMatch.getName() + " similarity " + + similarity.getAlgorithm() + " requires a matchThreshold"); } } @@ -193,7 +205,7 @@ public class MdmRuleValidator implements IMdmRuleValidator { private void validateFieldPathForAllTypes(List theMdmResourceTypes, MdmFieldMatchJson theFieldMatch) { - for (String resourceType: theMdmResourceTypes) { + for (String resourceType : theMdmResourceTypes) { validateFieldPathForType(resourceType, theFieldMatch); } } @@ -202,36 +214,34 @@ public class MdmRuleValidator implements IMdmRuleValidator { ourLog.debug("Validating resource {} for {} ", theResourceType, theFieldMatch.getResourcePath()); if (theFieldMatch.getFhirPath() != null && theFieldMatch.getResourcePath() != null) { - throw new ConfigurationException(Msg.code(1515) + "MatchField [" + - theFieldMatch.getName() + - "] resourceType [" + - theFieldMatch.getResourceType() + - "] has defined both a resourcePath and a fhirPath. You must define one of the two."); + throw new ConfigurationException(Msg.code(1515) + "MatchField [" + theFieldMatch.getName() + + "] resourceType [" + + theFieldMatch.getResourceType() + + "] has defined both a resourcePath and a fhirPath. You must define one of the two."); } if (theFieldMatch.getResourcePath() == null && theFieldMatch.getFhirPath() == null) { - throw new ConfigurationException(Msg.code(1516) + "MatchField [" + - theFieldMatch.getName() + - "] resourceType [" + - theFieldMatch.getResourceType() + - "] has defined neither a resourcePath or a fhirPath. You must define one of the two."); + throw new ConfigurationException(Msg.code(1516) + "MatchField [" + theFieldMatch.getName() + + "] resourceType [" + + theFieldMatch.getResourceType() + + "] has defined neither a resourcePath or a fhirPath. You must define one of the two."); } if (theFieldMatch.getResourcePath() != null) { - try { //Try to validate the struture definition path + try { // Try to validate the struture definition path RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(theResourceType); Class implementingClass = resourceDefinition.getImplementingClass(); String path = theResourceType + "." + theFieldMatch.getResourcePath(); myTerser.getDefinition(implementingClass, path); } catch (DataFormatException | ConfigurationException | ClassCastException e) { - //Fallback to attempting to FHIRPath evaluate it. - throw new ConfigurationException(Msg.code(1517) + "MatchField " + - theFieldMatch.getName() + - " resourceType " + - theFieldMatch.getResourceType() + - " has invalid path '" + theFieldMatch.getResourcePath() + "'. " + e.getMessage()); + // Fallback to attempting to FHIRPath evaluate it. + throw new ConfigurationException(Msg.code(1517) + "MatchField " + theFieldMatch.getName() + + " resourceType " + + theFieldMatch.getResourceType() + + " has invalid path '" + + theFieldMatch.getResourcePath() + "'. " + e.getMessage()); } - } else { //Try to validate the FHIRPath + } else { // Try to validate the FHIRPath try { if (myFhirPath != null) { myFhirPath.parse(theResourceType + "." + theFieldMatch.getFhirPath()); @@ -239,7 +249,9 @@ public class MdmRuleValidator implements IMdmRuleValidator { ourLog.debug("Can't validate FHIRPath expression due to a lack of IFhirPath object."); } } catch (Exception e) { - throw new ConfigurationException(Msg.code(1518) + "MatchField [" + theFieldMatch.getName() + "] resourceType [" + theFieldMatch.getResourceType() + "] has failed FHIRPath evaluation. " + e.getMessage()); + throw new ConfigurationException(Msg.code(1518) + "MatchField [" + theFieldMatch.getName() + + "] resourceType [" + theFieldMatch.getResourceType() + "] has failed FHIRPath evaluation. " + + e.getMessage()); } } } @@ -253,7 +265,8 @@ public class MdmRuleValidator implements IMdmRuleValidator { try { new URI(theUri); } catch (URISyntaxException e) { - throw new ConfigurationException(Msg.code(1519) + "Enterprise Identifier System (eidSystem) must be a valid URI"); + throw new ConfigurationException( + Msg.code(1519) + "Enterprise Identifier System (eidSystem) must be a valid URI"); } } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmFilterSearchParamJson.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmFilterSearchParamJson.java index 72603d604f6..8e6fb037a3b 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmFilterSearchParamJson.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmFilterSearchParamJson.java @@ -31,10 +31,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class MdmFilterSearchParamJson implements IModelJson { @JsonProperty(value = "resourceType", required = true) String myResourceType; + @JsonProperty(value = "searchParam", required = true) String mySearchParam; + @JsonProperty(value = "qualifier", required = true) TokenParamModifier myTokenParamModifier; + @JsonProperty(value = "fixedValue", required = true) String myFixedValue; @@ -56,7 +59,6 @@ public class MdmFilterSearchParamJson implements IModelJson { return this; } - public TokenParamModifier getTokenParamModifier() { return myTokenParamModifier; } @@ -75,7 +77,7 @@ public class MdmFilterSearchParamJson implements IModelJson { return this; } - public String getTokenParamModifierAsString() { + public String getTokenParamModifierAsString() { return myTokenParamModifier == null ? "" : myTokenParamModifier.getValue(); - } + } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmResourceSearchParamJson.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmResourceSearchParamJson.java index e800bfa4431..f19758a50bc 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmResourceSearchParamJson.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmResourceSearchParamJson.java @@ -32,6 +32,7 @@ import java.util.List; public class MdmResourceSearchParamJson implements IModelJson, Iterable { @JsonProperty(value = "resourceType", required = true) String myResourceType; + @JsonProperty(value = "searchParams", required = true) List mySearchParams; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmRulesJson.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmRulesJson.java index 6d13abd1b54..dcc600e9fab 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmRulesJson.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/MdmRulesJson.java @@ -41,12 +41,16 @@ public class MdmRulesJson implements IModelJson { @JsonProperty(value = "version", required = true) String myVersion; + @JsonProperty(value = "candidateSearchParams", required = true) List myCandidateSearchParams = new ArrayList<>(); + @JsonProperty(value = "candidateFilterSearchParams", required = true) List myCandidateFilterSearchParams = new ArrayList<>(); + @JsonProperty(value = "matchFields", required = true) List myMatchFieldJsonList = new ArrayList<>(); + @JsonProperty(value = "matchResultMap", required = true) Map myMatchResultMap = new HashMap<>(); @@ -59,6 +63,7 @@ public class MdmRulesJson implements IModelJson { @JsonProperty(value = "eidSystems") Map myEnterpriseEidSystems = new HashMap<>(); + @JsonProperty(value = "mdmTypes") List myMdmTypes; @@ -148,15 +153,15 @@ public class MdmRulesJson implements IModelJson { } public Map getEnterpriseEIDSystems() { - //First try the new property. + // First try the new property. if (myEnterpriseEidSystems != null && !myEnterpriseEidSystems.isEmpty()) { return myEnterpriseEidSystems; - //If that fails, fall back to our deprecated property. + // If that fails, fall back to our deprecated property. } else if (!StringUtils.isBlank(myEnterpriseEIDSystem)) { - HashMap retVal = new HashMap<>(); + HashMap retVal = new HashMap<>(); retVal.put(ALL_RESOURCE_SEARCH_PARAM_TYPE, myEnterpriseEIDSystem); return retVal; - //Otherwise, return an empty map. + // Otherwise, return an empty map. } else { return Collections.emptyMap(); } @@ -185,18 +190,17 @@ public class MdmRulesJson implements IModelJson { Map enterpriseEIDSystems = getEnterpriseEIDSystems(); - //If we have a * eid system, there should only be one. + // If we have a * eid system, there should only be one. if (enterpriseEIDSystems.containsKey(ALL_RESOURCE_SEARCH_PARAM_TYPE)) { Validate.isTrue(enterpriseEIDSystems.size() == 1); } - } public String getSummary() { - return myCandidateSearchParams.size() + " Candidate Search Params, " + - myCandidateFilterSearchParams.size() + " Filter Search Params, " + - myMatchFieldJsonList.size() + " Match Fields, " + - myMatchResultMap.size() + " Match Result Entries"; + return myCandidateSearchParams.size() + " Candidate Search Params, " + myCandidateFilterSearchParams.size() + + " Filter Search Params, " + myMatchFieldJsonList.size() + + " Match Fields, " + myMatchResultMap.size() + + " Match Result Entries"; } public String getFieldMatchNamesForVector(long theVector) { @@ -212,7 +216,7 @@ public class MdmRulesJson implements IModelJson { fieldMatchResult.add(myMatchFieldJsonList.get(i).getName() + ": YES"); } } - return String.join("\n" ,fieldMatchResult); + return String.join("\n", fieldMatchResult); } @VisibleForTesting @@ -228,8 +232,7 @@ public class MdmRulesJson implements IModelJson { /** * This empty constructor is required by Jackson */ - public MdmRulesJsonConverter() { - } + public MdmRulesJsonConverter() {} @Override public MdmRulesJson convert(MdmRulesJson theMdmRulesJson) { @@ -246,5 +249,4 @@ public class MdmRulesJson implements IModelJson { public void setMdmTypes(List theMdmTypes) { myMdmTypes = theMdmTypes; } - } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/VectorMatchResultMap.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/VectorMatchResultMap.java index 5af083ba3af..93bc658c8d7 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/VectorMatchResultMap.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/json/VectorMatchResultMap.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.mdm.rules.json; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; -import javax.annotation.Nonnull; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; +import javax.annotation.Nonnull; public class VectorMatchResultMap { private final MdmRulesJson myMdmRulesJson; @@ -43,7 +43,8 @@ public class VectorMatchResultMap { } private void initMap() { - for (Map.Entry entry : myMdmRulesJson.getMatchResultMap().entrySet()) { + for (Map.Entry entry : + myMdmRulesJson.getMatchResultMap().entrySet()) { put(entry.getKey(), entry.getValue()); } } @@ -100,7 +101,7 @@ public class VectorMatchResultMap { return -1; } - public String getFieldMatchNames(long theVector) { + public String getFieldMatchNames(long theVector) { return myVectorToFieldMatchNamesMap.get(theVector); - } + } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/MdmMatcherFactory.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/MdmMatcherFactory.java index 0ff64a8b387..96461155324 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/MdmMatcherFactory.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/MdmMatcherFactory.java @@ -47,11 +47,7 @@ public class MdmMatcherFactory implements IMatcherFactory { private final INicknameSvc myNicknameSvc; - public MdmMatcherFactory( - FhirContext theFhirContext, - IMdmSettings theSettings, - INicknameSvc theNicknameSvc - ) { + public MdmMatcherFactory(FhirContext theFhirContext, IMdmSettings theSettings, INicknameSvc theNicknameSvc) { myFhirContext = theFhirContext; myMdmSettings = theSettings; myNicknameSvc = theNicknameSvc; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/DateTimeWrapper.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/DateTimeWrapper.java index 7b1c413bbad..966a0e2ebd2 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/DateTimeWrapper.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/DateTimeWrapper.java @@ -24,7 +24,6 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.TemporalPrecisionEnum; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.r4.model.DateTimeType; -import org.hl7.fhir.r4.model.DateType; /** * A wrapper class for datetimes of ambiguous fhir version @@ -52,7 +51,8 @@ public class DateTimeWrapper { myValueAsString = r4Date.getValueAsString(); } else { // we should consider changing this error so we don't need the fhir context at all - throw new UnsupportedOperationException(Msg.code(1520) + "Version not supported: " + theFhirContext.getVersion().getVersion()); + throw new UnsupportedOperationException(Msg.code(1520) + "Version not supported: " + + theFhirContext.getVersion().getVersion()); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/EmptyFieldMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/EmptyFieldMatcher.java index 02618e097d3..2fafd12f794 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/EmptyFieldMatcher.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/EmptyFieldMatcher.java @@ -25,8 +25,7 @@ import org.hl7.fhir.instance.model.api.IBase; public class EmptyFieldMatcher implements IMdmFieldMatcher { - public EmptyFieldMatcher() { - } + public EmptyFieldMatcher() {} @Override public boolean matches(IBase theLeftBase, IBase theRightBase, MdmMatcherJson theParams) { diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/HapiStringMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/HapiStringMatcher.java index 4168051754b..3bfec0dc79f 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/HapiStringMatcher.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/HapiStringMatcher.java @@ -33,8 +33,10 @@ public class HapiStringMatcher implements IMdmFieldMatcher { @Override public boolean matches(IBase theLeftBase, IBase theRightBase, MdmMatcherJson theExtraMatchParams) { if (theLeftBase instanceof IPrimitiveType && theRightBase instanceof IPrimitiveType) { - String leftString = StringMatcherUtils.extractString((IPrimitiveType) theLeftBase, theExtraMatchParams.getExact()); - String rightString = StringMatcherUtils.extractString((IPrimitiveType) theRightBase, theExtraMatchParams.getExact()); + String leftString = + StringMatcherUtils.extractString((IPrimitiveType) theLeftBase, theExtraMatchParams.getExact()); + String rightString = + StringMatcherUtils.extractString((IPrimitiveType) theRightBase, theExtraMatchParams.getExact()); return leftString.equals(rightString); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NameMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NameMatcher.java index 15cafbdabbc..186e2323930 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NameMatcher.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NameMatcher.java @@ -60,8 +60,12 @@ public class NameMatcher implements IMdmFieldMatcher { if (!theParams.getExact()) { leftFamilyName = StringUtil.normalizeStringForSearchIndexing(leftFamilyName); rightFamilyName = StringUtil.normalizeStringForSearchIndexing(rightFamilyName); - leftGivenNames = leftGivenNames.stream().map(StringUtil::normalizeStringForSearchIndexing).collect(Collectors.toList()); - rightGivenNames = rightGivenNames.stream().map(StringUtil::normalizeStringForSearchIndexing).collect(Collectors.toList()); + leftGivenNames = leftGivenNames.stream() + .map(StringUtil::normalizeStringForSearchIndexing) + .collect(Collectors.toList()); + rightGivenNames = rightGivenNames.stream() + .map(StringUtil::normalizeStringForSearchIndexing) + .collect(Collectors.toList()); } for (String leftGivenName : leftGivenNames) { diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NicknameMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NicknameMatcher.java index 6a3386c167e..2ca6aeaad1b 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NicknameMatcher.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NicknameMatcher.java @@ -53,7 +53,8 @@ public class NicknameMatcher implements IMdmFieldMatcher { public boolean matches(IBase theLeftBase, IBase theRightBase, MdmMatcherJson theParams) { if (theLeftBase instanceof IPrimitiveType && theRightBase instanceof IPrimitiveType) { String leftString = StringMatcherUtils.extractString((IPrimitiveType) theLeftBase, theParams.getExact()); - String rightString = StringMatcherUtils.extractString((IPrimitiveType) theRightBase, theParams.getExact()); + String rightString = + StringMatcherUtils.extractString((IPrimitiveType) theRightBase, theParams.getExact()); return matches(leftString, rightString); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NumericMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NumericMatcher.java index c9a3777f6dd..d1ab7edbe27 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NumericMatcher.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/NumericMatcher.java @@ -27,15 +27,18 @@ import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IPrimitiveType; // Useful for numerical identifiers like phone numbers, address parts etc. -// This should not be used where decimals are important. A new "quantity matcher" should be added to handle cases like that. +// This should not be used where decimals are important. A new "quantity matcher" should be added to handle cases like +// that. public class NumericMatcher implements IMdmFieldMatcher { private final NumericEncoder encoder = new NumericEncoder(); @Override public boolean matches(IBase theLeftBase, IBase theRightBase, MdmMatcherJson theParams) { if (theLeftBase instanceof IPrimitiveType && theRightBase instanceof IPrimitiveType) { - String left = encoder.encode(StringMatcherUtils.extractString((IPrimitiveType) theLeftBase, theParams.getExact())); - String right = encoder.encode(StringMatcherUtils.extractString((IPrimitiveType) theRightBase, theParams.getExact())); + String left = encoder.encode( + StringMatcherUtils.extractString((IPrimitiveType) theLeftBase, theParams.getExact())); + String right = encoder.encode( + StringMatcherUtils.extractString((IPrimitiveType) theRightBase, theParams.getExact())); return left.equals(right); } return false; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/PhoneticEncoderMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/PhoneticEncoderMatcher.java index efbb3372e1f..988dfdd1089 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/PhoneticEncoderMatcher.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/fieldmatchers/PhoneticEncoderMatcher.java @@ -50,5 +50,4 @@ public class PhoneticEncoderMatcher implements IMdmFieldMatcher { return matches(leftString, rightString); } - } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/models/MatchTypeEnum.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/models/MatchTypeEnum.java index 7f721c20822..21e4dc2479c 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/models/MatchTypeEnum.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/matcher/models/MatchTypeEnum.java @@ -25,7 +25,6 @@ package ca.uhn.fhir.mdm.rules.matcher.models; * https://commons.apache.org/proper/commons-codec/userguide.html */ public enum MatchTypeEnum { - CAVERPHONE1, CAVERPHONE2, COLOGNE, @@ -49,5 +48,4 @@ public enum MatchTypeEnum { EMPTY_FIELD, EXTENSION_ANY_ORDER, NUMERIC; - } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/similarity/MdmSimilarityEnum.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/similarity/MdmSimilarityEnum.java index 66285dfa66a..c62bd4ed355 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/similarity/MdmSimilarityEnum.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/similarity/MdmSimilarityEnum.java @@ -31,7 +31,6 @@ import org.hl7.fhir.instance.model.api.IBase; import javax.annotation.Nullable; public enum MdmSimilarityEnum { - JARO_WINKLER(new HapiStringSimilarity(new JaroWinkler())), COSINE(new HapiStringSimilarity(new Cosine())), JACCARD(new HapiStringSimilarity(new Jaccard())), @@ -44,11 +43,23 @@ public enum MdmSimilarityEnum { myMdmFieldSimilarity = theMdmFieldSimilarity; } - public MdmMatchEvaluation match(FhirContext theFhirContext, IBase theLeftBase, IBase theRightBase, boolean theExact, @Nullable Double theThreshold) { - return matchBySimilarity(myMdmFieldSimilarity, theFhirContext, theLeftBase, theRightBase, theExact, theThreshold); + public MdmMatchEvaluation match( + FhirContext theFhirContext, + IBase theLeftBase, + IBase theRightBase, + boolean theExact, + @Nullable Double theThreshold) { + return matchBySimilarity( + myMdmFieldSimilarity, theFhirContext, theLeftBase, theRightBase, theExact, theThreshold); } - private MdmMatchEvaluation matchBySimilarity(IMdmFieldSimilarity theSimilarity, FhirContext theFhirContext, IBase theLeftBase, IBase theRightBase, boolean theExact, Double theThreshold) { + private MdmMatchEvaluation matchBySimilarity( + IMdmFieldSimilarity theSimilarity, + FhirContext theFhirContext, + IBase theLeftBase, + IBase theRightBase, + boolean theExact, + Double theThreshold) { double similarityResult = theSimilarity.similarity(theFhirContext, theLeftBase, theRightBase, theExact); return new MdmMatchEvaluation(similarityResult >= theThreshold, similarityResult); } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceFieldMatcher.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceFieldMatcher.java index 9dd3fda09ec..a3e2e5f9f5a 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceFieldMatcher.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceFieldMatcher.java @@ -58,11 +58,10 @@ public class MdmResourceFieldMatcher { private final IMatcherFactory myIMatcherFactory; public MdmResourceFieldMatcher( - FhirContext theFhirContext, - IMatcherFactory theIMatcherFactory, - MdmFieldMatchJson theMdmFieldMatchJson, - MdmRulesJson theMdmRulesJson - ) { + FhirContext theFhirContext, + IMatcherFactory theIMatcherFactory, + MdmFieldMatchJson theMdmFieldMatchJson, + MdmRulesJson theMdmRulesJson) { myIMatcherFactory = theIMatcherFactory; myFhirContext = theFhirContext; @@ -136,7 +135,8 @@ public class MdmResourceFieldMatcher { return similarity.match(myFhirContext, theLeftValue, theRightValue); } - throw new InternalErrorException(Msg.code(1522) + "Field Match " + myName + " has neither a matcher nor a similarity."); + throw new InternalErrorException( + Msg.code(1522) + "Field Match " + myName + " has neither a matcher nor a similarity."); } private void validate(IBaseResource theResource) { @@ -144,10 +144,19 @@ public class MdmResourceFieldMatcher { Validate.notNull(resourceType, "Resource type may not be null"); if (ALL_RESOURCE_SEARCH_PARAM_TYPE.equals(myResourceType)) { - boolean isMdmType = myMdmRulesJson.getMdmTypes().stream().anyMatch(mdmType -> mdmType.equalsIgnoreCase(resourceType)); - Validate.isTrue(isMdmType, "Expecting resource type %s, got resource type %s", myMdmRulesJson.getMdmTypes().stream().collect(Collectors.joining(",")), resourceType); + boolean isMdmType = + myMdmRulesJson.getMdmTypes().stream().anyMatch(mdmType -> mdmType.equalsIgnoreCase(resourceType)); + Validate.isTrue( + isMdmType, + "Expecting resource type %s, got resource type %s", + myMdmRulesJson.getMdmTypes().stream().collect(Collectors.joining(",")), + resourceType); } else { - Validate.isTrue(myResourceType.equals(resourceType), "Expecting resource type %s got resource type %s", myResourceType, resourceType); + Validate.isTrue( + myResourceType.equals(resourceType), + "Expecting resource type %s got resource type %s", + myResourceType, + resourceType); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java index 418849cb363..2389ab1fb22 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/rules/svc/MdmResourceMatcherSvc.java @@ -44,7 +44,6 @@ import java.util.List; * It does so by calling individual comparators, and returning a vector based on the combination of * field comparators that matched. */ - @Service public class MdmResourceMatcherSvc { private static final Logger ourLog = Logs.getMdmTroubleshootingLog(); @@ -56,10 +55,7 @@ public class MdmResourceMatcherSvc { private MdmRulesJson myMdmRulesJson; public MdmResourceMatcherSvc( - FhirContext theFhirContext, - IMatcherFactory theIMatcherFactory, - IMdmSettings theMdmSettings - ) { + FhirContext theFhirContext, IMatcherFactory theIMatcherFactory, IMdmSettings theMdmSettings) { myFhirContext = theFhirContext; myMatcherFactory = theIMatcherFactory; myMdmRulesJson = theMdmSettings.getMdmRules(); @@ -69,11 +65,13 @@ public class MdmResourceMatcherSvc { private void addFieldMatchers() { if (myMdmRulesJson == null) { - throw new ConfigurationException(Msg.code(1521) + "Failed to load MDM Rules. If MDM is enabled, then MDM rules must be available in context."); + throw new ConfigurationException(Msg.code(1521) + + "Failed to load MDM Rules. If MDM is enabled, then MDM rules must be available in context."); } myFieldMatchers.clear(); for (MdmFieldMatchJson matchFieldJson : myMdmRulesJson.getMatchFields()) { - myFieldMatchers.add(new MdmResourceFieldMatcher(myFhirContext, myMatcherFactory, matchFieldJson, myMdmRulesJson)); + myFieldMatchers.add( + new MdmResourceFieldMatcher(myFhirContext, myMatcherFactory, matchFieldJson, myMdmRulesJson)); } } @@ -94,9 +92,15 @@ public class MdmResourceMatcherSvc { MdmMatchResultEnum matchResultEnum = myMdmRulesJson.getMatchResult(matchResult.getVector()); matchResult.setMatchResultEnum(matchResultEnum); if (ourLog.isDebugEnabled()) { - ourLog.debug("{} {}: {}", matchResult.getMatchResultEnum(), theRightResource.getIdElement().toUnqualifiedVersionless(), matchResult); + ourLog.debug( + "{} {}: {}", + matchResult.getMatchResultEnum(), + theRightResource.getIdElement().toUnqualifiedVersionless(), + matchResult); if (ourLog.isTraceEnabled()) { - ourLog.trace("Field matcher results:\n{}", myMdmRulesJson.getDetailedFieldMatchResultWithSuccessInformation(matchResult.getVector())); + ourLog.trace( + "Field matcher results:\n{}", + myMdmRulesJson.getDetailedFieldMatchResultWithSuccessInformation(matchResult.getVector())); } } return matchResult; @@ -122,23 +126,36 @@ public class MdmResourceMatcherSvc { double score = 0.0; int appliedRuleCount = 0; - //TODO GGG MDM: This grabs ALL comparators, not just the ones we care about (e.g. the ones for Medication) + // TODO GGG MDM: This grabs ALL comparators, not just the ones we care about (e.g. the ones for Medication) String resourceType = myFhirContext.getResourceType(theLeftResource); for (int i = 0; i < myFieldMatchers.size(); ++i) { - //any that are not for the resourceType in question. + // any that are not for the resourceType in question. MdmResourceFieldMatcher fieldComparator = myFieldMatchers.get(i); if (!isValidResourceType(resourceType, fieldComparator.getResourceType())) { - ourLog.debug("Matcher {} is not valid for resource type: {}. Skipping it.", fieldComparator.getName(), resourceType); + ourLog.debug( + "Matcher {} is not valid for resource type: {}. Skipping it.", + fieldComparator.getName(), + resourceType); continue; } - ourLog.trace("Matcher {} is valid for resource type: {}. Evaluating match.", fieldComparator.getName(), resourceType); + ourLog.trace( + "Matcher {} is valid for resource type: {}. Evaluating match.", + fieldComparator.getName(), + resourceType); MdmMatchEvaluation matchEvaluation = fieldComparator.match(theLeftResource, theRightResource); if (matchEvaluation.match) { vector |= (1L << i); - ourLog.trace("Match: Successfully matched matcher {} with score {}. New vector: {}", fieldComparator.getName(), matchEvaluation.score, vector); + ourLog.trace( + "Match: Successfully matched matcher {} with score {}. New vector: {}", + fieldComparator.getName(), + matchEvaluation.score, + vector); } else { - ourLog.trace("No match: Matcher {} did not match (score: {}).", fieldComparator.getName(), matchEvaluation.score); + ourLog.trace( + "No match: Matcher {} did not match (score: {}).", + fieldComparator.getName(), + matchEvaluation.score); } score += matchEvaluation.score; appliedRuleCount += 1; @@ -150,10 +167,8 @@ public class MdmResourceMatcherSvc { } private boolean isValidResourceType(String theResourceType, String theFieldComparatorType) { - return ( - theFieldComparatorType.equalsIgnoreCase(MdmConstants.ALL_RESOURCE_SEARCH_PARAM_TYPE) - || theFieldComparatorType.equalsIgnoreCase(theResourceType) - ); + return (theFieldComparatorType.equalsIgnoreCase(MdmConstants.ALL_RESOURCE_SEARCH_PARAM_TYPE) + || theFieldComparatorType.equalsIgnoreCase(theResourceType)); } @VisibleForTesting diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmChannelSubmitterSvcImpl.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmChannelSubmitterSvcImpl.java index 57e43b33e18..719b0abfdf3 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmChannelSubmitterSvcImpl.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmChannelSubmitterSvcImpl.java @@ -51,7 +51,12 @@ public class MdmChannelSubmitterSvcImpl implements IMdmChannelSubmitterSvc { @Override public void submitResourceToMdmChannel(IBaseResource theResource) { ResourceModifiedJsonMessage resourceModifiedJsonMessage = new ResourceModifiedJsonMessage(); - ResourceModifiedMessage resourceModifiedMessage = new ResourceModifiedMessage(myFhirContext, theResource, ResourceModifiedMessage.OperationTypeEnum.MANUALLY_TRIGGERED, null, (RequestPartitionId) theResource.getUserData(Constants.RESOURCE_PARTITION_ID)); + ResourceModifiedMessage resourceModifiedMessage = new ResourceModifiedMessage( + myFhirContext, + theResource, + ResourceModifiedMessage.OperationTypeEnum.MANUALLY_TRIGGERED, + null, + (RequestPartitionId) theResource.getUserData(Constants.RESOURCE_PARTITION_ID)); resourceModifiedMessage.setOperationType(ResourceModifiedMessage.OperationTypeEnum.MANUALLY_TRIGGERED); resourceModifiedJsonMessage.setPayload(resourceModifiedMessage); boolean success = getMdmChannelProducer().send(resourceModifiedJsonMessage); @@ -68,7 +73,8 @@ public class MdmChannelSubmitterSvcImpl implements IMdmChannelSubmitterSvc { private void init() { ChannelProducerSettings channelSettings = new ChannelProducerSettings(); - myMdmChannelProducer = myChannelFactory.getOrCreateProducer(EMPI_CHANNEL_NAME, ResourceModifiedJsonMessage.class, channelSettings); + myMdmChannelProducer = myChannelFactory.getOrCreateProducer( + EMPI_CHANNEL_NAME, ResourceModifiedJsonMessage.class, channelSettings); } private MessageChannel getMdmChannelProducer() { diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmLinkDeleteSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmLinkDeleteSvc.java index a6697ff0afa..4df3503371f 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmLinkDeleteSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmLinkDeleteSvc.java @@ -36,6 +36,7 @@ public class MdmLinkDeleteSvc { @Autowired private IMdmLinkDao myMdmLinkDao; + @Autowired private IIdHelperService myIdHelperService; @@ -46,19 +47,27 @@ public class MdmLinkDeleteSvc { * @return the number of records deleted */ public int deleteWithAnyReferenceTo(IBaseResource theResource) { - IResourcePersistentId pid = myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theResource.getIdElement()); + IResourcePersistentId pid = myIdHelperService.getPidOrThrowException( + RequestPartitionId.allPartitions(), theResource.getIdElement()); int removed = myMdmLinkDao.deleteWithAnyReferenceToPid(pid); if (removed > 0) { - ourLog.info("Removed {} MDM links with references to {}", removed, theResource.getIdElement().toVersionless()); + ourLog.info( + "Removed {} MDM links with references to {}", + removed, + theResource.getIdElement().toVersionless()); } return removed; } public int deleteNonRedirectWithAnyReferenceTo(IBaseResource theResource) { - IResourcePersistentId pid = myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theResource.getIdElement()); + IResourcePersistentId pid = myIdHelperService.getPidOrThrowException( + RequestPartitionId.allPartitions(), theResource.getIdElement()); int removed = myMdmLinkDao.deleteWithAnyReferenceToPidAndMatchResultNot(pid, MdmMatchResultEnum.REDIRECT); if (removed > 0) { - ourLog.info("Removed {} non-redirect MDM links with references to {}", removed, theResource.getIdElement().toVersionless()); + ourLog.info( + "Removed {} non-redirect MDM links with references to {}", + removed, + theResource.getIdElement().toVersionless()); } return removed; } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmLinkExpandSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmLinkExpandSvc.java index 389d5e20985..0b2c4294669 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmLinkExpandSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmLinkExpandSvc.java @@ -35,10 +35,10 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; import java.util.HashSet; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; @Service @Transactional @@ -47,11 +47,11 @@ public class MdmLinkExpandSvc implements IMdmLinkExpandSvc { @Autowired private IMdmLinkDao myMdmLinkDao; + @Autowired private IIdHelperService myIdHelperService; - public MdmLinkExpandSvc() { - } + public MdmLinkExpandSvc() {} /** * Given a source resource, perform MDM expansion and return all the resource IDs of all resources that are @@ -76,7 +76,8 @@ public class MdmLinkExpandSvc implements IMdmLinkExpandSvc { @Override public Set expandMdmBySourceResourceId(IIdType theId) { ourLog.debug("About to expand source resource with resource id {}", theId); - return expandMdmBySourceResourcePid(myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theId)); + return expandMdmBySourceResourcePid( + myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theId)); } /** @@ -89,7 +90,8 @@ public class MdmLinkExpandSvc implements IMdmLinkExpandSvc { @Override public Set expandMdmBySourceResourcePid(IResourcePersistentId theSourceResourcePid) { ourLog.debug("About to expand source resource with PID {}", theSourceResourcePid); - List goldenPidSourcePidTuples = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(theSourceResourcePid, MdmMatchResultEnum.MATCH); + List goldenPidSourcePidTuples = + myMdmLinkDao.expandPidsBySourcePidAndMatchResult(theSourceResourcePid, MdmMatchResultEnum.MATCH); return flattenPidTuplesToSet(theSourceResourcePid, goldenPidSourcePidTuples); } @@ -103,11 +105,11 @@ public class MdmLinkExpandSvc implements IMdmLinkExpandSvc { @Override public Set expandMdmByGoldenResourceId(IResourcePersistentId theGoldenResourcePid) { ourLog.debug("About to expand golden resource with PID {}", theGoldenResourcePid); - List goldenPidSourcePidTuples = myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult(theGoldenResourcePid, MdmMatchResultEnum.MATCH); + List goldenPidSourcePidTuples = myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult( + theGoldenResourcePid, MdmMatchResultEnum.MATCH); return flattenPidTuplesToSet(theGoldenResourcePid, goldenPidSourcePidTuples); } - /** * Given a resource ID of a golden resource, perform MDM expansion and return all the resource IDs of all resources that are * MDM-Matched to this golden resource. @@ -118,19 +120,22 @@ public class MdmLinkExpandSvc implements IMdmLinkExpandSvc { @Override public Set expandMdmByGoldenResourcePid(IResourcePersistentId theGoldenResourcePid) { ourLog.debug("About to expand golden resource with PID {}", theGoldenResourcePid); - List goldenPidSourcePidTuples = myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult(theGoldenResourcePid, MdmMatchResultEnum.MATCH); + List goldenPidSourcePidTuples = myMdmLinkDao.expandPidsByGoldenResourcePidAndMatchResult( + theGoldenResourcePid, MdmMatchResultEnum.MATCH); return flattenPidTuplesToSet(theGoldenResourcePid, goldenPidSourcePidTuples); } @Override public Set expandMdmByGoldenResourceId(IdDt theId) { ourLog.debug("About to expand golden resource with golden resource id {}", theId); - IResourcePersistentId pidOrThrowException = myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theId); + IResourcePersistentId pidOrThrowException = + myIdHelperService.getPidOrThrowException(RequestPartitionId.allPartitions(), theId); return expandMdmByGoldenResourcePid(pidOrThrowException); } @Nonnull - public Set flattenPidTuplesToSet(IResourcePersistentId initialPid, List goldenPidSourcePidTuples) { + public Set flattenPidTuplesToSet( + IResourcePersistentId initialPid, List goldenPidSourcePidTuples) { Set flattenedPids = new HashSet<>(); goldenPidSourcePidTuples.forEach(tuple -> { flattenedPids.add(tuple.getSourcePid()); diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSearchParamSvc.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSearchParamSvc.java index bafd5e54a95..39bbdd3ac98 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSearchParamSvc.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSearchParamSvc.java @@ -36,21 +36,26 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nullable; import java.util.List; +import javax.annotation.Nullable; @Service public class MdmSearchParamSvc { @Autowired FhirContext myFhirContext; + @Autowired private MatchUrlService myMatchUrlService; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private SearchParamExtractorService mySearchParamExtractorService; + @Autowired private SearchBuilderFactory mySearchBuilderFactory; + @Autowired private DaoRegistry myDaoRegistry; diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSubmitSvcImpl.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSubmitSvcImpl.java index a7fb6f94473..9c6150c5d24 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSubmitSvcImpl.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSubmitSvcImpl.java @@ -43,14 +43,14 @@ import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.UUID; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class MdmSubmitSvcImpl implements IMdmSubmitSvc { @@ -75,22 +75,22 @@ public class MdmSubmitSvcImpl implements IMdmSubmitSvc { private int myBufferSize = DEFAULT_BUFFER_SIZE; - public MdmSubmitSvcImpl() { - } + public MdmSubmitSvcImpl() {} @Override @Transactional public long submitAllSourceTypesToMdm(@Nullable String theCriteria, @Nonnull RequestDetails theRequestDetails) { long submittedCount = myMdmSettings.getMdmRules().getMdmTypes().stream() - .mapToLong(type -> submitSourceResourceTypeToMdm(type, theCriteria, theRequestDetails)) - .sum(); + .mapToLong(type -> submitSourceResourceTypeToMdm(type, theCriteria, theRequestDetails)) + .sum(); return submittedCount; } @Override @Transactional - public long submitSourceResourceTypeToMdm(String theSourceResourceType, @Nullable String theCriteria, @Nonnull RequestDetails theRequestDetails) { + public long submitSourceResourceTypeToMdm( + String theSourceResourceType, @Nullable String theCriteria, @Nonnull RequestDetails theRequestDetails) { if (theCriteria == null) { ourLog.info("Submitting all resources of type {} to MDM", theSourceResourceType); } else { @@ -98,26 +98,35 @@ public class MdmSubmitSvcImpl implements IMdmSubmitSvc { } validateSourceType(theSourceResourceType); - SearchParameterMap spMap = myMdmSearchParamSvc.getSearchParameterMapFromCriteria(theSourceResourceType, theCriteria); + SearchParameterMap spMap = + myMdmSearchParamSvc.getSearchParameterMapFromCriteria(theSourceResourceType, theCriteria); spMap.setLoadSynchronous(true); spMap.setCount(myBufferSize); ISearchBuilder searchBuilder = myMdmSearchParamSvc.generateSearchBuilderForType(theSourceResourceType); - RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, theSourceResourceType, spMap, null); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType( + theRequestDetails, theSourceResourceType, spMap, null); return submitAllMatchingResourcesToMdmChannel(spMap, searchBuilder, requestPartitionId); } - private long submitAllMatchingResourcesToMdmChannel(SearchParameterMap theSpMap, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) { - SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(null, UUID.randomUUID().toString()); + private long submitAllMatchingResourcesToMdmChannel( + SearchParameterMap theSpMap, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) { + SearchRuntimeDetails searchRuntimeDetails = + new SearchRuntimeDetails(null, UUID.randomUUID().toString()); long total = 0; - try (IResultIterator query = theSearchBuilder.createQuery(theSpMap, searchRuntimeDetails, null, theRequestPartitionId)) { + try (IResultIterator query = + theSearchBuilder.createQuery(theSpMap, searchRuntimeDetails, null, theRequestPartitionId)) { Collection pidBatch; do { pidBatch = query.getNextResultBatch(myBufferSize); total += loadPidsAndSubmitToMdmChannel(theSearchBuilder, pidBatch); } while (query.hasNext()); } catch (IOException theE) { - throw new InternalErrorException(Msg.code(749) + "Failure while attempting to query resources for " + ProviderConstants.OPERATION_MDM_SUBMIT, theE); + throw new InternalErrorException( + Msg.code(749) + "Failure while attempting to query resources for " + + ProviderConstants.OPERATION_MDM_SUBMIT, + theE); } ourLog.info("MDM Submit complete. Submitted a total of {} resources.", total); return total; @@ -132,12 +141,12 @@ public class MdmSubmitSvcImpl implements IMdmSubmitSvc { * * @return The total count of submitted resources. */ - private long loadPidsAndSubmitToMdmChannel(ISearchBuilder theSearchBuilder, Collection thePidsToSubmit) { + private long loadPidsAndSubmitToMdmChannel( + ISearchBuilder theSearchBuilder, Collection thePidsToSubmit) { List resourcesToSubmit = new ArrayList<>(); theSearchBuilder.loadResourcesByPid(thePidsToSubmit, Collections.emptyList(), resourcesToSubmit, false, null); ourLog.info("Submitting {} resources to MDM", resourcesToSubmit.size()); - resourcesToSubmit - .forEach(resource -> myMdmChannelSubmitterSvc.submitResourceToMdmChannel(resource)); + resourcesToSubmit.forEach(resource -> myMdmChannelSubmitterSvc.submitResourceToMdmChannel(resource)); return resourcesToSubmit.size(); } @@ -169,8 +178,9 @@ public class MdmSubmitSvcImpl implements IMdmSubmitSvc { } private void validateSourceType(String theResourceType) { - if(!myMdmSettings.getMdmRules().getMdmTypes().contains(theResourceType)) { - throw new InvalidRequestException(Msg.code(750) + ProviderConstants.OPERATION_MDM_SUBMIT + " does not support resource type: " + theResourceType); + if (!myMdmSettings.getMdmRules().getMdmTypes().contains(theResourceType)) { + throw new InvalidRequestException(Msg.code(750) + ProviderConstants.OPERATION_MDM_SUBMIT + + " does not support resource type: " + theResourceType); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/EIDHelper.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/EIDHelper.java index 4aa1a63c84d..0e8a7e7c0c8 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/EIDHelper.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/EIDHelper.java @@ -20,8 +20,8 @@ package ca.uhn.fhir.mdm.util; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.mdm.api.MdmConstants; import ca.uhn.fhir.mdm.api.IMdmSettings; +import ca.uhn.fhir.mdm.api.MdmConstants; import ca.uhn.fhir.mdm.model.CanonicalEID; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -47,10 +47,9 @@ public final class EIDHelper { public CanonicalEID createHapiEid() { return new CanonicalEID( - MdmConstants.HAPI_ENTERPRISE_IDENTIFIER_SYSTEM, - UUID.randomUUID().toString(), - null - ); + MdmConstants.HAPI_ENTERPRISE_IDENTIFIER_SYSTEM, + UUID.randomUUID().toString(), + null); } /** @@ -63,7 +62,10 @@ public final class EIDHelper { */ public List getExternalEid(IBaseResource theResource) { String resourceType = myFhirContext.getResourceType(theResource); - return CanonicalEID.extractFromResource(myFhirContext, myMdmSettings.getMdmRules().getEnterpriseEIDSystemForResourceType(resourceType), theResource); + return CanonicalEID.extractFromResource( + myFhirContext, + myMdmSettings.getMdmRules().getEnterpriseEIDSystemForResourceType(resourceType), + theResource); } /** @@ -75,7 +77,8 @@ public final class EIDHelper { * @return An optional {@link CanonicalEID} representing the internal EID. Absent if the EID is not present. */ public List getHapiEid(IAnyResource theResource) { - return CanonicalEID.extractFromResource(myFhirContext, MdmConstants.HAPI_ENTERPRISE_IDENTIFIER_SYSTEM, theResource); + return CanonicalEID.extractFromResource( + myFhirContext, MdmConstants.HAPI_ENTERPRISE_IDENTIFIER_SYSTEM, theResource); } /** @@ -88,12 +91,11 @@ public final class EIDHelper { * @return a boolean indicating whether there is a match between these two identifier sets. */ public boolean eidMatchExists(List theFirstResourceEids, List theSecondResourceEids) { - List collect = theFirstResourceEids.stream().map(CanonicalEID::getValue).collect(Collectors.toList()); - List collect1 = theSecondResourceEids.stream().map(CanonicalEID::getValue).collect(Collectors.toList()); - return !Collections.disjoint( - collect, - collect1 - ); + List collect = + theFirstResourceEids.stream().map(CanonicalEID::getValue).collect(Collectors.toList()); + List collect1 = + theSecondResourceEids.stream().map(CanonicalEID::getValue).collect(Collectors.toList()); + return !Collections.disjoint(collect, collect1); } /** diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/GoldenResourceHelper.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/GoldenResourceHelper.java index 1b55f543e91..3c228ba7830 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/GoldenResourceHelper.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/GoldenResourceHelper.java @@ -41,12 +41,12 @@ import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.context.FhirVersionEnum.DSTU3; import static ca.uhn.fhir.context.FhirVersionEnum.R4; @@ -60,10 +60,13 @@ public class GoldenResourceHelper { @Autowired private IMdmSettings myMdmSettings; + @Autowired private EIDHelper myEIDHelper; + @Autowired private IMdmSurvivorshipService myMdmSurvivorshipService; + @Autowired private MdmPartitionHelper myMdmPartitionHelper; @@ -83,14 +86,16 @@ public class GoldenResourceHelper { * @param theMdmTransactionContext */ @Nonnull - public T createGoldenResourceFromMdmSourceResource(T theIncomingResource, MdmTransactionContext theMdmTransactionContext) { + public T createGoldenResourceFromMdmSourceResource( + T theIncomingResource, MdmTransactionContext theMdmTransactionContext) { validateContextSupported(); // get a ref to the actual ID Field RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(theIncomingResource); IBaseResource newGoldenResource = resourceDefinition.newInstance(); - myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource(theIncomingResource, newGoldenResource, theMdmTransactionContext); + myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource( + theIncomingResource, newGoldenResource, theMdmTransactionContext); // hapi has 2 metamodels: for children and types BaseRuntimeChildDefinition goldenResourceIdentifier = resourceDefinition.getChildByName(FIELD_NAME_IDENTIFIER); @@ -103,7 +108,9 @@ public class GoldenResourceHelper { MdmResourceUtil.setGoldenResource(newGoldenResource); // add the partition id to the new resource - newGoldenResource.setUserData(Constants.RESOURCE_PARTITION_ID, myMdmPartitionHelper.getRequestPartitionIdForNewGoldenResources(theIncomingResource)); + newGoldenResource.setUserData( + Constants.RESOURCE_PARTITION_ID, + myMdmPartitionHelper.getRequestPartitionIdForNewGoldenResources(theIncomingResource)); return (T) newGoldenResource; } @@ -111,9 +118,11 @@ public class GoldenResourceHelper { /** * If there are no external EIDs on the incoming resource, create a new HAPI EID on the new Golden Resource. */ - //TODO GGG ask james if there is any way we can convert this canonical EID into a generic STU-agnostic IBase. + // TODO GGG ask james if there is any way we can convert this canonical EID into a generic STU-agnostic IBase. private void addHapiEidIfNoExternalEidIsPresent( - IBaseResource theNewGoldenResource, BaseRuntimeChildDefinition theGoldenResourceIdentifier, IAnyResource theSourceResource) { + IBaseResource theNewGoldenResource, + BaseRuntimeChildDefinition theGoldenResourceIdentifier, + IAnyResource theSourceResource) { List eidsToApply = myEIDHelper.getExternalEid(theNewGoldenResource); if (!eidsToApply.isEmpty()) { @@ -121,14 +130,18 @@ public class GoldenResourceHelper { } CanonicalEID hapiEid = myEIDHelper.createHapiEid(); - theGoldenResourceIdentifier.getMutator().addValue(theNewGoldenResource, IdentifierUtil.toId(myFhirContext, hapiEid)); + theGoldenResourceIdentifier + .getMutator() + .addValue(theNewGoldenResource, IdentifierUtil.toId(myFhirContext, hapiEid)); // set identifier on the source resource cloneEidIntoResource(myFhirContext, theSourceResource, hapiEid); } - private void cloneMDMEidsIntoNewGoldenResource(BaseRuntimeChildDefinition theGoldenResourceIdentifier, - IAnyResource theIncomingResource, IBase theNewGoldenResource) { + private void cloneMDMEidsIntoNewGoldenResource( + BaseRuntimeChildDefinition theGoldenResourceIdentifier, + IAnyResource theIncomingResource, + IBase theNewGoldenResource) { String incomingResourceType = myFhirContext.getResourceType(theIncomingResource); String mdmEIDSystem = myMdmSettings.getMdmRules().getEnterpriseEIDSystemForResourceType(incomingResourceType); @@ -138,17 +151,29 @@ public class GoldenResourceHelper { // FHIR choice types - fields within fhir where we have a choice of ids IFhirPath fhirPath = myFhirContext.newFhirPath(); - List incomingResourceIdentifiers = theGoldenResourceIdentifier.getAccessor().getValues(theIncomingResource); + List incomingResourceIdentifiers = + theGoldenResourceIdentifier.getAccessor().getValues(theIncomingResource); for (IBase incomingResourceIdentifier : incomingResourceIdentifiers) { - Optional incomingIdentifierSystem = fhirPath.evaluateFirst(incomingResourceIdentifier, "system", IPrimitiveType.class); + Optional incomingIdentifierSystem = + fhirPath.evaluateFirst(incomingResourceIdentifier, "system", IPrimitiveType.class); if (incomingIdentifierSystem.isPresent()) { - String incomingIdentifierSystemString = incomingIdentifierSystem.get().getValueAsString(); + String incomingIdentifierSystemString = + incomingIdentifierSystem.get().getValueAsString(); if (Objects.equals(incomingIdentifierSystemString, mdmEIDSystem)) { - ourLog.debug("Incoming resource EID System {} matches EID system in the MDM rules. Copying to Golden Resource.", incomingIdentifierSystemString); - ca.uhn.fhir.util.TerserUtil.cloneEidIntoResource(myFhirContext, theGoldenResourceIdentifier, incomingResourceIdentifier, theNewGoldenResource); + ourLog.debug( + "Incoming resource EID System {} matches EID system in the MDM rules. Copying to Golden Resource.", + incomingIdentifierSystemString); + ca.uhn.fhir.util.TerserUtil.cloneEidIntoResource( + myFhirContext, + theGoldenResourceIdentifier, + incomingResourceIdentifier, + theNewGoldenResource); } else { - ourLog.debug("Incoming resource EID System {} differs from EID system in the MDM rules {}. Not copying to Golden Resource.", incomingIdentifierSystemString, mdmEIDSystem); + ourLog.debug( + "Incoming resource EID System {} differs from EID system in the MDM rules {}. Not copying to Golden Resource.", + incomingIdentifierSystemString, + mdmEIDSystem); } } else { ourLog.debug("No EID System in incoming resource."); @@ -161,7 +186,8 @@ public class GoldenResourceHelper { if (fhirVersion == R4 || fhirVersion == DSTU3) { return; } - throw new UnsupportedOperationException(Msg.code(1489) + "Version not supported: " + myFhirContext.getVersion().getVersion()); + throw new UnsupportedOperationException(Msg.code(1489) + "Version not supported: " + + myFhirContext.getVersion().getVersion()); } /** @@ -174,9 +200,12 @@ public class GoldenResourceHelper { * @param theSourceResource The source we will retrieve the external EID from. * @return the modified {@link IBaseResource} representing the Golden Resource. */ - public IAnyResource updateGoldenResourceExternalEidFromSourceResource(IAnyResource theGoldenResource, IAnyResource - theSourceResource, MdmTransactionContext theMdmTransactionContext) { - //This handles overwriting an automatically assigned EID if a patient that links is coming in with an official EID. + public IAnyResource updateGoldenResourceExternalEidFromSourceResource( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmTransactionContext theMdmTransactionContext) { + // This handles overwriting an automatically assigned EID if a patient that links is coming in with an official + // EID. List incomingSourceEid = myEIDHelper.getExternalEid(theSourceResource); List goldenResourceOfficialEid = myEIDHelper.getExternalEid(theGoldenResource); @@ -186,14 +215,29 @@ public class GoldenResourceHelper { if (goldenResourceOfficialEid.isEmpty() || !myMdmSettings.isPreventMultipleEids()) { if (addCanonicalEidsToGoldenResourceIfAbsent(theGoldenResource, incomingSourceEid)) { - log(theMdmTransactionContext, "Incoming resource:" + theSourceResource.getIdElement().toUnqualifiedVersionless() + " + with EID " + incomingSourceEid.stream().map(CanonicalEID::toString).collect(Collectors.joining(",")) - + " is applying this EID to its related Golden Resource, as this Golden Resource does not yet have an external EID"); + log( + theMdmTransactionContext, + "Incoming resource:" + theSourceResource.getIdElement().toUnqualifiedVersionless() + + " + with EID " + + incomingSourceEid.stream() + .map(CanonicalEID::toString) + .collect(Collectors.joining(",")) + + " is applying this EID to its related Golden Resource, as this Golden Resource does not yet have an external EID"); } - } else if (!goldenResourceOfficialEid.isEmpty() && myEIDHelper.eidMatchExists(goldenResourceOfficialEid, incomingSourceEid)) { - log(theMdmTransactionContext, "Incoming resource:" + theSourceResource.getIdElement().toVersionless() + " with EIDs " + incomingSourceEid.stream().map(CanonicalEID::toString).collect(Collectors.joining(",")) + " does not need to overwrite the EID in the Golden Resource, as this EID is already present in the Golden Resource"); + } else if (!goldenResourceOfficialEid.isEmpty() + && myEIDHelper.eidMatchExists(goldenResourceOfficialEid, incomingSourceEid)) { + log( + theMdmTransactionContext, + "Incoming resource:" + theSourceResource.getIdElement().toVersionless() + " with EIDs " + + incomingSourceEid.stream() + .map(CanonicalEID::toString) + .collect(Collectors.joining(",")) + + " does not need to overwrite the EID in the Golden Resource, as this EID is already present in the Golden Resource"); } else { - throw new IllegalArgumentException(Msg.code(1490) + String.format("Incoming resource EID %s would create a duplicate Golden Resource, as Golden Resource EID %s already exists!", - incomingSourceEid.toString(), goldenResourceOfficialEid.toString())); + throw new IllegalArgumentException(Msg.code(1490) + + String.format( + "Incoming resource EID %s would create a duplicate Golden Resource, as Golden Resource EID %s already exists!", + incomingSourceEid.toString(), goldenResourceOfficialEid.toString())); } return theGoldenResource; } @@ -204,9 +248,11 @@ public class GoldenResourceHelper { return theGoldenResource; } - private void clearExternalEidsFromTheGoldenResource(BaseRuntimeChildDefinition theGoldenResourceIdentifier, IBaseResource theGoldenResource) { + private void clearExternalEidsFromTheGoldenResource( + BaseRuntimeChildDefinition theGoldenResourceIdentifier, IBaseResource theGoldenResource) { IFhirPath fhirPath = myFhirContext.newFhirPath(); - List goldenResourceIdentifiers = theGoldenResourceIdentifier.getAccessor().getValues(theGoldenResource); + List goldenResourceIdentifiers = + theGoldenResourceIdentifier.getAccessor().getValues(theGoldenResource); List clonedIdentifiers = new ArrayList<>(); FhirTerser terser = myFhirContext.newTerser(); @@ -217,13 +263,15 @@ public class GoldenResourceHelper { String mdmSystem = myMdmSettings.getMdmRules().getEnterpriseEIDSystemForResourceType(resourceType); String baseSystem = system.get().getValueAsString(); if (Objects.equals(baseSystem, mdmSystem)) { - ourLog.debug("Found EID confirming to MDM rules {}. It does not need to be copied, skipping", baseSystem); + ourLog.debug( + "Found EID confirming to MDM rules {}. It does not need to be copied, skipping", + baseSystem); continue; } } BaseRuntimeElementCompositeDefinition childIdentifier = (BaseRuntimeElementCompositeDefinition) - theGoldenResourceIdentifier.getChildByName(FIELD_NAME_IDENTIFIER); + theGoldenResourceIdentifier.getChildByName(FIELD_NAME_IDENTIFIER); IBase goldenResourceNewIdentifier = childIdentifier.newInstance(); terser.cloneInto(base, goldenResourceNewIdentifier, true); @@ -248,7 +296,8 @@ public class GoldenResourceHelper { * Given a list of incoming External EIDs, and a Golden Resource, apply all the EIDs to this resource, which did not already exist on it. * @return true if an EID was added */ - private boolean addCanonicalEidsToGoldenResourceIfAbsent(IBaseResource theGoldenResource, List theIncomingSourceExternalEids) { + private boolean addCanonicalEidsToGoldenResourceIfAbsent( + IBaseResource theGoldenResource, List theIncomingSourceExternalEids) { List goldenResourceExternalEids = myEIDHelper.getExternalEid(theGoldenResource); boolean addedEid = false; for (CanonicalEID incomingExternalEid : theIncomingSourceExternalEids) { @@ -265,23 +314,33 @@ public class GoldenResourceHelper { return ca.uhn.fhir.util.TerserUtil.hasValues(myFhirContext, theResource, FIELD_NAME_IDENTIFIER); } - public void mergeIndentifierFields(IBaseResource theFromGoldenResource, IBaseResource theToGoldenResource, MdmTransactionContext theMdmTransactionContext) { - ca.uhn.fhir.util.TerserUtil.cloneCompositeField(myFhirContext, theFromGoldenResource, theToGoldenResource, - FIELD_NAME_IDENTIFIER); + public void mergeIndentifierFields( + IBaseResource theFromGoldenResource, + IBaseResource theToGoldenResource, + MdmTransactionContext theMdmTransactionContext) { + ca.uhn.fhir.util.TerserUtil.cloneCompositeField( + myFhirContext, theFromGoldenResource, theToGoldenResource, FIELD_NAME_IDENTIFIER); } - public void mergeNonIdentiferFields(IBaseResource theFromGoldenResource, IBaseResource theToGoldenResource, MdmTransactionContext theMdmTransactionContext) { - myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource(theFromGoldenResource, theToGoldenResource, theMdmTransactionContext); + public void mergeNonIdentiferFields( + IBaseResource theFromGoldenResource, + IBaseResource theToGoldenResource, + MdmTransactionContext theMdmTransactionContext) { + myMdmSurvivorshipService.applySurvivorshipRulesToGoldenResource( + theFromGoldenResource, theToGoldenResource, theMdmTransactionContext); } /** * An incoming resource is a potential duplicate if it matches a source that has a golden resource with an official * EID, but the incoming resource also has an EID that does not match. */ - public boolean isPotentialDuplicate(IAnyResource theExistingGoldenResource, IAnyResource theComparingGoldenResource) { + public boolean isPotentialDuplicate( + IAnyResource theExistingGoldenResource, IAnyResource theComparingGoldenResource) { List externalEidsGoldenResource = myEIDHelper.getExternalEid(theExistingGoldenResource); List externalEidsResource = myEIDHelper.getExternalEid(theComparingGoldenResource); - return !externalEidsGoldenResource.isEmpty() && !externalEidsResource.isEmpty() && !myEIDHelper.eidMatchExists(externalEidsResource, externalEidsGoldenResource); + return !externalEidsGoldenResource.isEmpty() + && !externalEidsResource.isEmpty() + && !myEIDHelper.eidMatchExists(externalEidsResource, externalEidsGoldenResource); } private void log(MdmTransactionContext theMdmTransactionContext, String theMessage) { @@ -289,11 +348,14 @@ public class GoldenResourceHelper { ourLog.debug(theMessage); } - public void handleExternalEidAddition(IAnyResource theGoldenResource, IAnyResource theSourceResource, MdmTransactionContext - theMdmTransactionContext) { + public void handleExternalEidAddition( + IAnyResource theGoldenResource, + IAnyResource theSourceResource, + MdmTransactionContext theMdmTransactionContext) { List eidFromResource = myEIDHelper.getExternalEid(theSourceResource); if (!eidFromResource.isEmpty()) { - updateGoldenResourceExternalEidFromSourceResource(theGoldenResource, theSourceResource, theMdmTransactionContext); + updateGoldenResourceExternalEidFromSourceResource( + theGoldenResource, theSourceResource, theMdmTransactionContext); } } @@ -304,12 +366,16 @@ public class GoldenResourceHelper { * @param theResourceToCloneInto Resource to set the EID on * @param theEid EID to be set */ - public void cloneEidIntoResource(FhirContext theFhirContext, IBaseResource theResourceToCloneInto, CanonicalEID theEid) { + public void cloneEidIntoResource( + FhirContext theFhirContext, IBaseResource theResourceToCloneInto, CanonicalEID theEid) { // get a ref to the actual ID Field RuntimeResourceDefinition resourceDefinition = theFhirContext.getResourceDefinition(theResourceToCloneInto); // hapi has 2 metamodels: for children and types BaseRuntimeChildDefinition resourceIdentifier = resourceDefinition.getChildByName(FIELD_NAME_IDENTIFIER); - ca.uhn.fhir.util.TerserUtil.cloneEidIntoResource(theFhirContext, resourceIdentifier, - IdentifierUtil.toId(theFhirContext, theEid), theResourceToCloneInto); + ca.uhn.fhir.util.TerserUtil.cloneEidIntoResource( + theFhirContext, + resourceIdentifier, + IdentifierUtil.toId(theFhirContext, theEid), + theResourceToCloneInto); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/IdentifierUtil.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/IdentifierUtil.java index 4433b82a19c..403ddd2c4ff 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/IdentifierUtil.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/IdentifierUtil.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.mdm.util; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.mdm.model.CanonicalEID; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.util.CanonicalIdentifier; @@ -28,8 +28,7 @@ import org.hl7.fhir.instance.model.api.IBase; public final class IdentifierUtil { - private IdentifierUtil() { - } + private IdentifierUtil() {} public static CanonicalIdentifier identifierDtFromIdentifier(IBase theIdentifier) { CanonicalIdentifier retval = new CanonicalIdentifier(); @@ -45,13 +44,12 @@ public final class IdentifierUtil { org.hl7.fhir.r5.model.Identifier ident = (org.hl7.fhir.r5.model.Identifier) theIdentifier; retval.setSystem(ident.getSystem()).setValue(ident.getValue()); } else { - throw new InternalErrorException(Msg.code(1486) + "Expected 'Identifier' type but was '" + theIdentifier.getClass().getName() + "'"); + throw new InternalErrorException(Msg.code(1486) + "Expected 'Identifier' type but was '" + + theIdentifier.getClass().getName() + "'"); } return retval; } - - /** * Retrieves appropriate FHIR Identifier model instance based on the context version * @@ -67,6 +65,7 @@ public final class IdentifierUtil { case DSTU3: return (T) eid.toDSTU3(); } - throw new IllegalStateException(Msg.code(1487) + "Unsupported FHIR version " + theFhirContext.getVersion().getVersion()); + throw new IllegalStateException(Msg.code(1487) + "Unsupported FHIR version " + + theFhirContext.getVersion().getVersion()); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmPartitionHelper.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmPartitionHelper.java index aa035cb25c8..31b73320160 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmPartitionHelper.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmPartitionHelper.java @@ -33,7 +33,7 @@ public class MdmPartitionHelper { private final MessageHelper myMessageHelper; private final IMdmSettings myMdmSettings; - public MdmPartitionHelper(MessageHelper theMessageHelper, IMdmSettings theMdmSettings){ + public MdmPartitionHelper(MessageHelper theMessageHelper, IMdmSettings theMdmSettings) { myMessageHelper = theMessageHelper; myMdmSettings = theMdmSettings; } @@ -46,13 +46,19 @@ public class MdmPartitionHelper { * @param theFromResource * @param theToResource */ - public void validateMdmResourcesPartitionMatches(IAnyResource theFromResource, IAnyResource theToResource){ - if (!myMdmSettings.getSearchAllPartitionForMatch()){ - RequestPartitionId fromGoldenResourcePartitionId = (RequestPartitionId) theFromResource.getUserData(Constants.RESOURCE_PARTITION_ID); - RequestPartitionId toGoldenPartitionId = (RequestPartitionId) theToResource.getUserData(Constants.RESOURCE_PARTITION_ID); - if (fromGoldenResourcePartitionId != null && toGoldenPartitionId != null && fromGoldenResourcePartitionId.hasPartitionIds() && toGoldenPartitionId.hasPartitionIds() && - !fromGoldenResourcePartitionId.hasPartitionId(toGoldenPartitionId.getFirstPartitionIdOrNull())) { - throw new InvalidRequestException(Msg.code(2075) + myMessageHelper.getMessageForMismatchPartition(theFromResource, theToResource)); + public void validateMdmResourcesPartitionMatches(IAnyResource theFromResource, IAnyResource theToResource) { + if (!myMdmSettings.getSearchAllPartitionForMatch()) { + RequestPartitionId fromGoldenResourcePartitionId = + (RequestPartitionId) theFromResource.getUserData(Constants.RESOURCE_PARTITION_ID); + RequestPartitionId toGoldenPartitionId = + (RequestPartitionId) theToResource.getUserData(Constants.RESOURCE_PARTITION_ID); + if (fromGoldenResourcePartitionId != null + && toGoldenPartitionId != null + && fromGoldenResourcePartitionId.hasPartitionIds() + && toGoldenPartitionId.hasPartitionIds() + && !fromGoldenResourcePartitionId.hasPartitionId(toGoldenPartitionId.getFirstPartitionIdOrNull())) { + throw new InvalidRequestException(Msg.code(2075) + + myMessageHelper.getMessageForMismatchPartition(theFromResource, theToResource)); } } } @@ -64,20 +70,18 @@ public class MdmPartitionHelper { * @param theResource * @return The RequestPartitionId that should be used for the candidate search for the given resource */ - public RequestPartitionId getRequestPartitionIdFromResourceForSearch(IAnyResource theResource){ - if (myMdmSettings.getSearchAllPartitionForMatch()){ + public RequestPartitionId getRequestPartitionIdFromResourceForSearch(IAnyResource theResource) { + if (myMdmSettings.getSearchAllPartitionForMatch()) { return RequestPartitionId.allPartitions(); - } - else { + } else { return (RequestPartitionId) theResource.getUserData(Constants.RESOURCE_PARTITION_ID); } } - public RequestPartitionId getRequestPartitionIdForNewGoldenResources(IAnyResource theSourceResource){ - if (StringUtils.isBlank(myMdmSettings.getGoldenResourcePartitionName())){ + public RequestPartitionId getRequestPartitionIdForNewGoldenResources(IAnyResource theSourceResource) { + if (StringUtils.isBlank(myMdmSettings.getGoldenResourcePartitionName())) { return (RequestPartitionId) theSourceResource.getUserData(Constants.RESOURCE_PARTITION_ID); - } - else { + } else { return RequestPartitionId.fromPartitionName(myMdmSettings.getGoldenResourcePartitionName()); } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java index ad2b5e877ba..99931f58e57 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MdmResourceUtil.java @@ -23,13 +23,12 @@ import ca.uhn.fhir.mdm.api.MdmConstants; import org.hl7.fhir.instance.model.api.IBaseCoding; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.util.Optional; +import javax.annotation.Nonnull; public final class MdmResourceUtil { - private MdmResourceUtil() { - } + private MdmResourceUtil() {} /** * If the resource is tagged as not managed by MDM, return false. Otherwise true. @@ -38,7 +37,8 @@ public final class MdmResourceUtil { * @return A boolean indicating whether MDM can manage this resource. */ public static boolean isMdmAllowed(IBaseResource theBaseResource) { - return theBaseResource.getMeta().getTag(MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_NO_MDM_MANAGED) == null; + return theBaseResource.getMeta().getTag(MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_NO_MDM_MANAGED) + == null; } /** @@ -53,7 +53,8 @@ public final class MdmResourceUtil { } public static boolean isGoldenRecord(IBaseResource theBaseResource) { - return resourceHasTag(theBaseResource, MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD); + return resourceHasTag( + theBaseResource, MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD); } public static boolean hasGoldenRecordSystemTag(IBaseResource theIBaseResource) { @@ -65,7 +66,8 @@ public final class MdmResourceUtil { } public static boolean isGoldenRecordRedirected(IBaseResource theBaseResource) { - return resourceHasTag(theBaseResource, MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD_REDIRECTED); + return resourceHasTag( + theBaseResource, MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD_REDIRECTED); } private static boolean resourceHasTag(IBaseResource theBaseResource, String theSystem, String theCode) { @@ -82,8 +84,11 @@ public final class MdmResourceUtil { return theBaseResource.getMeta().getTag().stream().anyMatch(tag -> theSystem.equalsIgnoreCase(tag.getSystem())); } - private static Optional getTagWithSystem(IBaseResource theResource, @Nonnull String theSystem) { - return theResource.getMeta().getTag().stream().filter(tag -> theSystem.equalsIgnoreCase(tag.getSystem())).findFirst(); + private static Optional getTagWithSystem( + IBaseResource theResource, @Nonnull String theSystem) { + return theResource.getMeta().getTag().stream() + .filter(tag -> theSystem.equalsIgnoreCase(tag.getSystem())) + .findFirst(); } public static void removeTagWithSystem(IBaseResource theResource, @Nonnull String theSystem) { @@ -98,15 +103,27 @@ public final class MdmResourceUtil { * @return Returns resource with the tag set. */ public static IBaseResource setMdmManaged(IBaseResource theBaseResource) { - return setTagOnResource(theBaseResource, MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_HAPI_MDM_MANAGED, MdmConstants.DISPLAY_HAPI_MDM_MANAGED); + return setTagOnResource( + theBaseResource, + MdmConstants.SYSTEM_MDM_MANAGED, + MdmConstants.CODE_HAPI_MDM_MANAGED, + MdmConstants.DISPLAY_HAPI_MDM_MANAGED); } public static IBaseResource setGoldenResource(IBaseResource theBaseResource) { - return setTagOnResource(theBaseResource, MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD, MdmConstants.DISPLAY_GOLDEN_RECORD); + return setTagOnResource( + theBaseResource, + MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, + MdmConstants.CODE_GOLDEN_RECORD, + MdmConstants.DISPLAY_GOLDEN_RECORD); } public static IBaseResource setGoldenResourceRedirected(IBaseResource theBaseResource) { - return setTagOnResource(theBaseResource, MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, MdmConstants.CODE_GOLDEN_RECORD_REDIRECTED, MdmConstants.DISPLAY_GOLDEN_REDIRECT); + return setTagOnResource( + theBaseResource, + MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS, + MdmConstants.CODE_GOLDEN_RECORD_REDIRECTED, + MdmConstants.DISPLAY_GOLDEN_REDIRECT); } /** @@ -116,7 +133,8 @@ public final class MdmResourceUtil { * a reference to a tag, to make sure it isn't double-added. */ @Nonnull - private static IBaseResource setTagOnResource(IBaseResource theGoldenResource, String theSystem, String theCode, String theDisplay) { + private static IBaseResource setTagOnResource( + IBaseResource theGoldenResource, String theSystem, String theCode, String theDisplay) { Optional tagWithSystem = getTagWithSystem(theGoldenResource, theSystem); if (tagWithSystem.isPresent()) { tagWithSystem.get().setCode(theCode); diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MessageHelper.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MessageHelper.java index 7f81b6dab55..3a25c931850 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MessageHelper.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/MessageHelper.java @@ -44,8 +44,8 @@ public class MessageHelper { public String getMessageForUnmanagedResource() { return String.format( - "Only MDM managed resources can be merged. MDM managed resources must have the %s tag.", - MdmConstants.CODE_HAPI_MDM_MANAGED); + "Only MDM managed resources can be merged. MDM managed resources must have the %s tag.", + MdmConstants.CODE_HAPI_MDM_MANAGED); } public String getMessageForUnsupportedResource(String theName, IAnyResource theResource) { @@ -53,8 +53,9 @@ public class MessageHelper { } public String getMessageForUnsupportedResource(String theName, String theResourceType) { - return String.format("Only %s resources can be merged. The %s points to a %s", - myMdmSettings.getSupportedMdmTypes(), theName, theResourceType); + return String.format( + "Only %s resources can be merged. The %s points to a %s", + myMdmSettings.getSupportedMdmTypes(), theName, theResourceType); } public String getMessageForUnsupportedMatchResult() { @@ -63,27 +64,28 @@ public class MessageHelper { public String getMessageForUnsupportedFirstArgumentTypeInUpdate(String goldenRecordType) { return "First argument to " + ProviderConstants.MDM_UPDATE_LINK + " must be a " - + myMdmSettings.getSupportedMdmTypes() + ". Was " + goldenRecordType; + + myMdmSettings.getSupportedMdmTypes() + ". Was " + goldenRecordType; } public String getMessageForUnsupportedSecondArgumentTypeInUpdate(String theGoldenRecordType) { return "First argument to " + ProviderConstants.MDM_UPDATE_LINK + " must be a " - + myMdmSettings.getSupportedMdmTypes() + ". Was " + theGoldenRecordType; + + myMdmSettings.getSupportedMdmTypes() + ". Was " + theGoldenRecordType; } public String getMessageForArgumentTypeMismatchInUpdate(String theGoldenRecordType, String theSourceResourceType) { - return "Arguments to " + ProviderConstants.MDM_UPDATE_LINK + " must be of the same type. Were " + - theGoldenRecordType + " and " + theSourceResourceType; + return "Arguments to " + ProviderConstants.MDM_UPDATE_LINK + " must be of the same type. Were " + + theGoldenRecordType + " and " + theSourceResourceType; } public String getMessageForUnsupportedSourceResource() { return "The source resource is marked with the " + MdmConstants.CODE_NO_MDM_MANAGED - + " tag which means it may not be MDM linked."; + + " tag which means it may not be MDM linked."; } public String getMessageForNoLink(IAnyResource theGoldenRecord, IAnyResource theSourceResource) { - return getMessageForNoLink(theGoldenRecord.getIdElement().toVersionless().toString(), - theSourceResource.getIdElement().toVersionless().toString()); + return getMessageForNoLink( + theGoldenRecord.getIdElement().toVersionless().toString(), + theSourceResource.getIdElement().toVersionless().toString()); } public String getMessageForNoLink(String theGoldenRecord, String theSourceResource) { @@ -91,8 +93,9 @@ public class MessageHelper { } public String getMessageForAlreadyAcceptedLink(IAnyResource theGoldenRecord, IAnyResource theSourceResource) { - return getMessageForAlreadyAcceptedLink(theGoldenRecord.getIdElement().toVersionless().toString(), - theSourceResource.getIdElement().toVersionless().toString()); + return getMessageForAlreadyAcceptedLink( + theGoldenRecord.getIdElement().toVersionless().toString(), + theSourceResource.getIdElement().toVersionless().toString()); } public String getMessageForAlreadyAcceptedLink(String theGoldenId, String theSourceId) { @@ -100,16 +103,19 @@ public class MessageHelper { } public String getMessageForPresentLink(IAnyResource theGoldenRecord, IAnyResource theSourceResource) { - return getMessageForPresentLink(theGoldenRecord.getIdElement().toVersionless().toString(), - theSourceResource.getIdElement().toVersionless().toString()); + return getMessageForPresentLink( + theGoldenRecord.getIdElement().toVersionless().toString(), + theSourceResource.getIdElement().toVersionless().toString()); } public String getMessageForPresentLink(String theGoldenRecord, String theSourceResource) { - return "Link already exists between " + theGoldenRecord + " and " + theSourceResource + ". Use $mdm-update-link instead."; + return "Link already exists between " + theGoldenRecord + " and " + theSourceResource + + ". Use $mdm-update-link instead."; } public String getMessageForMultipleGoldenRecords(IAnyResource theSourceResource) { - return getMessageForMultipleGoldenRecords(theSourceResource.getIdElement().toVersionless().toString()); + return getMessageForMultipleGoldenRecords( + theSourceResource.getIdElement().toVersionless().toString()); } public String getMessageForMultipleGoldenRecords(String theSourceResource) { @@ -117,15 +123,18 @@ public class MessageHelper { } public String getMessageForFailedGoldenResourceLoad(String theParamName, String theGoldenResourceId) { - return theGoldenResourceId + " used as parameter [" + theParamName + "] could not be loaded as a golden resource, as it appears to be lacking the golden resource meta tags."; + return theGoldenResourceId + " used as parameter [" + theParamName + + "] could not be loaded as a golden resource, as it appears to be lacking the golden resource meta tags."; } public String getMessageForMismatchPartition(IAnyResource theGoldenRecord, IAnyResource theSourceResource) { - return getMessageForMismatchPartition(theGoldenRecord.getIdElement().toVersionless().toString(), - theSourceResource.getIdElement().toVersionless().toString()); + return getMessageForMismatchPartition( + theGoldenRecord.getIdElement().toVersionless().toString(), + theSourceResource.getIdElement().toVersionless().toString()); } public String getMessageForMismatchPartition(String theGoldenRecord, String theSourceResource) { - return theGoldenRecord + " and " + theSourceResource + " are stored in different partitions. This operation is only available for resources on the same partition."; + return theGoldenRecord + " and " + theSourceResource + + " are stored in different partitions. This operation is only available for resources on the same partition."; } } diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/NameUtil.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/NameUtil.java index 78a1b5406b3..a349f8ed5c1 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/NameUtil.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/util/NameUtil.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.mdm.util; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.r4.model.HumanName; @@ -31,34 +31,39 @@ import java.util.stream.Collectors; public final class NameUtil { - private NameUtil() { - } + private NameUtil() {} public static List extractGivenNames(FhirContext theFhirContext, IBase theBase) { - switch(theFhirContext.getVersion().getVersion()) { + switch (theFhirContext.getVersion().getVersion()) { case R4: - HumanName humanNameR4 = (HumanName)theBase; - return humanNameR4.getGiven().stream().map(PrimitiveType::getValueAsString).filter(s -> !StringUtils.isEmpty(s)).collect(Collectors.toList()); + HumanName humanNameR4 = (HumanName) theBase; + return humanNameR4.getGiven().stream() + .map(PrimitiveType::getValueAsString) + .filter(s -> !StringUtils.isEmpty(s)) + .collect(Collectors.toList()); case DSTU3: org.hl7.fhir.dstu3.model.HumanName humanNameDSTU3 = (org.hl7.fhir.dstu3.model.HumanName) theBase; - return humanNameDSTU3.getGiven().stream().map(given -> given.toString()).filter(s -> !StringUtils.isEmpty(s)).collect(Collectors.toList()); + return humanNameDSTU3.getGiven().stream() + .map(given -> given.toString()) + .filter(s -> !StringUtils.isEmpty(s)) + .collect(Collectors.toList()); default: - throw new UnsupportedOperationException(Msg.code(1491) + "Version not supported: " + theFhirContext.getVersion().getVersion()); - + throw new UnsupportedOperationException(Msg.code(1491) + "Version not supported: " + + theFhirContext.getVersion().getVersion()); } } public static String extractFamilyName(FhirContext theFhirContext, IBase theBase) { - switch(theFhirContext.getVersion().getVersion()) { + switch (theFhirContext.getVersion().getVersion()) { case R4: - HumanName humanNameR4 = (HumanName)theBase; + HumanName humanNameR4 = (HumanName) theBase; return humanNameR4.getFamily(); case DSTU3: - org.hl7.fhir.dstu3.model.HumanName humanNameDSTU3 = (org.hl7.fhir.dstu3.model.HumanName)theBase; + org.hl7.fhir.dstu3.model.HumanName humanNameDSTU3 = (org.hl7.fhir.dstu3.model.HumanName) theBase; return humanNameDSTU3.getFamily(); default: - throw new UnsupportedOperationException(Msg.code(1492) + "Version not supported: " + theFhirContext.getVersion().getVersion()); - + throw new UnsupportedOperationException(Msg.code(1492) + "Version not supported: " + + theFhirContext.getVersion().getVersion()); } } } diff --git a/hapi-fhir-server-openapi/src/main/java/ca/uhn/fhir/rest/openapi/OpenApiInterceptor.java b/hapi-fhir-server-openapi/src/main/java/ca/uhn/fhir/rest/openapi/OpenApiInterceptor.java index bd09d784de8..66c6bbc990c 100644 --- a/hapi-fhir-server-openapi/src/main/java/ca/uhn/fhir/rest/openapi/OpenApiInterceptor.java +++ b/hapi-fhir-server-openapi/src/main/java/ca/uhn/fhir/rest/openapi/OpenApiInterceptor.java @@ -71,13 +71,13 @@ import org.hl7.fhir.r4.model.DateType; import org.hl7.fhir.r4.model.Extension; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.OperationDefinition; +import org.hl7.fhir.r4.model.OperationDefinition.OperationDefinitionParameterComponent; +import org.hl7.fhir.r4.model.OperationDefinition.OperationParameterUse; import org.hl7.fhir.r4.model.Parameters; import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.Resource; import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.Type; -import org.hl7.fhir.r4.model.OperationDefinition.OperationDefinitionParameterComponent; -import org.hl7.fhir.r4.model.OperationDefinition.OperationParameterUse; import org.hl7.fhir.r4.model.codesystems.DataTypes; import org.thymeleaf.IEngineConfiguration; import org.thymeleaf.TemplateEngine; @@ -92,9 +92,6 @@ import org.thymeleaf.templateresolver.ITemplateResolver; import org.thymeleaf.templateresolver.TemplateResolution; import org.thymeleaf.templateresource.ClassLoaderTemplateResource; -import javax.servlet.ServletContext; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; @@ -110,6 +107,9 @@ import java.util.Properties; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; +import javax.servlet.ServletContext; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.StringUtils.defaultString; @@ -186,18 +186,22 @@ public class OpenApiInterceptor { } @Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLER_SELECTED) - public boolean serveSwaggerUi(HttpServletRequest theRequest, HttpServletResponse theResponse, ServletRequestDetails theRequestDetails) throws IOException { + public boolean serveSwaggerUi( + HttpServletRequest theRequest, HttpServletResponse theResponse, ServletRequestDetails theRequestDetails) + throws IOException { String requestPath = theRequest.getPathInfo(); String queryString = theRequest.getQueryString(); if (isBlank(requestPath) || requestPath.equals("/")) { if (isBlank(queryString)) { - Set highestRankedAcceptValues = RestfulServerUtils.parseAcceptHeaderAndReturnHighestRankedOptions(theRequest); + Set highestRankedAcceptValues = + RestfulServerUtils.parseAcceptHeaderAndReturnHighestRankedOptions(theRequest); if (highestRankedAcceptValues.contains(Constants.CT_HTML)) { String serverBase = "."; if (theRequestDetails.getServletRequest() != null) { - IServerAddressStrategy addressStrategy = theRequestDetails.getServer().getServerAddressStrategy(); + IServerAddressStrategy addressStrategy = + theRequestDetails.getServer().getServerAddressStrategy(); serverBase = addressStrategy.determineServerBase(theRequest.getServletContext(), theRequest); } String redirectUrl = theResponse.encodeRedirectURL(serverBase + "/swagger-ui/"); @@ -224,13 +228,14 @@ public class OpenApiInterceptor { theResponse.getWriter().write(response); theResponse.getWriter().close(); return false; - } return true; } - protected boolean handleResourceRequest(HttpServletResponse theResponse, ServletRequestDetails theRequestDetails, String requestPath) throws IOException { + protected boolean handleResourceRequest( + HttpServletResponse theResponse, ServletRequestDetails theRequestDetails, String requestPath) + throws IOException { if (requestPath.equals("/swagger-ui/") || requestPath.equals("/swagger-ui/index.html")) { serveSwaggerUiHtml(theRequestDetails, theResponse); return true; @@ -251,7 +256,6 @@ public class OpenApiInterceptor { return true; } - String resourcePath = requestPath.substring("/swagger-ui/".length()); if (resourcePath.equals("swagger-ui-custom.css") && isNotBlank(myCssText)) { @@ -262,7 +266,8 @@ public class OpenApiInterceptor { return true; } - try (InputStream resource = ClasspathUtil.loadResourceAsStream("/META-INF/resources/webjars/swagger-ui/" + mySwaggerUiVersion + "/" + resourcePath)) { + try (InputStream resource = ClasspathUtil.loadResourceAsStream( + "/META-INF/resources/webjars/swagger-ui/" + mySwaggerUiVersion + "/" + resourcePath)) { if (resourcePath.endsWith(".js") || resourcePath.endsWith(".map")) { theResponse.setContentType("application/javascript"); @@ -321,7 +326,8 @@ public class OpenApiInterceptor { } @SuppressWarnings("unchecked") - private void serveSwaggerUiHtml(ServletRequestDetails theRequestDetails, HttpServletResponse theResponse) throws IOException { + private void serveSwaggerUiHtml(ServletRequestDetails theRequestDetails, HttpServletResponse theResponse) + throws IOException { CapabilityStatement cs = getCapabilityStatement(theRequestDetails); String baseUrl = removeTrailingSlash(cs.getImplementation().getUrl()); theResponse.setStatus(200); @@ -340,7 +346,9 @@ public class OpenApiInterceptor { context.setVariable("FHIR_VERSION", cs.getFhirVersion().toCode()); context.setVariable("ADDITIONAL_CSS_TEXT", getCssText()); context.setVariable("USE_RESOURCE_PAGES", isUseResourcePages()); - context.setVariable("FHIR_VERSION_CODENAME", FhirVersionEnum.forVersionString(cs.getFhirVersion().toCode()).name()); + context.setVariable( + "FHIR_VERSION_CODENAME", + FhirVersionEnum.forVersionString(cs.getFhirVersion().toCode()).name()); String copyright = cs.getCopyright(); if (isNotBlank(copyright)) { @@ -355,7 +363,8 @@ public class OpenApiInterceptor { pageNames.add(type); Extension countExtension = t.getExtensionByUrl(ExtensionConstants.CONF_RESOURCE_COUNT); if (countExtension != null) { - IPrimitiveType countExtensionValue = (IPrimitiveType) countExtension.getValueAsPrimitive(); + IPrimitiveType countExtensionValue = + (IPrimitiveType) countExtension.getValueAsPrimitive(); if (countExtensionValue != null && countExtensionValue.hasValue()) { resourceToCount.put(type, countExtensionValue.getValue().intValue()); } @@ -426,7 +435,6 @@ public class OpenApiInterceptor { capabilitiesProvider = (IServerConformanceProvider) restfulServer.getServerConformanceProvider(); } - OpenAPI openApi = new OpenAPI(); openApi.setInfo(new Info()); @@ -435,7 +443,9 @@ public class OpenApiInterceptor { openApi.getInfo().setVersion(cs.getSoftware().getVersion()); openApi.getInfo().setContact(new Contact()); openApi.getInfo().getContact().setName(cs.getContactFirstRep().getName()); - openApi.getInfo().getContact().setEmail(cs.getContactFirstRep().getTelecomFirstRep().getValue()); + openApi.getInfo() + .getContact() + .setEmail(cs.getContactFirstRep().getTelecomFirstRep().getValue()); Server server = new Server(); openApi.addServersItem(server); @@ -445,7 +455,6 @@ public class OpenApiInterceptor { Paths paths = new Paths(); openApi.setPaths(paths); - if (page == null || page.equals(PAGE_SYSTEM) || page.equals(PAGE_ALL)) { Tag serverTag = new Tag(); serverTag.setName(PAGE_SYSTEM); @@ -457,9 +466,13 @@ public class OpenApiInterceptor { capabilitiesOperation.setSummary("server-capabilities: Fetch the server FHIR CapabilityStatement"); addFhirResourceResponse(ctx, openApi, capabilitiesOperation, "CapabilityStatement"); - Set systemInteractions = cs.getRestFirstRep().getInteraction().stream().map(t -> t.getCode()).collect(Collectors.toSet()); + Set systemInteractions = + cs.getRestFirstRep().getInteraction().stream() + .map(t -> t.getCode()) + .collect(Collectors.toSet()); // Transaction Operation - if (systemInteractions.contains(CapabilityStatement.SystemRestfulInteraction.TRANSACTION) || systemInteractions.contains(CapabilityStatement.SystemRestfulInteraction.BATCH)) { + if (systemInteractions.contains(CapabilityStatement.SystemRestfulInteraction.TRANSACTION) + || systemInteractions.contains(CapabilityStatement.SystemRestfulInteraction.BATCH)) { Operation transaction = getPathItem(paths, "/", PathItem.HttpMethod.POST); transaction.addTagsItem(PAGE_SYSTEM); transaction.setSummary("server-transaction: Execute a FHIR Transaction (or FHIR Batch) Bundle"); @@ -471,25 +484,30 @@ public class OpenApiInterceptor { if (systemInteractions.contains(CapabilityStatement.SystemRestfulInteraction.HISTORYSYSTEM)) { Operation systemHistory = getPathItem(paths, "/_history", PathItem.HttpMethod.GET); systemHistory.addTagsItem(PAGE_SYSTEM); - systemHistory.setSummary("server-history: Fetch the resource change history across all resource types on the server"); + systemHistory.setSummary( + "server-history: Fetch the resource change history across all resource types on the server"); addFhirResourceResponse(ctx, openApi, systemHistory, null); } // System-level Operations - for (CapabilityStatement.CapabilityStatementRestResourceOperationComponent nextOperation : cs.getRestFirstRep().getOperation()) { + for (CapabilityStatement.CapabilityStatementRestResourceOperationComponent nextOperation : + cs.getRestFirstRep().getOperation()) { addFhirOperation(ctx, openApi, theRequestDetails, capabilitiesProvider, paths, null, nextOperation); } - } - for (CapabilityStatement.CapabilityStatementRestResourceComponent nextResource : cs.getRestFirstRep().getResource()) { + for (CapabilityStatement.CapabilityStatementRestResourceComponent nextResource : + cs.getRestFirstRep().getResource()) { String resourceType = nextResource.getType(); if (page != null && !page.equals(resourceType) && !page.equals(PAGE_ALL)) { continue; } - Set typeRestfulInteractions = nextResource.getInteraction().stream().map(t -> t.getCodeElement().getValue()).collect(Collectors.toSet()); + Set typeRestfulInteractions = + nextResource.getInteraction().stream() + .map(t -> t.getCodeElement().getValue()) + .collect(Collectors.toSet()); Tag resourceTag = new Tag(); resourceTag.setName(resourceType); @@ -507,7 +525,8 @@ public class OpenApiInterceptor { // Instance VRead if (typeRestfulInteractions.contains(CapabilityStatement.TypeRestfulInteraction.VREAD)) { - Operation operation = getPathItem(paths, "/" + resourceType + "/{id}/_history/{version_id}", PathItem.HttpMethod.GET); + Operation operation = + getPathItem(paths, "/" + resourceType + "/{id}/_history/{version_id}", PathItem.HttpMethod.GET); operation.addTagsItem(resourceType); operation.setSummary("vread-instance: Read " + resourceType + " instance with specific version"); addResourceIdParameter(operation); @@ -528,7 +547,8 @@ public class OpenApiInterceptor { if (typeRestfulInteractions.contains(CapabilityStatement.TypeRestfulInteraction.UPDATE)) { Operation operation = getPathItem(paths, "/" + resourceType + "/{id}", PathItem.HttpMethod.PUT); operation.addTagsItem(resourceType); - operation.setSummary("update-instance: Update an existing " + resourceType + " instance, or create using a client-assigned ID"); + operation.setSummary("update-instance: Update an existing " + resourceType + + " instance, or create using a client-assigned ID"); addResourceIdParameter(operation); addFhirResourceRequestBody(openApi, operation, ctx, genericExampleSupplier(ctx, resourceType)); addFhirResourceResponse(ctx, openApi, operation, null); @@ -538,15 +558,18 @@ public class OpenApiInterceptor { if (typeRestfulInteractions.contains(CapabilityStatement.TypeRestfulInteraction.HISTORYTYPE)) { Operation operation = getPathItem(paths, "/" + resourceType + "/_history", PathItem.HttpMethod.GET); operation.addTagsItem(resourceType); - operation.setSummary("type-history: Fetch the resource change history for all resources of type " + resourceType); + operation.setSummary( + "type-history: Fetch the resource change history for all resources of type " + resourceType); addFhirResourceResponse(ctx, openApi, operation, null); } // Instance history if (typeRestfulInteractions.contains(CapabilityStatement.TypeRestfulInteraction.HISTORYTYPE)) { - Operation operation = getPathItem(paths, "/" + resourceType + "/{id}/_history", PathItem.HttpMethod.GET); + Operation operation = + getPathItem(paths, "/" + resourceType + "/{id}/_history", PathItem.HttpMethod.GET); operation.addTagsItem(resourceType); - operation.setSummary("instance-history: Fetch the resource change history for all resources of type " + resourceType); + operation.setSummary("instance-history: Fetch the resource change history for all resources of type " + + resourceType); addResourceIdParameter(operation); addFhirResourceResponse(ctx, openApi, operation, null); } @@ -572,28 +595,44 @@ public class OpenApiInterceptor { // Search if (typeRestfulInteractions.contains(CapabilityStatement.TypeRestfulInteraction.SEARCHTYPE)) { - addSearchOperation(openApi, getPathItem(paths, "/" + resourceType, PathItem.HttpMethod.GET), ctx, resourceType, nextResource); - addSearchOperation(openApi, getPathItem(paths, "/" + resourceType + "/_search", PathItem.HttpMethod.GET), ctx, resourceType, nextResource); + addSearchOperation( + openApi, + getPathItem(paths, "/" + resourceType, PathItem.HttpMethod.GET), + ctx, + resourceType, + nextResource); + addSearchOperation( + openApi, + getPathItem(paths, "/" + resourceType + "/_search", PathItem.HttpMethod.GET), + ctx, + resourceType, + nextResource); } // Resource-level Operations - for (CapabilityStatement.CapabilityStatementRestResourceOperationComponent nextOperation : nextResource.getOperation()) { - addFhirOperation(ctx, openApi, theRequestDetails, capabilitiesProvider, paths, resourceType, nextOperation); + for (CapabilityStatement.CapabilityStatementRestResourceOperationComponent nextOperation : + nextResource.getOperation()) { + addFhirOperation( + ctx, openApi, theRequestDetails, capabilitiesProvider, paths, resourceType, nextOperation); } - } return openApi; } - protected void addSearchOperation(final OpenAPI openApi, final Operation operation, final FhirContext ctx, - final String resourceType, final CapabilityStatement.CapabilityStatementRestResourceComponent nextResource) { + protected void addSearchOperation( + final OpenAPI openApi, + final Operation operation, + final FhirContext ctx, + final String resourceType, + final CapabilityStatement.CapabilityStatementRestResourceComponent nextResource) { operation.addTagsItem(resourceType); operation.setDescription("This is a search type"); operation.setSummary("search-type: Search for " + resourceType + " instances"); addFhirResourceResponse(ctx, openApi, operation, null); - for (final CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent nextSearchParam : nextResource.getSearchParam()) { + for (final CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent nextSearchParam : + nextResource.getSearchParam()) { final Parameter parametersItem = new Parameter(); operation.addParametersItem(parametersItem); @@ -607,9 +646,8 @@ public class OpenApiInterceptor { private Supplier patchExampleSupplier() { return () -> { Parameters example = new Parameters(); - Parameters.ParametersParameterComponent operation = example - .addParameter() - .setName("operation"); + Parameters.ParametersParameterComponent operation = + example.addParameter().setName("operation"); operation.addPart().setName("type").setValue(new StringType("add")); operation.addPart().setName("path").setValue(new StringType("Patient")); operation.addPart().setName("name").setValue(new StringType("birthDate")); @@ -645,14 +683,23 @@ public class OpenApiInterceptor { private CapabilityStatement getCapabilityStatement(ServletRequestDetails theRequestDetails) { RestfulServer restfulServer = theRequestDetails.getServer(); - IBaseConformance versionIndependentCapabilityStatement = restfulServer.getCapabilityStatement(theRequestDetails); + IBaseConformance versionIndependentCapabilityStatement = + restfulServer.getCapabilityStatement(theRequestDetails); return toCanonicalVersion(versionIndependentCapabilityStatement); } - private void addFhirOperation(FhirContext theFhirContext, OpenAPI theOpenApi, ServletRequestDetails theRequestDetails, IServerConformanceProvider theCapabilitiesProvider, Paths thePaths, String theResourceType, CapabilityStatement.CapabilityStatementRestResourceOperationComponent theOperation) { + private void addFhirOperation( + FhirContext theFhirContext, + OpenAPI theOpenApi, + ServletRequestDetails theRequestDetails, + IServerConformanceProvider theCapabilitiesProvider, + Paths thePaths, + String theResourceType, + CapabilityStatement.CapabilityStatementRestResourceOperationComponent theOperation) { if (theCapabilitiesProvider != null) { IdType definitionId = new IdType(theOperation.getDefinition()); - IBaseResource operationDefinitionNonCanonical = theCapabilitiesProvider.readOperationDefinition(definitionId, theRequestDetails); + IBaseResource operationDefinitionNonCanonical = + theCapabilitiesProvider.readOperationDefinition(definitionId, theRequestDetails); if (operationDefinitionNonCanonical == null) { return; } @@ -660,52 +707,68 @@ public class OpenApiInterceptor { OperationDefinition operationDefinition = toCanonicalVersion(operationDefinitionNonCanonical); final boolean postOnly = operationDefinition.getAffectsState() || operationDefinition.getParameter().stream() - .filter(p -> p.getUse().equals(OperationParameterUse.IN)) - .anyMatch(p -> { - final boolean required = p.getMin() > 0; - return required && !isPrimitive(p); - }); + .filter(p -> p.getUse().equals(OperationParameterUse.IN)) + .anyMatch(p -> { + final boolean required = p.getMin() > 0; + return required && !isPrimitive(p); + }); if (!postOnly) { // GET form for non-state-affecting operations if (theResourceType != null) { if (operationDefinition.getType()) { - Operation operation = getPathItem(thePaths, "/" + theResourceType + "/$" + operationDefinition.getCode(), PathItem.HttpMethod.GET); - populateOperation(theFhirContext, theOpenApi, theResourceType, operationDefinition, operation, true); + Operation operation = getPathItem( + thePaths, + "/" + theResourceType + "/$" + operationDefinition.getCode(), + PathItem.HttpMethod.GET); + populateOperation( + theFhirContext, theOpenApi, theResourceType, operationDefinition, operation, true); } if (operationDefinition.getInstance()) { - Operation operation = getPathItem(thePaths, "/" + theResourceType + "/{id}/$" + operationDefinition.getCode(), PathItem.HttpMethod.GET); + Operation operation = getPathItem( + thePaths, + "/" + theResourceType + "/{id}/$" + operationDefinition.getCode(), + PathItem.HttpMethod.GET); addResourceIdParameter(operation); - populateOperation(theFhirContext, theOpenApi, theResourceType, operationDefinition, operation, true); + populateOperation( + theFhirContext, theOpenApi, theResourceType, operationDefinition, operation, true); } } else { if (operationDefinition.getSystem()) { - Operation operation = getPathItem(thePaths, "/$" + operationDefinition.getCode(), PathItem.HttpMethod.GET); + Operation operation = + getPathItem(thePaths, "/$" + operationDefinition.getCode(), PathItem.HttpMethod.GET); populateOperation(theFhirContext, theOpenApi, null, operationDefinition, operation, true); } } - } // POST form for all operations if (theResourceType != null) { if (operationDefinition.getType()) { - Operation operation = getPathItem(thePaths, "/" + theResourceType + "/$" + operationDefinition.getCode(), PathItem.HttpMethod.POST); - populateOperation(theFhirContext, theOpenApi, theResourceType, operationDefinition, operation, false); + Operation operation = getPathItem( + thePaths, + "/" + theResourceType + "/$" + operationDefinition.getCode(), + PathItem.HttpMethod.POST); + populateOperation( + theFhirContext, theOpenApi, theResourceType, operationDefinition, operation, false); } if (operationDefinition.getInstance()) { - Operation operation = getPathItem(thePaths, "/" + theResourceType + "/{id}/$" + operationDefinition.getCode(), PathItem.HttpMethod.POST); + Operation operation = getPathItem( + thePaths, + "/" + theResourceType + "/{id}/$" + operationDefinition.getCode(), + PathItem.HttpMethod.POST); addResourceIdParameter(operation); - populateOperation(theFhirContext, theOpenApi, theResourceType, operationDefinition, operation, false); + populateOperation( + theFhirContext, theOpenApi, theResourceType, operationDefinition, operation, false); } } else { if (operationDefinition.getSystem()) { - Operation operation = getPathItem(thePaths, "/$" + operationDefinition.getCode(), PathItem.HttpMethod.POST); + Operation operation = + getPathItem(thePaths, "/$" + operationDefinition.getCode(), PathItem.HttpMethod.POST); populateOperation(theFhirContext, theOpenApi, null, operationDefinition, operation, false); } } - } } @@ -730,14 +793,19 @@ public class OpenApiInterceptor { DataTypes.MARKDOWN.toCode(), DataTypes.UNSIGNEDINT.toCode(), DataTypes.POSITIVEINT.toCode(), - DataTypes.UUID.toCode() - ); + DataTypes.UUID.toCode()); private static boolean isPrimitive(OperationDefinitionParameterComponent parameter) { return primitiveTypes.contains(parameter.getType()); } - private void populateOperation(FhirContext theFhirContext, OpenAPI theOpenApi, String theResourceType, OperationDefinition theOperationDefinition, Operation theOperation, boolean theGet) { + private void populateOperation( + FhirContext theFhirContext, + OpenAPI theOpenApi, + String theResourceType, + OperationDefinition theOperationDefinition, + Operation theOperation, + boolean theGet) { if (theResourceType == null) { theOperation.addTagsItem(PAGE_SYSTEM); } else { @@ -748,8 +816,10 @@ public class OpenApiInterceptor { addFhirResourceResponse(theFhirContext, theOpenApi, theOperation, null); if (theGet) { - for (OperationDefinition.OperationDefinitionParameterComponent nextParameter : theOperationDefinition.getParameter()) { - if ("0".equals(nextParameter.getMax()) || !nextParameter.getUse().equals(OperationParameterUse.IN)) { + for (OperationDefinition.OperationDefinitionParameterComponent nextParameter : + theOperationDefinition.getParameter()) { + if ("0".equals(nextParameter.getMax()) + || !nextParameter.getUse().equals(OperationParameterUse.IN)) { continue; } if (!isPrimitive(nextParameter) && nextParameter.getMin() == 0) { @@ -764,23 +834,26 @@ public class OpenApiInterceptor { parametersItem.setStyle(Parameter.StyleEnum.SIMPLE); parametersItem.setRequired(nextParameter.getMin() > 0); - List exampleExtensions = nextParameter.getExtensionsByUrl(HapiExtensions.EXT_OP_PARAMETER_EXAMPLE_VALUE); + List exampleExtensions = + nextParameter.getExtensionsByUrl(HapiExtensions.EXT_OP_PARAMETER_EXAMPLE_VALUE); if (exampleExtensions.size() == 1) { - parametersItem.setExample(exampleExtensions.get(0).getValueAsPrimitive().getValueAsString()); + parametersItem.setExample( + exampleExtensions.get(0).getValueAsPrimitive().getValueAsString()); } else if (exampleExtensions.size() > 1) { for (Extension next : exampleExtensions) { String nextExample = next.getValueAsPrimitive().getValueAsString(); parametersItem.addExample(nextExample, new Example().value(nextExample)); } } - } } else { Parameters exampleRequestBody = new Parameters(); - for (OperationDefinition.OperationDefinitionParameterComponent nextSearchParam : theOperationDefinition.getParameter()) { - if ("0".equals(nextSearchParam.getMax()) || !nextSearchParam.getUse().equals(OperationParameterUse.IN)) { + for (OperationDefinition.OperationDefinitionParameterComponent nextSearchParam : + theOperationDefinition.getParameter()) { + if ("0".equals(nextSearchParam.getMax()) + || !nextSearchParam.getUse().equals(OperationParameterUse.IN)) { continue; } Parameters.ParametersParameterComponent param = exampleRequestBody.addParameter(); @@ -791,19 +864,25 @@ public class OpenApiInterceptor { case "url": case "code": case "string": { - IPrimitiveType type = (IPrimitiveType) FHIR_CONTEXT_CANONICAL.getElementDefinition(paramType).newInstance(); + IPrimitiveType type = (IPrimitiveType) FHIR_CONTEXT_CANONICAL + .getElementDefinition(paramType) + .newInstance(); type.setValueAsString("example"); param.setValue((Type) type); break; } case "integer": { - IPrimitiveType type = (IPrimitiveType) FHIR_CONTEXT_CANONICAL.getElementDefinition(paramType).newInstance(); + IPrimitiveType type = (IPrimitiveType) FHIR_CONTEXT_CANONICAL + .getElementDefinition(paramType) + .newInstance(); type.setValueAsString("0"); param.setValue((Type) type); break; } case "boolean": { - IPrimitiveType type = (IPrimitiveType) FHIR_CONTEXT_CANONICAL.getElementDefinition(paramType).newInstance(); + IPrimitiveType type = (IPrimitiveType) FHIR_CONTEXT_CANONICAL + .getElementDefinition(paramType) + .newInstance(); type.setValueAsString("false"); param.setValue((Type) type); break; @@ -828,24 +907,26 @@ public class OpenApiInterceptor { break; case "Resource": if (theResourceType != null) { - IBaseResource resource = FHIR_CONTEXT_CANONICAL.getResourceDefinition(theResourceType).newInstance(); + IBaseResource resource = FHIR_CONTEXT_CANONICAL + .getResourceDefinition(theResourceType) + .newInstance(); resource.setId("1"); param.setResource((Resource) resource); } break; } - } - String exampleRequestBodyString = FHIR_CONTEXT_CANONICAL.newJsonParser().setPrettyPrint(true).encodeResourceToString(exampleRequestBody); + String exampleRequestBodyString = FHIR_CONTEXT_CANONICAL + .newJsonParser() + .setPrettyPrint(true) + .encodeResourceToString(exampleRequestBody); theOperation.setRequestBody(new RequestBody()); theOperation.getRequestBody().setContent(new Content()); MediaType mediaType = new MediaType(); mediaType.setExample(exampleRequestBodyString); mediaType.setSchema(new Schema().type("object").title("FHIR Resource")); theOperation.getRequestBody().getContent().addMediaType(Constants.CT_FHIR_JSON_NEW, mediaType); - - } } @@ -883,7 +964,11 @@ public class OpenApiInterceptor { } } - private void addFhirResourceRequestBody(OpenAPI theOpenApi, Operation theOperation, FhirContext theExampleFhirContext, Supplier theExampleSupplier) { + private void addFhirResourceRequestBody( + OpenAPI theOpenApi, + Operation theOperation, + FhirContext theExampleFhirContext, + Supplier theExampleSupplier) { RequestBody requestBody = new RequestBody(); requestBody.setContent(provideContentFhirResource(theOpenApi, theExampleFhirContext, theExampleSupplier)); theOperation.setRequestBody(requestBody); @@ -900,11 +985,13 @@ public class OpenApiInterceptor { theOperation.addParametersItem(parameter); } - private void addFhirResourceResponse(FhirContext theFhirContext, OpenAPI theOpenApi, Operation theOperation, String theResourceType) { + private void addFhirResourceResponse( + FhirContext theFhirContext, OpenAPI theOpenApi, Operation theOperation, String theResourceType) { theOperation.setResponses(new ApiResponses()); ApiResponse response200 = new ApiResponse(); response200.setDescription("Success"); - response200.setContent(provideContentFhirResource(theOpenApi, theFhirContext, genericExampleSupplier(theFhirContext, theResourceType))); + response200.setContent(provideContentFhirResource( + theOpenApi, theFhirContext, genericExampleSupplier(theFhirContext, theResourceType))); theOperation.getResponses().addApiResponse("200", response200); } @@ -921,19 +1008,28 @@ public class OpenApiInterceptor { }; } - private Content provideContentFhirResource(OpenAPI theOpenApi, FhirContext theExampleFhirContext, Supplier theExampleSupplier) { + private Content provideContentFhirResource( + OpenAPI theOpenApi, FhirContext theExampleFhirContext, Supplier theExampleSupplier) { addSchemaFhirResource(theOpenApi); Content retVal = new Content(); - MediaType jsonSchema = new MediaType().schema(new ObjectSchema().$ref("#/components/schemas/" + FHIR_JSON_RESOURCE)); + MediaType jsonSchema = + new MediaType().schema(new ObjectSchema().$ref("#/components/schemas/" + FHIR_JSON_RESOURCE)); if (theExampleSupplier != null) { - jsonSchema.setExample(theExampleFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(theExampleSupplier.get())); + jsonSchema.setExample(theExampleFhirContext + .newJsonParser() + .setPrettyPrint(true) + .encodeResourceToString(theExampleSupplier.get())); } retVal.addMediaType(Constants.CT_FHIR_JSON_NEW, jsonSchema); - MediaType xmlSchema = new MediaType().schema(new ObjectSchema().$ref("#/components/schemas/" + FHIR_XML_RESOURCE)); + MediaType xmlSchema = + new MediaType().schema(new ObjectSchema().$ref("#/components/schemas/" + FHIR_XML_RESOURCE)); if (theExampleSupplier != null) { - xmlSchema.setExample(theExampleFhirContext.newXmlParser().setPrettyPrint(true).encodeResourceToString(theExampleSupplier.get())); + xmlSchema.setExample(theExampleFhirContext + .newXmlParser() + .setPrettyPrint(true) + .encodeResourceToString(theExampleSupplier.get())); } retVal.addMediaType(Constants.CT_FHIR_XML_NEW, xmlSchema); return retVal; @@ -951,7 +1047,8 @@ public class OpenApiInterceptor { } protected ClassLoaderTemplateResource getIndexTemplate() { - return new ClassLoaderTemplateResource(myResourcePathToClasspath.get("/swagger-ui/index.html"), StandardCharsets.UTF_8.name()); + return new ClassLoaderTemplateResource( + myResourcePathToClasspath.get("/swagger-ui/index.html"), StandardCharsets.UTF_8.name()); } public String getBannerImage() { @@ -975,11 +1072,13 @@ public class OpenApiInterceptor { private static T toCanonicalVersion(IBaseResource theNonCanonical) { IBaseResource canonical; if (theNonCanonical instanceof org.hl7.fhir.dstu3.model.Resource) { - canonical = VersionConvertorFactory_30_40.convertResource((org.hl7.fhir.dstu3.model.Resource) theNonCanonical); + canonical = + VersionConvertorFactory_30_40.convertResource((org.hl7.fhir.dstu3.model.Resource) theNonCanonical); } else if (theNonCanonical instanceof org.hl7.fhir.r5.model.Resource) { canonical = VersionConvertorFactory_40_50.convertResource((org.hl7.fhir.r5.model.Resource) theNonCanonical); } else if (theNonCanonical instanceof org.hl7.fhir.r4b.model.Resource) { - org.hl7.fhir.r5.model.Resource r5 = VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theNonCanonical); + org.hl7.fhir.r5.model.Resource r5 = + VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.Resource) theNonCanonical); canonical = VersionConvertorFactory_40_50.convertResource(r5); } else { canonical = theNonCanonical; @@ -999,7 +1098,11 @@ public class OpenApiInterceptor { } @Override - public TemplateResolution resolveTemplate(IEngineConfiguration configuration, String ownerTemplate, String template, Map templateResolutionAttributes) { + public TemplateResolution resolveTemplate( + IEngineConfiguration configuration, + String ownerTemplate, + String template, + Map templateResolutionAttributes) { ClassLoaderTemplateResource resource = getIndexTemplate(); ICacheEntryValidity cacheValidity = new AlwaysValidCacheEntryValidity(); return new TemplateResolution(resource, TemplateMode.HTML, cacheValidity); @@ -1009,23 +1112,29 @@ public class OpenApiInterceptor { private static class TemplateLinkBuilder extends AbstractLinkBuilder { @Override - public String buildLink(IExpressionContext theExpressionContext, String theBase, Map theParameters) { + public String buildLink( + IExpressionContext theExpressionContext, String theBase, Map theParameters) { - ServletRequestDetails requestDetails = (ServletRequestDetails) theExpressionContext.getVariable(REQUEST_DETAILS); + ServletRequestDetails requestDetails = + (ServletRequestDetails) theExpressionContext.getVariable(REQUEST_DETAILS); IServerAddressStrategy addressStrategy = requestDetails.getServer().getServerAddressStrategy(); - String baseUrl = addressStrategy.determineServerBase(requestDetails.getServletRequest().getServletContext(), requestDetails.getServletRequest()); + String baseUrl = addressStrategy.determineServerBase( + requestDetails.getServletRequest().getServletContext(), requestDetails.getServletRequest()); StringBuilder builder = new StringBuilder(); builder.append(baseUrl); builder.append(theBase); if (!theParameters.isEmpty()) { builder.append("?"); - for (Iterator> iter = theParameters.entrySet().iterator(); iter.hasNext(); ) { + for (Iterator> iter = + theParameters.entrySet().iterator(); + iter.hasNext(); ) { Map.Entry nextEntry = iter.next(); builder.append(UrlUtil.escapeUrlParam(nextEntry.getKey())); builder.append("="); - builder.append(UrlUtil.escapeUrlParam(defaultIfNull(nextEntry.getValue(), "").toString())); + builder.append(UrlUtil.escapeUrlParam( + defaultIfNull(nextEntry.getValue(), "").toString())); if (iter.hasNext()) { builder.append("&"); } @@ -1035,6 +1144,4 @@ public class OpenApiInterceptor { return builder.toString(); } } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/BaseParseAction.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/BaseParseAction.java index 31734694338..a3518d942d3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/BaseParseAction.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/BaseParseAction.java @@ -39,5 +39,4 @@ public abstract class BaseParseAction { } public abstract void execute(IParser parser, Writer writer) throws IOException; - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IBundleProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IBundleProvider.java index be2af7c03f8..829453dc08b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IBundleProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IBundleProvider.java @@ -25,13 +25,12 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public interface IBundleProvider { @@ -107,7 +106,6 @@ public interface IBundleProvider { return null; } - /** * Returns the instant as of which this result was created. The * result of this value is used to populate the lastUpdated @@ -147,7 +145,9 @@ public interface IBundleProvider { Integer size = size(); if (size == null) { - throw new ConfigurationException(Msg.code(464) + "Attempt to request all resources from an asynchronous search result. The SearchParameterMap for this search probably should have been synchronous."); + throw new ConfigurationException( + Msg.code(464) + + "Attempt to request all resources from an asynchronous search result. The SearchParameterMap for this search probably should have been synchronous."); } if (size > 0) { retval.addAll(getResources(0, size)); @@ -219,6 +219,8 @@ public interface IBundleProvider { * @return the list of ids of all resources in the bundle */ default List getAllResourceIds() { - return getAllResources().stream().map(resource -> resource.getIdElement().getIdPart()).collect(Collectors.toList()); + return getAllResources().stream() + .map(resource -> resource.getIdElement().getIdPart()) + .collect(Collectors.toList()); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IFhirVersionServer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IFhirVersionServer.java index a3a8b1a739a..2eb88c206e8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IFhirVersionServer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IFhirVersionServer.java @@ -19,18 +19,15 @@ */ package ca.uhn.fhir.rest.api.server; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.model.api.IFhirVersion; -import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.IServerConformanceProvider; import ca.uhn.fhir.rest.server.RestfulServer; +import org.hl7.fhir.instance.model.api.IBaseResource; /** * This class is the server specific equivalent to {@link IFhirVersion} */ public interface IFhirVersionServer { - IServerConformanceProvider createServerConformanceProvider(RestfulServer theRestfulServer); - + IServerConformanceProvider createServerConformanceProvider(RestfulServer theRestfulServer); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceAccessDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceAccessDetails.java index fdbd25fe075..b1190a268df 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceAccessDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceAccessDetails.java @@ -34,5 +34,4 @@ public interface IPreResourceAccessDetails { IBaseResource getResource(int theIndex); void setDontReturnResourceAtIndex(int theIndex); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceShowDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceShowDetails.java index d5d41a75504..5e691392e5b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceShowDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceShowDetails.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.rest.api.server; -import org.apache.commons.collections4.IteratorUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.List; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IRestfulResponse.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IRestfulResponse.java index 2433f1111a4..936fc86801d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IRestfulResponse.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IRestfulResponse.java @@ -19,14 +19,14 @@ */ package ca.uhn.fhir.rest.api.server; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; import java.io.Writer; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Implementations of this interface represent a response back to the client from the server. It is @@ -59,7 +59,8 @@ public interface IRestfulResponse { * @return Returns a {@link Writer} that can accept the response body. */ @Nonnull - Writer getResponseWriter(int theStatusCode, String theContentType, String theCharset, boolean theRespondGzip) throws IOException; + Writer getResponseWriter(int theStatusCode, String theContentType, String theCharset, boolean theRespondGzip) + throws IOException; /** * Initiate a new binary response. The OutputStream returned by this method must be finalized by @@ -79,7 +80,8 @@ public interface IRestfulResponse { * @return Returns an {@link OutputStream} that can accept the response body. */ @Nonnull - OutputStream getResponseOutputStream(int theStatusCode, String theContentType, @Nullable Integer theContentLength) throws IOException; + OutputStream getResponseOutputStream(int theStatusCode, String theContentType, @Nullable Integer theContentLength) + throws IOException; /** * Finalizes the response streaming using the writer that was returned by calling either @@ -105,10 +107,8 @@ public interface IRestfulResponse { */ void addHeader(String headerKey, String headerValue); - /** * Returns the headers added to this response */ Map> getHeaders(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IServerMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IServerMethodBinding.java index 5c37791425a..eb28619c8a3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IServerMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IServerMethodBinding.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.api.server; -public interface IServerMethodBinding { - -} +public interface IServerMethodBinding {} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/RequestDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/RequestDetails.java index 80d62cc7e35..a8a0ae4af20 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/RequestDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/RequestDetails.java @@ -26,16 +26,12 @@ import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.server.IRestfulServerDefaults; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor; -import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.UrlUtil; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.Reader; @@ -47,6 +43,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -311,10 +309,10 @@ public abstract class RequestDetails { } } if (needsSanitization) { - myParameters = myParameters - .entrySet() - .stream() - .collect(Collectors.toMap(t -> UrlUtil.sanitizeUrlPart((String) ((Map.Entry) t).getKey()), t -> (String[]) ((Map.Entry) t).getValue())); + myParameters = myParameters.entrySet().stream() + .collect( + Collectors.toMap(t -> UrlUtil.sanitizeUrlPart((String) ((Map.Entry) t).getKey()), t -> + (String[]) ((Map.Entry) t).getValue())); } } @@ -434,7 +432,8 @@ public abstract class RequestDetails { myUnqualifiedToQualifiedNames = new HashMap<>(); } String unqualified = next.substring(0, i); - List list = myUnqualifiedToQualifiedNames.computeIfAbsent(unqualified, k -> new ArrayList<>(4)); + List list = + myUnqualifiedToQualifiedNames.computeIfAbsent(unqualified, k -> new ArrayList<>(4)); list.add(next); break; } @@ -548,7 +547,6 @@ public abstract class RequestDetails { myTransactionGuid = theTransactionGuid; } - public boolean isRewriteHistory() { return myRewriteHistory; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/ResponseDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/ResponseDetails.java index e04e23d5dc9..7355ca891b5 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/ResponseDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/ResponseDetails.java @@ -64,5 +64,4 @@ public class ResponseDetails { public void setResponseResource(IBaseResource theResponseResource) { myResponseResource = theResponseResource; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SimplePreResourceShowDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SimplePreResourceShowDetails.java index e23d9c217fc..a71dd1965a6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SimplePreResourceShowDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SimplePreResourceShowDetails.java @@ -30,7 +30,6 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; public class SimplePreResourceShowDetails implements IPreResourceShowDetails { @@ -67,24 +66,29 @@ public class SimplePreResourceShowDetails implements IPreResourceShowDetails { @Override public void setResource(int theIndex, IBaseResource theResource) { Validate.isTrue(theIndex >= 0, "Invalid index %d - theIndex must not be < 0", theIndex); - Validate.isTrue(theIndex < myResources.length, "Invalid index {} - theIndex must be < %d", theIndex, myResources.length); + Validate.isTrue( + theIndex < myResources.length, + "Invalid index {} - theIndex must be < %d", + theIndex, + myResources.length); myResources[theIndex] = theResource; } @Override public void markResourceAtIndexAsSubset(int theIndex) { Validate.isTrue(theIndex >= 0, "Invalid index %d - theIndex must not be < 0", theIndex); - Validate.isTrue(theIndex < myResources.length, "Invalid index {} - theIndex must be < %d", theIndex, myResources.length); + Validate.isTrue( + theIndex < myResources.length, + "Invalid index {} - theIndex must be < %d", + theIndex, + myResources.length); myResourceMarkedAsSubset[theIndex] = true; } @Override public List getAllResources() { ArrayList retVal = new ArrayList<>(myResources.length); - Arrays - .stream(myResources) - .filter(Objects::nonNull) - .forEach(retVal::add); + Arrays.stream(myResources).filter(Objects::nonNull).forEach(retVal::add); return Collections.unmodifiableList(retVal); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SystemRequestDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SystemRequestDetails.java index 225cf7298a4..0dd4f0667ba 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SystemRequestDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/SystemRequestDetails.java @@ -70,7 +70,9 @@ public class SystemRequestDetails extends RequestDetails { public SystemRequestDetails(RequestDetails theDetails) { super(theDetails); - if (nonNull(theDetails.getServer())) { myServer = theDetails.getServer(); } + if (nonNull(theDetails.getServer())) { + myServer = theDetails.getServer(); + } } public RequestPartitionId getRequestPartitionId() { @@ -133,9 +135,7 @@ public class SystemRequestDetails extends RequestDetails { } @Override - public void setAttribute(String theAttributeName, Object theAttributeValue) { - - } + public void setAttribute(String theAttributeName, Object theAttributeValue) {} @Override public InputStream getInputStream() throws IOException { @@ -232,5 +232,4 @@ public class SystemRequestDetails extends RequestDetails { systemRequestDetails.setRequestPartitionId(RequestPartitionId.allPartitions()); return systemRequestDetails; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkExportJobParameters.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkExportJobParameters.java index bde8e3b2efe..6aab0880815 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkExportJobParameters.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/bulk/BulkExportJobParameters.java @@ -20,9 +20,9 @@ package ca.uhn.fhir.rest.api.server.bulk; import ca.uhn.fhir.interceptor.model.RequestPartitionId; +import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.server.util.JsonDateDeserializer; import ca.uhn.fhir.rest.server.util.JsonDateSerializer; -import ca.uhn.fhir.model.api.IModelJson; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; @@ -110,8 +110,10 @@ public class BulkExportJobParameters implements IModelJson { */ @JsonProperty("partitionId") private RequestPartitionId myPartitionId; + @JsonProperty("binarySecurityContextIdentifierSystem") private String myBinarySecurityContextIdentifierSystem; + @JsonProperty("binarySecurityContextIdentifierValue") private String myBinarySecurityContextIdentifierValue; @@ -268,6 +270,8 @@ public class BulkExportJobParameters implements IModelJson { } public enum ExportStyle { - PATIENT, GROUP, SYSTEM + PATIENT, + GROUP, + SYSTEM } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/BaseResourcePersistentId.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/BaseResourcePersistentId.java index 8fd916b7007..54390bb6776 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/BaseResourcePersistentId.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/BaseResourcePersistentId.java @@ -32,7 +32,6 @@ public abstract class BaseResourcePersistentId implements IResourcePersistent private final String myResourceType; private IIdType myAssociatedResourceId; - protected BaseResourcePersistentId(String theResourceType) { myResourceType = theResourceType; } @@ -66,7 +65,6 @@ public abstract class BaseResourcePersistentId implements IResourcePersistent return Objects.hash(myVersion); } - @Override public Long getVersion() { return myVersion; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java index 9f4e4c9db42..41cdb789cea 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IDeleteExpungeJobSubmitter.java @@ -30,5 +30,10 @@ public interface IDeleteExpungeJobSubmitter { * @param theUrlsToProcess A list of strings of the form "/Patient?active=true" * @return The Batch2 JobId that was started to run this batch job */ - String submitJob(Integer theBatchSize, List theUrlsToProcess, boolean theCascade, Integer theCascadeMaxRounds, RequestDetails theRequest); + String submitJob( + Integer theBatchSize, + List theUrlsToProcess, + boolean theCascade, + Integer theCascadeMaxRounds, + RequestDetails theRequest); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IResourcePersistentId.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IResourcePersistentId.java index 0e9caf6ce0c..a2885997701 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IResourcePersistentId.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/IResourcePersistentId.java @@ -35,7 +35,6 @@ public interface IResourcePersistentId { * @param theVersion This should only be populated if a specific version is needed. If you want the current version, * leave this as null */ - void setVersion(Long theVersion); /** diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/NotFoundPid.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/NotFoundPid.java index 53536ee98b2..eef1a458083 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/NotFoundPid.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/NotFoundPid.java @@ -20,9 +20,9 @@ package ca.uhn.fhir.rest.api.server.storage; public class NotFoundPid extends BaseResourcePersistentId { - public NotFoundPid() { - super(null); - } + public NotFoundPid() { + super(null); + } @Override public Long getId() { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java index 36c661735f4..4762a79e99c 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/storage/TransactionDetails.java @@ -31,10 +31,10 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import java.util.function.Supplier; +import java.util.*; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.*; -import java.util.function.Supplier; /** * This object contains runtime information that is gathered and relevant to a single database transaction. @@ -185,7 +185,6 @@ public class TransactionDetails { return false; } - /** * A Resolved Resource ID is a mapping between a resource ID (e.g. "Patient/ABC" or * "Observation/123") and a storage ID for that resource. Resources should only be placed within @@ -233,14 +232,17 @@ public class TransactionDetails { * "Observation/123") and a storage ID for that resource. Resources should only be placed within * the TransactionDetails if they are known to exist and be valid targets for other resources to link to. */ - public void addResolvedMatchUrl(FhirContext theFhirContext, String theConditionalUrl, @Nonnull IResourcePersistentId thePersistentId) { + public void addResolvedMatchUrl( + FhirContext theFhirContext, String theConditionalUrl, @Nonnull IResourcePersistentId thePersistentId) { Validate.notBlank(theConditionalUrl); Validate.notNull(thePersistentId); if (myResolvedMatchUrls.isEmpty()) { myResolvedMatchUrls = new HashMap<>(); } else if (matchUrlWithDiffIdExists(theConditionalUrl, thePersistentId)) { - String msg = theFhirContext.getLocalizer().getMessage(TransactionDetails.class, "invalidMatchUrlMultipleMatches", theConditionalUrl); + String msg = theFhirContext + .getLocalizer() + .getMessage(TransactionDetails.class, "invalidMatchUrlMultipleMatches", theConditionalUrl); throw new PreconditionFailedException(Msg.code(2207) + msg); } myResolvedMatchUrls.put(theConditionalUrl, thePersistentId); @@ -254,10 +256,10 @@ public class TransactionDetails { myResolvedMatchUrls.remove(theMatchUrl); } - private boolean matchUrlWithDiffIdExists(String theConditionalUrl, @Nonnull IResourcePersistentId thePersistentId) { - if (myResolvedMatchUrls.containsKey(theConditionalUrl) && myResolvedMatchUrls.get(theConditionalUrl) != NOT_FOUND) { - return ! myResolvedMatchUrls.get(theConditionalUrl).getId().equals(thePersistentId.getId()); + if (myResolvedMatchUrls.containsKey(theConditionalUrl) + && myResolvedMatchUrls.get(theConditionalUrl) != NOT_FOUND) { + return !myResolvedMatchUrls.get(theConditionalUrl).getId().equals(thePersistentId.getId()); } return false; } @@ -379,8 +381,7 @@ public class TransactionDetails { return hookParams == null ? InterceptorInvocationTimingEnum.ACTIVE : InterceptorInvocationTimingEnum.DEFERRED; } - public void deferredBroadcastProcessingFinished() { - } + public void deferredBroadcastProcessingFinished() {} public void clearResolvedItems() { myResolvedResourceIds.clear(); @@ -398,6 +399,4 @@ public class TransactionDetails { public void setFhirTransaction(boolean theFhirTransaction) { myFhirTransaction = theFhirTransaction; } - } - diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ApacheProxyAddressStrategy.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ApacheProxyAddressStrategy.java index 426a926ea96..561bc7dfb08 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ApacheProxyAddressStrategy.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ApacheProxyAddressStrategy.java @@ -19,8 +19,6 @@ */ package ca.uhn.fhir.rest.server; -import java.util.Optional; - import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,14 +27,12 @@ import org.springframework.http.server.ServletServerHttpRequest; import org.springframework.web.util.UriComponents; import org.springframework.web.util.UriComponentsBuilder; +import java.util.Optional; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; - - import static java.util.Optional.ofNullable; - /** * Works like the normal * {@link ca.uhn.fhir.rest.server.IncomingRequestAddressStrategy} unless there's @@ -78,9 +74,7 @@ public class ApacheProxyAddressStrategy extends IncomingRequestAddressStrategy { private static final String X_FORWARDED_PROTO = "x-forwarded-proto"; private static final String X_FORWARDED_HOST = "x-forwarded-host"; - - private static final Logger LOG = LoggerFactory - .getLogger(ApacheProxyAddressStrategy.class); + private static final Logger LOG = LoggerFactory.getLogger(ApacheProxyAddressStrategy.class); private final boolean useHttps; @@ -94,11 +88,9 @@ public class ApacheProxyAddressStrategy extends IncomingRequestAddressStrategy { } @Override - public String determineServerBase(ServletContext servletContext, - HttpServletRequest request) { + public String determineServerBase(ServletContext servletContext, HttpServletRequest request) { String serverBase = super.determineServerBase(servletContext, request); - ServletServerHttpRequest requestWrapper = new ServletServerHttpRequest( - request); + ServletServerHttpRequest requestWrapper = new ServletServerHttpRequest(request); UriComponentsBuilder uriBuilder = UriComponentsBuilder.fromHttpRequest(requestWrapper); uriBuilder.replaceQuery(null); HttpHeaders headers = requestWrapper.getHeaders(); @@ -108,26 +100,23 @@ public class ApacheProxyAddressStrategy extends IncomingRequestAddressStrategy { /** * If forward host ist defined, but no forward protocol, use the configured default. - * + * * @param uriBuilder * @param headers */ - private void adjustSchemeWithDefault(UriComponentsBuilder uriBuilder, - HttpHeaders headers) { - if (headers.getFirst(X_FORWARDED_HOST) != null - && headers.getFirst(X_FORWARDED_PROTO) == null) { + private void adjustSchemeWithDefault(UriComponentsBuilder uriBuilder, HttpHeaders headers) { + if (headers.getFirst(X_FORWARDED_HOST) != null && headers.getFirst(X_FORWARDED_PROTO) == null) { uriBuilder.scheme(useHttps ? "https" : "http"); } } - private String forwardedServerBase(String originalServerBase, - HttpHeaders headers, UriComponentsBuilder uriBuilder) { + private String forwardedServerBase( + String originalServerBase, HttpHeaders headers, UriComponentsBuilder uriBuilder) { Optional forwardedPrefix = getForwardedPrefix(headers); LOG.debug("serverBase: {}, forwardedPrefix: {}", originalServerBase, forwardedPrefix); LOG.debug("request header: {}", headers); - String path = forwardedPrefix - .orElseGet(() -> pathFrom(originalServerBase)); + String path = forwardedPrefix.orElseGet(() -> pathFrom(originalServerBase)); uriBuilder.replacePath(path); return uriBuilder.build().toUriString(); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BasePagingProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BasePagingProvider.java index 9d8b27b2bb7..fcf6871583b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BasePagingProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BasePagingProvider.java @@ -54,5 +54,4 @@ public abstract class BasePagingProvider implements IPagingProvider { myMaximumPageSize = theMaximumPageSize; return this; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BaseRestfulResponse.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BaseRestfulResponse.java index a24f5d7197a..35f1818f9bf 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BaseRestfulResponse.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BaseRestfulResponse.java @@ -67,5 +67,4 @@ public abstract class BaseRestfulResponse implements I public void setRequestDetails(T requestDetails) { this.myRequestDetails = requestDetails; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/Bindings.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/Bindings.java index 1a724538ba4..a3776eee981 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/Bindings.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/Bindings.java @@ -32,7 +32,11 @@ public class Bindings { private final HashMap> myOperationIdToBindings; private final IdentityHashMap myOperationBindingToId; - public Bindings(IdentityHashMap theNamedSearchMethodBindingToName, HashMap> theSearchNameToBindings, HashMap> theOperationIdToBindings, IdentityHashMap theOperationBindingToName) { + public Bindings( + IdentityHashMap theNamedSearchMethodBindingToName, + HashMap> theSearchNameToBindings, + HashMap> theOperationIdToBindings, + IdentityHashMap theOperationBindingToName) { myNamedSearchMethodBindingToName = theNamedSearchMethodBindingToName; mySearchNameToBindings = theSearchNameToBindings; myOperationIdToBindings = theOperationIdToBindings; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BundleProviderWithNamedPages.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BundleProviderWithNamedPages.java index a213d97cc27..f313a8a0d5b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BundleProviderWithNamedPages.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BundleProviderWithNamedPages.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.rest.server; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; /** * Bundle provider that uses named pages instead of counts @@ -44,7 +44,8 @@ public class BundleProviderWithNamedPages extends SimpleBundleProvider { * @see #setNextPageId(String) * @see #setPreviousPageId(String) */ - public BundleProviderWithNamedPages(List theResultsInThisPage, String theSearchId, String thePageId, Integer theTotalResults) { + public BundleProviderWithNamedPages( + List theResultsInThisPage, String theSearchId, String thePageId, Integer theTotalResults) { super(theResultsInThisPage, theSearchId); Validate.notNull(theResultsInThisPage, "theResultsInThisPage must not be null"); @@ -95,5 +96,4 @@ public class BundleProviderWithNamedPages extends SimpleBundleProvider { super.setSize(theSize); return this; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BundleProviders.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BundleProviders.java index 5828e42a108..afceb9c2497 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BundleProviders.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/BundleProviders.java @@ -19,15 +19,13 @@ */ package ca.uhn.fhir.rest.server; -import java.util.Collections; -import java.util.List; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseResource; +import java.util.Collections; +import java.util.List; import javax.annotation.Nonnull; /** @@ -38,7 +36,7 @@ public class BundleProviders { /** Non instantiable */ @CoverageIgnore private BundleProviders() { - //nothing + // nothing } /** diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/CommonResourceSupertypeScanner.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/CommonResourceSupertypeScanner.java index 6ea76d83fc9..fda30277416 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/CommonResourceSupertypeScanner.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/CommonResourceSupertypeScanner.java @@ -20,11 +20,12 @@ package ca.uhn.fhir.rest.server; import ca.uhn.fhir.model.api.annotation.ResourceDef; +import org.hl7.fhir.instance.model.api.IBaseResource; + import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Optional; -import org.hl7.fhir.instance.model.api.IBaseResource; /** *

    @@ -38,47 +39,48 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
      * MyPatient4 extends MyPatient3
      * this class will find the common ancestor sequence "IBaseResource -> Patient -> MyPatient". MyPatient is the lowest common superclass in this hierarchy.
      * 
    - * + * */ public class CommonResourceSupertypeScanner { - private List> greatestSharedAncestorsDescending; - private boolean initialized; + private List> greatestSharedAncestorsDescending; + private boolean initialized; - /** - * Recomputes the lowest common superclass by adding a new resource definition to the hierarchy. - * @param resourceClass The resource class to add. - */ - public void register(Class resourceClass) { - List> resourceClassesInHierarchy = new LinkedList<>(); - Class currentClass = resourceClass; - while (IBaseResource.class.isAssignableFrom(currentClass) - && currentClass.getAnnotation(ResourceDef.class) != null) { - resourceClassesInHierarchy.add((Class)currentClass); - currentClass = currentClass.getSuperclass(); - } - Collections.reverse(resourceClassesInHierarchy); - if (initialized) { - for (int i = 0; i < Math.min(resourceClassesInHierarchy.size(), greatestSharedAncestorsDescending.size()); i++) { - if (greatestSharedAncestorsDescending.get(i) != resourceClassesInHierarchy.get(i)) { - greatestSharedAncestorsDescending = greatestSharedAncestorsDescending.subList(0, i); - break; - } - } - } else { - greatestSharedAncestorsDescending = resourceClassesInHierarchy; - initialized = true; - } - } - - /** - * @return The lowest common superclass of currently registered resources. - */ - public Optional> getLowestCommonSuperclass() { - if (!initialized || greatestSharedAncestorsDescending.isEmpty()) { - return Optional.empty(); - } - return Optional.ofNullable(greatestSharedAncestorsDescending.get(greatestSharedAncestorsDescending.size() - 1)); - } + /** + * Recomputes the lowest common superclass by adding a new resource definition to the hierarchy. + * @param resourceClass The resource class to add. + */ + public void register(Class resourceClass) { + List> resourceClassesInHierarchy = new LinkedList<>(); + Class currentClass = resourceClass; + while (IBaseResource.class.isAssignableFrom(currentClass) + && currentClass.getAnnotation(ResourceDef.class) != null) { + resourceClassesInHierarchy.add((Class) currentClass); + currentClass = currentClass.getSuperclass(); + } + Collections.reverse(resourceClassesInHierarchy); + if (initialized) { + for (int i = 0; + i < Math.min(resourceClassesInHierarchy.size(), greatestSharedAncestorsDescending.size()); + i++) { + if (greatestSharedAncestorsDescending.get(i) != resourceClassesInHierarchy.get(i)) { + greatestSharedAncestorsDescending = greatestSharedAncestorsDescending.subList(0, i); + break; + } + } + } else { + greatestSharedAncestorsDescending = resourceClassesInHierarchy; + initialized = true; + } + } + /** + * @return The lowest common superclass of currently registered resources. + */ + public Optional> getLowestCommonSuperclass() { + if (!initialized || greatestSharedAncestorsDescending.isEmpty()) { + return Optional.empty(); + } + return Optional.ofNullable(greatestSharedAncestorsDescending.get(greatestSharedAncestorsDescending.size() - 1)); + } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ETagSupportEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ETagSupportEnum.java index 3fb8c826958..b9480430ba9 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ETagSupportEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ETagSupportEnum.java @@ -21,7 +21,7 @@ package ca.uhn.fhir.rest.server; /** * RESTful server behaviour for automatically adding profile tags - * + * * @see RestfulServer#setETagSupport(ETagSupportEnum) */ public enum ETagSupportEnum { @@ -29,7 +29,7 @@ public enum ETagSupportEnum { * Send ETag headers */ ENABLED, - + /** * Do not send ETag headers */ diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ElementsSupportEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ElementsSupportEnum.java index 65d215c2588..d5fcb999ba0 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ElementsSupportEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ElementsSupportEnum.java @@ -37,5 +37,4 @@ public enum ElementsSupportEnum { * exclusion. */ EXTENDED - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/FifoMemoryPagingProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/FifoMemoryPagingProvider.java index 36f7a816213..81f8dbdb694 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/FifoMemoryPagingProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/FifoMemoryPagingProvider.java @@ -53,5 +53,4 @@ public class FifoMemoryPagingProvider extends BasePagingProvider { myBundleProviders.put(key, theList); return key; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/HardcodedServerAddressStrategy.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/HardcodedServerAddressStrategy.java index 60f9bba0be3..5782290bdd2 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/HardcodedServerAddressStrategy.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/HardcodedServerAddressStrategy.java @@ -19,11 +19,11 @@ */ package ca.uhn.fhir.rest.server; +import org.apache.commons.lang3.Validate; + import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.lang3.Validate; - /** * Server address strategy which simply returns a hardcoded URL */ @@ -53,5 +53,4 @@ public class HardcodedServerAddressStrategy implements IServerAddressStrategy { public String determineServerBase(ServletContext theServletContext, HttpServletRequest theRequest) { return myValue; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IDynamicSearchResourceProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IDynamicSearchResourceProvider.java index ba3f7510108..67e94b4625e 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IDynamicSearchResourceProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IDynamicSearchResourceProvider.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.rest.server; -import java.util.List; - import ca.uhn.fhir.context.RuntimeSearchParam; +import java.util.List; + /** * This is still an experimental API - It isn't meant for public consumption yet. Get in touch if you'd like to use it * and maybe we can help work out a good design together. @@ -30,5 +30,4 @@ import ca.uhn.fhir.context.RuntimeSearchParam; public interface IDynamicSearchResourceProvider extends IResourceProvider { List getSearchParameters(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IPagingProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IPagingProvider.java index ea388a8cf32..58300d2be11 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IPagingProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IPagingProvider.java @@ -60,7 +60,8 @@ public interface IPagingProvider { * method in HAPI FHIR 4.0.0. Existing implementations may choose to * add this parameter and not use it if needed. */ - default IBundleProvider retrieveResultList(@Nullable RequestDetails theRequestDetails, @Nonnull String theSearchId, String thePageId) { + default IBundleProvider retrieveResultList( + @Nullable RequestDetails theRequestDetails, @Nonnull String theSearchId, String thePageId) { return null; } @@ -70,5 +71,4 @@ public interface IPagingProvider { * @param theRequestDetails The server request being made (may be null) */ String storeResultList(@Nullable RequestDetails theRequestDetails, IBundleProvider theList); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IResourceProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IResourceProvider.java index 7fccf76d078..0fb8ca1a3a6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IResourceProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IResourceProvider.java @@ -25,9 +25,8 @@ public interface IResourceProvider { /** * Returns the type of resource returned by this provider - * + * * @return Returns the type of resource returned by this provider */ Class getResourceType(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IRestfulServerUtil.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IRestfulServerUtil.java index 991ce5338f6..a75fe2ff7ff 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IRestfulServerUtil.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IRestfulServerUtil.java @@ -19,25 +19,28 @@ */ package ca.uhn.fhir.rest.server; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.method.BaseMethodBinding; import ca.uhn.fhir.rest.server.method.ResourceParameter.Mode; import ca.uhn.fhir.rest.server.method.TransactionParameter.ParamStyle; +import org.hl7.fhir.instance.model.api.IBaseResource; public interface IRestfulServerUtil { - Object getResourceParameter( - RequestDetails requestDetails, - Mode myMode, - BaseMethodBinding theMethodBinding, - Class myResourceType); + Object getResourceParameter( + RequestDetails requestDetails, + Mode myMode, + BaseMethodBinding theMethodBinding, + Class myResourceType); - Object getRequestResource(RequestDetails theRequest, ParamStyle myParamStyle, Class myResourceBundleType); + Object getRequestResource( + RequestDetails theRequest, ParamStyle myParamStyle, Class myResourceBundleType); - T loadResourceFromRequest(RequestDetails theRequest, BaseMethodBinding theMethodBinding, Class theResourceType); - - IBaseResource parseResourceFromRequest(RequestDetails theRequest, BaseMethodBinding theMethodBinding, Class theResourceType); + T loadResourceFromRequest( + RequestDetails theRequest, BaseMethodBinding theMethodBinding, Class theResourceType); + IBaseResource parseResourceFromRequest( + RequestDetails theRequest, + BaseMethodBinding theMethodBinding, + Class theResourceType); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IServerAddressStrategy.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IServerAddressStrategy.java index 2ebf244c393..96d682e1a0e 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IServerAddressStrategy.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IServerAddressStrategy.java @@ -32,5 +32,4 @@ public interface IServerAddressStrategy { * Determine the server base for a given request */ String determineServerBase(ServletContext theServletContext, HttpServletRequest theRequest); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IServerConformanceProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IServerConformanceProvider.java index 3325acfb302..f2b5eb31b9d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IServerConformanceProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IServerConformanceProvider.java @@ -19,19 +19,19 @@ */ package ca.uhn.fhir.rest.server; -import javax.servlet.http.HttpServletRequest; - import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Read; import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import javax.servlet.http.HttpServletRequest; + public interface IServerConformanceProvider { /** * Actually create and return the conformance statement - * + * * See the class documentation for an important note if you are extending this class */ T getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails); @@ -45,7 +45,7 @@ public interface IServerConformanceProvider { * This setter is needed in implementation classes (along with * a no-arg constructor) to avoid reference cycles in the * Spring wiring of a RestfulServer instance. - * + * * @param theRestfulServer */ void setRestfulServer(RestfulServer theRestfulServer); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IncomingRequestAddressStrategy.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IncomingRequestAddressStrategy.java index 1bec0b5b6d5..4653bdbca74 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IncomingRequestAddressStrategy.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/IncomingRequestAddressStrategy.java @@ -19,11 +19,11 @@ */ package ca.uhn.fhir.rest.server; +import org.apache.commons.lang3.StringUtils; + import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.lang3.StringUtils; - /** * Determines the server's base using the incoming request */ @@ -71,7 +71,7 @@ public class IncomingRequestAddressStrategy implements IServerAddressStrategy { contextIndex = requestUrl.indexOf(requestPath, startOfPath); } } else { - //servletContextPath can start with servletPath + // servletContextPath can start with servletPath contextIndex = requestUrl.indexOf(servletPath + "/", startOfPath); if (contextIndex == -1) { contextIndex = requestUrl.indexOf(servletPath, startOfPath); @@ -108,22 +108,24 @@ public class IncomingRequestAddressStrategy implements IServerAddressStrategy { /** * Determines the servlet's context path. - * + * * This is here to try and deal with the wide variation in servers and what they return. - * + * * getServletContext().getContextPath() is supposed to return the path to the specific servlet we are deployed as but it's not available everywhere. On some servers getServletContext() can return * null (old Jetty seems to suffer from this, see hapi-fhir-base-test-mindeps-server) and on other old servers (Servlet 2.4) getServletContext().getContextPath() doesn't even exist. - * + * * theRequest.getContextPath() returns the context for the specific incoming request. It should be available everywhere, but it's likely to be less predicable if there are multiple servlet mappings * pointing to the same servlet, so we don't favour it. This is possibly not the best strategy (maybe we should just always use theRequest.getContextPath()?) but so far people seem happy with this * behavour across a wide variety of platforms. - * + * * If you are having troubles on a given platform/configuration and want to suggest a change or even report incompatibility here, we'd love to hear about it. */ public static String determineServletContextPath(HttpServletRequest theRequest, RestfulServer server) { String retVal; if (server.getServletContext() != null) { - if (server.getServletContext().getMajorVersion() >= 3 || (server.getServletContext().getMajorVersion() > 2 && server.getServletContext().getMinorVersion() >= 5)) { + if (server.getServletContext().getMajorVersion() >= 3 + || (server.getServletContext().getMajorVersion() > 2 + && server.getServletContext().getMinorVersion() >= 5)) { retVal = server.getServletContext().getContextPath(); } else { retVal = theRequest.getContextPath(); @@ -134,5 +136,4 @@ public class IncomingRequestAddressStrategy implements IServerAddressStrategy { retVal = StringUtils.defaultString(retVal); return retVal; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/PageProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/PageProvider.java index 3390e782056..c8a98e58aff 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/PageProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/PageProvider.java @@ -24,11 +24,10 @@ import ca.uhn.fhir.rest.annotation.GetPage; import ca.uhn.fhir.util.CoverageIgnore; public class PageProvider { - + @GetPage() @CoverageIgnore public IResource getPage() { return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ResourceBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ResourceBinding.java index 9c0da650ded..febb8a729c9 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ResourceBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ResourceBinding.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.rest.server; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.method.BaseMethodBinding; -import ca.uhn.fhir.rest.server.method.BaseMethodBinding; import ca.uhn.fhir.rest.server.method.MethodMatchEnum; import java.util.LinkedList; @@ -93,8 +92,7 @@ public class ResourceBinding { @Override public boolean equals(Object o) { - if (!(o instanceof ResourceBinding)) - return false; + if (!(o instanceof ResourceBinding)) return false; return resourceName.equals(((ResourceBinding) o).getResourceName()); } @@ -102,5 +100,4 @@ public class ResourceBinding { public int hashCode() { return 0; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java index 6dbfbad7ccf..02dbd0d5ccd 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServer.java @@ -59,7 +59,6 @@ import ca.uhn.fhir.rest.server.method.MethodMatchEnum; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.tenant.ITenantIdentificationStrategy; import ca.uhn.fhir.util.CoverageIgnore; -import ca.uhn.fhir.util.IoUtil; import ca.uhn.fhir.util.OperationOutcomeUtil; import ca.uhn.fhir.util.ReflectionUtil; import ca.uhn.fhir.util.UrlPathTokenizer; @@ -76,15 +75,6 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.servlet.ServletException; -import javax.servlet.UnavailableException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.Closeable; -import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.Writer; @@ -107,6 +97,13 @@ import java.util.Set; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.jar.Manifest; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.servlet.ServletException; +import javax.servlet.UnavailableException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import static ca.uhn.fhir.util.StringUtil.toUtf8String; import static java.util.stream.Collectors.toList; @@ -140,6 +137,7 @@ public class RestfulServer extends HttpServlet implements IRestfulServer createPoweredByAttributes() { - return Lists.newArrayList("FHIR Server", "FHIR " + myFhirContext.getVersion().getVersion().getFhirVersionString() + "/" + myFhirContext.getVersion().getVersion().name()); + return Lists.newArrayList( + "FHIR Server", + "FHIR " + myFhirContext.getVersion().getVersion().getFhirVersionString() + "/" + + myFhirContext.getVersion().getVersion().name()); } /** @@ -354,18 +362,11 @@ public class RestfulServer extends HttpServlet implements IRestfulServer t.getMethodBindings().stream()) - .forEach(t -> t.close()); - myGlobalBinding - .getMethodBindings() - .forEach(t -> t.close()); - myServerBinding - .getMethodBindings() - .forEach(t -> t.close()); - + myResourceNameToBinding.values().stream() + .flatMap(t -> t.getMethodBindings().stream()) + .forEach(t -> t.close()); + myGlobalBinding.getMethodBindings().forEach(t -> t.close()); + myServerBinding.getMethodBindings().forEach(t -> t.close()); } /** @@ -399,7 +400,8 @@ public class RestfulServer extends HttpServlet implements IRestfulServer getInterceptors_() { - List retVal = getInterceptorService() - .getAllRegisteredInterceptors() - .stream() - .filter(t -> t instanceof IServerInterceptor) - .map(t -> (IServerInterceptor) t) - .collect(toList()); + List retVal = getInterceptorService().getAllRegisteredInterceptors().stream() + .filter(t -> t instanceof IServerInterceptor) + .map(t -> (IServerInterceptor) t) + .collect(toList()); return Collections.unmodifiableList(retVal); } @@ -758,7 +770,6 @@ public class RestfulServer extends HttpServlet implements IRestfulServer resourceProvider = getResourceProviders(); // 'true' tells registerProviders() that @@ -1376,7 +1402,8 @@ public class RestfulServer extends HttpServlet implements IRestfulServer resourceType = rsrcProvider.getResourceType(); if (resourceType == null) { - throw new NullPointerException(Msg.code(301) + "getResourceType() on class '" + rsrcProvider.getClass().getCanonicalName() + "' returned null"); + throw new NullPointerException(Msg.code(301) + "getResourceType() on class '" + + rsrcProvider.getClass().getCanonicalName() + "' returned null"); } if (!inInit) { myResourceProviders.add(rsrcProvider); @@ -1730,10 +1762,12 @@ public class RestfulServer extends HttpServlet implements IRestfulServer it = resourceBinding.getMethodBindings().iterator(); it.hasNext(); ) { + for (Iterator it = + resourceBinding.getMethodBindings().iterator(); + it.hasNext(); ) { BaseMethodBinding binding = it.next(); if (theProvider.equals(binding.getProvider())) { it.remove(); @@ -1799,7 +1835,8 @@ public class RestfulServer extends HttpServlet implements IRestfulServer[] interfaces, Collection resourceNames) { + private void removeResourceMethodsOnInterfaces( + Object theProvider, Class[] interfaces, Collection resourceNames) { for (Class anInterface : interfaces) { removeResourceMethods(theProvider, anInterface, resourceNames); removeResourceMethodsOnInterfaces(theProvider, anInterface.getInterfaces(), resourceNames); @@ -1809,7 +1846,8 @@ public class RestfulServer extends HttpServlet implements IRestfulServer clazz, Collection resourceNames) throws ConfigurationException { + private void removeResourceMethods(Object theProvider, Class clazz, Collection resourceNames) + throws ConfigurationException { for (Method m : ReflectionUtil.getDeclaredMethods(clazz)) { BaseMethodBinding foundMethodBinding = BaseMethodBinding.bindMethod(m, getFhirContext(), theProvider); if (foundMethodBinding == null) { @@ -1826,7 +1864,14 @@ public class RestfulServer extends HttpServlet implements IRestfulServer outcome, int operationStatus, boolean allowPrefer, MethodOutcome response, String resourceName) throws IOException { + public Object returnResponse( + ServletRequestDetails theRequest, + BaseParseAction outcome, + int operationStatus, + boolean allowPrefer, + MethodOutcome response, + String resourceName) + throws IOException { HttpServletResponse servletResponse = theRequest.getServletResponse(); servletResponse.setStatus(operationStatus); servletResponse.setCharacterEncoding(Constants.CHARSET_NAME_UTF8); @@ -1850,7 +1895,8 @@ public class RestfulServer extends HttpServlet implements IRestfulServer knownDistinctAndSortedResourceTypes = myResourceProviders.stream() - .map(t -> t.getResourceType().getSimpleName()) - .distinct() - .sorted() - .collect(toList()); - throw new ResourceNotFoundException(Msg.code(302) + "Unknown resource type '" + theResourceName + "' - Server knows how to handle: " + knownDistinctAndSortedResourceTypes); + .map(t -> t.getResourceType().getSimpleName()) + .distinct() + .sorted() + .collect(toList()); + throw new ResourceNotFoundException(Msg.code(302) + "Unknown resource type '" + theResourceName + + "' - Server knows how to handle: " + knownDistinctAndSortedResourceTypes); } /** @@ -1980,12 +2028,13 @@ public class RestfulServer extends HttpServlet implements IRestfulServer> nextEntry : theException.getResponseHeaders().entrySet()) { + for (Entry> nextEntry : + theException.getResponseHeaders().entrySet()) { for (String nextValue : nextEntry.getValue()) { if (isNotBlank(nextValue)) { theResponse.addHeader(nextEntry.getKey(), nextValue); @@ -2056,8 +2105,19 @@ public class RestfulServer extends HttpServlet implements IRestfulServer 0 && (nextString.charAt(0) == '_' || nextString.charAt(0) == '$' || nextString.equals(Constants.URL_TOKEN_METADATA)); + return nextString.length() > 0 + && (nextString.charAt(0) == '_' + || nextString.charAt(0) == '$' + || nextString.equals(Constants.URL_TOKEN_METADATA)); } -// /** -// * Returns the read method binding for the given resource type, or -// * returns null if not -// * @param theResourceType The resource type, e.g. "Patient" -// * @return The read method binding, or null -// */ -// public ReadMethodBinding findReadMethodBinding(String theResourceType) { -// ReadMethodBinding retVal = null; -// -// ResourceBinding type = myResourceNameToBinding.get(theResourceType); -// if (type != null) { -// for (BaseMethodBinding next : type.getMethodBindings()) { -// if (next instanceof ReadMethodBinding) { -// retVal = (ReadMethodBinding) next; -// } -// } -// } -// -// return retVal; -// } + // /** + // * Returns the read method binding for the given resource type, or + // * returns null if not + // * @param theResourceType The resource type, e.g. "Patient" + // * @return The read method binding, or null + // */ + // public ReadMethodBinding findReadMethodBinding(String theResourceType) { + // ReadMethodBinding retVal = null; + // + // ResourceBinding type = myResourceNameToBinding.get(theResourceType); + // if (type != null) { + // for (BaseMethodBinding next : type.getMethodBindings()) { + // if (next instanceof ReadMethodBinding) { + // retVal = (ReadMethodBinding) next; + // } + // } + // } + // + // return retVal; + // } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerConfiguration.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerConfiguration.java index f7549f2f76c..55050b6fbc3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerConfiguration.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerConfiguration.java @@ -41,8 +41,6 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -57,6 +55,8 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -121,7 +121,8 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { return myResourceNameToSharedSupertype; } - public RestfulServerConfiguration setNameToSharedSupertype(Map> resourceNameToSharedSupertype) { + public RestfulServerConfiguration setNameToSharedSupertype( + Map> resourceNameToSharedSupertype) { this.myResourceNameToSharedSupertype = resourceNameToSharedSupertype; return this; } @@ -247,10 +248,8 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { HashMap> operationIdToBindings = new HashMap<>(); Map> resourceToMethods = collectMethodBindings(); - List methodBindings = resourceToMethods - .values() - .stream().flatMap(t -> t.stream()) - .collect(Collectors.toList()); + List methodBindings = + resourceToMethods.values().stream().flatMap(t -> t.stream()).collect(Collectors.toList()); if (myGlobalBindings != null) { methodBindings.addAll(myGlobalBindings); } @@ -327,7 +326,7 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { String operationId = operationIdBuilder.toString(); operationIdToBindings.put(operationId, nextMethodBindings); - nextMethodBindings.forEach(t->operationBindingToId.put(t, operationId)); + nextMethodBindings.forEach(t -> operationBindingToId.put(t, operationId)); } for (BaseMethodBinding nextMethodBinding : methodBindings) { @@ -348,7 +347,8 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { } } - return new Bindings(namedSearchMethodBindingToName, searchNameToBindings, operationIdToBindings, operationBindingToId); + return new Bindings( + namedSearchMethodBindingToName, searchNameToBindings, operationIdToBindings, operationBindingToId); } public Map> collectMethodBindings() { @@ -391,21 +391,21 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { public void computeSharedSupertypeForResourcePerName(Collection providers) { Map resourceNameToScanner = new HashMap<>(); - List> providedResourceClasses = providers.stream() - .map(provider -> provider.getResourceType()) - .collect(Collectors.toList()); - providedResourceClasses.stream() - .forEach(resourceClass -> { - RuntimeResourceDefinition baseDefinition = getFhirContext().getResourceDefinition(resourceClass).getBaseDefinition(); - CommonResourceSupertypeScanner scanner = resourceNameToScanner.computeIfAbsent(baseDefinition.getName(), key -> new CommonResourceSupertypeScanner()); - scanner.register(resourceClass); - }); + List> providedResourceClasses = + providers.stream().map(provider -> provider.getResourceType()).collect(Collectors.toList()); + providedResourceClasses.stream().forEach(resourceClass -> { + RuntimeResourceDefinition baseDefinition = + getFhirContext().getResourceDefinition(resourceClass).getBaseDefinition(); + CommonResourceSupertypeScanner scanner = resourceNameToScanner.computeIfAbsent( + baseDefinition.getName(), key -> new CommonResourceSupertypeScanner()); + scanner.register(resourceClass); + }); myResourceNameToSharedSupertype = resourceNameToScanner.entrySet().stream() - .filter(entry -> entry.getValue().getLowestCommonSuperclass().isPresent()) - .collect(Collectors.toMap( - entry -> entry.getKey(), - entry -> entry.getValue().getLowestCommonSuperclass().get())); + .filter(entry -> entry.getValue().getLowestCommonSuperclass().isPresent()) + .collect(Collectors.toMap( + entry -> entry.getKey(), + entry -> entry.getValue().getLowestCommonSuperclass().get())); } private String createNamedQueryName(SearchMethodBinding searchMethodBinding) { @@ -430,14 +430,12 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { ResourceSearchParams retval = new ResourceSearchParams(theResourceName); - collectMethodBindings() - .getOrDefault(theResourceName, Collections.emptyList()) - .stream() - .filter(t -> theResourceName.equals(t.getResourceName())) - .filter(t -> t instanceof SearchMethodBinding) - .map(t -> (SearchMethodBinding) t) - .filter(t -> t.getQueryName() == null) - .forEach(t -> createRuntimeBinding(retval, t)); + collectMethodBindings().getOrDefault(theResourceName, Collections.emptyList()).stream() + .filter(t -> theResourceName.equals(t.getResourceName())) + .filter(t -> t instanceof SearchMethodBinding) + .map(t -> (SearchMethodBinding) t) + .filter(t -> t.getQueryName() == null) + .forEach(t -> createRuntimeBinding(retval, t)); return retval; } @@ -448,15 +446,14 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { throw new UnsupportedOperationException(Msg.code(286)); } - private void createRuntimeBinding(ResourceSearchParams theMapToPopulate, SearchMethodBinding theSearchMethodBinding) { + private void createRuntimeBinding( + ResourceSearchParams theMapToPopulate, SearchMethodBinding theSearchMethodBinding) { - List parameters = theSearchMethodBinding - .getParameters() - .stream() - .filter(t -> t instanceof SearchParameter) - .map(t -> (SearchParameter) t) - .sorted(SearchParameterComparator.INSTANCE) - .collect(Collectors.toList()); + List parameters = theSearchMethodBinding.getParameters().stream() + .filter(t -> t instanceof SearchParameter) + .map(t -> (SearchParameter) t) + .sorted(SearchParameterComparator.INSTANCE) + .collect(Collectors.toList()); for (SearchParameter nextParameter : parameters) { @@ -473,7 +470,8 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { * If the parameter has no description, default to the one from the resource */ if (StringUtils.isBlank(nextParamDescription)) { - RuntimeResourceDefinition def = getFhirContext().getResourceDefinition(theSearchMethodBinding.getResourceName()); + RuntimeResourceDefinition def = + getFhirContext().getResourceDefinition(theSearchMethodBinding.getResourceName()); RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName); if (paramDef != null) { nextParamDescription = paramDef.getDescription(); @@ -484,20 +482,34 @@ public class RestfulServerConfiguration implements ISearchParamRegistry { continue; } - IIdType id = getFhirContext().getVersion().newIdType().setValue("SearchParameter/" + theSearchMethodBinding.getResourceName() + "-" + nextParamName); + IIdType id = getFhirContext() + .getVersion() + .newIdType() + .setValue("SearchParameter/" + theSearchMethodBinding.getResourceName() + "-" + nextParamName); String uri = null; String description = nextParamDescription; String path = null; RestSearchParameterTypeEnum type = nextParameter.getParamType(); Set providesMembershipInCompartments = Collections.emptySet(); Set targets = Collections.emptySet(); - RuntimeSearchParam.RuntimeSearchParamStatusEnum status = RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE; + RuntimeSearchParam.RuntimeSearchParamStatusEnum status = + RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE; Collection base = Collections.singletonList(theSearchMethodBinding.getResourceName()); - RuntimeSearchParam param = new RuntimeSearchParam(id, uri, nextParamName, description, path, type, providesMembershipInCompartments, targets, status, null, null, base); + RuntimeSearchParam param = new RuntimeSearchParam( + id, + uri, + nextParamName, + description, + path, + type, + providesMembershipInCompartments, + targets, + status, + null, + null, + base); theMapToPopulate.put(nextParamName, param); - } - } private static class SearchParameterComparator implements Comparator { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java index c689a10dfcc..c10b35463be 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/RestfulServerUtils.java @@ -29,10 +29,10 @@ import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.parser.IParser; -import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.api.server.IRestfulResponse; import ca.uhn.fhir.rest.api.server.IRestfulServer; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.method.ElementsParameter; @@ -46,27 +46,30 @@ import com.google.common.collect.Sets; import org.apache.commons.lang3.math.NumberUtils; import org.hl7.fhir.instance.model.api.*; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.io.OutputStream; import java.io.Writer; -import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.*; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.*; public class RestfulServerUtils { - static final Pattern ACCEPT_HEADER_PATTERN = Pattern.compile("\\s*([a-zA-Z0-9+.*/-]+)\\s*(;\\s*([a-zA-Z]+)\\s*=\\s*([a-zA-Z0-9.]+)\\s*)?(,?)"); + static final Pattern ACCEPT_HEADER_PATTERN = + Pattern.compile("\\s*([a-zA-Z0-9+.*/-]+)\\s*(;\\s*([a-zA-Z]+)\\s*=\\s*([a-zA-Z0-9.]+)\\s*)?(,?)"); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RestfulServerUtils.class); - private static final HashSet TEXT_ENCODE_ELEMENTS = new HashSet<>(Arrays.asList("*.text", "*.id", "*.meta", "*.(mandatory)")); + private static final HashSet TEXT_ENCODE_ELEMENTS = + new HashSet<>(Arrays.asList("*.text", "*.id", "*.meta", "*.(mandatory)")); private static Map myFhirContextMap = Collections.synchronizedMap(new HashMap<>()); - private static EnumSet ourOperationsWhichAllowPreferHeader = EnumSet.of(RestOperationTypeEnum.CREATE, RestOperationTypeEnum.UPDATE, RestOperationTypeEnum.PATCH); + private static EnumSet ourOperationsWhichAllowPreferHeader = + EnumSet.of(RestOperationTypeEnum.CREATE, RestOperationTypeEnum.UPDATE, RestOperationTypeEnum.PATCH); @SuppressWarnings("EnumSwitchStatementWhichMissesCases") public static void configureResponseParser(RequestDetails theRequestDetails, IParser parser) { @@ -82,7 +85,8 @@ public class RestfulServerUtils { // _elements Set elements = ElementsParameter.getElementsValueOrNull(theRequestDetails, false); if (elements != null && !summaryMode.equals(Collections.singleton(SummaryEnum.FALSE))) { - throw new InvalidRequestException(Msg.code(304) + "Cannot combine the " + Constants.PARAM_SUMMARY + " and " + Constants.PARAM_ELEMENTS + " parameters"); + throw new InvalidRequestException(Msg.code(304) + "Cannot combine the " + Constants.PARAM_SUMMARY + " and " + + Constants.PARAM_ELEMENTS + " parameters"); } // _elements:exclude @@ -170,19 +174,24 @@ public class RestfulServerUtils { /** * This function will create a self link but omit any parameters passed in via the excludedParameterNames list. */ - public static String createLinkSelfWithoutGivenParameters(String theServerBase, RequestDetails theRequest, List excludedParameterNames) { + public static String createLinkSelfWithoutGivenParameters( + String theServerBase, RequestDetails theRequest, List excludedParameterNames) { StringBuilder b = new StringBuilder(); b.append(theServerBase); if (isNotBlank(theRequest.getRequestPath())) { b.append('/'); - if (isNotBlank(theRequest.getTenantId()) && theRequest.getRequestPath().startsWith(theRequest.getTenantId() + "/")) { - b.append(theRequest.getRequestPath().substring(theRequest.getTenantId().length() + 1)); + if (isNotBlank(theRequest.getTenantId()) + && theRequest.getRequestPath().startsWith(theRequest.getTenantId() + "/")) { + b.append(theRequest + .getRequestPath() + .substring(theRequest.getTenantId().length() + 1)); } else { b.append(theRequest.getRequestPath()); } } - // For POST the URL parameters get jumbled with the post body parameters so don't include them, they might be huge + // For POST the URL parameters get jumbled with the post body parameters so don't include them, they might be + // huge if (theRequest.getRequestType() == RequestTypeEnum.GET) { boolean first = true; Map parameters = theRequest.getParameters(); @@ -204,10 +213,15 @@ public class RestfulServerUtils { } return b.toString(); - } - public static String createOffsetPagingLink(BundleLinks theBundleLinks, String requestPath, String tenantId, Integer theOffset, Integer theCount, Map theRequestParameters) { + public static String createOffsetPagingLink( + BundleLinks theBundleLinks, + String requestPath, + String tenantId, + Integer theOffset, + Integer theCount, + Map theRequestParameters) { StringBuilder b = new StringBuilder(); b.append(theBundleLinks.serverBase); @@ -221,8 +235,8 @@ public class RestfulServerUtils { } Map params = Maps.newLinkedHashMap(theRequestParameters); - params.put(Constants.PARAM_OFFSET, new String[]{String.valueOf(theOffset)}); - params.put(Constants.PARAM_COUNT, new String[]{String.valueOf(theCount)}); + params.put(Constants.PARAM_OFFSET, new String[] {String.valueOf(theOffset)}); + params.put(Constants.PARAM_COUNT, new String[] {String.valueOf(theCount)}); boolean first = true; for (String nextParamName : new TreeSet<>(params.keySet())) { @@ -242,17 +256,35 @@ public class RestfulServerUtils { return b.toString(); } - public static String createPagingLink(BundleLinks theBundleLinks, RequestDetails theRequestDetails, String theSearchId, int theOffset, int theCount, Map theRequestParameters) { - return createPagingLink(theBundleLinks, theRequestDetails, theSearchId, theOffset, theCount, theRequestParameters, null); + public static String createPagingLink( + BundleLinks theBundleLinks, + RequestDetails theRequestDetails, + String theSearchId, + int theOffset, + int theCount, + Map theRequestParameters) { + return createPagingLink( + theBundleLinks, theRequestDetails, theSearchId, theOffset, theCount, theRequestParameters, null); } - public static String createPagingLink(BundleLinks theBundleLinks, RequestDetails theRequestDetails, String theSearchId, String thePageId, Map theRequestParameters) { - return createPagingLink(theBundleLinks, theRequestDetails, theSearchId, null, null, theRequestParameters, - thePageId); + public static String createPagingLink( + BundleLinks theBundleLinks, + RequestDetails theRequestDetails, + String theSearchId, + String thePageId, + Map theRequestParameters) { + return createPagingLink( + theBundleLinks, theRequestDetails, theSearchId, null, null, theRequestParameters, thePageId); } - private static String createPagingLink(BundleLinks theBundleLinks, RequestDetails theRequestDetails, String theSearchId, Integer theOffset, Integer theCount, Map theRequestParameters, - String thePageId) { + private static String createPagingLink( + BundleLinks theBundleLinks, + RequestDetails theRequestDetails, + String theSearchId, + Integer theOffset, + Integer theCount, + Map theRequestParameters, + String thePageId) { String serverBase = theRequestDetails.getFhirServerBase(); @@ -321,11 +353,8 @@ public class RestfulServerUtils { b.append('&'); b.append(Constants.PARAM_ELEMENTS); b.append('='); - String nextValue = elements - .stream() - .sorted() - .map(UrlUtil::escapeUrlParam) - .collect(Collectors.joining(",")); + String nextValue = + elements.stream().sorted().map(UrlUtil::escapeUrlParam).collect(Collectors.joining(",")); b.append(nextValue); } @@ -336,11 +365,10 @@ public class RestfulServerUtils { b.append('&'); b.append(Constants.PARAM_ELEMENTS + Constants.PARAM_ELEMENTS_EXCLUDE_MODIFIER); b.append('='); - String nextValue = elementsExclude - .stream() - .sorted() - .map(UrlUtil::escapeUrlParam) - .collect(Collectors.joining(",")); + String nextValue = elementsExclude.stream() + .sorted() + .map(UrlUtil::escapeUrlParam) + .collect(Collectors.joining(",")); b.append(nextValue); } } @@ -362,7 +390,8 @@ public class RestfulServerUtils { return retVal.getEncoding(); } - private static ResponseEncoding determineRequestEncodingNoDefaultReturnRE(RequestDetails theReq, boolean theStrict) { + private static ResponseEncoding determineRequestEncodingNoDefaultReturnRE( + RequestDetails theReq, boolean theStrict) { ResponseEncoding retVal = null; List headers = theReq.getHeaders(Constants.HEADER_CONTENT_TYPE); if (headers != null) { @@ -411,7 +440,8 @@ public class RestfulServerUtils { * _format parameter. If a value is provided to thePreferContents, we'll * prefer to return that value over the native FHIR value. */ - public static ResponseEncoding determineResponseEncodingNoDefault(RequestDetails theReq, EncodingEnum thePrefer, String thePreferContentType) { + public static ResponseEncoding determineResponseEncodingNoDefault( + RequestDetails theReq, EncodingEnum thePrefer, String thePreferContentType) { String[] format = theReq.getParameters().get(Constants.PARAM_FORMAT); if (format != null) { for (String nextFormat : format) { @@ -473,20 +503,32 @@ public class RestfulServerUtils { ResponseEncoding encoding; if (endSpaceIndex == -1) { if (startSpaceIndex == 0) { - encoding = getEncodingForContentType(theReq.getServer().getFhirContext(), strict, nextToken, thePreferContentType); + encoding = getEncodingForContentType( + theReq.getServer().getFhirContext(), strict, nextToken, thePreferContentType); } else { - encoding = getEncodingForContentType(theReq.getServer().getFhirContext(), strict, nextToken.substring(startSpaceIndex), thePreferContentType); + encoding = getEncodingForContentType( + theReq.getServer().getFhirContext(), + strict, + nextToken.substring(startSpaceIndex), + thePreferContentType); } } else { - encoding = getEncodingForContentType(theReq.getServer().getFhirContext(), strict, nextToken.substring(startSpaceIndex, endSpaceIndex), thePreferContentType); + encoding = getEncodingForContentType( + theReq.getServer().getFhirContext(), + strict, + nextToken.substring(startSpaceIndex, endSpaceIndex), + thePreferContentType); String remaining = nextToken.substring(endSpaceIndex + 1); StringTokenizer qualifierTok = new StringTokenizer(remaining, ";"); while (qualifierTok.hasMoreTokens()) { String nextQualifier = qualifierTok.nextToken(); int equalsIndex = nextQualifier.indexOf('='); if (equalsIndex != -1) { - String nextQualifierKey = nextQualifier.substring(0, equalsIndex).trim(); - String nextQualifierValue = nextQualifier.substring(equalsIndex + 1, nextQualifier.length()).trim(); + String nextQualifierKey = + nextQualifier.substring(0, equalsIndex).trim(); + String nextQualifierValue = nextQualifier + .substring(equalsIndex + 1, nextQualifier.length()) + .trim(); if (nextQualifierKey.equals("q")) { try { q = Float.parseFloat(nextQualifierValue); @@ -505,11 +547,8 @@ public class RestfulServerUtils { bestQ = q; } } - } - } - } /* @@ -530,9 +569,11 @@ public class RestfulServerUtils { * "_format" parameter and "Accept:" HTTP header. */ public static ResponseEncoding determineResponseEncodingWithDefault(RequestDetails theReq) { - ResponseEncoding retVal = determineResponseEncodingNoDefault(theReq, theReq.getServer().getDefaultResponseEncoding()); + ResponseEncoding retVal = + determineResponseEncodingNoDefault(theReq, theReq.getServer().getDefaultResponseEncoding()); if (retVal == null) { - retVal = new ResponseEncoding(theReq.getServer().getFhirContext(), theReq.getServer().getDefaultResponseEncoding(), null); + retVal = new ResponseEncoding( + theReq.getServer().getFhirContext(), theReq.getServer().getDefaultResponseEncoding(), null); } return retVal; } @@ -593,7 +634,8 @@ public class RestfulServerUtils { return lastUpdated; } - public static IIdType fullyQualifyResourceIdOrReturnNull(IRestfulServerDefaults theServer, IBaseResource theResource, String theServerBase, IIdType theResourceId) { + public static IIdType fullyQualifyResourceIdOrReturnNull( + IRestfulServerDefaults theServer, IBaseResource theResource, String theServerBase, IIdType theResourceId) { IIdType retVal = null; if (theResourceId.hasIdPart() && isNotBlank(theServerBase)) { String resName = theResourceId.getResourceType(); @@ -627,7 +669,8 @@ public class RestfulServerUtils { return context; } - private static ResponseEncoding getEncodingForContentType(FhirContext theFhirContext, boolean theStrict, String theContentType, String thePreferContentType) { + private static ResponseEncoding getEncodingForContentType( + FhirContext theFhirContext, boolean theStrict, String theContentType, String thePreferContentType) { EncodingEnum encoding; if (theStrict) { encoding = EncodingEnum.forContentTypeStrict(theContentType); @@ -645,11 +688,13 @@ public class RestfulServerUtils { return new ResponseEncoding(theFhirContext, encoding, theContentType); } - public static IParser getNewParser(FhirContext theContext, FhirVersionEnum theForVersion, RequestDetails theRequestDetails) { + public static IParser getNewParser( + FhirContext theContext, FhirVersionEnum theForVersion, RequestDetails theRequestDetails) { FhirContext context = getContextForVersion(theContext, theForVersion); // Determine response encoding - EncodingEnum responseEncoding = RestfulServerUtils.determineResponseEncodingWithDefault(theRequestDetails).getEncoding(); + EncodingEnum responseEncoding = RestfulServerUtils.determineResponseEncodingWithDefault(theRequestDetails) + .getEncoding(); IParser parser; switch (responseEncoding) { case JSON: @@ -704,14 +749,12 @@ public class RestfulServerUtils { if (q == bestQ) { retVal.add(contentTypeGroup.trim()); } - } if (!",".equals(m.group(5))) { break; } } - } } @@ -755,7 +798,6 @@ public class RestfulServerUtils { } else if (key.equals(Constants.HEADER_PREFER_RESPOND_ASYNC)) { retVal.setRespondAsync(true); - } } } @@ -797,18 +839,43 @@ public class RestfulServerUtils { return prettyPrint; } - public static Object streamResponseAsResource(IRestfulServerDefaults theServer, IBaseResource theResource, Set theSummaryMode, int theStatusCode, boolean theAddContentLocationHeader, - boolean respondGzip, RequestDetails theRequestDetails) throws IOException { - return streamResponseAsResource(theServer, theResource, theSummaryMode, theStatusCode, theAddContentLocationHeader, respondGzip, theRequestDetails, null, null); + public static Object streamResponseAsResource( + IRestfulServerDefaults theServer, + IBaseResource theResource, + Set theSummaryMode, + int theStatusCode, + boolean theAddContentLocationHeader, + boolean respondGzip, + RequestDetails theRequestDetails) + throws IOException { + return streamResponseAsResource( + theServer, + theResource, + theSummaryMode, + theStatusCode, + theAddContentLocationHeader, + respondGzip, + theRequestDetails, + null, + null); } - public static Object streamResponseAsResource(IRestfulServerDefaults theServer, IBaseResource theResource, Set theSummaryMode, int theStatusCode, - boolean theAddContentLocationHeader, boolean respondGzip, RequestDetails theRequestDetails, IIdType theOperationResourceId, IPrimitiveType theOperationResourceLastUpdated) - throws IOException { + public static Object streamResponseAsResource( + IRestfulServerDefaults theServer, + IBaseResource theResource, + Set theSummaryMode, + int theStatusCode, + boolean theAddContentLocationHeader, + boolean respondGzip, + RequestDetails theRequestDetails, + IIdType theOperationResourceId, + IPrimitiveType theOperationResourceLastUpdated) + throws IOException { IRestfulResponse response = theRequestDetails.getResponse(); // Determine response encoding - ResponseEncoding responseEncoding = RestfulServerUtils.determineResponseEncodingNoDefault(theRequestDetails, theServer.getDefaultResponseEncoding()); + ResponseEncoding responseEncoding = RestfulServerUtils.determineResponseEncodingNoDefault( + theRequestDetails, theServer.getDefaultResponseEncoding()); String serverBase = theRequestDetails.getFhirServerBase(); IIdType fullId = null; @@ -838,7 +905,9 @@ public class RestfulServerUtils { if (fullId != null && fullId.hasVersionIdPart()) { String versionIdPart = fullId.getVersionIdPart(); response.addHeader(Constants.HEADER_ETAG, createEtag(versionIdPart)); - } else if (theResource != null && theResource.getMeta() != null && isNotBlank(theResource.getMeta().getVersionId())) { + } else if (theResource != null + && theResource.getMeta() != null + && isNotBlank(theResource.getMeta().getVersionId())) { String versionId = theResource.getMeta().getVersionId(); response.addHeader(Constants.HEADER_ETAG, createEtag(versionId)); } @@ -854,7 +923,8 @@ public class RestfulServerUtils { // Add a security context header IBaseReference securityContext = BinaryUtil.getSecurityContext(theServer.getFhirContext(), bin); if (securityContext != null) { - String securityContextRef = securityContext.getReferenceElement().getValue(); + String securityContextRef = + securityContext.getReferenceElement().getValue(); if (isNotBlank(securityContextRef)) { response.addHeader(Constants.HEADER_X_SECURITY_CONTEXT, securityContextRef); } @@ -883,7 +953,8 @@ public class RestfulServerUtils { // Ok, we're not serving a binary resource, so apply default encoding if (responseEncoding == null) { - responseEncoding = new ResponseEncoding(theServer.getFhirContext(), theServer.getDefaultResponseEncoding(), null); + responseEncoding = + new ResponseEncoding(theServer.getFhirContext(), theServer.getDefaultResponseEncoding(), null); } boolean encodingDomainResourceAsText = theSummaryMode.size() == 1 && theSummaryMode.contains(SummaryEnum.TEXT); @@ -928,12 +999,15 @@ public class RestfulServerUtils { Writer writer = response.getResponseWriter(theStatusCode, contentType, charset, respondGzip); // Interceptor call: SERVER_OUTGOING_WRITER_CREATED - if (theServer.getInterceptorService() != null && theServer.getInterceptorService().hasHooks(Pointcut.SERVER_OUTGOING_WRITER_CREATED)) { + if (theServer.getInterceptorService() != null + && theServer.getInterceptorService().hasHooks(Pointcut.SERVER_OUTGOING_WRITER_CREATED)) { HookParams params = new HookParams() - .add(Writer.class, writer) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); - Object newWriter = theServer.getInterceptorService().callHooksAndReturnObject(Pointcut.SERVER_OUTGOING_WRITER_CREATED, params); + .add(Writer.class, writer) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + Object newWriter = theServer + .getInterceptorService() + .callHooksAndReturnObject(Pointcut.SERVER_OUTGOING_WRITER_CREATED, params); if (newWriter != null) { writer = (Writer) newWriter; } @@ -1019,7 +1093,8 @@ public class RestfulServerUtils { public static void validateResourceListNotNull(List theResourceList) { if (theResourceList == null) { - throw new InternalErrorException(Msg.code(306) + "IBundleProvider returned a null list of resources - This is not allowed"); + throw new InternalErrorException( + Msg.code(306) + "IBundleProvider returned a null list of resources - This is not allowed"); } } @@ -1033,7 +1108,8 @@ public class RestfulServerUtils { String[] cascadeParameters = theRequest.getParameters().get(Constants.PARAMETER_CASCADE_DELETE); if (cascadeParameters != null && Arrays.asList(cascadeParameters).contains(Constants.CASCADE_DELETE)) { mode = DeleteCascadeModeEnum.DELETE; - String[] maxRoundsValues = theRequest.getParameters().get(Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS); + String[] maxRoundsValues = + theRequest.getParameters().get(Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS); if (maxRoundsValues != null && maxRoundsValues.length > 0) { String maxRoundsString = maxRoundsValues[0]; maxRounds = parseMaxRoundsString(maxRoundsString); @@ -1043,14 +1119,17 @@ public class RestfulServerUtils { if (mode == null) { String cascadeHeader = theRequest.getHeader(Constants.HEADER_CASCADE); if (isNotBlank(cascadeHeader)) { - if (Constants.CASCADE_DELETE.equals(cascadeHeader) || cascadeHeader.startsWith(Constants.CASCADE_DELETE + ";") || cascadeHeader.startsWith(Constants.CASCADE_DELETE + " ")) { + if (Constants.CASCADE_DELETE.equals(cascadeHeader) + || cascadeHeader.startsWith(Constants.CASCADE_DELETE + ";") + || cascadeHeader.startsWith(Constants.CASCADE_DELETE + " ")) { mode = DeleteCascadeModeEnum.DELETE; if (cascadeHeader.contains(";")) { String remainder = cascadeHeader.substring(cascadeHeader.indexOf(';') + 1); remainder = trim(remainder); if (remainder.startsWith(Constants.HEADER_CASCADE_MAX_ROUNDS + "=")) { - String maxRoundsString = remainder.substring(Constants.HEADER_CASCADE_MAX_ROUNDS.length() + 1); + String maxRoundsString = + remainder.substring(Constants.HEADER_CASCADE_MAX_ROUNDS.length() + 1); maxRounds = parseMaxRoundsString(maxRoundsString); } } @@ -1074,13 +1153,16 @@ public class RestfulServerUtils { } else if (NumberUtils.isDigits(theMaxRoundsString)) { maxRounds = Integer.parseInt(theMaxRoundsString); } else { - throw new InvalidRequestException(Msg.code(2349) + "Invalid value for " + Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS + " parameter"); + throw new InvalidRequestException(Msg.code(2349) + "Invalid value for " + + Constants.PARAMETER_CASCADE_DELETE_MAX_ROUNDS + " parameter"); } return maxRounds; } private enum NarrativeModeEnum { - NORMAL, ONLY, SUPPRESS; + NORMAL, + ONLY, + SUPPRESS; public static NarrativeModeEnum valueOfCaseInsensitive(String theCode) { return valueOf(NarrativeModeEnum.class, theCode.toUpperCase()); @@ -1101,10 +1183,12 @@ public class RestfulServerUtils { myContentType = theContentType; if (theContentType != null) { FhirVersionEnum ctxtEnum = theCtx.getVersion().getVersion(); - if (theContentType.equals(EncodingEnum.JSON_PLAIN_STRING) || theContentType.equals(EncodingEnum.XML_PLAIN_STRING)) { + if (theContentType.equals(EncodingEnum.JSON_PLAIN_STRING) + || theContentType.equals(EncodingEnum.XML_PLAIN_STRING)) { myNonLegacy = ctxtEnum.isNewerThan(FhirVersionEnum.DSTU2_1); } else { - myNonLegacy = ctxtEnum.isNewerThan(FhirVersionEnum.DSTU2_1) && !EncodingEnum.isLegacy(theContentType); + myNonLegacy = + ctxtEnum.isNewerThan(FhirVersionEnum.DSTU2_1) && !EncodingEnum.isLegacy(theContentType); } } else { FhirVersionEnum ctxtEnum = theCtx.getVersion().getVersion(); @@ -1154,5 +1238,4 @@ public class RestfulServerUtils { return myMaxRounds; } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ServletRequestTracing.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ServletRequestTracing.java index 16d0faa1ce8..6de5ac23b19 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ServletRequestTracing.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/ServletRequestTracing.java @@ -33,10 +33,11 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class ServletRequestTracing { private static final Logger ourLog = LoggerFactory.getLogger(ServletRequestTracing.class); - public static final String ATTRIBUTE_REQUEST_ID = ServletRequestTracing.class.getName() + '.' + Constants.HEADER_REQUEST_ID; + public static final String ATTRIBUTE_REQUEST_ID = + ServletRequestTracing.class.getName() + '.' + Constants.HEADER_REQUEST_ID; + + ServletRequestTracing() {} - ServletRequestTracing() { } - /** * Assign a tracing id to this request, using * the X-Request-ID if present and compatible. @@ -82,5 +83,4 @@ public class ServletRequestTracing { } return requestId; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/SimpleBundleProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/SimpleBundleProvider.java index 1b4eb63dd32..0f4e9ad4350 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/SimpleBundleProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/SimpleBundleProvider.java @@ -25,10 +25,10 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.Collections; import java.util.Date; import java.util.List; +import javax.annotation.Nonnull; public class SimpleBundleProvider implements IBundleProvider { @@ -130,7 +130,8 @@ public class SimpleBundleProvider implements IBundleProvider { @Nonnull @Override public List getResources(int theFromIndex, int theToIndex) { - return (List) myList.subList(Math.min(theFromIndex, myList.size()), Math.min(theToIndex, myList.size())); + return (List) + myList.subList(Math.min(theFromIndex, myList.size()), Math.min(theToIndex, myList.size())); } @Override @@ -170,8 +171,6 @@ public class SimpleBundleProvider implements IBundleProvider { @Override public String toString() { - return new ToStringBuilder(this) - .append("mySize", mySize) - .toString(); + return new ToStringBuilder(this).append("mySize", mySize).toString(); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BanUnsupportedHttpMethodsInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BanUnsupportedHttpMethodsInterceptor.java index 7e89fde07e2..a0b07e904ef 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BanUnsupportedHttpMethodsInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BanUnsupportedHttpMethodsInterceptor.java @@ -20,15 +20,14 @@ package ca.uhn.fhir.rest.server.interceptor; import ca.uhn.fhir.i18n.Msg; -import java.util.HashSet; -import java.util.Set; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; +import java.util.HashSet; +import java.util.Set; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + /** * This interceptor causes the server to reject invocations for HTTP methods * other than those supported by the server with an HTTP 405. This is a requirement @@ -37,25 +36,24 @@ import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; public class BanUnsupportedHttpMethodsInterceptor extends InterceptorAdapter { private Set myAllowedMethods = new HashSet(); - + public BanUnsupportedHttpMethodsInterceptor() { myAllowedMethods.add(RequestTypeEnum.GET); myAllowedMethods.add(RequestTypeEnum.OPTIONS); myAllowedMethods.add(RequestTypeEnum.DELETE); myAllowedMethods.add(RequestTypeEnum.PUT); - myAllowedMethods.add(RequestTypeEnum.POST); + myAllowedMethods.add(RequestTypeEnum.POST); myAllowedMethods.add(RequestTypeEnum.PATCH); myAllowedMethods.add(RequestTypeEnum.HEAD); } - + @Override public boolean incomingRequestPreProcessed(HttpServletRequest theRequest, HttpServletResponse theResponse) { RequestTypeEnum requestType = RequestTypeEnum.valueOf(theRequest.getMethod()); if (myAllowedMethods.contains(requestType)) { return true; } - + throw new MethodNotAllowedException(Msg.code(329) + "Method not supported: " + theRequest.getMethod()); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BaseResponseTerminologyInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BaseResponseTerminologyInterceptor.java index 3a06bb755ee..cca85d6242b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BaseResponseTerminologyInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BaseResponseTerminologyInterceptor.java @@ -27,9 +27,9 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.util.Collections; import java.util.List; +import javax.annotation.Nonnull; public abstract class BaseResponseTerminologyInterceptor { protected final IValidationSupport myValidationSupport; @@ -52,7 +52,7 @@ public abstract class BaseResponseTerminologyInterceptor { protected List toListForProcessing(RequestDetails theRequestDetails, IBaseResource theResource) { switch (theRequestDetails.getRestOperationType()) { - // Don't apply to these operations + // Don't apply to these operations case ADD_TAGS: case DELETE_TAGS: case GET_TAGS: @@ -74,7 +74,7 @@ public abstract class BaseResponseTerminologyInterceptor { default: return Collections.emptyList(); - // Do apply to these operations + // Do apply to these operations case HISTORY_INSTANCE: case HISTORY_SYSTEM: case HISTORY_TYPE: @@ -93,5 +93,4 @@ public abstract class BaseResponseTerminologyInterceptor { } return resources; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BaseValidatingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BaseValidatingInterceptor.java index 132039c926f..50c320c7580 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BaseValidatingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/BaseValidatingInterceptor.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.rest.server.interceptor; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; @@ -96,7 +96,9 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric */ public BaseValidatingInterceptor addValidatorModule(IValidatorModule theModule) { Validate.notNull(theModule, "theModule must not be null"); - Validate.isTrue(myValidator == null, "Can not specify both a validator and validator modules. Only one needs to be supplied."); + Validate.isTrue( + myValidator == null, + "Can not specify both a validator and validator modules. Only one needs to be supplied."); if (getValidatorModules() == null) { setValidatorModules(new ArrayList<>()); } @@ -111,11 +113,14 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric * @see #setValidatorModules(List) */ public void setValidator(FhirValidator theValidator) { - Validate.isTrue(theValidator == null || getValidatorModules() == null || getValidatorModules().isEmpty(), "Can not specify both a validator and validator modules. Only one needs to be supplied."); + Validate.isTrue( + theValidator == null + || getValidatorModules() == null + || getValidatorModules().isEmpty(), + "Can not specify both a validator and validator modules. Only one needs to be supplied."); myValidator = theValidator; } - abstract ValidationResult doValidate(FhirValidator theValidator, T theRequest); /** @@ -123,7 +128,9 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric * Subclasses may change this behaviour by providing alternate behaviour. */ protected void fail(RequestDetails theRequestDetails, ValidationResult theValidationResult) { - throw new UnprocessableEntityException(Msg.code(330) + theValidationResult.getMessages().get(0).getMessage(), theValidationResult.toOperationOutcome()); + throw new UnprocessableEntityException( + Msg.code(330) + theValidationResult.getMessages().get(0).getMessage(), + theValidationResult.toOperationOutcome()); } /** @@ -132,7 +139,9 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric * OperationOutcome resource containing the validation results. */ public ResultSeverityEnum getAddResponseOutcomeHeaderOnSeverity() { - return myAddResponseOutcomeHeaderOnSeverity != null ? ResultSeverityEnum.values()[myAddResponseOutcomeHeaderOnSeverity] : null; + return myAddResponseOutcomeHeaderOnSeverity != null + ? ResultSeverityEnum.values()[myAddResponseOutcomeHeaderOnSeverity] + : null; } /** @@ -141,7 +150,8 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric * OperationOutcome resource containing the validation results. */ public void setAddResponseOutcomeHeaderOnSeverity(ResultSeverityEnum theAddResponseOutcomeHeaderOnSeverity) { - myAddResponseOutcomeHeaderOnSeverity = theAddResponseOutcomeHeaderOnSeverity != null ? theAddResponseOutcomeHeaderOnSeverity.ordinal() : null; + myAddResponseOutcomeHeaderOnSeverity = + theAddResponseOutcomeHeaderOnSeverity != null ? theAddResponseOutcomeHeaderOnSeverity.ordinal() : null; } /** @@ -181,7 +191,9 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric } public void setValidatorModules(List theValidatorModules) { - Validate.isTrue(myValidator == null || theValidatorModules == null || theValidatorModules.isEmpty(), "Can not specify both a validator and validator modules. Only one needs to be supplied."); + Validate.isTrue( + myValidator == null || theValidatorModules == null || theValidatorModules.isEmpty(), + "Can not specify both a validator and validator modules. Only one needs to be supplied."); myValidatorModules = theValidatorModules; } @@ -294,14 +306,12 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric * Hook for subclasses (e.g. add a tag (coding) to an incoming resource when a given severity appears in the * ValidationResult). */ - protected void postProcessResult(RequestDetails theRequestDetails, ValidationResult theValidationResult) { - } + protected void postProcessResult(RequestDetails theRequestDetails, ValidationResult theValidationResult) {} /** * Hook for subclasses on failure (e.g. add a response header to an incoming resource upon rejection). */ - protected void postProcessResultOnFailure(RequestDetails theRequestDetails, ValidationResult theValidationResult) { - } + protected void postProcessResultOnFailure(RequestDetails theRequestDetails, ValidationResult theValidationResult) {} /** * Note: May return null @@ -357,7 +367,9 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric } if (!found) { if (isNotBlank(myResponseIssueHeaderValueNoIssues)) { - theRequestDetails.getResponse().addHeader(myResponseIssueHeaderName, myResponseIssueHeaderValueNoIssues); + theRequestDetails + .getResponse() + .addHeader(myResponseIssueHeaderName, myResponseIssueHeaderValueNoIssues); } } } @@ -380,14 +392,20 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric break; } } - if (outcome == null && myAddResponseOutcomeHeaderOnSeverity != null && myAddResponseOutcomeHeaderOnSeverity == ResultSeverityEnum.INFORMATION.ordinal()) { + if (outcome == null + && myAddResponseOutcomeHeaderOnSeverity != null + && myAddResponseOutcomeHeaderOnSeverity == ResultSeverityEnum.INFORMATION.ordinal()) { FhirContext ctx = theRequestDetails.getServer().getFhirContext(); outcome = OperationOutcomeUtil.newInstance(ctx); OperationOutcomeUtil.addIssue(ctx, outcome, "information", "No issues detected", "", "informational"); } if (outcome != null) { - IParser parser = theRequestDetails.getServer().getFhirContext().newJsonParser().setPrettyPrint(false); + IParser parser = theRequestDetails + .getServer() + .getFhirContext() + .newJsonParser() + .setPrettyPrint(false); String encoded = parser.encodeResourceToString(outcome); if (encoded.length() > getMaximumHeaderLength()) { encoded = encoded.substring(0, getMaximumHeaderLength() - 3) + "..."; @@ -433,7 +451,5 @@ public abstract class BaseValidatingInterceptor extends ValidationResultEnric private static String toString(Object theInt) { return theInt != null ? theInt.toString() : ""; } - } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CaptureResourceSourceFromHeaderInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CaptureResourceSourceFromHeaderInterceptor.java index 514b77b1948..5408da55efc 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CaptureResourceSourceFromHeaderInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CaptureResourceSourceFromHeaderInterceptor.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.rest.server.interceptor; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ConfigLoader.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ConfigLoader.java index d6fb76d7bcc..b964864e27f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ConfigLoader.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ConfigLoader.java @@ -35,7 +35,7 @@ public class ConfigLoader { public static final String CLASSPATH = "classpath:"; public static String loadResourceContent(String theResourcePath) { - if(theResourcePath.startsWith(CLASSPATH)) { + if (theResourcePath.startsWith(CLASSPATH)) { theResourcePath = theResourcePath.substring(CLASSPATH.length()); } return ClasspathUtil.loadResource(theResourcePath); @@ -47,7 +47,8 @@ public class ConfigLoader { try { props.load(new StringReader(propsString)); } catch (IOException e) { - throw new RuntimeException(Msg.code(324) + String.format("Unable to load properties at %s", theResourcePath), e); + throw new RuntimeException( + Msg.code(324) + String.format("Unable to load properties at %s", theResourcePath), e); } return props; } @@ -57,8 +58,8 @@ public class ConfigLoader { try { return mapper.readValue(loadResourceContent(theResourcePath), theModelClass); } catch (Exception e) { - throw new RuntimeException(Msg.code(325) + String.format("Unable to parse resource at %s", theResourcePath), e); + throw new RuntimeException( + Msg.code(325) + String.format("Unable to parse resource at %s", theResourcePath), e); } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CorsInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CorsInterceptor.java index f7bb2bb4890..1d928ae0b5b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CorsInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/CorsInterceptor.java @@ -20,21 +20,18 @@ package ca.uhn.fhir.rest.server.interceptor; import ca.uhn.fhir.i18n.Msg; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.apache.commons.lang3.Validate; import org.springframework.web.cors.CorsConfiguration; import org.springframework.web.cors.CorsProcessor; import org.springframework.web.cors.CorsUtils; import org.springframework.web.cors.DefaultCorsProcessor; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import java.io.IOException; +import java.util.ArrayList; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; public class CorsInterceptor extends InterceptorAdapter { @@ -120,8 +117,6 @@ public class CorsInterceptor extends InterceptorAdapter { retVal.addAllowedOrigin("*"); - return retVal; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ExceptionHandlingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ExceptionHandlingInterceptor.java index 97482e5d160..68a8122a99b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ExceptionHandlingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ExceptionHandlingInterceptor.java @@ -19,11 +19,28 @@ */ package ca.uhn.fhir.rest.server.interceptor; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isNotBlank; -import static org.apache.http.HttpHeaders.CONTENT_ENCODING; +import ca.uhn.fhir.interceptor.api.Hook; +import ca.uhn.fhir.interceptor.api.Interceptor; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.parser.DataFormatException; +import ca.uhn.fhir.rest.api.Constants; +import ca.uhn.fhir.rest.api.SummaryEnum; +import ca.uhn.fhir.rest.api.server.IRestfulResponse; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.server.RestfulServerUtils; +import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import ca.uhn.fhir.rest.server.exceptions.UnclassifiedServerFailureException; +import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding; +import ca.uhn.fhir.rest.server.servlet.ServletRestfulResponse; +import ca.uhn.fhir.util.OperationOutcomeUtil; +import org.apache.commons.collections4.map.HashedMap; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; -import java.io.Closeable; import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -31,48 +48,35 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; - import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import ca.uhn.fhir.interceptor.api.Hook; -import ca.uhn.fhir.interceptor.api.Interceptor; -import ca.uhn.fhir.interceptor.api.Pointcut; -import ca.uhn.fhir.parser.DataFormatException; -import ca.uhn.fhir.rest.server.RestfulServerUtils; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding; -import ca.uhn.fhir.rest.server.servlet.ServletRestfulResponse; -import org.apache.commons.collections4.map.HashedMap; -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.SummaryEnum; -import ca.uhn.fhir.rest.api.server.IRestfulResponse; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.UnclassifiedServerFailureException; -import ca.uhn.fhir.util.OperationOutcomeUtil; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.http.HttpHeaders.CONTENT_ENCODING; @Interceptor public class ExceptionHandlingInterceptor { public static final String PROCESSING = Constants.OO_INFOSTATUS_PROCESSING; - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ExceptionHandlingInterceptor.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(ExceptionHandlingInterceptor.class); public static final Set SUMMARY_MODE = Collections.singleton(SummaryEnum.FALSE); private Class[] myReturnStackTracesForExceptionTypes; @Hook(Pointcut.SERVER_HANDLE_EXCEPTION) - public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theRequest, HttpServletResponse theResponse) throws ServletException, IOException { + public boolean handleException( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + HttpServletRequest theRequest, + HttpServletResponse theResponse) + throws ServletException, IOException { handleException(theRequestDetails, theException); return false; } - public Object handleException(RequestDetails theRequestDetails, BaseServerResponseException theException) throws ServletException, IOException { + public Object handleException(RequestDetails theRequestDetails, BaseServerResponseException theException) + throws ServletException, IOException { IRestfulResponse response = theRequestDetails.getResponse(); FhirContext ctx = theRequestDetails.getServer().getFhirContext(); @@ -96,7 +100,7 @@ public class ExceptionHandlingInterceptor { } } } - + String statusMessage = null; if (theException instanceof UnclassifiedServerFailureException) { String sm = theException.getMessage(); @@ -109,10 +113,21 @@ public class ExceptionHandlingInterceptor { try { resetOutputStreamIfPossible(response); } catch (Throwable t) { - ourLog.error("HAPI-FHIR was unable to reset the output stream during exception handling. The root causes follows:", t); + ourLog.error( + "HAPI-FHIR was unable to reset the output stream during exception handling. The root causes follows:", + t); } - return RestfulServerUtils.streamResponseAsResource(theRequestDetails.getServer(), oo, SUMMARY_MODE, statusCode, false, false, theRequestDetails, null, null); + return RestfulServerUtils.streamResponseAsResource( + theRequestDetails.getServer(), + oo, + SUMMARY_MODE, + statusCode, + false, + false, + theRequestDetails, + null, + null); } /** @@ -123,7 +138,8 @@ public class ExceptionHandlingInterceptor { private void resetOutputStreamIfPossible(IRestfulResponse response) { if (response.getClass().isAssignableFrom(ServletRestfulResponse.class)) { ServletRestfulResponse servletRestfulResponse = (ServletRestfulResponse) response; - HttpServletResponse servletResponse = servletRestfulResponse.getRequestDetails().getServletResponse(); + HttpServletResponse servletResponse = + servletRestfulResponse.getRequestDetails().getServletResponse(); Collection headerNames = servletResponse.getHeaderNames(); Map> oldHeaders = new HashedMap<>(); for (String headerName : headerNames) { @@ -131,19 +147,24 @@ public class ExceptionHandlingInterceptor { } servletResponse.reset(); - oldHeaders.entrySet().stream().filter(entry -> !entry.getKey().equals(CONTENT_ENCODING)).forEach(entry -> { - entry.getValue().stream().forEach(value -> { - servletResponse.addHeader(entry.getKey(), value); - }); - }); + oldHeaders.entrySet().stream() + .filter(entry -> !entry.getKey().equals(CONTENT_ENCODING)) + .forEach(entry -> { + entry.getValue().stream().forEach(value -> { + servletResponse.addHeader(entry.getKey(), value); + }); + }); } } @Hook(Pointcut.SERVER_PRE_PROCESS_OUTGOING_EXCEPTION) - public BaseServerResponseException preProcessOutgoingException(RequestDetails theRequestDetails, Throwable theException, HttpServletRequest theServletRequest) throws ServletException { + public BaseServerResponseException preProcessOutgoingException( + RequestDetails theRequestDetails, Throwable theException, HttpServletRequest theServletRequest) + throws ServletException { BaseServerResponseException retVal; if (theException instanceof DataFormatException) { - // Wrapping the DataFormatException as an InvalidRequestException so that it gets sent back to the client as a 400 response. + // Wrapping the DataFormatException as an InvalidRequestException so that it gets sent back to the client as + // a 400 response. retVal = new InvalidRequestException(theException); } else if (!(theException instanceof BaseServerResponseException)) { retVal = new InternalErrorException(theException); @@ -152,13 +173,15 @@ public class ExceptionHandlingInterceptor { } if (retVal.getOperationOutcome() == null) { - retVal.setOperationOutcome(createOperationOutcome(theException, theRequestDetails.getServer().getFhirContext())); + retVal.setOperationOutcome(createOperationOutcome( + theException, theRequestDetails.getServer().getFhirContext())); } return retVal; } - private IBaseOperationOutcome createOperationOutcome(Throwable theException, FhirContext ctx) throws ServletException { + private IBaseOperationOutcome createOperationOutcome(Throwable theException, FhirContext ctx) + throws ServletException { IBaseOperationOutcome oo = null; if (theException instanceof BaseServerResponseException) { oo = ((BaseServerResponseException) theException).getOperationOutcome(); @@ -183,8 +206,9 @@ public class ExceptionHandlingInterceptor { } else { ourLog.warn("Failure during REST processing", theException); } - - BaseServerResponseException baseServerResponseException = (BaseServerResponseException) theException; + + BaseServerResponseException baseServerResponseException = + (BaseServerResponseException) theException; populateDetails(ctx, theException, oo); if (baseServerResponseException.getAdditionalMessages() != null) { for (String next : baseServerResponseException.getAdditionalMessages()) { @@ -197,7 +221,8 @@ public class ExceptionHandlingInterceptor { } } catch (Exception e1) { ourLog.error("Failed to instantiate OperationOutcome resource instance", e1); - throw new ServletException(Msg.code(328) + "Failed to instantiate OperationOutcome resource instance", e1); + throw new ServletException( + Msg.code(328) + "Failed to instantiate OperationOutcome resource instance", e1); } } else { ourLog.error("Unknown error during processing", theException); @@ -209,7 +234,8 @@ public class ExceptionHandlingInterceptor { if (myReturnStackTracesForExceptionTypes != null) { for (Class next : myReturnStackTracesForExceptionTypes) { if (next.isAssignableFrom(theException.getClass())) { - String detailsValue = theException.getMessage() + "\n\n" + ExceptionUtils.getStackTrace(theException); + String detailsValue = + theException.getMessage() + "\n\n" + ExceptionUtils.getStackTrace(theException); OperationOutcomeUtil.addIssue(theCtx, theOo, "error", detailsValue, null, PROCESSING); return; } @@ -222,7 +248,7 @@ public class ExceptionHandlingInterceptor { /** * If any server methods throw an exception which extends any of the given exception types, the exception stack trace will be returned to the user. This can be useful for helping to diagnose * issues, but may not be desirable for production situations. - * + * * @param theExceptionTypes * The exception types for which to return the stack trace to the user. * @return Returns an instance of this interceptor, to allow for easy method chaining. @@ -231,5 +257,4 @@ public class ExceptionHandlingInterceptor { myReturnStackTracesForExceptionTypes = theExceptionTypes; return this; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/FhirPathFilterInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/FhirPathFilterInterceptor.java index 8538396c431..01d8fec8091 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/FhirPathFilterInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/FhirPathFilterInterceptor.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.rest.server.interceptor; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.fhirpath.FhirPathExecutionException; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.rest.api.Constants; @@ -69,7 +69,8 @@ public class FhirPathFilterInterceptor { try { outputs = fhirPath.evaluate(responseResource, expression, IBase.class); } catch (FhirPathExecutionException e) { - throw new InvalidRequestException(Msg.code(327) + "Error parsing FHIRPath expression: " + e.getMessage()); + throw new InvalidRequestException( + Msg.code(327) + "Error parsing FHIRPath expression: " + e.getMessage()); } for (IBase nextOutput : outputs) { @@ -86,5 +87,4 @@ public class FhirPathFilterInterceptor { } } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/IServerInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/IServerInterceptor.java index e2fab0cbb08..c8e8b63c769 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/IServerInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/IServerInterceptor.java @@ -19,36 +19,24 @@ */ package ca.uhn.fhir.rest.server.interceptor; -import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.Hook; -import ca.uhn.fhir.interceptor.api.HookParams; -import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.model.api.TagList; import ca.uhn.fhir.model.base.resource.BaseOperationOutcome; import ca.uhn.fhir.rest.annotation.Read; -import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.ResponseDetails; -import ca.uhn.fhir.rest.server.IRestfulServerDefaults; import ca.uhn.fhir.rest.server.exceptions.AuthenticationException; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; +import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.io.IOException; -import java.util.Collections; -import java.util.Map; - -import static org.apache.commons.lang3.StringUtils.isBlank; /** * Provides methods to intercept requests and responses. Note that implementations of this interface may wish to use @@ -91,8 +79,12 @@ public interface IServerInterceptor { * @throws IOException If this exception is thrown, it will be re-thrown up to the container for handling. */ @Hook(Pointcut.SERVER_HANDLE_EXCEPTION) - boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) - throws ServletException, IOException; + boolean handleException( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws ServletException, IOException; /** * This method is called just before the actual implementing server method is invoked. @@ -112,7 +104,9 @@ public interface IServerInterceptor { * attempt. If thrown, processing will stop and an HTTP 401 will be returned to the client. */ @Hook(Pointcut.SERVER_INCOMING_REQUEST_POST_PROCESSED) - boolean incomingRequestPostProcessed(RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) throws AuthenticationException; + boolean incomingRequestPostProcessed( + RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) + throws AuthenticationException; /** * Invoked before an incoming request is processed. Note that this method is called @@ -167,7 +161,11 @@ public interface IServerInterceptor { */ @Deprecated @Hook(Pointcut.SERVER_OUTGOING_RESPONSE) - boolean outgoingResponse(RequestDetails theRequestDetails, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws AuthenticationException; + boolean outgoingResponse( + RequestDetails theRequestDetails, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException; /** * Use {@link #outgoingResponse(RequestDetails, IBaseResource, HttpServletRequest, HttpServletResponse)} instead @@ -203,8 +201,12 @@ public interface IServerInterceptor { */ @Deprecated @Hook(Pointcut.SERVER_OUTGOING_RESPONSE) - boolean outgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) - throws AuthenticationException; + boolean outgoingResponse( + RequestDetails theRequestDetails, + IBaseResource theResponseObject, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException; /** * This method is called after the server implementation method has been called, but before any attempt to stream the @@ -227,9 +229,12 @@ public interface IServerInterceptor { * attempt. If thrown, processing will stop and an HTTP 401 will be returned to the client. */ @Hook(Pointcut.SERVER_OUTGOING_RESPONSE) - boolean outgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseDetails, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) - throws AuthenticationException; - + boolean outgoingResponse( + RequestDetails theRequestDetails, + ResponseDetails theResponseDetails, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException; /** * Use {@link #outgoingResponse(RequestDetails, IBaseResource, HttpServletRequest, HttpServletResponse)} instead @@ -245,7 +250,12 @@ public interface IServerInterceptor { * @deprecated As of HAPI FHIR 3.2.0, this method is deprecated and will be removed in a future version of HAPI FHIR. */ @Deprecated - boolean outgoingResponse(RequestDetails theRequestDetails, TagList theResponseObject, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws AuthenticationException; + boolean outgoingResponse( + RequestDetails theRequestDetails, + TagList theResponseObject, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException; /** * This method is called upon any exception being thrown within the server's request processing code. This includes @@ -270,7 +280,9 @@ public interface IServerInterceptor { * should return an exception. */ @Hook(Pointcut.SERVER_PRE_PROCESS_OUTGOING_EXCEPTION) - BaseServerResponseException preProcessOutgoingException(RequestDetails theRequestDetails, Throwable theException, HttpServletRequest theServletRequest) throws ServletException; + BaseServerResponseException preProcessOutgoingException( + RequestDetails theRequestDetails, Throwable theException, HttpServletRequest theServletRequest) + throws ServletException; /** * This method is called after all processing is completed for a request, but only if the @@ -288,5 +300,4 @@ public interface IServerInterceptor { */ @Hook(Pointcut.SERVER_PROCESSING_COMPLETED_NORMALLY) void processingCompletedNormally(ServletRequestDetails theRequestDetails); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/IServerOperationInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/IServerOperationInterceptor.java index 38571116edf..e27d3e46d3e 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/IServerOperationInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/IServerOperationInterceptor.java @@ -131,5 +131,4 @@ public interface IServerOperationInterceptor extends IServerInterceptor { */ @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED) void resourceUpdated(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InteractionBlockingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InteractionBlockingInterceptor.java index c82817384be..a003ab59f14 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InteractionBlockingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InteractionBlockingInterceptor.java @@ -30,11 +30,11 @@ import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.TreeSet; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -138,14 +138,17 @@ public class InteractionBlockingInterceptor { } if (!allowed) { - ourLog.info("Skipping method binding for {}:{} provided by {}", resourceName, restOperationType, theMethodBinding.getMethod()); + ourLog.info( + "Skipping method binding for {}:{} provided by {}", + resourceName, + restOperationType, + theMethodBinding.getMethod()); return null; } return theMethodBinding; } - private static String toKey(String theResourceType, RestOperationTypeEnum theRestOperationTypeEnum) { if (isBlank(theResourceType)) { return theRestOperationTypeEnum.getCode(); @@ -153,7 +156,6 @@ public class InteractionBlockingInterceptor { return theResourceType + ":" + theRestOperationTypeEnum.getCode(); } - public static class Builder { private final Set myAllowedKeys = new HashSet<>(); @@ -226,7 +228,10 @@ public class InteractionBlockingInterceptor { private void addAllowedInteraction(String theResourceType, RestOperationTypeEnum theInteractionType) { Validate.notBlank(theResourceType, "theResourceType must not be null or blank"); Validate.notNull(theInteractionType, "theInteractionType must not be null"); - Validate.isTrue(ALLOWED_OP_TYPES.contains(theInteractionType), "Operation type %s can not be used as an allowable rule", theInteractionType); + Validate.isTrue( + ALLOWED_OP_TYPES.contains(theInteractionType), + "Operation type %s can not be used as an allowable rule", + theInteractionType); Validate.isTrue(myCtx.getResourceType(theResourceType) != null, "Unknown resource type: %s"); String key = toKey(theResourceType, theInteractionType); myAllowedKeys.add(key); @@ -241,9 +246,5 @@ public class InteractionBlockingInterceptor { public InteractionBlockingInterceptor build() { return new InteractionBlockingInterceptor(this); } - - } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InterceptorAdapter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InterceptorAdapter.java index 6476c53625e..a736b12725f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InterceptorAdapter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InterceptorAdapter.java @@ -28,10 +28,10 @@ import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; +import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.io.IOException; /** * Base class for {@link IServerInterceptor} implementations. Provides a No-op implementation @@ -40,13 +40,19 @@ import java.io.IOException; public class InterceptorAdapter implements IServerInterceptor { @Override - public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) - throws ServletException, IOException { + public boolean handleException( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws ServletException, IOException { return true; } @Override - public boolean incomingRequestPostProcessed(RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) throws AuthenticationException { + public boolean incomingRequestPostProcessed( + RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) + throws AuthenticationException { return true; } @@ -67,7 +73,11 @@ public class InterceptorAdapter implements IServerInterceptor { } @Override - public boolean outgoingResponse(RequestDetails theRequestDetails, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws AuthenticationException { + public boolean outgoingResponse( + RequestDetails theRequestDetails, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException { return true; } @@ -78,13 +88,22 @@ public class InterceptorAdapter implements IServerInterceptor { } @Override - public boolean outgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) - throws AuthenticationException { + public boolean outgoingResponse( + RequestDetails theRequestDetails, + IBaseResource theResponseObject, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException { return true; } @Override - public boolean outgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseDetails, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws AuthenticationException { + public boolean outgoingResponse( + RequestDetails theRequestDetails, + ResponseDetails theResponseDetails, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException { return true; } @@ -95,13 +114,19 @@ public class InterceptorAdapter implements IServerInterceptor { } @Override - public boolean outgoingResponse(RequestDetails theRequestDetails, TagList theResponseObject, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) - throws AuthenticationException { + public boolean outgoingResponse( + RequestDetails theRequestDetails, + TagList theResponseObject, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException { return true; } @Override - public BaseServerResponseException preProcessOutgoingException(RequestDetails theRequestDetails, Throwable theException, HttpServletRequest theServletRequest) throws ServletException { + public BaseServerResponseException preProcessOutgoingException( + RequestDetails theRequestDetails, Throwable theException, HttpServletRequest theServletRequest) + throws ServletException { return null; } @@ -109,5 +134,4 @@ public class InterceptorAdapter implements IServerInterceptor { public void processingCompletedNormally(ServletRequestDetails theRequestDetails) { // nothing } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InterceptorOrders.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InterceptorOrders.java index 34eca7f5c4d..26715159fc3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InterceptorOrders.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/InterceptorOrders.java @@ -34,5 +34,4 @@ public class InterceptorOrders { private InterceptorOrders() { // nothing } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/LoggingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/LoggingInterceptor.java index 2584640cd12..0a9435c84f1 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/LoggingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/LoggingInterceptor.java @@ -38,12 +38,12 @@ import org.apache.commons.text.lookup.StringLookup; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.Date; import java.util.Map.Entry; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -139,7 +139,7 @@ public class LoggingInterceptor { public LoggingInterceptor() { super(); } - + /** * Get the log message format to be used when logging exceptions */ @@ -148,7 +148,12 @@ public class LoggingInterceptor { } @Hook(Pointcut.SERVER_HANDLE_EXCEPTION) - public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws ServletException, IOException { + public boolean handleException( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws ServletException, IOException { if (myLogExceptions) { // Perform any string substitutions from the message format StringLookup lookup = new MyLookup(theServletRequest, theException, theRequestDetails); @@ -157,12 +162,10 @@ public class LoggingInterceptor { // Actuall log the line String line = subs.replace(myErrorMessageFormat); myLogger.info(line); - } return true; } - @Hook(Pointcut.SERVER_PROCESSING_COMPLETED_NORMALLY) public void processingCompletedNormally(ServletRequestDetails theRequestDetails) { // Perform any string substitutions from the message format @@ -204,7 +207,6 @@ public class LoggingInterceptor { public void setLoggerName(String theLoggerName) { Validate.notBlank(theLoggerName, "Logger name can not be null/empty"); myLogger = LoggerFactory.getLogger(theLoggerName); - } /** @@ -227,7 +229,10 @@ public class LoggingInterceptor { myException = null; } - MyLookup(HttpServletRequest theServletRequest, BaseServerResponseException theException, RequestDetails theRequestDetails) { + MyLookup( + HttpServletRequest theServletRequest, + BaseServerResponseException theException, + RequestDetails theRequestDetails) { myException = theException; myRequestDetails = theRequestDetails; myRequest = theServletRequest; @@ -248,15 +253,15 @@ public class LoggingInterceptor { } else if ("operationName".equals(theKey)) { if (myRequestDetails.getRestOperationType() != null) { switch (myRequestDetails.getRestOperationType()) { - case EXTENDED_OPERATION_INSTANCE: - case EXTENDED_OPERATION_SERVER: - case EXTENDED_OPERATION_TYPE: - return myRequestDetails.getOperation(); - default: - return ""; + case EXTENDED_OPERATION_INSTANCE: + case EXTENDED_OPERATION_SERVER: + case EXTENDED_OPERATION_TYPE: + return myRequestDetails.getOperation(); + default: + return ""; } } - return ""; + return ""; } else if ("id".equals(theKey)) { if (myRequestDetails.getId() != null) { return myRequestDetails.getId().getValue(); @@ -274,7 +279,8 @@ public class LoggingInterceptor { return ""; } else if (theKey.equals("requestParameters")) { StringBuilder b = new StringBuilder(); - for (Entry next : myRequestDetails.getParameters().entrySet()) { + for (Entry next : + myRequestDetails.getParameters().entrySet()) { for (String nextValue : next.getValue()) { if (b.length() == 0) { b.append('?'); @@ -293,7 +299,8 @@ public class LoggingInterceptor { } else if (theKey.startsWith("remoteAddr")) { return StringUtils.defaultString(myRequest.getRemoteAddr()); } else if (theKey.equals("responseEncodingNoDefault")) { - ResponseEncoding encoding = RestfulServerUtils.determineResponseEncodingNoDefault(myRequestDetails, myRequestDetails.getServer().getDefaultResponseEncoding()); + ResponseEncoding encoding = RestfulServerUtils.determineResponseEncodingNoDefault( + myRequestDetails, myRequestDetails.getServer().getDefaultResponseEncoding()); if (encoding != null) { return encoding.getEncoding().name(); } @@ -333,5 +340,4 @@ public class LoggingInterceptor { return "!VAL!"; } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/RequestValidatingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/RequestValidatingInterceptor.java index 6b1febcc1b6..69208893551 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/RequestValidatingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/RequestValidatingInterceptor.java @@ -31,9 +31,9 @@ import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.ResultSeverityEnum; import ca.uhn.fhir.validation.ValidationResult; +import java.nio.charset.Charset; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.nio.charset.Charset; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -48,7 +48,9 @@ public class RequestValidatingInterceptor extends BaseValidatingInterceptorReturn: prefer=representation) */ - public void setAddValidationResultsToResponseOperationOutcome(boolean theAddValidationResultsToResponseOperationOutcome) { + public void setAddValidationResultsToResponseOperationOutcome( + boolean theAddValidationResultsToResponseOperationOutcome) { myAddValidationResultsToResponseOperationOutcome = theAddValidationResultsToResponseOperationOutcome; } @@ -118,5 +123,4 @@ public class RequestValidatingInterceptor extends BaseValidatingInterceptor"); i += 5; inQuote = false; @@ -212,7 +217,12 @@ public class ResponseHighlighterInterceptor { theTarget.append(nextChar); theTarget.append(""); inValue = false; - } else if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + } else if (nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { if (inValue) { theTarget.append("""); } else { @@ -234,7 +244,13 @@ public class ResponseHighlighterInterceptor { if (inQuote) { theTarget.append(nextChar); - if (prevChar != '\\' && nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + if (prevChar != '\\' + && nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { theTarget.append("quot;"); i += 5; inQuote = false; @@ -256,7 +272,12 @@ public class ResponseHighlighterInterceptor { theTarget.append(nextChar); theTarget.append(""); } else { - if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + if (nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { theTarget.append("""); inQuote = true; i += 5; @@ -271,7 +292,12 @@ public class ResponseHighlighterInterceptor { if (inQuote) { theTarget.append(nextChar); - if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + if (nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { theTarget.append("quot;"); i += 5; inQuote = false; @@ -284,7 +310,12 @@ public class ResponseHighlighterInterceptor { } else if (nextChar == ' ') { theTarget.append(""); theTarget.append(nextChar); - } else if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + } else if (nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { theTarget.append("""); inQuote = true; i += 5; @@ -308,7 +339,11 @@ public class ResponseHighlighterInterceptor { } @Hook(value = Pointcut.SERVER_HANDLE_EXCEPTION, order = InterceptorOrders.RESPONSE_HIGHLIGHTER_INTERCEPTOR) - public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) { + public boolean handleException( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) { /* * It's not a browser... */ @@ -342,7 +377,13 @@ public class ResponseHighlighterInterceptor { responseDetails.setResponseCode(theException.getStatusCode()); BaseResourceReturningMethodBinding.callOutgoingFailureOperationOutcomeHook(theRequestDetails, oo); - streamResponse(theRequestDetails, theServletResponse, responseDetails.getResponseResource(), null, theServletRequest, responseDetails.getResponseCode()); + streamResponse( + theRequestDetails, + theServletResponse, + responseDetails.getResponseResource(), + null, + theServletRequest, + responseDetails.getResponseCode()); return false; } @@ -388,8 +429,13 @@ public class ResponseHighlighterInterceptor { } @Hook(value = Pointcut.SERVER_OUTGOING_GRAPHQL_RESPONSE, order = InterceptorOrders.RESPONSE_HIGHLIGHTER_INTERCEPTOR) - public boolean outgoingGraphqlResponse(RequestDetails theRequestDetails, String theRequest, String theResponse, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) - throws AuthenticationException { + public boolean outgoingGraphqlResponse( + RequestDetails theRequestDetails, + String theRequest, + String theResponse, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException { /* * Return true here so that we still fire SERVER_OUTGOING_GRAPHQL_RESPONSE! @@ -405,13 +451,23 @@ public class ResponseHighlighterInterceptor { } @Hook(value = Pointcut.SERVER_OUTGOING_RESPONSE, order = InterceptorOrders.RESPONSE_HIGHLIGHTER_INTERCEPTOR) - public boolean outgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseObject, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) - throws AuthenticationException { + public boolean outgoingResponse( + RequestDetails theRequestDetails, + ResponseDetails theResponseObject, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException { if (!Boolean.TRUE.equals(theRequestDetails.getAttribute("ResponseHighlighterInterceptorHandled"))) { String graphqlResponse = null; IBaseResource resourceResponse = theResponseObject.getResponseResource(); - if (handleOutgoingResponse(theRequestDetails, theResponseObject, theServletRequest, theServletResponse, graphqlResponse, resourceResponse)) { + if (handleOutgoingResponse( + theRequestDetails, + theResponseObject, + theServletRequest, + theServletResponse, + graphqlResponse, + resourceResponse)) { return true; } } @@ -420,25 +476,39 @@ public class ResponseHighlighterInterceptor { } @Hook(Pointcut.SERVER_CAPABILITY_STATEMENT_GENERATED) - public void capabilityStatementGenerated(RequestDetails theRequestDetails, IBaseConformance theCapabilityStatement) { + public void capabilityStatementGenerated( + RequestDetails theRequestDetails, IBaseConformance theCapabilityStatement) { FhirTerser terser = theRequestDetails.getFhirContext().newTerser(); - Set formats = terser.getValues(theCapabilityStatement, "format", IPrimitiveType.class) - .stream() - .map(t -> t.getValueAsString()) - .collect(Collectors.toSet()); - addFormatConditionally(theCapabilityStatement, terser, formats, Constants.CT_FHIR_JSON_NEW, Constants.FORMATS_HTML_JSON); - addFormatConditionally(theCapabilityStatement, terser, formats, Constants.CT_FHIR_XML_NEW, Constants.FORMATS_HTML_XML); - addFormatConditionally(theCapabilityStatement, terser, formats, Constants.CT_RDF_TURTLE, Constants.FORMATS_HTML_TTL); + Set formats = terser.getValues(theCapabilityStatement, "format", IPrimitiveType.class).stream() + .map(t -> t.getValueAsString()) + .collect(Collectors.toSet()); + addFormatConditionally( + theCapabilityStatement, terser, formats, Constants.CT_FHIR_JSON_NEW, Constants.FORMATS_HTML_JSON); + addFormatConditionally( + theCapabilityStatement, terser, formats, Constants.CT_FHIR_XML_NEW, Constants.FORMATS_HTML_XML); + addFormatConditionally( + theCapabilityStatement, terser, formats, Constants.CT_RDF_TURTLE, Constants.FORMATS_HTML_TTL); } - private void addFormatConditionally(IBaseConformance theCapabilityStatement, FhirTerser terser, Set formats, String wanted, String toAdd) { + private void addFormatConditionally( + IBaseConformance theCapabilityStatement, + FhirTerser terser, + Set formats, + String wanted, + String toAdd) { if (formats.contains(wanted)) { terser.addElement(theCapabilityStatement, "format", toAdd); } } - private boolean handleOutgoingResponse(RequestDetails theRequestDetails, ResponseDetails theResponseObject, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse, String theGraphqlResponse, IBaseResource theResourceResponse) { + private boolean handleOutgoingResponse( + RequestDetails theRequestDetails, + ResponseDetails theResponseObject, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse, + String theGraphqlResponse, + IBaseResource theResourceResponse) { if (theResourceResponse == null && theGraphqlResponse == null) { // this will happen during, for example, a bulk export polling request return true; @@ -448,7 +518,8 @@ public class ResponseHighlighterInterceptor { */ String[] rawParamValues = theRequestDetails.getParameters().get(PARAM_RAW); if (rawParamValues != null && rawParamValues.length > 0 && rawParamValues[0].equals(PARAM_RAW_TRUE)) { - ourLog.warn("Client is using non-standard/legacy _raw parameter - Use _format=json or _format=xml instead, as this parmameter will be removed at some point"); + ourLog.warn( + "Client is using non-standard/legacy _raw parameter - Use _format=json or _format=xml instead, as this parmameter will be removed at some point"); return true; } @@ -481,7 +552,8 @@ public class ResponseHighlighterInterceptor { /* * It's not a browser... */ - Set highestRankedAcceptValues = RestfulServerUtils.parseAcceptHeaderAndReturnHighestRankedOptions(theServletRequest); + Set highestRankedAcceptValues = + RestfulServerUtils.parseAcceptHeaderAndReturnHighestRankedOptions(theServletRequest); if (!force && highestRankedAcceptValues.contains(Constants.CT_HTML) == false) { return true; } @@ -513,7 +585,8 @@ public class ResponseHighlighterInterceptor { return true; } - streamResponse(theRequestDetails, theServletResponse, theResourceResponse, theGraphqlResponse, theServletRequest, 200); + streamResponse( + theRequestDetails, theServletResponse, theResourceResponse, theGraphqlResponse, theServletRequest, 200); return false; } @@ -535,7 +608,13 @@ public class ResponseHighlighterInterceptor { } } - private void streamResponse(RequestDetails theRequestDetails, HttpServletResponse theServletResponse, IBaseResource theResource, String theGraphqlResponse, ServletRequest theServletRequest, int theStatusCode) { + private void streamResponse( + RequestDetails theRequestDetails, + HttpServletResponse theServletResponse, + IBaseResource theResource, + String theGraphqlResponse, + ServletRequest theServletRequest, + int theStatusCode) { EncodingEnum encoding; String encoded; Map parameters = theRequestDetails.getParameters(); @@ -550,10 +629,13 @@ public class ResponseHighlighterInterceptor { IParser p; if (parameters.containsKey(Constants.PARAM_FORMAT)) { FhirVersionEnum forVersion = theResource.getStructureFhirVersionEnum(); - p = RestfulServerUtils.getNewParser(theRequestDetails.getServer().getFhirContext(), forVersion, theRequestDetails); + p = RestfulServerUtils.getNewParser( + theRequestDetails.getServer().getFhirContext(), forVersion, theRequestDetails); } else { - EncodingEnum defaultResponseEncoding = theRequestDetails.getServer().getDefaultResponseEncoding(); - p = defaultResponseEncoding.newParser(theRequestDetails.getServer().getFhirContext()); + EncodingEnum defaultResponseEncoding = + theRequestDetails.getServer().getDefaultResponseEncoding(); + p = defaultResponseEncoding.newParser( + theRequestDetails.getServer().getFhirContext()); RestfulServerUtils.configureResponseParser(theRequestDetails, p); } @@ -572,7 +654,6 @@ public class ResponseHighlighterInterceptor { encoding = p.getEncoding(); encoded = p.encodeResourceToString(theResource); - } if (theRequestDetails.getServer() instanceof RestfulServer) { @@ -592,7 +673,8 @@ public class ResponseHighlighterInterceptor { outputBuffer.append(" \n"); outputBuffer.append(" \n"); outputBuffer.append(" \n"); outputBuffer.append(" \n"); outputBuffer.append("\n"); @@ -728,7 +810,9 @@ public class ResponseHighlighterInterceptor { outputBuffer.append("\n"); InputStream jsStream = ResponseHighlighterInterceptor.class.getResourceAsStream("ResponseHighlighter.js"); - String jsStr = jsStream != null ? IOUtils.toString(jsStream, StandardCharsets.UTF_8) : "console.log('ResponseHighlighterInterceptor: javascript theResource not found')"; + String jsStr = jsStream != null + ? IOUtils.toString(jsStream, StandardCharsets.UTF_8) + : "console.log('ResponseHighlighterInterceptor: javascript theResource not found')"; String baseUrl = theRequestDetails.getServerBaseForRequest(); @@ -754,7 +838,6 @@ public class ResponseHighlighterInterceptor { theServletResponse.getWriter().append(writeSw.toString()); theServletResponse.getWriter().append(""); - theServletResponse.getWriter().append(""); theServletResponse.getWriter().append(""); @@ -780,7 +863,8 @@ public class ResponseHighlighterInterceptor { // If the resource is a document, see if the Composition has a narrative if (xhtmlNode == null && "Bundle".equals(ctx.getResourceType(theResource))) { if ("document".equals(ctx.newTerser().getSinglePrimitiveValueOrNull(theResource, "type"))) { - IBaseResource firstResource = ctx.newTerser().getSingleValueOrNull(theResource, "entry.resource", IBaseResource.class); + IBaseResource firstResource = + ctx.newTerser().getSingleValueOrNull(theResource, "entry.resource", IBaseResource.class); if (firstResource != null && "Composition".equals(ctx.getResourceType(firstResource))) { xhtmlNode = extractNarrativeFromDomainResource(firstResource, ctx); } @@ -792,7 +876,8 @@ public class ResponseHighlighterInterceptor { if (xhtmlNode == null && "Parameters".equals(ctx.getResourceType(theResource))) { String firstParameterName = ctx.newTerser().getSinglePrimitiveValueOrNull(theResource, "parameter.name"); if ("Narrative".equals(firstParameterName)) { - String firstParameterValue = ctx.newTerser().getSinglePrimitiveValueOrNull(theResource, "parameter.value[x]"); + String firstParameterValue = + ctx.newTerser().getSinglePrimitiveValueOrNull(theResource, "parameter.value[x]"); if (defaultString(firstParameterValue).startsWith("Response Headers"); @@ -834,7 +920,8 @@ public class ResponseHighlighterInterceptor { * actually returning an HTML one */ if (nextHeaderName.equalsIgnoreCase(Constants.HEADER_CONTENT_TYPE)) { - ResponseEncoding responseEncoding = RestfulServerUtils.determineResponseEncodingNoDefault(theRequestDetails, theRequestDetails.getServer().getDefaultResponseEncoding()); + ResponseEncoding responseEncoding = RestfulServerUtils.determineResponseEncodingNoDefault( + theRequestDetails, theRequestDetails.getServer().getDefaultResponseEncoding()); if (responseEncoding != null && isNotBlank(responseEncoding.getResourceContentType())) { nextHeaderValue = responseEncoding.getResourceContentType() + ";charset=utf-8"; } @@ -856,7 +943,11 @@ public class ResponseHighlighterInterceptor { private void appendHeader(StringBuilder theBuilder, String theHeaderName, String theHeaderValue) { theBuilder.append("
    "); - theBuilder.append("").append(theHeaderName).append(": ").append(""); + theBuilder + .append("") + .append(theHeaderName) + .append(": ") + .append(""); theBuilder.append("").append(theHeaderValue).append(""); theBuilder.append("
    "); } @@ -888,7 +979,6 @@ public class ResponseHighlighterInterceptor { * @return Should the narrative be rendered? * @since 6.6.0 */ - public boolean isShowNarrative() { return myShowNarrative; } @@ -927,9 +1017,10 @@ public class ResponseHighlighterInterceptor { @Nullable private static XhtmlNode extractNarrativeFromDomainResource(@Nonnull IBaseResource theResource, FhirContext ctx) { if (ctx.getResourceDefinition(theResource).getChildByName("text") != null) { - return ctx.newTerser().getSingleValue(theResource, "text.div", XhtmlNode.class).orElse(null); + return ctx.newTerser() + .getSingleValue(theResource, "text.div", XhtmlNode.class) + .orElse(null); } return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseSizeCapturingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseSizeCapturingInterceptor.java index ca107e104a9..5db02636da9 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseSizeCapturingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseSizeCapturingInterceptor.java @@ -24,12 +24,12 @@ import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.rest.api.server.RequestDetails; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; +import javax.annotation.Nonnull; /** * This interceptor captures and makes @@ -53,9 +53,11 @@ public class ResponseSizeCapturingInterceptor { * be available before that time. *

    */ - public static final String RESPONSE_RESULT_KEY = ResponseSizeCapturingInterceptor.class.getName() + "_RESPONSE_RESULT_KEY"; + public static final String RESPONSE_RESULT_KEY = + ResponseSizeCapturingInterceptor.class.getName() + "_RESPONSE_RESULT_KEY"; - private static final String COUNTING_WRITER_KEY = ResponseSizeCapturingInterceptor.class.getName() + "_COUNTING_WRITER_KEY"; + private static final String COUNTING_WRITER_KEY = + ResponseSizeCapturingInterceptor.class.getName() + "_COUNTING_WRITER_KEY"; private final List> myConsumers = new ArrayList<>(); @Hook(Pointcut.SERVER_OUTGOING_WRITER_CREATED) @@ -65,10 +67,12 @@ public class ResponseSizeCapturingInterceptor { return retVal; } - - @Hook(value = Pointcut.SERVER_PROCESSING_COMPLETED, order = InterceptorOrders.RESPONSE_SIZE_CAPTURING_INTERCEPTOR_COMPLETED) + @Hook( + value = Pointcut.SERVER_PROCESSING_COMPLETED, + order = InterceptorOrders.RESPONSE_SIZE_CAPTURING_INTERCEPTOR_COMPLETED) public void completed(RequestDetails theRequestDetails) { - CountingWriter countingWriter = (CountingWriter) theRequestDetails.getUserData().get(COUNTING_WRITER_KEY); + CountingWriter countingWriter = + (CountingWriter) theRequestDetails.getUserData().get(COUNTING_WRITER_KEY); if (countingWriter != null) { int charCount = countingWriter.getCount(); Result result = new Result(theRequestDetails, charCount); @@ -112,10 +116,8 @@ public class ResponseSizeCapturingInterceptor { public int getWrittenChars() { return myWrittenChars; } - } - private static class CountingWriter extends Writer { private final Writer myWrap; @@ -145,5 +147,4 @@ public class ResponseSizeCapturingInterceptor { return myCount; } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyDisplayPopulationInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyDisplayPopulationInterceptor.java index 3a455de9cca..c6a7c901ee6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyDisplayPopulationInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyDisplayPopulationInterceptor.java @@ -63,7 +63,8 @@ public class ResponseTerminologyDisplayPopulationInterceptor extends BaseRespons public ResponseTerminologyDisplayPopulationInterceptor(IValidationSupport theValidationSupport) { super(theValidationSupport); - myCodingDefinitition = (BaseRuntimeElementCompositeDefinition) Objects.requireNonNull(myContext.getElementDefinition("Coding")); + myCodingDefinitition = (BaseRuntimeElementCompositeDefinition) + Objects.requireNonNull(myContext.getElementDefinition("Coding")); myCodingType = myCodingDefinitition.getImplementingClass(); myCodingSystemChild = myCodingDefinitition.getChildByName("system"); myCodingCodeChild = myCodingDefinitition.getChildByName("code"); @@ -80,21 +81,40 @@ public class ResponseTerminologyDisplayPopulationInterceptor extends BaseRespons for (IBaseResource nextResource : resources) { terser.visit(nextResource, new MappingVisitor()); } - } private class MappingVisitor implements IModelVisitor { @Override - public void acceptElement(IBaseResource theResource, IBase theElement, List thePathToElement, BaseRuntimeChildDefinition theChildDefinition, BaseRuntimeElementDefinition theDefinition) { + public void acceptElement( + IBaseResource theResource, + IBase theElement, + List thePathToElement, + BaseRuntimeChildDefinition theChildDefinition, + BaseRuntimeElementDefinition theDefinition) { if (myCodingType.isAssignableFrom(theElement.getClass())) { - String system = myCodingSystemChild.getAccessor().getFirstValueOrNull(theElement).map(t -> (IPrimitiveType) t).map(t -> t.getValueAsString()).orElse(null); - String code = myCodingCodeChild.getAccessor().getFirstValueOrNull(theElement).map(t -> (IPrimitiveType) t).map(t -> t.getValueAsString()).orElse(null); + String system = myCodingSystemChild + .getAccessor() + .getFirstValueOrNull(theElement) + .map(t -> (IPrimitiveType) t) + .map(t -> t.getValueAsString()) + .orElse(null); + String code = myCodingCodeChild + .getAccessor() + .getFirstValueOrNull(theElement) + .map(t -> (IPrimitiveType) t) + .map(t -> t.getValueAsString()) + .orElse(null); if (isBlank(system) || isBlank(code)) { return; } - String display = myCodingDisplayChild.getAccessor().getFirstValueOrNull(theElement).map(t -> (IPrimitiveType) t).map(t -> t.getValueAsString()).orElse(null); + String display = myCodingDisplayChild + .getAccessor() + .getFirstValueOrNull(theElement) + .map(t -> (IPrimitiveType) t) + .map(t -> t.getValueAsString()) + .orElse(null); if (isNotBlank(display)) { return; } @@ -102,18 +122,15 @@ public class ResponseTerminologyDisplayPopulationInterceptor extends BaseRespons ValidationSupportContext validationSupportContext = new ValidationSupportContext(myValidationSupport); if (myValidationSupport.isCodeSystemSupported(validationSupportContext, system)) { - IValidationSupport.LookupCodeResult lookupCodeResult = myValidationSupport.lookupCode(validationSupportContext, system, code); + IValidationSupport.LookupCodeResult lookupCodeResult = + myValidationSupport.lookupCode(validationSupportContext, system, code); if (lookupCodeResult != null && lookupCodeResult.isFound()) { String newDisplay = lookupCodeResult.getCodeDisplay(); IPrimitiveType newString = myStringDefinition.newInstance(newDisplay); myCodingDisplayChild.getMutator().addValue(theElement, newString); } - } } - } - } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationInterceptor.java index 6469f21b2aa..c6e01381338 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationInterceptor.java @@ -50,7 +50,9 @@ public class ResponseTerminologyTranslationInterceptor extends BaseResponseTermi * * @param theValidationSupport The validation support module */ - public ResponseTerminologyTranslationInterceptor(IValidationSupport theValidationSupport, ResponseTerminologyTranslationSvc theResponseTerminologyTranslationSvc) { + public ResponseTerminologyTranslationInterceptor( + IValidationSupport theValidationSupport, + ResponseTerminologyTranslationSvc theResponseTerminologyTranslationSvc) { super(theValidationSupport); myResponseTerminologyTranslationSvc = theResponseTerminologyTranslationSvc; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationSvc.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationSvc.java index 293fecad643..8b54c02186c 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationSvc.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseTerminologyTranslationSvc.java @@ -38,12 +38,12 @@ import org.hl7.fhir.instance.model.api.IBaseCoding; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import javax.annotation.Nonnull; public class ResponseTerminologyTranslationSvc { private BaseRuntimeChildDefinition myCodingSystemChild; @@ -67,12 +67,14 @@ public class ResponseTerminologyTranslationSvc { myFhirContext = theValidationSupport.getFhirContext(); Validate.notNull(myFhirContext, "The validation support must not return a null context"); - BaseRuntimeElementCompositeDefinition codeableConceptDef = (BaseRuntimeElementCompositeDefinition) Objects.requireNonNull(myFhirContext.getElementDefinition("CodeableConcept")); + BaseRuntimeElementCompositeDefinition codeableConceptDef = (BaseRuntimeElementCompositeDefinition) + Objects.requireNonNull(myFhirContext.getElementDefinition("CodeableConcept")); myCodeableConceptType = codeableConceptDef.getImplementingClass(); myCodeableConceptCodingChild = codeableConceptDef.getChildByName("coding"); - myCodingDefinition = (BaseRuntimeElementCompositeDefinition) Objects.requireNonNull(myFhirContext.getElementDefinition("Coding")); + myCodingDefinition = (BaseRuntimeElementCompositeDefinition) + Objects.requireNonNull(myFhirContext.getElementDefinition("Coding")); myCodingType = myCodingDefinition.getImplementingClass(); myCodingSystemChild = myCodingDefinition.getChildByName("system"); myCodingCodeChild = myCodingDefinition.getChildByName("code"); @@ -111,16 +113,34 @@ public class ResponseTerminologyTranslationSvc { private class MappingVisitor implements IModelVisitor { @Override - public void acceptElement(IBaseResource theResource, IBase theElement, List thePathToElement, BaseRuntimeChildDefinition theChildDefinition, BaseRuntimeElementDefinition theDefinition) { + public void acceptElement( + IBaseResource theResource, + IBase theElement, + List thePathToElement, + BaseRuntimeChildDefinition theChildDefinition, + BaseRuntimeElementDefinition theDefinition) { if (myCodeableConceptType.isAssignableFrom(theElement.getClass())) { // Find all existing Codings Multimap foundSystemsToCodes = ArrayListMultimap.create(); - List nextCodeableConceptCodings = myCodeableConceptCodingChild.getAccessor().getValues(theElement); + List nextCodeableConceptCodings = + myCodeableConceptCodingChild.getAccessor().getValues(theElement); for (IBase nextCodeableConceptCoding : nextCodeableConceptCodings) { - String system = myCodingSystemChild.getAccessor().getFirstValueOrNull(nextCodeableConceptCoding).map(t -> (IPrimitiveType) t).map(IPrimitiveType::getValueAsString).orElse(null); - String code = myCodingCodeChild.getAccessor().getFirstValueOrNull(nextCodeableConceptCoding).map(t -> (IPrimitiveType) t).map(IPrimitiveType::getValueAsString).orElse(null); - if (StringUtils.isNotBlank(system) && StringUtils.isNotBlank(code) && !foundSystemsToCodes.containsKey(system)) { + String system = myCodingSystemChild + .getAccessor() + .getFirstValueOrNull(nextCodeableConceptCoding) + .map(t -> (IPrimitiveType) t) + .map(IPrimitiveType::getValueAsString) + .orElse(null); + String code = myCodingCodeChild + .getAccessor() + .getFirstValueOrNull(nextCodeableConceptCoding) + .map(t -> (IPrimitiveType) t) + .map(IPrimitiveType::getValueAsString) + .orElse(null); + if (StringUtils.isNotBlank(system) + && StringUtils.isNotBlank(code) + && !foundSystemsToCodes.containsKey(system)) { foundSystemsToCodes.put(system, code); } } @@ -134,28 +154,28 @@ public class ResponseTerminologyTranslationSvc { for (String code : foundSystemsToCodes.get(nextSourceSystem)) { List codings = new ArrayList<>(); codings.add(createCodingFromPrimitives(nextSourceSystem, code, null)); - TranslateConceptResults translateConceptResults = myValidationSupport.translateConcept(new IValidationSupport.TranslateCodeRequest(codings, wantTargetSystem)); + TranslateConceptResults translateConceptResults = myValidationSupport.translateConcept( + new IValidationSupport.TranslateCodeRequest(codings, wantTargetSystem)); if (translateConceptResults != null) { List mappings = translateConceptResults.getResults(); for (TranslateConceptResult nextMapping : mappings) { IBase newCoding = createCodingFromPrimitives( - nextMapping.getSystem(), - nextMapping.getCode(), - nextMapping.getDisplay()); + nextMapping.getSystem(), + nextMapping.getCode(), + nextMapping.getDisplay()); // Add coding to existing CodeableConcept - myCodeableConceptCodingChild.getMutator().addValue(theElement, newCoding); - + myCodeableConceptCodingChild + .getMutator() + .addValue(theElement, newCoding); } } } } } } - } - } private IBaseCoding createCodingFromPrimitives(String system, String code, String display) { @@ -173,6 +193,5 @@ public class ResponseTerminologyTranslationSvc { } return newCoding; } - } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseValidatingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseValidatingInterceptor.java index 73ac775212f..aa73fb8a1d2 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseValidatingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ResponseValidatingInterceptor.java @@ -44,7 +44,8 @@ public class ResponseValidatingInterceptor extends BaseValidatingInterceptor myExcludeOperationTypes; @@ -67,7 +68,9 @@ public class ResponseValidatingInterceptor extends BaseValidatingInterceptor) theRequestDetails.getServer(), preferHeader); + PreferHeader parsedPreferHeader = RestfulServerUtils.parsePreferHeader( + (IRestfulServer) theRequestDetails.getServer(), preferHeader); handling = parsedPreferHeader.getHanding(); } @@ -140,14 +144,20 @@ public class SearchPreferHandlingInterceptor { } else { // Strict handling - List allowedParams = searchParamRetriever.getActiveSearchParams(resourceName).getSearchParamNames().stream().sorted().distinct().collect(Collectors.toList()); + List allowedParams = + searchParamRetriever.getActiveSearchParams(resourceName).getSearchParamNames().stream() + .sorted() + .distinct() + .collect(Collectors.toList()); HapiLocalizer localizer = theRequestDetails.getFhirContext().getLocalizer(); - String msg = localizer.getMessage("ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter", paramName, resourceName, allowedParams); + String msg = localizer.getMessage( + "ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter", + paramName, + resourceName, + allowedParams); throw new InvalidRequestException(Msg.code(323) + msg); - } } - } if (newMap != null) { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServeMediaResourceRawInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServeMediaResourceRawInterceptor.java index 038e29ddee7..d1ceea5cab0 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServeMediaResourceRawInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServeMediaResourceRawInterceptor.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.rest.server.interceptor; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; @@ -34,13 +34,13 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Optional; import java.util.Set; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -66,31 +66,38 @@ public class ServeMediaResourceRawInterceptor { RESPOND_TO_OPERATION_TYPES = Collections.unmodifiableSet(respondToOperationTypes); } - @Hook(value=Pointcut.SERVER_OUTGOING_RESPONSE, order = InterceptorOrders.SERVE_MEDIA_RESOURCE_RAW_INTERCEPTOR) - public boolean outgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws AuthenticationException { + @Hook(value = Pointcut.SERVER_OUTGOING_RESPONSE, order = InterceptorOrders.SERVE_MEDIA_RESOURCE_RAW_INTERCEPTOR) + public boolean outgoingResponse( + RequestDetails theRequestDetails, + IBaseResource theResponseObject, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws AuthenticationException { if (theResponseObject == null) { return true; } - FhirContext context = theRequestDetails.getFhirContext(); String resourceName = context.getResourceType(theResponseObject); // Are we serving a FHIR read request on the Media resource type - if (!"Media".equals(resourceName) || !RESPOND_TO_OPERATION_TYPES.contains(theRequestDetails.getRestOperationType())) { + if (!"Media".equals(resourceName) + || !RESPOND_TO_OPERATION_TYPES.contains(theRequestDetails.getRestOperationType())) { return true; } // What is the content type of the Media resource we're returning? String contentType = null; - Optional contentTypeOpt = context.newFluentPath().evaluateFirst(theResponseObject, MEDIA_CONTENT_CONTENT_TYPE_OPT, IPrimitiveType.class); + Optional contentTypeOpt = context.newFluentPath() + .evaluateFirst(theResponseObject, MEDIA_CONTENT_CONTENT_TYPE_OPT, IPrimitiveType.class); if (contentTypeOpt.isPresent()) { contentType = contentTypeOpt.get().getValueAsString(); } // What is the data of the Media resource we're returning? IPrimitiveType data = null; - Optional dataOpt = context.newFluentPath().evaluateFirst(theResponseObject, "Media.content.data", IPrimitiveType.class); + Optional dataOpt = + context.newFluentPath().evaluateFirst(theResponseObject, "Media.content.data", IPrimitiveType.class); if (dataOpt.isPresent()) { data = dataOpt.get(); } @@ -99,12 +106,12 @@ public class ServeMediaResourceRawInterceptor { return true; } - RestfulServerUtils.ResponseEncoding responseEncoding = RestfulServerUtils.determineResponseEncodingNoDefault(theRequestDetails, null, contentType); + RestfulServerUtils.ResponseEncoding responseEncoding = + RestfulServerUtils.determineResponseEncodingNoDefault(theRequestDetails, null, contentType); if (responseEncoding != null) { if (contentType.equals(responseEncoding.getContentType())) { returnRawResponse(theRequestDetails, theServletResponse, contentType, data); return false; - } } @@ -117,7 +124,11 @@ public class ServeMediaResourceRawInterceptor { return true; } - private void returnRawResponse(RequestDetails theRequestDetails, HttpServletResponse theServletResponse, String theContentType, IPrimitiveType theData) { + private void returnRawResponse( + RequestDetails theRequestDetails, + HttpServletResponse theServletResponse, + String theContentType, + IPrimitiveType theData) { theServletResponse.setStatus(200); if (theRequestDetails.getServer() instanceof RestfulServer) { RestfulServer rs = (RestfulServer) theRequestDetails.getServer(); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServerInterceptorUtil.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServerInterceptorUtil.java index 993c0ab922e..e97fd2d6aa7 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServerInterceptorUtil.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServerInterceptorUtil.java @@ -29,9 +29,9 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.CheckReturnValue; import java.util.List; import java.util.Objects; +import javax.annotation.CheckReturnValue; public class ServerInterceptorUtil { @@ -44,7 +44,10 @@ public class ServerInterceptorUtil { * from the resource list */ @CheckReturnValue - public static List fireStoragePreshowResource(List theResources, RequestDetails theRequest, IInterceptorBroadcaster theInterceptorBroadcaster) { + public static List fireStoragePreshowResource( + List theResources, + RequestDetails theRequest, + IInterceptorBroadcaster theInterceptorBroadcaster) { List retVal = theResources; retVal.removeIf(Objects::isNull); @@ -54,10 +57,11 @@ public class ServerInterceptorUtil { if (retVal.size() > 0) { SimplePreResourceShowDetails accessDetails = new SimplePreResourceShowDetails(retVal); HookParams params = new HookParams() - .add(IPreResourceShowDetails.class, accessDetails) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); + .add(IPreResourceShowDetails.class, accessDetails) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); retVal = accessDetails.toList(); retVal.removeIf(Objects::isNull); @@ -65,5 +69,4 @@ public class ServerInterceptorUtil { return retVal; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServerOperationInterceptorAdapter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServerOperationInterceptorAdapter.java index bcabe2c00ab..2b1283e5bd9 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServerOperationInterceptorAdapter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ServerOperationInterceptorAdapter.java @@ -19,9 +19,8 @@ */ package ca.uhn.fhir.rest.server.interceptor; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.rest.api.server.RequestDetails; +import org.hl7.fhir.instance.model.api.IBaseResource; /** * NOP implementation of {@link IServerOperationInterceptor} @@ -44,7 +43,8 @@ public class ServerOperationInterceptorAdapter extends InterceptorAdapter implem } @Override - public void resourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { + public void resourcePreUpdate( + RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { // nothing } @@ -66,5 +66,4 @@ public class ServerOperationInterceptorAdapter extends InterceptorAdapter implem public void resourceUpdated(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { // nothing } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/StaticCapabilityStatementInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/StaticCapabilityStatementInterceptor.java index d415349f70b..a60fdaa1d16 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/StaticCapabilityStatementInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/StaticCapabilityStatementInterceptor.java @@ -70,15 +70,13 @@ public class StaticCapabilityStatementInterceptor { FhirContext ctx = theRequestDetails.getFhirContext(); EncodingEnum encoding = EncodingEnum.detectEncodingNoDefault(output); - Validate.notNull(encoding, "Could not determine FHIR encoding for resource: %s", myCapabilityStatementResource); + Validate.notNull( + encoding, "Could not determine FHIR encoding for resource: %s", myCapabilityStatementResource); - retVal = (IBaseConformance) encoding - .newParser(ctx) - .parseResource(output); + retVal = (IBaseConformance) encoding.newParser(ctx).parseResource(output); myCapabilityStatement = retVal; } return retVal; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ValidationResultEnrichingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ValidationResultEnrichingInterceptor.java index 2b87a28dcf7..3adefa98466 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ValidationResultEnrichingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/ValidationResultEnrichingInterceptor.java @@ -27,10 +27,10 @@ import ca.uhn.fhir.validation.ValidationResult; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; @Interceptor public class ValidationResultEnrichingInterceptor { @@ -39,33 +39,36 @@ public class ValidationResultEnrichingInterceptor { * A {@link RequestDetails#getUserData() user data} entry will be created with this * key which contains the {@link ValidationResult} from validating the request. */ - public static final String REQUEST_VALIDATION_RESULT = ValidationResultEnrichingInterceptor.class.getName() + "_REQUEST_VALIDATION_RESULT"; + public static final String REQUEST_VALIDATION_RESULT = + ValidationResultEnrichingInterceptor.class.getName() + "_REQUEST_VALIDATION_RESULT"; @SuppressWarnings("unchecked") @Hook(Pointcut.SERVER_OUTGOING_RESPONSE) - public boolean addValidationResultsToOperationOutcome(RequestDetails theRequestDetails, IBaseResource theResponseObject) { + public boolean addValidationResultsToOperationOutcome( + RequestDetails theRequestDetails, IBaseResource theResponseObject) { if (theResponseObject instanceof IBaseOperationOutcome) { IBaseOperationOutcome oo = (IBaseOperationOutcome) theResponseObject; if (theRequestDetails != null) { - List validationResult = (List) theRequestDetails.getUserData().remove(REQUEST_VALIDATION_RESULT); + List validationResult = + (List) theRequestDetails.getUserData().remove(REQUEST_VALIDATION_RESULT); if (validationResult != null) { for (ValidationResult next : validationResult) { next.populateOperationOutcome(oo); } } } - } return true; } - @SuppressWarnings("unchecked") - public static void addValidationResultToRequestDetails(@Nullable RequestDetails theRequestDetails, @Nonnull ValidationResult theValidationResult) { + public static void addValidationResultToRequestDetails( + @Nullable RequestDetails theRequestDetails, @Nonnull ValidationResult theValidationResult) { if (theRequestDetails != null) { - List results = (List) theRequestDetails.getUserData().computeIfAbsent(REQUEST_VALIDATION_RESULT, t -> new ArrayList<>(2)); + List results = (List) + theRequestDetails.getUserData().computeIfAbsent(REQUEST_VALIDATION_RESULT, t -> new ArrayList<>(2)); results.add(theValidationResult); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/VerboseLoggingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/VerboseLoggingInterceptor.java index 652eb6d153b..7b000b05ab5 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/VerboseLoggingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/VerboseLoggingInterceptor.java @@ -19,19 +19,18 @@ */ package ca.uhn.fhir.rest.server.interceptor; -import java.util.Enumeration; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.AuthenticationException; +import java.util.Enumeration; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + /** - * This interceptor creates verbose server log entries containing the complete request and response payloads. - *

    + * This interceptor creates verbose server log entries containing the complete request and response payloads. + *

    * This interceptor is mainly intended for debugging since it will generate very large log entries and - * could potentially be a security risk since it logs every header and complete payload. Use with caution! + * could potentially be a security risk since it logs every header and complete payload. Use with caution! *

    */ public class VerboseLoggingInterceptor extends InterceptorAdapter { @@ -39,25 +38,30 @@ public class VerboseLoggingInterceptor extends InterceptorAdapter { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(VerboseLoggingInterceptor.class); @Override - public boolean incomingRequestPostProcessed(RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) throws AuthenticationException { - + public boolean incomingRequestPostProcessed( + RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) + throws AuthenticationException { + StringBuilder b = new StringBuilder("Incoming request: "); b.append(theRequest.getMethod()); b.append(" "); b.append(theRequest.getRequestURL()); b.append("\n"); - - for (Enumeration headerEnumeration = theRequest.getHeaderNames(); headerEnumeration.hasMoreElements(); ) { + + for (Enumeration headerEnumeration = theRequest.getHeaderNames(); + headerEnumeration.hasMoreElements(); ) { String nextName = headerEnumeration.nextElement(); - for (Enumeration valueEnumeration = theRequest.getHeaders(nextName); valueEnumeration.hasMoreElements(); ) { - b.append(" * ").append(nextName).append(": ").append(valueEnumeration.nextElement()).append("\n"); + for (Enumeration valueEnumeration = theRequest.getHeaders(nextName); + valueEnumeration.hasMoreElements(); ) { + b.append(" * ") + .append(nextName) + .append(": ") + .append(valueEnumeration.nextElement()) + .append("\n"); } } - + ourLog.info(b.toString()); return true; } - - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AdditionalCompartmentSearchParameters.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AdditionalCompartmentSearchParameters.java index 7165d925b80..661495f782b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AdditionalCompartmentSearchParameters.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AdditionalCompartmentSearchParameters.java @@ -20,12 +20,13 @@ package ca.uhn.fhir.rest.server.interceptor.auth; import ca.uhn.fhir.i18n.Msg; -import javax.annotation.Nonnull; + import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; +import javax.annotation.Nonnull; /** * This class is used in RuleBuilder, as a way to provide a compartment permission additional resource search params that @@ -44,18 +45,25 @@ public class AdditionalCompartmentSearchParameters { public void addSearchParameters(@Nonnull String... theQualifiedSearchParameters) { Arrays.stream(theQualifiedSearchParameters).forEach(code -> { if (code == null || !code.contains(":")) { - throw new IllegalArgumentException(Msg.code(341) + code + " is not a valid search parameter. Search parameters must be in the form resourcetype:parametercode, e.g. 'Device:patient'"); + throw new IllegalArgumentException( + Msg.code(341) + code + + " is not a valid search parameter. Search parameters must be in the form resourcetype:parametercode, e.g. 'Device:patient'"); } String[] split = code.split(":"); if (split.length != 2) { - throw new IllegalArgumentException(Msg.code(342) + code + " is not a valid search parameter. Search parameters must be in the form resourcetype:parametercode, e.g. 'Device:patient'"); + throw new IllegalArgumentException( + Msg.code(342) + code + + " is not a valid search parameter. Search parameters must be in the form resourcetype:parametercode, e.g. 'Device:patient'"); } else { - myResourceTypeToParameterCodeMap.computeIfAbsent(split[0].toLowerCase(), (key) -> new HashSet<>()).add(split[1].toLowerCase()); + myResourceTypeToParameterCodeMap + .computeIfAbsent(split[0].toLowerCase(), (key) -> new HashSet<>()) + .add(split[1].toLowerCase()); } }); } public Set getSearchParamNamesForResourceType(@Nonnull String theResourceType) { - return myResourceTypeToParameterCodeMap.computeIfAbsent(theResourceType.toLowerCase(), (key) -> new HashSet<>()); + return myResourceTypeToParameterCodeMap.computeIfAbsent( + theResourceType.toLowerCase(), (key) -> new HashSet<>()); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AllowedCodeInValueSet.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AllowedCodeInValueSet.java index 8095150b74a..2f7f217259a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AllowedCodeInValueSet.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AllowedCodeInValueSet.java @@ -29,7 +29,11 @@ class AllowedCodeInValueSet { private final String myValueSetUrl; private final boolean myNegate; - public AllowedCodeInValueSet(@Nonnull String theResourceName, @Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl, boolean theNegate) { + public AllowedCodeInValueSet( + @Nonnull String theResourceName, + @Nonnull String theSearchParameterName, + @Nonnull String theValueSetUrl, + boolean theNegate) { assert isNotBlank(theResourceName); assert isNotBlank(theSearchParameterName); assert isNotBlank(theValueSetUrl); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AppliesTypeEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AppliesTypeEnum.java index aee4b2d27e6..45aea2839b1 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AppliesTypeEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AppliesTypeEnum.java @@ -20,5 +20,7 @@ package ca.uhn.fhir.rest.server.interceptor.auth; enum AppliesTypeEnum { - ALL_RESOURCES, TYPES, INSTANCES + ALL_RESOURCES, + TYPES, + INSTANCES } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationConstants.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationConstants.java index 43426534786..f513f6d2836 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationConstants.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationConstants.java @@ -29,5 +29,4 @@ public class AuthorizationConstants { private AuthorizationConstants() { super(); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java index 929d0a00c57..853a90c6f7e 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java @@ -44,8 +44,6 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -54,6 +52,8 @@ import java.util.IdentityHashMap; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static java.util.Objects.isNull; import static java.util.Objects.nonNull; @@ -76,12 +76,15 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; @Interceptor(order = AuthorizationConstants.ORDER_AUTH_INTERCEPTOR) public class AuthorizationInterceptor implements IRuleApplier { - public static final String REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS = AuthorizationInterceptor.class.getName() + "_BulkDataExportOptions"; + public static final String REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS = + AuthorizationInterceptor.class.getName() + "_BulkDataExportOptions"; private static final AtomicInteger ourInstanceCount = new AtomicInteger(0); private static final Logger ourLog = LoggerFactory.getLogger(AuthorizationInterceptor.class); private final int myInstanceIndex = ourInstanceCount.incrementAndGet(); - private final String myRequestSeenResourcesKey = AuthorizationInterceptor.class.getName() + "_" + myInstanceIndex + "_SEENRESOURCES"; - private final String myRequestRuleListKey = AuthorizationInterceptor.class.getName() + "_" + myInstanceIndex + "_RULELIST"; + private final String myRequestSeenResourcesKey = + AuthorizationInterceptor.class.getName() + "_" + myInstanceIndex + "_SEENRESOURCES"; + private final String myRequestRuleListKey = + AuthorizationInterceptor.class.getName() + "_" + myInstanceIndex + "_RULELIST"; private PolicyEnum myDefaultPolicy = PolicyEnum.DENY; private Set myFlags = Collections.emptySet(); private IValidationSupport myValidationSupport; @@ -118,9 +121,15 @@ public class AuthorizationInterceptor implements IRuleApplier { myTroubleshootingLog = theTroubleshootingLog; } - private void applyRulesAndFailIfDeny(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, - IBaseResource theOutputResource, Pointcut thePointcut) { - Verdict decision = applyRulesAndReturnDecision(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, thePointcut); + private void applyRulesAndFailIfDeny( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + Pointcut thePointcut) { + Verdict decision = applyRulesAndReturnDecision( + theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, thePointcut); if (decision.getDecision() == PolicyEnum.ALLOW) { return; @@ -130,26 +139,41 @@ public class AuthorizationInterceptor implements IRuleApplier { } @Override - public Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, - IBaseResource theOutputResource, Pointcut thePointcut) { + public Verdict applyRulesAndReturnDecision( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + Pointcut thePointcut) { @SuppressWarnings("unchecked") - List rules = (List) theRequestDetails.getUserData().get(myRequestRuleListKey); + List rules = + (List) theRequestDetails.getUserData().get(myRequestRuleListKey); if (rules == null) { rules = buildRuleList(theRequestDetails); theRequestDetails.getUserData().put(myRequestRuleListKey, rules); } Set flags = getFlags(); - ourLog.trace("Applying {} rules to render an auth decision for operation {}, theInputResource type={}, theOutputResource type={}, thePointcut={} ", - rules.size(), - getPointcutNameOrEmpty(thePointcut), - getResourceTypeOrEmpty(theInputResource), - getResourceTypeOrEmpty(theOutputResource)); + ourLog.trace( + "Applying {} rules to render an auth decision for operation {}, theInputResource type={}, theOutputResource type={}, thePointcut={} ", + rules.size(), + getPointcutNameOrEmpty(thePointcut), + getResourceTypeOrEmpty(theInputResource), + getResourceTypeOrEmpty(theOutputResource)); Verdict verdict = null; for (IAuthRule nextRule : rules) { ourLog.trace("Rule being applied - {}", nextRule); - verdict = nextRule.applyRule(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, this, flags, thePointcut); + verdict = nextRule.applyRule( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + this, + flags, + thePointcut); if (verdict != null) { ourLog.trace("Rule {} returned decision {}", nextRule, verdict.getDecision()); break; @@ -191,7 +215,8 @@ public class AuthorizationInterceptor implements IRuleApplier { * * @param theAuthorizationSearchParamMatcher The search parameter matcher. Defaults to null. */ - public void setAuthorizationSearchParamMatcher(@Nullable IAuthorizationSearchParamMatcher theAuthorizationSearchParamMatcher) { + public void setAuthorizationSearchParamMatcher( + @Nullable IAuthorizationSearchParamMatcher theAuthorizationSearchParamMatcher) { this.myAuthorizationSearchParamMatcher = theAuthorizationSearchParamMatcher; } @@ -271,9 +296,9 @@ public class AuthorizationInterceptor implements IRuleApplier { default: // Should not happen - throw new IllegalStateException(Msg.code(332) + "Unable to apply security to event of type " + theOperation); + throw new IllegalStateException( + Msg.code(332) + "Unable to apply security to event of type " + theOperation); } - } /** @@ -356,7 +381,11 @@ public class AuthorizationInterceptor implements IRuleApplier { throw new ForbiddenOperationException(Msg.code(334) + "Access denied by default policy (no applicable rules)"); } - private void handleUserOperation(RequestDetails theRequest, IBaseResource theResource, RestOperationTypeEnum theOperation, Pointcut thePointcut) { + private void handleUserOperation( + RequestDetails theRequest, + IBaseResource theResource, + RestOperationTypeEnum theOperation, + Pointcut thePointcut) { applyRulesAndFailIfDeny(theOperation, theRequest, theResource, theResource.getIdElement(), null, thePointcut); } @@ -383,11 +412,13 @@ public class AuthorizationInterceptor implements IRuleApplier { return; } - applyRulesAndFailIfDeny(theRequest.getRestOperationType(), theRequest, inputResource, inputResourceId, null, thePointcut); + applyRulesAndFailIfDeny( + theRequest.getRestOperationType(), theRequest, inputResource, inputResourceId, null, thePointcut); } @Hook(Pointcut.STORAGE_PRESHOW_RESOURCES) - public void hookPreShow(RequestDetails theRequestDetails, IPreResourceShowDetails theDetails, Pointcut thePointcut) { + public void hookPreShow( + RequestDetails theRequestDetails, IPreResourceShowDetails theDetails, Pointcut thePointcut) { for (int i = 0; i < theDetails.size(); i++) { IBaseResource next = theDetails.getResource(i); checkOutgoingResourceAndFailIfDeny(theRequestDetails, next, thePointcut); @@ -395,24 +426,29 @@ public class AuthorizationInterceptor implements IRuleApplier { } @Hook(Pointcut.SERVER_OUTGOING_RESPONSE) - public void hookOutgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject, Pointcut thePointcut) { + public void hookOutgoingResponse( + RequestDetails theRequestDetails, IBaseResource theResponseObject, Pointcut thePointcut) { checkOutgoingResourceAndFailIfDeny(theRequestDetails, theResponseObject, thePointcut); } @Hook(Pointcut.STORAGE_CASCADE_DELETE) - public void hookCascadeDeleteForConflict(RequestDetails theRequestDetails, Pointcut thePointcut, IBaseResource theResourceToDelete) { + public void hookCascadeDeleteForConflict( + RequestDetails theRequestDetails, Pointcut thePointcut, IBaseResource theResourceToDelete) { Validate.notNull(theResourceToDelete); // just in case checkPointcutAndFailIfDeny(theRequestDetails, thePointcut, theResourceToDelete); } @Hook(Pointcut.STORAGE_PRE_DELETE_EXPUNGE) public void hookDeleteExpunge(RequestDetails theRequestDetails, Pointcut thePointcut) { - applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, null, thePointcut); + applyRulesAndFailIfDeny( + theRequestDetails.getRestOperationType(), theRequestDetails, null, null, null, thePointcut); } @Hook(Pointcut.STORAGE_INITIATE_BULK_EXPORT) - public void initiateBulkExport(RequestDetails theRequestDetails, BulkExportJobParameters theBulkExportOptions, Pointcut thePointcut) { -// RestOperationTypeEnum restOperationType = determineRestOperationTypeFromBulkExportOptions(theBulkExportOptions); + public void initiateBulkExport( + RequestDetails theRequestDetails, BulkExportJobParameters theBulkExportOptions, Pointcut thePointcut) { + // RestOperationTypeEnum restOperationType = + // determineRestOperationTypeFromBulkExportOptions(theBulkExportOptions); RestOperationTypeEnum restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER; if (theRequestDetails != null) { @@ -425,7 +461,8 @@ public class AuthorizationInterceptor implements IRuleApplier { * TODO GGG This method should eventually be used when invoking the rules applier.....however we currently rely on the incorrect * behaviour of passing down `EXTENDED_OPERATION_SERVER`. */ - private RestOperationTypeEnum determineRestOperationTypeFromBulkExportOptions(BulkExportJobParameters theBulkExportOptions) { + private RestOperationTypeEnum determineRestOperationTypeFromBulkExportOptions( + BulkExportJobParameters theBulkExportOptions) { RestOperationTypeEnum restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER; BulkExportJobParameters.ExportStyle exportStyle = theBulkExportOptions.getExportStyle(); if (exportStyle.equals(BulkExportJobParameters.ExportStyle.SYSTEM)) { @@ -442,11 +479,19 @@ public class AuthorizationInterceptor implements IRuleApplier { return restOperationType; } - private void checkPointcutAndFailIfDeny(RequestDetails theRequestDetails, Pointcut thePointcut, @Nonnull IBaseResource theInputResource) { - applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, theInputResource, theInputResource.getIdElement(), null, thePointcut); + private void checkPointcutAndFailIfDeny( + RequestDetails theRequestDetails, Pointcut thePointcut, @Nonnull IBaseResource theInputResource) { + applyRulesAndFailIfDeny( + theRequestDetails.getRestOperationType(), + theRequestDetails, + theInputResource, + theInputResource.getIdElement(), + null, + thePointcut); } - private void checkOutgoingResourceAndFailIfDeny(RequestDetails theRequestDetails, IBaseResource theResponseObject, Pointcut thePointcut) { + private void checkOutgoingResourceAndFailIfDeny( + RequestDetails theRequestDetails, IBaseResource theResponseObject, Pointcut thePointcut) { switch (determineOperationDirection(theRequestDetails.getRestOperationType())) { case IN: @@ -458,7 +503,8 @@ public class AuthorizationInterceptor implements IRuleApplier { } // Don't check the value twice - IdentityHashMap alreadySeenMap = ConsentInterceptor.getAlreadySeenResourcesMap(theRequestDetails, myRequestSeenResourcesKey); + IdentityHashMap alreadySeenMap = + ConsentInterceptor.getAlreadySeenResourcesMap(theRequestDetails, myRequestSeenResourcesKey); if (alreadySeenMap.putIfAbsent(theResponseObject, Boolean.TRUE) != null) { return; } @@ -492,7 +538,8 @@ public class AuthorizationInterceptor implements IRuleApplier { } for (IBaseResource nextResponse : resources) { - applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, nextResponse, thePointcut); + applyRulesAndFailIfDeny( + theRequestDetails.getRestOperationType(), theRequestDetails, null, null, nextResponse, thePointcut); } } @@ -507,7 +554,11 @@ public class AuthorizationInterceptor implements IRuleApplier { } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void hookResourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource, Pointcut thePointcut) { + public void hookResourcePreUpdate( + RequestDetails theRequest, + IBaseResource theOldResource, + IBaseResource theNewResource, + Pointcut thePointcut) { if (theOldResource != null) { handleUserOperation(theRequest, theOldResource, RestOperationTypeEnum.UPDATE, thePointcut); } @@ -521,7 +572,8 @@ public class AuthorizationInterceptor implements IRuleApplier { OUT, } - static List toListOfResourcesAndExcludeContainer(IBaseResource theResponseObject, FhirContext fhirContext) { + static List toListOfResourcesAndExcludeContainer( + IBaseResource theResponseObject, FhirContext fhirContext) { if (theResponseObject == null) { return Collections.emptyList(); } @@ -547,7 +599,7 @@ public class AuthorizationInterceptor implements IRuleApplier { } // Don't apply security to OperationOutcome - retVal.removeIf(t->t instanceof IBaseOperationOutcome); + retVal.removeIf(t -> t instanceof IBaseOperationOutcome); return retVal; } @@ -585,29 +637,27 @@ public class AuthorizationInterceptor implements IRuleApplier { b.append("decision", myDecision.name()); return b.build(); } - } private Object getPointcutNameOrEmpty(Pointcut thePointcut) { return nonNull(thePointcut) ? thePointcut.name() : EMPTY; } - private String getResourceTypeOrEmpty(IBaseResource theResource){ + private String getResourceTypeOrEmpty(IBaseResource theResource) { String retVal = StringUtils.EMPTY; - if(isNull(theResource)){ + if (isNull(theResource)) { return retVal; } - if(isNull(theResource.getIdElement())){ + if (isNull(theResource.getIdElement())) { return retVal; } - if(isNull(theResource.getIdElement().getResourceType())){ + if (isNull(theResource.getIdElement().getResourceType())) { return retVal; } return theResource.getIdElement().getResourceType(); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizedList.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizedList.java index 2773721b02f..d49ec122d68 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizedList.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizedList.java @@ -22,10 +22,10 @@ package ca.uhn.fhir.rest.server.interceptor.auth; import ca.uhn.fhir.rest.api.server.RequestDetails; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Return type for {@link SearchNarrowingInterceptor#buildAuthorizedList(RequestDetails)} @@ -122,7 +122,8 @@ public class AuthorizedList { * @see AuthorizationInterceptor If search narrowing by code is being used for security reasons, consider also using AuthorizationInterceptor as a failsafe to ensure that no inapproproiate resources are returned * @since 6.0.0 */ - public AuthorizedList addCodeInValueSet(@Nonnull String theResourceName, @Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl) { + public AuthorizedList addCodeInValueSet( + @Nonnull String theResourceName, @Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl) { Validate.notBlank(theResourceName, "theResourceName must not be missing or null"); Validate.notBlank(theSearchParameterName, "theSearchParameterName must not be missing or null"); Validate.notBlank(theValueSetUrl, "theResourceUrl must not be missing or null"); @@ -141,7 +142,8 @@ public class AuthorizedList { * @see AuthorizationInterceptor If search narrowing by code is being used for security reasons, consider also using AuthorizationInterceptor as a failsafe to ensure that no inapproproiate resources are returned * @since 6.0.0 */ - public AuthorizedList addCodeNotInValueSet(@Nonnull String theResourceName, @Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl) { + public AuthorizedList addCodeNotInValueSet( + @Nonnull String theResourceName, @Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl) { Validate.notBlank(theResourceName, "theResourceName must not be missing or null"); Validate.notBlank(theSearchParameterName, "theSearchParameterName must not be missing or null"); Validate.notBlank(theValueSetUrl, "theResourceUrl must not be missing or null"); @@ -149,11 +151,13 @@ public class AuthorizedList { return doAddCodeInValueSet(theResourceName, theSearchParameterName, theValueSetUrl, true); } - private AuthorizedList doAddCodeInValueSet(String theResourceName, String theSearchParameterName, String theValueSetUrl, boolean negate) { + private AuthorizedList doAddCodeInValueSet( + String theResourceName, String theSearchParameterName, String theValueSetUrl, boolean negate) { if (myAllowedCodeInValueSets == null) { myAllowedCodeInValueSets = new ArrayList<>(); } - myAllowedCodeInValueSets.add(new AllowedCodeInValueSet(theResourceName, theSearchParameterName, theValueSetUrl, negate)); + myAllowedCodeInValueSets.add( + new AllowedCodeInValueSet(theResourceName, theSearchParameterName, theValueSetUrl, negate)); return this; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/BaseRule.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/BaseRule.java index 093c4705643..533c837a1a0 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/BaseRule.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/BaseRule.java @@ -54,13 +54,19 @@ abstract class BaseRule implements IAuthRule { theTesters.forEach(this::addTester); } - private boolean applyTesters(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IIdType theInputResourceId, IBaseResource theInputResource, IBaseResource theOutputResource, IRuleApplier theRuleApplier) { + private boolean applyTesters( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IIdType theInputResourceId, + IBaseResource theInputResource, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier) { assert !(theInputResource != null && theOutputResource != null); boolean retVal = true; if (theOutputResource == null) { - IAuthRuleTester.RuleTestRequest inputRequest = new IAuthRuleTester.RuleTestRequest(myMode, theOperation, theRequestDetails, theInputResourceId, theInputResource, theRuleApplier); - + IAuthRuleTester.RuleTestRequest inputRequest = new IAuthRuleTester.RuleTestRequest( + myMode, theOperation, theRequestDetails, theInputResourceId, theInputResource, theRuleApplier); for (IAuthRuleTester next : getTesters()) { if (!next.matches(inputRequest)) { @@ -69,7 +75,13 @@ abstract class BaseRule implements IAuthRule { } } } else { - IAuthRuleTester.RuleTestRequest outputRequest = new IAuthRuleTester.RuleTestRequest(myMode, theOperation, theRequestDetails, theOutputResource.getIdElement(), theOutputResource, theRuleApplier); + IAuthRuleTester.RuleTestRequest outputRequest = new IAuthRuleTester.RuleTestRequest( + myMode, + theOperation, + theRequestDetails, + theOutputResource.getIdElement(), + theOutputResource, + theRuleApplier); for (IAuthRuleTester next : getTesters()) { if (!next.matchesOutput(outputRequest)) { retVal = false; @@ -102,15 +114,28 @@ abstract class BaseRule implements IAuthRule { return Collections.unmodifiableList(myTesters); } - Verdict newVerdict(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier) { - if (!applyTesters(theOperation, theRequestDetails, theInputResourceId, theInputResource, theOutputResource, theRuleApplier)) { + Verdict newVerdict( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier) { + if (!applyTesters( + theOperation, + theRequestDetails, + theInputResourceId, + theInputResource, + theOutputResource, + theRuleApplier)) { return null; } return new Verdict(myMode, this); } protected boolean isResourceAccess(Pointcut thePointcut) { - return thePointcut.equals(Pointcut.STORAGE_PREACCESS_RESOURCES) || thePointcut.equals(Pointcut.STORAGE_PRESHOW_RESOURCES); + return thePointcut.equals(Pointcut.STORAGE_PREACCESS_RESOURCES) + || thePointcut.equals(Pointcut.STORAGE_PRESHOW_RESOURCES); } @Override @@ -124,5 +149,4 @@ abstract class BaseRule implements IAuthRule { builder.append("testers", myTesters); return builder; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/ClassifierTypeEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/ClassifierTypeEnum.java index a4446dd715b..ca965c78aaf 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/ClassifierTypeEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/ClassifierTypeEnum.java @@ -20,6 +20,6 @@ package ca.uhn.fhir.rest.server.interceptor.auth; enum ClassifierTypeEnum { - IN_COMPARTMENT, ANY_ID - + IN_COMPARTMENT, + ANY_ID } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/FhirQueryRuleTester.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/FhirQueryRuleTester.java index 43111aeefc7..f87bdc38b66 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/FhirQueryRuleTester.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/FhirQueryRuleTester.java @@ -46,7 +46,10 @@ public class FhirQueryRuleTester implements IAuthRuleTester { // look for a matcher IAuthorizationSearchParamMatcher matcher = theRuleTestRequest.ruleApplier.getSearchParamMatcher(); if (matcher == null) { - theRuleTestRequest.ruleApplier.getTroubleshootingLog().warn("No matcher provided. Can't apply filter permission."); + theRuleTestRequest + .ruleApplier + .getTroubleshootingLog() + .warn("No matcher provided. Can't apply filter permission."); return false; } @@ -58,14 +61,19 @@ public class FhirQueryRuleTester implements IAuthRuleTester { return true; } - // we use the target type since the rule might apply to all types, a type set, or instances, and that has already been checked. - IAuthorizationSearchParamMatcher.MatchResult mr = matcher.match(theRuleTestRequest.resource.fhirType() + "?" + myQueryParameters, theRuleTestRequest.resource); + // we use the target type since the rule might apply to all types, a type set, or instances, and that has + // already been checked. + IAuthorizationSearchParamMatcher.MatchResult mr = matcher.match( + theRuleTestRequest.resource.fhirType() + "?" + myQueryParameters, theRuleTestRequest.resource); switch (mr.match) { case MATCH: return true; case UNSUPPORTED: - theRuleTestRequest.ruleApplier.getTroubleshootingLog().warn("Unsupported matcher expression {}: {}.", myQueryParameters, mr.unsupportedReason); + theRuleTestRequest + .ruleApplier + .getTroubleshootingLog() + .warn("Unsupported matcher expression {}: {}.", myQueryParameters, mr.unsupportedReason); // unsupported doesn't match unless this is a deny request, and we need to be safe! return (theRuleTestRequest.mode == PolicyEnum.DENY); case NO_MATCH: @@ -77,7 +85,7 @@ public class FhirQueryRuleTester implements IAuthRuleTester { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("filter", myQueryParameters) - .toString(); + .append("filter", myQueryParameters) + .toString(); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRule.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRule.java index 509cfb360a1..4cb23bb3d11 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRule.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRule.java @@ -51,11 +51,18 @@ public interface IAuthRule { * @param thePointcut The pointcut hook that triggered this call * @return Returns a policy decision, or null if the rule does not apply */ - Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set theFlags, Pointcut thePointcut); + Verdict applyRule( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier, + Set theFlags, + Pointcut thePointcut); /** * Returns a name for this rule, to be used in logs and error messages */ String getName(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilder.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilder.java index 763a8eb75b5..64f56356486 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilder.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilder.java @@ -24,7 +24,7 @@ import java.util.List; /** * Used by {@link AuthorizationInterceptor} in order to allow user code to define authorization * rules. - * + * * @see AuthorizationInterceptor */ public interface IAuthRuleBuilder { @@ -36,7 +36,7 @@ public interface IAuthRuleBuilder { /** * Start a new rule to allow a given operation - * + * * @param theRuleName * The name of this rule. The rule name is used for logging and error messages, * and could be shown to the client, but has no semantic meaning within @@ -80,7 +80,7 @@ public interface IAuthRuleBuilder { /** * Start a new rule to deny a given operation - * + * * @param theRuleName * The name of this rule. The rule name is used for logging and error messages, * and could be shown to the client, but has no semantic meaning within @@ -111,5 +111,4 @@ public interface IAuthRuleBuilder { * HAPI FHIR. */ IAuthRuleBuilderRuleOpClassifierFinished denyAll(String theRuleName); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderAppliesTo.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderAppliesTo.java index 7ade4f86a12..1fedaac709a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderAppliesTo.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderAppliesTo.java @@ -39,5 +39,4 @@ public interface IAuthRuleBuilderAppliesTo { * @param theType E.g. "Patient" */ T resourcesOfType(String theType); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperation.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperation.java index c91138e910b..bdbc8638907 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperation.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperation.java @@ -23,7 +23,7 @@ public interface IAuthRuleBuilderOperation { /** * This rule applies to the operation with the given name - * + * * @param The operation name, e.g. "validate" or "$validate" (either form may be used here) */ IAuthRuleBuilderOperationNamed named(String theOperationName); @@ -32,5 +32,4 @@ public interface IAuthRuleBuilderOperation { * This rule applies to any operation */ IAuthRuleBuilderOperationNamed withAnyName(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamed.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamed.java index 49d5c540668..539f00a1168 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamed.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamed.java @@ -58,5 +58,4 @@ public interface IAuthRuleBuilderOperationNamed { * Rule applies to invocations of this operation at any level (server, type or instance) */ IAuthRuleBuilderOperationNamedAndScoped atAnyLevel(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamedAndScoped.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamedAndScoped.java index 81fd5fd8dfe..87ae1527ab8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamedAndScoped.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderOperationNamedAndScoped.java @@ -33,6 +33,4 @@ public interface IAuthRuleBuilderOperationNamedAndScoped { * resources being returned */ IAuthRuleBuilderRuleOpClassifierFinished andRequireExplicitResponseAuthorization(); - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderPatch.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderPatch.java index c7e2dfff237..559b14d5165 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderPatch.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderPatch.java @@ -35,5 +35,4 @@ public interface IAuthRuleBuilderPatch { * permit write operations. */ IAuthRuleFinished allRequests(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExport.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExport.java index cd041961195..448e9bd2512 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExport.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExport.java @@ -53,14 +53,12 @@ public interface IAuthRuleBuilderRuleBulkExport { return patientExportOnGroup(theFocusResourceId.getValue()); } - IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatient(@Nonnull String theFocusResourceId); default IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatient(@Nonnull IIdType theFocusResourceId) { return patientExportOnPatient(theFocusResourceId.getValue()); } - /** * Allow/deny patient-level export rule applies to the Group with the given resource ID, e.g. Group/123 * diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExportWithTarget.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExportWithTarget.java index 5dd3798e28a..415c24dd2ec 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExportWithTarget.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleBulkExportWithTarget.java @@ -36,5 +36,4 @@ public interface IAuthRuleBuilderRuleBulkExportWithTarget extends IAuthRuleFinis * @since 5.5.0 */ IAuthRuleBuilderRuleBulkExportWithTarget withResourceTypes(Collection theResourceTypes); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleConditional.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleConditional.java index 5489016b842..4ab4267694f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleConditional.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleConditional.java @@ -19,6 +19,5 @@ */ package ca.uhn.fhir.rest.server.interceptor.auth; -public interface IAuthRuleBuilderRuleConditional extends IAuthRuleBuilderAppliesTo { - -} +public interface IAuthRuleBuilderRuleConditional + extends IAuthRuleBuilderAppliesTo {} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOp.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOp.java index aab1830a2c4..7871cb4b2e3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOp.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOp.java @@ -52,7 +52,7 @@ public interface IAuthRuleBuilderRuleOp extends IAuthRuleBuilderAppliesToPatient/123 - Any Patient resource with the ID "123" will be matched *
  • 123 - Any resource of any type with the ID "123" will be matched
  • * - >* + * >* * @param theId The ID of the resource to apply (e.g. Patient/123) * @throws IllegalArgumentException If theId does not contain an ID with at least an ID part * @throws NullPointerException If theId is null @@ -75,5 +75,4 @@ public interface IAuthRuleBuilderRuleOp extends IAuthRuleBuilderAppliesTo theIds); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOpClassifier.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOpClassifier.java index de7454d23e4..328fd530dc5 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOpClassifier.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOpClassifier.java @@ -19,10 +19,9 @@ */ package ca.uhn.fhir.rest.server.interceptor.auth; -import java.util.Collection; - import org.hl7.fhir.instance.model.api.IIdType; +import java.util.Collection; import javax.annotation.Nonnull; public interface IAuthRuleBuilderRuleOpClassifier { @@ -35,9 +34,9 @@ public interface IAuthRuleBuilderRuleOpClassifier { * inCompartment("Patient", new IdType("Patient", "123")) *

    *

    - * This call completes the rule and adds the rule to the chain. + * This call completes the rule and adds the rule to the chain. *

    - * + * * @param theCompartmentName The name of the compartment (must not be null or blank) * @param theOwner The owner of the compartment. Note that both the resource type and ID must be populated in this ID. */ @@ -64,8 +63,10 @@ public interface IAuthRuleBuilderRuleOpClassifier { * @param theOwner The owner of the compartment. Note that both the resource type and ID must be populated in this ID. * @param theAdditionalTypeSearchParamNames A list of strings for additional resource types and search parameters which count as being in the compartment, in the form "resourcetype:search-parameter-name". */ - IAuthRuleBuilderRuleOpClassifierFinished inCompartmentWithAdditionalSearchParams(String theCompartmentName, IIdType theOwner, AdditionalCompartmentSearchParameters theAdditionalTypeSearchParamNames); - + IAuthRuleBuilderRuleOpClassifierFinished inCompartmentWithAdditionalSearchParams( + String theCompartmentName, + IIdType theOwner, + AdditionalCompartmentSearchParameters theAdditionalTypeSearchParamNames); /** * Rule applies to resources in the given compartment. @@ -75,14 +76,14 @@ public interface IAuthRuleBuilderRuleOpClassifier { * inCompartment("Patient", new IdType("Patient", "123")) *

    *

    - * This call completes the rule and adds the rule to the chain. + * This call completes the rule and adds the rule to the chain. *

    - * + * * @param theCompartmentName The name of the compartment (must not be null or blank) * @param theOwners The owner of the compartment. Note that both the resource type and ID must be populated in this ID. */ - IAuthRuleBuilderRuleOpClassifierFinished inCompartment(String theCompartmentName, Collection theOwners); - + IAuthRuleBuilderRuleOpClassifierFinished inCompartment( + String theCompartmentName, Collection theOwners); /** * Rule applies to resources in the given compartment. @@ -106,13 +107,15 @@ public interface IAuthRuleBuilderRuleOpClassifier { * @param theAdditionalTypeSearchParamNames A {@link AdditionalCompartmentSearchParameters} which allows you to expand the search space for what is considered "in" the compartment. * **/ - IAuthRuleBuilderRuleOpClassifierFinished inCompartmentWithAdditionalSearchParams(String theCompartmentName, Collection theOwners, AdditionalCompartmentSearchParameters theAdditionalTypeSearchParamNames); - + IAuthRuleBuilderRuleOpClassifierFinished inCompartmentWithAdditionalSearchParams( + String theCompartmentName, + Collection theOwners, + AdditionalCompartmentSearchParameters theAdditionalTypeSearchParamNames); /** * Rule applies to any resource instances *

    - * This call completes the rule and adds the rule to the chain. + * This call completes the rule and adds the rule to the chain. *

    */ IAuthRuleBuilderRuleOpClassifierFinished withAnyId(); @@ -123,7 +126,8 @@ public interface IAuthRuleBuilderRuleOpClassifier { * @param theValueSetUrl The valueset URL, e.g. "http://my-value-set" * @since 6.0.0 */ - IAuthRuleBuilderRuleOpClassifierFinished withCodeInValueSet(@Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl); + IAuthRuleBuilderRuleOpClassifierFinished withCodeInValueSet( + @Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl); /** * Rule applies to resources where the given search parameter would be satisfied by a code not in the given ValueSet diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId.java index e2789f4a551..7122a171c4f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.server.interceptor.auth; -public interface IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId extends IAuthRuleFinished { - -} +public interface IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId extends IAuthRuleFinished {} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleTransactionOp.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleTransactionOp.java index 23faf474b5b..534172baa15 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleTransactionOp.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleBuilderRuleTransactionOp.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.rest.server.interceptor.auth; public interface IAuthRuleBuilderRuleTransactionOp { IAuthRuleBuilderRuleOpClassifierFinished andApplyNormalRules(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleFinished.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleFinished.java index c76acc3ff07..5a3d2397cd6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleFinished.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleFinished.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.rest.server.interceptor.auth; -import javax.annotation.Nullable; import java.util.List; +import javax.annotation.Nullable; public interface IAuthRuleFinished { @@ -55,5 +55,4 @@ public interface IAuthRuleFinished { * @param theQueryParameters a FHIR query parameter string. E.g. category=laboratory&date=ge2021 */ IAuthRuleFinished withFilterTester(String theQueryParameters); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleTester.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleTester.java index ce60a7aec0b..05eabea9f0e 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleTester.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthRuleTester.java @@ -43,7 +43,8 @@ public interface IAuthRuleTester { class RuleTestRequest { // fake record pattern /** the mode of the calling rule context */ - @Nonnull public final PolicyEnum mode; + @Nonnull + public final PolicyEnum mode; /** * The FHIR operation being performed. * Note that this is not necessarily the same as the value obtained from invoking @@ -51,14 +52,28 @@ public interface IAuthRuleTester { * because multiple operations can be nested within * an HTTP request using FHIR transaction and batch operations */ - @Nonnull public final RestOperationTypeEnum operation; - @Nonnull public final RequestDetails requestDetails; - @Nullable public final IIdType resourceId; - @Nullable public final IBaseResource resource; - /** supplier for support services */ - @Nonnull public final IRuleApplier ruleApplier; + @Nonnull + public final RestOperationTypeEnum operation; - public RuleTestRequest(PolicyEnum theMode, @Nonnull RestOperationTypeEnum theOperation, @Nonnull RequestDetails theRequestDetails, @Nullable IIdType theResourceId, @Nullable IBaseResource theResource, @Nonnull IRuleApplier theRuleApplier) { + @Nonnull + public final RequestDetails requestDetails; + + @Nullable + public final IIdType resourceId; + + @Nullable + public final IBaseResource resource; + /** supplier for support services */ + @Nonnull + public final IRuleApplier ruleApplier; + + public RuleTestRequest( + PolicyEnum theMode, + @Nonnull RestOperationTypeEnum theOperation, + @Nonnull RequestDetails theRequestDetails, + @Nullable IIdType theResourceId, + @Nullable IBaseResource theResource, + @Nonnull IRuleApplier theRuleApplier) { Validate.notNull(theMode); Validate.notNull(theOperation); Validate.notNull(theRequestDetails); @@ -92,16 +107,20 @@ public interface IAuthRuleTester { * @since 6.1.0 */ default boolean matches(RuleTestRequest theRequest) { - return this.matches(theRequest.operation, theRequest.requestDetails, theRequest.resourceId, theRequest.resource); + return this.matches( + theRequest.operation, theRequest.requestDetails, theRequest.resourceId, theRequest.resource); } - /** * DO NOT IMPLEMENT - Old api. {@link #matches(RuleTestRequest)} instead. * @deprecated */ @Deprecated(since = "6.1.0") - default boolean matches(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IIdType theInputResourceId, IBaseResource theInputResource) { + default boolean matches( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IIdType theInputResourceId, + IBaseResource theInputResource) { return true; } @@ -127,8 +146,8 @@ public interface IAuthRuleTester { * @deprecated */ @Deprecated(since = "6.1.0") - default boolean matchesOutput(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theOutputResource) { + default boolean matchesOutput( + RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theOutputResource) { return true; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthorizationSearchParamMatcher.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthorizationSearchParamMatcher.java index 9a79d423e2c..7b941dc6165 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthorizationSearchParamMatcher.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IAuthorizationSearchParamMatcher.java @@ -50,9 +50,11 @@ public interface IAuthorizationSearchParamMatcher { class MatchResult { // fake record pattern /** match result */ - @Nonnull public final Match match; + @Nonnull + public final Match match; /** the reason for the UNSUPPORTED result */ - @Nullable public final String unsupportedReason; + @Nullable + public final String unsupportedReason; public static MatchResult buildMatched() { return new MatchResult(Match.MATCH, null); @@ -70,6 +72,5 @@ public interface IAuthorizationSearchParamMatcher { this.match = myMatch; this.unsupportedReason = myUnsupportedReason; } - } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IRuleApplier.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IRuleApplier.java index acef0833c63..7f88de3b9ac 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IRuleApplier.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/IRuleApplier.java @@ -21,12 +21,11 @@ package ca.uhn.fhir.rest.server.interceptor.auth; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.interceptor.api.Pointcut; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor.Verdict; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import javax.annotation.Nonnull; @@ -37,7 +36,13 @@ public interface IRuleApplier { @Nonnull Logger getTroubleshootingLog(); - Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, Pointcut thePointcut); + Verdict applyRulesAndReturnDecision( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + Pointcut thePointcut); @Nullable IValidationSupport getValidationSupport(); @@ -45,5 +50,6 @@ public interface IRuleApplier { @Nullable default IAuthorizationSearchParamMatcher getSearchParamMatcher() { return null; - }; + } + ; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRule.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRule.java index 8efd9d1e194..2a10487e1bc 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRule.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRule.java @@ -79,7 +79,15 @@ class OperationRule extends BaseRule implements IAuthRule { } @Override - public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set theFlags, Pointcut thePointcut) { + public Verdict applyRule( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier, + Set theFlags, + Pointcut thePointcut) { FhirContext ctx = theRequestDetails.getServer().getFhirContext(); // Operation rules apply to the execution of the operation itself, not to side effects like @@ -131,7 +139,8 @@ class OperationRule extends BaseRule implements IAuthRule { } if (requestResourceId != null) { if (myAppliesToIds != null) { - String instanceId = requestResourceId.toUnqualifiedVersionless().getValue(); + String instanceId = + requestResourceId.toUnqualifiedVersionless().getValue(); for (IIdType next : myAppliesToIds) { if (next.toUnqualifiedVersionless().getValue().equals(instanceId)) { applies = true; @@ -143,7 +152,7 @@ class OperationRule extends BaseRule implements IAuthRule { // TODO: Convert to a map of strings and keep the result for (Class next : myAppliesToInstancesOfType) { String resName = ctx.getResourceType(next); - if (resName.equals(requestResourceId .getResourceType())) { + if (resName.equals(requestResourceId.getResourceType())) { applies = true; break; } @@ -198,7 +207,13 @@ class OperationRule extends BaseRule implements IAuthRule { return null; } - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } /** diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/PolicyEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/PolicyEnum.java index d5a07389126..e176ca26d35 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/PolicyEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/PolicyEnum.java @@ -20,8 +20,6 @@ package ca.uhn.fhir.rest.server.interceptor.auth; public enum PolicyEnum { - ALLOW, DENY - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilder.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilder.java index bcc388d89fe..f7505fd057f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilder.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBuilder.java @@ -30,7 +30,6 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -40,12 +39,14 @@ import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; public class RuleBuilder implements IAuthRuleBuilder { - private static final ConcurrentHashMap, String> ourTypeToName = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap, String> ourTypeToName = + new ConcurrentHashMap<>(); private final ArrayList myRules; private IAuthRuleBuilderRule myAllow; private IAuthRuleBuilderRule myDeny; @@ -113,7 +114,10 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderFinished(rule); } - private class RuleBuilderFinished implements IAuthRuleFinished, IAuthRuleBuilderRuleOpClassifierFinished, IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId { + private class RuleBuilderFinished + implements IAuthRuleFinished, + IAuthRuleBuilderRuleOpClassifierFinished, + IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId { protected final BaseRule myOpRule; private List myTesters; @@ -148,7 +152,8 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId forTenantIds(final Collection theTenantIds) { + public IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId forTenantIds( + final Collection theTenantIds) { withTester(new TenantCheckingTester(theTenantIds, true)); return this; } @@ -166,7 +171,8 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId notForTenantIds(final Collection theTenantIds) { + public IAuthRuleBuilderRuleOpClassifierFinishedWithTenantId notForTenantIds( + final Collection theTenantIds) { withTester(new TenantCheckingTester(theTenantIds, false)); return this; } @@ -199,7 +205,11 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public boolean matches(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IIdType theInputResourceId, IBaseResource theInputResource) { + public boolean matches( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IIdType theInputResourceId, + IBaseResource theInputResource) { if (!myTenantIds.contains(theRequestDetails.getTenantId())) { return !myOutcome; } @@ -208,7 +218,10 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public boolean matchesOutput(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theOutputResource) { + public boolean matchesOutput( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theOutputResource) { if (!myTenantIds.contains(theRequestDetails.getTenantId())) { return !myOutcome; } @@ -218,7 +231,8 @@ public class RuleBuilder implements IAuthRuleBuilder { private boolean matchesResource(IBaseResource theResource) { if (theResource != null) { - RequestPartitionId partitionId = (RequestPartitionId) theResource.getUserData(Constants.RESOURCE_PARTITION_ID); + RequestPartitionId partitionId = + (RequestPartitionId) theResource.getUserData(Constants.RESOURCE_PARTITION_ID); if (partitionId != null) { String partitionNameOrNull = partitionId.getFirstPartitionNameOrNull(); if (partitionNameOrNull == null || !myTenantIds.contains(partitionNameOrNull)) { @@ -358,7 +372,8 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderRuleConditionalClassifier(); } - public class RuleBuilderRuleConditionalClassifier extends RuleBuilderFinished implements IAuthRuleBuilderRuleConditionalClassifier { + public class RuleBuilderRuleConditionalClassifier extends RuleBuilderFinished + implements IAuthRuleBuilderRuleConditionalClassifier { RuleBuilderRuleConditionalClassifier() { super(new RuleImplConditional(myRuleName)); @@ -373,10 +388,8 @@ public class RuleBuilder implements IAuthRuleBuilder { rule.setAppliesToTypes(myAppliesToTypes); rule.addTesters(getTesters()); myRules.add(rule); - } } - } private class RuleBuilderRuleOp implements IAuthRuleBuilderRuleOp, IAuthRuleBuilderRuleOpDelete { @@ -425,7 +438,6 @@ public class RuleBuilder implements IAuthRuleBuilder { } } - @Override public IAuthRuleBuilderRuleOpClassifier resourcesOfType(Class theType) { Validate.notNull(theType, "theType must not be null"); @@ -460,7 +472,8 @@ public class RuleBuilder implements IAuthRuleBuilder { private Collection myInCompartmentOwners; private Collection myAppliesToInstances; private RuleImplOp myRule; - private AdditionalCompartmentSearchParameters myAdditionalSearchParamsForCompartmentTypes = new AdditionalCompartmentSearchParameters(); + private AdditionalCompartmentSearchParameters myAdditionalSearchParamsForCompartmentTypes = + new AdditionalCompartmentSearchParameters(); /** * Constructor @@ -504,12 +517,17 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public IAuthRuleBuilderRuleOpClassifierFinished inCompartment(String theCompartmentName, Collection theOwners) { - return inCompartmentWithAdditionalSearchParams(theCompartmentName, theOwners, new AdditionalCompartmentSearchParameters()); + public IAuthRuleBuilderRuleOpClassifierFinished inCompartment( + String theCompartmentName, Collection theOwners) { + return inCompartmentWithAdditionalSearchParams( + theCompartmentName, theOwners, new AdditionalCompartmentSearchParameters()); } @Override - public IAuthRuleBuilderRuleOpClassifierFinished inCompartmentWithAdditionalSearchParams(String theCompartmentName, Collection theOwners, AdditionalCompartmentSearchParameters theAdditionalTypeSearchParams) { + public IAuthRuleBuilderRuleOpClassifierFinished inCompartmentWithAdditionalSearchParams( + String theCompartmentName, + Collection theOwners, + AdditionalCompartmentSearchParameters theAdditionalTypeSearchParams) { Validate.notBlank(theCompartmentName, "theCompartmentName must not be null"); Validate.notNull(theOwners, "theOwners must not be null"); Validate.noNullElements(theOwners, "theOwners must not contain any null elements"); @@ -524,12 +542,17 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public IAuthRuleBuilderRuleOpClassifierFinished inCompartment(String theCompartmentName, IIdType theOwner) { - return inCompartmentWithAdditionalSearchParams(theCompartmentName, theOwner, new AdditionalCompartmentSearchParameters()); + public IAuthRuleBuilderRuleOpClassifierFinished inCompartment( + String theCompartmentName, IIdType theOwner) { + return inCompartmentWithAdditionalSearchParams( + theCompartmentName, theOwner, new AdditionalCompartmentSearchParameters()); } @Override - public IAuthRuleBuilderRuleOpClassifierFinished inCompartmentWithAdditionalSearchParams(String theCompartmentName, IIdType theOwner, AdditionalCompartmentSearchParameters theAdditionalTypeSearchParamNames) { + public IAuthRuleBuilderRuleOpClassifierFinished inCompartmentWithAdditionalSearchParams( + String theCompartmentName, + IIdType theOwner, + AdditionalCompartmentSearchParameters theAdditionalTypeSearchParamNames) { Validate.notBlank(theCompartmentName, "theCompartmentName must not be null"); Validate.notNull(theOwner, "theOwner must not be null"); validateOwner(theOwner); @@ -547,13 +570,18 @@ public class RuleBuilder implements IAuthRuleBuilder { return finished(); } - private Optional findMatchingRule() { return myRules.stream() - .filter(RuleImplOp.class::isInstance) - .map(RuleImplOp.class::cast) - .filter(rule -> rule.matches(myRuleOp, myAppliesTo, myAppliesToInstances, myAppliesToTypes, myClassifierType, myInCompartmentName)) - .findFirst(); + .filter(RuleImplOp.class::isInstance) + .map(RuleImplOp.class::cast) + .filter(rule -> rule.matches( + myRuleOp, + myAppliesTo, + myAppliesToInstances, + myAppliesToTypes, + myClassifierType, + myInCompartmentName)) + .findFirst(); } private void validateOwner(IIdType theOwner) { @@ -568,7 +596,8 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public IAuthRuleBuilderRuleOpClassifierFinished withCodeInValueSet(@Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl) { + public IAuthRuleBuilderRuleOpClassifierFinished withCodeInValueSet( + @Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl) { SearchParameterAndValueSetRuleImpl rule = new SearchParameterAndValueSetRuleImpl(myRuleName); rule.setSearchParameterName(theSearchParameterName); rule.setValueSetUrl(theValueSetUrl); @@ -577,7 +606,8 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public IAuthRuleFinished withCodeNotInValueSet(@Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl) { + public IAuthRuleFinished withCodeNotInValueSet( + @Nonnull String theSearchParameterName, @Nonnull String theValueSetUrl) { SearchParameterAndValueSetRuleImpl rule = new SearchParameterAndValueSetRuleImpl(myRuleName); rule.setSearchParameterName(theSearchParameterName); rule.setValueSetUrl(theValueSetUrl); @@ -586,7 +616,8 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public IAuthRuleFinished inCompartmentWithFilter(String theCompartmentName, IIdType theIdElement, String theFilter) { + public IAuthRuleFinished inCompartmentWithFilter( + String theCompartmentName, IIdType theIdElement, String theFilter) { Validate.notBlank(theCompartmentName, "theCompartmentName must not be null"); Validate.notNull(theIdElement, "theOwner must not be null"); validateOwner(theIdElement); @@ -598,19 +629,19 @@ public class RuleBuilder implements IAuthRuleBuilder { // todo JR/MB this is a quick and dirty fix at the last minute before the release. // We should revisit approach so that findMatchingRule() takes the filters into account // and only merges the rules if the filters are compatible -// Optional oRule = findMatchingRule(); -// if (oRule.isPresent()) { -// RuleImplOp rule = oRule.get(); -// rule.setAdditionalSearchParamsForCompartmentTypes(myAdditionalSearchParamsForCompartmentTypes); -// rule.addClassifierCompartmentOwner(theIdElement); -// return new RuleBuilderFinished(rule); -// } + // Optional oRule = findMatchingRule(); + // if (oRule.isPresent()) { + // RuleImplOp rule = oRule.get(); + // + // rule.setAdditionalSearchParamsForCompartmentTypes(myAdditionalSearchParamsForCompartmentTypes); + // rule.addClassifierCompartmentOwner(theIdElement); + // return new RuleBuilderFinished(rule); + // } myInCompartmentOwners = Collections.singletonList(theIdElement); RuleBuilderFinished result = finished(); result.withTester(new FhirQueryRuleTester(theFilter)); return result; - } @Override @@ -627,7 +658,6 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderFinished(myRule); } } - } private class RuleBuilderRuleOperation implements IAuthRuleBuilderOperation { @@ -697,7 +727,8 @@ public class RuleBuilder implements IAuthRuleBuilder { } @Override - public IAuthRuleBuilderOperationNamedAndScoped onInstancesOfType(Class theType) { + public IAuthRuleBuilderOperationNamedAndScoped onInstancesOfType( + Class theType) { validateType(theType); OperationRule rule = createRule(); @@ -752,9 +783,7 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderFinished(myRule); } } - } - } private class RuleBuilderRuleTransaction implements IAuthRuleBuilderRuleTransaction { @@ -776,9 +805,7 @@ public class RuleBuilder implements IAuthRuleBuilder { myRules.add(rule); return new RuleBuilderFinished(rule); } - } - } private class PatchBuilder implements IAuthRuleBuilderPatch { @@ -789,9 +816,8 @@ public class RuleBuilder implements IAuthRuleBuilder { @Override public IAuthRuleFinished allRequests() { - BaseRule rule = new RuleImplPatch(myRuleName) - .setAllRequests(true) - .setMode(myRuleMode); + BaseRule rule = + new RuleImplPatch(myRuleName).setAllRequests(true).setMode(myRuleMode); myRules.add(rule); return new RuleBuilderFinished(rule); } @@ -806,8 +832,8 @@ public class RuleBuilder implements IAuthRuleBuilder { @Override public IAuthRuleFinished allRequests() { BaseRule rule = new RuleImplUpdateHistoryRewrite(myRuleName) - .setAllRequests(true) - .setMode(myRuleMode); + .setAllRequests(true) + .setMode(myRuleMode); myRules.add(rule); return new RuleBuilderFinished(rule); } @@ -836,7 +862,6 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderBulkExportWithTarget(rule); } - @Override public IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnPatient(@Nonnull String theFocusResourceId) { RuleBulkExportImpl rule = new RuleBulkExportImpl(myRuleName); @@ -846,6 +871,7 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderBulkExportWithTarget(rule); } + @Override public IAuthRuleBuilderRuleBulkExportWithTarget patientExportOnGroup(@Nonnull String theFocusResourceId) { RuleBulkExportImpl rule = new RuleBulkExportImpl(myRuleName); @@ -876,13 +902,13 @@ public class RuleBuilder implements IAuthRuleBuilder { return new RuleBuilderBulkExportWithTarget(rule); } - private class RuleBuilderBulkExportWithTarget extends RuleBuilderFinished implements IAuthRuleBuilderRuleBulkExportWithTarget { + private class RuleBuilderBulkExportWithTarget extends RuleBuilderFinished + implements IAuthRuleBuilderRuleBulkExportWithTarget { private final RuleBulkExportImpl myRule; private RuleBuilderBulkExportWithTarget(RuleBulkExportImpl theRule) { super(theRule); myRule = theRule; - } @Override @@ -904,5 +930,4 @@ public class RuleBuilder implements IAuthRuleBuilder { } return retVal; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java index 1d8839cb26e..9fa1ac35d8f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleBulkExportImpl.java @@ -50,7 +50,15 @@ public class RuleBulkExportImpl extends BaseRule { } @Override - public AuthorizationInterceptor.Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set theFlags, Pointcut thePointcut) { + public AuthorizationInterceptor.Verdict applyRule( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier, + Set theFlags, + Pointcut thePointcut) { if (thePointcut != Pointcut.STORAGE_INITIATE_BULK_EXPORT) { return null; } @@ -59,7 +67,8 @@ public class RuleBulkExportImpl extends BaseRule { return null; } - BulkExportJobParameters options = (BulkExportJobParameters) theRequestDetails.getAttribute(AuthorizationInterceptor.REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS); + BulkExportJobParameters options = (BulkExportJobParameters) + theRequestDetails.getAttribute(AuthorizationInterceptor.REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS); if (!myWantAnyStyle && options.getExportStyle() != myWantExportStyle) { return null; @@ -71,55 +80,84 @@ public class RuleBulkExportImpl extends BaseRule { } for (String next : options.getResourceTypes()) { if (!myResourceTypes.contains(next)) { - return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY,this); + return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); } } } if (myWantAnyStyle || myWantExportStyle == BulkExportJobParameters.ExportStyle.SYSTEM) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } if (isNotBlank(myGroupId) && options.getGroupId() != null) { - String expectedGroupId = new IdDt(myGroupId).toUnqualifiedVersionless().getValue(); - String actualGroupId = new IdDt(options.getGroupId()).toUnqualifiedVersionless().getValue(); + String expectedGroupId = + new IdDt(myGroupId).toUnqualifiedVersionless().getValue(); + String actualGroupId = + new IdDt(options.getGroupId()).toUnqualifiedVersionless().getValue(); if (Objects.equals(expectedGroupId, actualGroupId)) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } } // TODO This is a _bad bad bad implementation_ but we are out of time. - // 1. If a claimed resource ID is present in the parameters, and the permission contains one, check for membership + // 1. If a claimed resource ID is present in the parameters, and the permission contains one, check for + // membership // 2. If not a member, Deny. if (myWantExportStyle == BulkExportJobParameters.ExportStyle.PATIENT && isNotBlank(myPatientId)) { - final String expectedPatientId = new IdDt(myPatientId).toUnqualifiedVersionless().getValue(); + final String expectedPatientId = + new IdDt(myPatientId).toUnqualifiedVersionless().getValue(); if (!options.getPatientIds().isEmpty()) { ourLog.debug("options.getPatientIds() != null"); final String actualPatientIds = options.getPatientIds().stream() - .map(t -> new IdDt(t).toUnqualifiedVersionless().getValue()) - .collect(Collectors.joining(",")); + .map(t -> new IdDt(t).toUnqualifiedVersionless().getValue()) + .collect(Collectors.joining(",")); if (actualPatientIds.contains(expectedPatientId)) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } - return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY,this); + return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); } final List filters = options.getFilters(); - // TODO: LD: This admittedly adds more to the tech debt above, and should really be addressed by https://github.com/hapifhir/hapi-fhir/issues/4990 - if (! filters.isEmpty()) { + // TODO: LD: This admittedly adds more to the tech debt above, and should really be addressed by + // https://github.com/hapifhir/hapi-fhir/issues/4990 + if (!filters.isEmpty()) { ourLog.debug("filters not empty"); final Set patientIdsInFilters = filters.stream() - .filter(filter -> filter.startsWith("Patient?_id=")) - .map(filter -> filter.replace("?_id=", "/")) - .collect(Collectors.toUnmodifiableSet()); + .filter(filter -> filter.startsWith("Patient?_id=")) + .map(filter -> filter.replace("?_id=", "/")) + .collect(Collectors.toUnmodifiableSet()); if (patientIdsInFilters.contains(expectedPatientId)) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } - return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY,this); + return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); } ourLog.debug("patientIds and filters both empty"); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplConditional.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplConditional.java index 3731d683937..17dbf764464 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplConditional.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplConditional.java @@ -39,8 +39,15 @@ public class RuleImplConditional extends BaseRule implements IAuthRule { } @Override - public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, - IRuleApplier theRuleApplier, Set theFlags, Pointcut thePointcut) { + public Verdict applyRule( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier, + Set theFlags, + Pointcut thePointcut) { assert !(theInputResource != null && theOutputResource != null); if (theInputResourceId != null && theInputResourceId.hasIdPart()) { @@ -66,7 +73,8 @@ public class RuleImplConditional extends BaseRule implements IAuthRule { return null; } } else { - String inputResourceName = theRequestDetails.getFhirContext().getResourceType(theInputResource); + String inputResourceName = + theRequestDetails.getFhirContext().getResourceType(theInputResource); if (!myAppliesToTypes.contains(inputResourceName)) { return null; } @@ -74,7 +82,13 @@ public class RuleImplConditional extends BaseRule implements IAuthRule { break; } - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } return null; @@ -91,5 +105,4 @@ public class RuleImplConditional extends BaseRule implements IAuthRule { void setOperationType(RestOperationTypeEnum theOperationType) { myOperationType = theOperationType; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java index 7ce9c00d89d..5b108b982c4 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplOp.java @@ -39,15 +39,12 @@ import ca.uhn.fhir.util.bundle.BundleEntryParts; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.ToStringBuilder; -import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -56,6 +53,8 @@ import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -94,8 +93,15 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { } @Override - public Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, - IRuleApplier theRuleApplier, Set theFlags, Pointcut thePointcut) { + public Verdict applyRule( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier, + Set theFlags, + Pointcut thePointcut) { FhirContext ctx = theRequestDetails.getFhirContext(); @@ -113,13 +119,27 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { break; case SEARCH_SYSTEM: case HISTORY_SYSTEM: - if (theFlags.contains(AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + if (theFlags.contains( + AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } break; case SEARCH_TYPE: - if (theFlags.contains(AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + if (theFlags.contains( + AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } target.resourceType = theRequestDetails.getResourceName(); target.setSearchParams(theRequestDetails); @@ -133,21 +153,41 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { } break; case HISTORY_TYPE: - if (theFlags.contains(AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + if (theFlags.contains( + AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } target.resourceType = theRequestDetails.getResourceName(); break; case HISTORY_INSTANCE: - if (theFlags.contains(AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + if (theFlags.contains( + AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } target.resourceIds = Collections.singleton(theInputResourceId); break; case GET_PAGE: - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); - // None of the following are checked on the way in + // None of the following are checked on the way in case ADD_TAGS: case DELETE_TAGS: case GET_TAGS: @@ -178,7 +218,10 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { if (theInputResource == null && theInputResourceId == null) { return null; } - if (theRequestDetails.isRewriteHistory() && theRequestDetails.getId() != null && theRequestDetails.getId().hasVersionIdPart() && theOperation == RestOperationTypeEnum.UPDATE) { + if (theRequestDetails.isRewriteHistory() + && theRequestDetails.getId() != null + && theRequestDetails.getId().hasVersionIdPart() + && theOperation == RestOperationTypeEnum.UPDATE) { return null; } switch (theOperation) { @@ -221,7 +264,13 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { case DELETE: if (theOperation == RestOperationTypeEnum.DELETE) { if (thePointcut == Pointcut.STORAGE_PRE_DELETE_EXPUNGE && myAppliesToDeleteExpunge) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } if (myAppliesToDeleteCascade != (thePointcut == Pointcut.STORAGE_CASCADE_DELETE)) { return null; @@ -232,10 +281,24 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { if (theInputResourceId.hasIdPart() == false) { // This is a conditional DELETE, so we'll authorize it using STORAGE events instead // so just let it through for now.. - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } - if (theInputResource == null && myClassifierCompartmentOwners != null && myClassifierCompartmentOwners.size() > 0) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + if (theInputResource == null + && myClassifierCompartmentOwners != null + && myClassifierCompartmentOwners.size() > 0) { + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } target.resource = theInputResource; @@ -245,28 +308,69 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { } break; case GRAPHQL: - return applyRuleToGraphQl(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, thePointcut, theRuleApplier); + return applyRuleToGraphQl( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + thePointcut, + theRuleApplier); case TRANSACTION: - return applyRuleToTransaction(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier, thePointcut, ctx); + return applyRuleToTransaction( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier, + thePointcut, + ctx); case ALL: - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); case METADATA: if (theOperation == RestOperationTypeEnum.METADATA) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } return null; default: // Should not happen - throw new IllegalStateException(Msg.code(335) + "Unable to apply security to event of type " + theOperation); + throw new IllegalStateException( + Msg.code(335) + "Unable to apply security to event of type " + theOperation); } switch (myAppliesTo) { case INSTANCES: - return applyRuleToInstances(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, target, theRuleApplier); + return applyRuleToInstances( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + target, + theRuleApplier); case ALL_RESOURCES: if (target.resourceType != null) { if (myClassifierType == ClassifierTypeEnum.ANY_ID) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } } break; @@ -282,7 +386,8 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { if (target.resourceIds != null) { for (IIdType nextRequestAppliesToResourceId : target.resourceIds) { if (nextRequestAppliesToResourceId.hasResourceType()) { - String nextRequestAppliesToResourceIdType = nextRequestAppliesToResourceId.getResourceType(); + String nextRequestAppliesToResourceIdType = + nextRequestAppliesToResourceId.getResourceType(); if (myAppliesToTypes.contains(nextRequestAppliesToResourceIdType) == false) { return null; } @@ -294,17 +399,33 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { return null; } if (myClassifierType == ClassifierTypeEnum.ANY_ID) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } else if (myClassifierType == ClassifierTypeEnum.IN_COMPARTMENT) { // ok we'll check below } } break; default: - throw new IllegalStateException(Msg.code(336) + "Unable to apply security to event of applies to type " + myAppliesTo); + throw new IllegalStateException( + Msg.code(336) + "Unable to apply security to event of applies to type " + myAppliesTo); } - return applyRuleLogic(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theFlags, ctx, target, theRuleApplier); + return applyRuleLogic( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theFlags, + ctx, + target, + theRuleApplier); } /** @@ -313,22 +434,54 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { * * TODO: At this point {@link RuleImplOp} handles "any ID" and "in compartment" logic - It would be nice to split these into separate classes. */ - protected Verdict applyRuleLogic(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, Set theFlags, FhirContext theFhirContext, RuleTarget theRuleTarget, IRuleApplier theRuleApplier) { + protected Verdict applyRuleLogic( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + Set theFlags, + FhirContext theFhirContext, + RuleTarget theRuleTarget, + IRuleApplier theRuleApplier) { ourLog.trace("applyRuleLogic {} {}", theOperation, theRuleTarget); switch (myClassifierType) { case ANY_ID: break; case IN_COMPARTMENT: - return applyRuleToCompartment(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theFlags, theFhirContext, theRuleTarget, theRuleApplier); + return applyRuleToCompartment( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theFlags, + theFhirContext, + theRuleTarget, + theRuleApplier); default: - throw new IllegalStateException(Msg.code(337) + "Unable to apply security to event of applies to type " + myAppliesTo); + throw new IllegalStateException( + Msg.code(337) + "Unable to apply security to event of applies to type " + myAppliesTo); } - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } @Nullable - private Verdict applyRuleToGraphQl(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, Pointcut thePointcut, IRuleApplier theRuleApplier) { + private Verdict applyRuleToGraphQl( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + Pointcut thePointcut, + IRuleApplier theRuleApplier) { if (theOperation == RestOperationTypeEnum.GRAPHQL_REQUEST) { // Make sure that the requestor actually has sufficient access to see the given resource @@ -336,21 +489,35 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { return null; } - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } else { return null; } } @Nullable - private Verdict applyRuleToCompartment(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, Set theFlags, FhirContext ctx, RuleTarget target, IRuleApplier theRuleApplier) { + private Verdict applyRuleToCompartment( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + Set theFlags, + FhirContext ctx, + RuleTarget target, + IRuleApplier theRuleApplier) { FhirTerser t = ctx.newTerser(); boolean foundMatch = false; if (target.resourceIds != null && target.resourceIds.size() > 0) { - boolean haveOwnersForAll = target.resourceIds - .stream() - .allMatch(n -> myClassifierCompartmentOwners.contains(n.toUnqualifiedVersionless())); + boolean haveOwnersForAll = target.resourceIds.stream() + .allMatch(n -> myClassifierCompartmentOwners.contains(n.toUnqualifiedVersionless())); if (haveOwnersForAll) { foundMatch = true; } @@ -361,9 +528,12 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { Set additionalSearchParamNames = null; if (myAdditionalCompartmentSearchParamMap != null) { - additionalSearchParamNames = myAdditionalCompartmentSearchParamMap.getSearchParamNamesForResourceType(ctx.getResourceType(target.resource)); + additionalSearchParamNames = + myAdditionalCompartmentSearchParamMap.getSearchParamNamesForResourceType( + ctx.getResourceType(target.resource)); } - if (t.isSourceInCompartmentForTarget(myClassifierCompartmentName, target.resource, next, additionalSearchParamNames)) { + if (t.isSourceInCompartmentForTarget( + myClassifierCompartmentName, target.resource, next, additionalSearchParamNames)) { foundMatch = true; break; } @@ -376,7 +546,16 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { * it makes sense. */ if (next.getResourceType().equals(target.resourceType)) { - Verdict verdict = checkForSearchParameterMatchingCompartmentAndReturnSuccessfulVerdictOrNull(target.getSearchParams(), next, SP_RES_ID, theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + Verdict verdict = checkForSearchParameterMatchingCompartmentAndReturnSuccessfulVerdictOrNull( + target.getSearchParams(), + next, + SP_RES_ID, + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); if (verdict != null) { return verdict; } @@ -394,14 +573,21 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { * might be in the given compartment. */ if (isNotBlank(target.resourceType)) { - RuntimeResourceDefinition sourceDef = theRequestDetails.getFhirContext().getResourceDefinition(target.resourceType); + RuntimeResourceDefinition sourceDef = + theRequestDetails.getFhirContext().getResourceDefinition(target.resourceType); String compartmentOwnerResourceType = next.getResourceType(); if (!StringUtils.equals(target.resourceType, compartmentOwnerResourceType)) { - List params = sourceDef.getSearchParamsForCompartmentName(compartmentOwnerResourceType); + List params = + sourceDef.getSearchParamsForCompartmentName(compartmentOwnerResourceType); - Set additionalParamNames = myAdditionalCompartmentSearchParamMap.getSearchParamNamesForResourceType(sourceDef.getName()); - List additionalParams = additionalParamNames.stream().map(sourceDef::getSearchParam).filter(Objects::nonNull).collect(Collectors.toList()); + Set additionalParamNames = + myAdditionalCompartmentSearchParamMap.getSearchParamNamesForResourceType( + sourceDef.getName()); + List additionalParams = additionalParamNames.stream() + .map(sourceDef::getSearchParam) + .filter(Objects::nonNull) + .collect(Collectors.toList()); if (params == null || params.isEmpty()) { params = additionalParams; } else { @@ -419,16 +605,34 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { * would match the given compartment. In this case, this * is a very effective mechanism. */ - if (target.getSearchParams() != null && !theFlags.contains(AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { + if (target.getSearchParams() != null + && !theFlags.contains( + AuthorizationFlagsEnum.DO_NOT_PROACTIVELY_BLOCK_COMPARTMENT_READ_ACCESS)) { for (RuntimeSearchParam nextRuntimeSearchParam : params) { String name = nextRuntimeSearchParam.getName(); - Verdict verdict = checkForSearchParameterMatchingCompartmentAndReturnSuccessfulVerdictOrNull(target.getSearchParams(), next, name, theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + Verdict verdict = + checkForSearchParameterMatchingCompartmentAndReturnSuccessfulVerdictOrNull( + target.getSearchParams(), + next, + name, + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); if (verdict != null) { return verdict; } } } else if (getMode() == PolicyEnum.ALLOW) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } } } @@ -437,11 +641,24 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { if (!foundMatch) { return null; } - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } @Nullable - private Verdict applyRuleToInstances(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, RuleTarget target, IRuleApplier theRuleApplier) { + private Verdict applyRuleToInstances( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + RuleTarget target, + IRuleApplier theRuleApplier) { if (target.resourceIds != null && target.resourceIds.size() > 0) { int haveMatches = 0; for (IIdType requestAppliesToResource : target.resourceIds) { @@ -458,11 +675,16 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { haveMatches++; break; } - } if (haveMatches == target.resourceIds.size()) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } } @@ -470,13 +692,27 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { } @Nullable - private Verdict applyRuleToTransaction(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Pointcut thePointcut, FhirContext ctx) { + private Verdict applyRuleToTransaction( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier, + Pointcut thePointcut, + FhirContext ctx) { if (!(theOperation == RestOperationTypeEnum.TRANSACTION)) { return null; } if (theInputResource != null && requestAppliesToTransaction(ctx, myOp, theInputResource)) { if (getMode() == PolicyEnum.DENY) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } List inputResources = BundleUtil.toListOfEntries(ctx, (IBaseBundle) theInputResource); Verdict verdict = null; @@ -490,7 +726,8 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { UrlUtil.UrlParts parts = UrlUtil.parseUrl(nextPart.getUrl()); - inputResourceId = theRequestDetails.getFhirContext().getVersion().newIdType(); + inputResourceId = + theRequestDetails.getFhirContext().getVersion().newIdType(); inputResourceId.setParts(null, parts.getResourceType(), parts.getResourceId(), null); } @@ -508,13 +745,23 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { operation = RestOperationTypeEnum.DELETE; } else if (nextPart.getRequestType() == RequestTypeEnum.PATCH) { operation = RestOperationTypeEnum.PATCH; - } else if (nextPart.getRequestType() == null && theRequestDetails.getServer().getFhirContext().getVersion().getVersion() == FhirVersionEnum.DSTU3 && BundleUtil.isDstu3TransactionPatch(theRequestDetails.getFhirContext(), nextPart.getResource())) { - // This is a workaround for the fact that there is no PATCH verb in DSTU3's bundle entry verb type ValueSet. + } else if (nextPart.getRequestType() == null + && theRequestDetails + .getServer() + .getFhirContext() + .getVersion() + .getVersion() + == FhirVersionEnum.DSTU3 + && BundleUtil.isDstu3TransactionPatch( + theRequestDetails.getFhirContext(), nextPart.getResource())) { + // This is a workaround for the fact that there is no PATCH verb in DSTU3's bundle entry verb type + // ValueSet. // See BundleUtil#isDstu3TransactionPatch operation = RestOperationTypeEnum.PATCH; } else { - throw new InvalidRequestException(Msg.code(338) + "Can not handle transaction with operation of type " + nextPart.getRequestType()); + throw new InvalidRequestException(Msg.code(338) + + "Can not handle transaction with operation of type " + nextPart.getRequestType()); } /* @@ -525,14 +772,16 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { if (nextPart.getResource() != null) { RuntimeResourceDefinition resourceDef = ctx.getResourceDefinition(nextPart.getResource()); if ("Parameters".equals(resourceDef.getName()) || "Bundle".equals(resourceDef.getName())) { - throw new InvalidRequestException(Msg.code(339) + "Can not handle transaction with nested resource of type " + resourceDef.getName()); + throw new InvalidRequestException(Msg.code(339) + + "Can not handle transaction with nested resource of type " + resourceDef.getName()); } } String previousFixedConditionalUrl = theRequestDetails.getFixedConditionalUrl(); theRequestDetails.setFixedConditionalUrl(nextPart.getConditionalUrl()); - Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(operation, theRequestDetails, inputResource, inputResourceId, null, thePointcut); + Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision( + operation, theRequestDetails, inputResource, inputResourceId, null, thePointcut); theRequestDetails.setFixedConditionalUrl(previousFixedConditionalUrl); @@ -550,20 +799,28 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { * be applying security on the way out */ if (allComponentsAreGets) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } return verdict; } else if (theOutputResource != null) { - List outputResources = AuthorizationInterceptor.toListOfResourcesAndExcludeContainer(theOutputResource, theRequestDetails.getFhirContext()); + List outputResources = AuthorizationInterceptor.toListOfResourcesAndExcludeContainer( + theOutputResource, theRequestDetails.getFhirContext()); Verdict verdict = null; for (IBaseResource nextResource : outputResources) { if (nextResource == null) { continue; } - Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision(RestOperationTypeEnum.READ, theRequestDetails, null, null, nextResource, thePointcut); + Verdict newVerdict = theRuleApplier.applyRulesAndReturnDecision( + RestOperationTypeEnum.READ, theRequestDetails, null, null, nextResource, thePointcut); if (newVerdict == null) { continue; } else if (verdict == null) { @@ -601,20 +858,42 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { } } - private Verdict checkForSearchParameterMatchingCompartmentAndReturnSuccessfulVerdictOrNull(Map theSearchParams, IIdType theCompartmentOwner, String theSearchParamName, RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier) { + private Verdict checkForSearchParameterMatchingCompartmentAndReturnSuccessfulVerdictOrNull( + Map theSearchParams, + IIdType theCompartmentOwner, + String theSearchParamName, + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier) { Verdict verdict = null; if (theSearchParams != null) { String[] values = theSearchParams.get(theSearchParamName); if (values != null) { for (String nextParameterValue : values) { - QualifiedParamList orParamList = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, nextParameterValue); + QualifiedParamList orParamList = + QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, nextParameterValue); for (String next : orParamList) { if (next.equals(theCompartmentOwner.getValue())) { - verdict = newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + verdict = newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); break; } if (next.equals(theCompartmentOwner.getIdPart())) { - verdict = newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + verdict = newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); break; } } @@ -628,7 +907,8 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { myTransactionAppliesToOp = theOp; } - private boolean requestAppliesToTransaction(FhirContext theContext, RuleOpEnum theOp, IBaseResource theInputResource) { + private boolean requestAppliesToTransaction( + FhirContext theContext, RuleOpEnum theOp, IBaseResource theInputResource) { if (!"Bundle".equals(theContext.getResourceType(theInputResource))) { return false; } @@ -641,7 +921,9 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { if ("transaction".equals(bundleType) || "batch".equals(bundleType)) { return true; } else { - String msg = theContext.getLocalizer().getMessage(RuleImplOp.class, "invalidRequestBundleTypeForTransaction", '"' + bundleType + '"'); + String msg = theContext + .getLocalizer() + .getMessage(RuleImplOp.class, "invalidRequestBundleTypeForTransaction", '"' + bundleType + '"'); throw new UnprocessableEntityException(Msg.code(340) + msg); } } @@ -673,7 +955,6 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { return this; } - @Override @Nonnull protected ToStringBuilder toStringBuilder() { @@ -702,18 +983,24 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { myClassifierCompartmentOwners = newList; } - public boolean matches(RuleOpEnum theRuleOp, AppliesTypeEnum theAppliesTo, Collection theAppliesToInstances, Set theAppliesToTypes, ClassifierTypeEnum theClassifierType, String theCompartmentName) { - if (theRuleOp != myOp || - theAppliesTo != myAppliesTo || - theClassifierType != myClassifierType) { + public boolean matches( + RuleOpEnum theRuleOp, + AppliesTypeEnum theAppliesTo, + Collection theAppliesToInstances, + Set theAppliesToTypes, + ClassifierTypeEnum theClassifierType, + String theCompartmentName) { + if (theRuleOp != myOp || theAppliesTo != myAppliesTo || theClassifierType != myClassifierType) { return false; } switch (theAppliesTo) { case TYPES: - return theAppliesToTypes.equals(myAppliesToTypes) && theCompartmentName.equals(myClassifierCompartmentName); + return theAppliesToTypes.equals(myAppliesToTypes) + && theCompartmentName.equals(myClassifierCompartmentName); case INSTANCES: - return theAppliesToInstances.equals(myAppliesToInstances) && theCompartmentName.equals(myClassifierCompartmentName); + return theAppliesToInstances.equals(myAppliesToInstances) + && theCompartmentName.equals(myClassifierCompartmentName); case ALL_RESOURCES: return theCompartmentName.equals(myClassifierCompartmentName); default: @@ -722,8 +1009,8 @@ class RuleImplOp extends BaseRule /* implements IAuthRule */ { } } - public void setAdditionalSearchParamsForCompartmentTypes(AdditionalCompartmentSearchParameters theAdditionalParameters) { + public void setAdditionalSearchParamsForCompartmentTypes( + AdditionalCompartmentSearchParameters theAdditionalParameters) { myAdditionalCompartmentSearchParamMap = theAdditionalParameters; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplPatch.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplPatch.java index e3efbd26602..352fc6dfdf8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplPatch.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplPatch.java @@ -35,12 +35,26 @@ class RuleImplPatch extends BaseRule { } @Override - public AuthorizationInterceptor.Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier, Set theFlags, Pointcut thePointcut) { + public AuthorizationInterceptor.Verdict applyRule( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier, + Set theFlags, + Pointcut thePointcut) { if (myAllRequests) { if (theOperation == RestOperationTypeEnum.PATCH) { if (theInputResource == null && theOutputResource == null) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplUpdateHistoryRewrite.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplUpdateHistoryRewrite.java index bdac717afa4..e9300e1267b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplUpdateHistoryRewrite.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleImplUpdateHistoryRewrite.java @@ -36,11 +36,26 @@ public class RuleImplUpdateHistoryRewrite extends BaseRule { } @Override - public AuthorizationInterceptor.Verdict applyRule(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, - IRuleApplier theRuleApplier, Set theFlags, Pointcut thePointcut) { + public AuthorizationInterceptor.Verdict applyRule( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier, + Set theFlags, + Pointcut thePointcut) { if (myAllRequests) { - if (theRequestDetails.getId() != null && theRequestDetails.getId().hasVersionIdPart() && theOperation == RestOperationTypeEnum.UPDATE) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + if (theRequestDetails.getId() != null + && theRequestDetails.getId().hasVersionIdPart() + && theOperation == RestOperationTypeEnum.UPDATE) { + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleOpEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleOpEnum.java index b6cd1aed89a..f63d8eb7c6b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleOpEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/RuleOpEnum.java @@ -21,13 +21,13 @@ package ca.uhn.fhir.rest.server.interceptor.auth; enum RuleOpEnum { READ, - WRITE, + WRITE, ALL, /** * Transaction applies to both transaction and batch */ - TRANSACTION, - METADATA, + TRANSACTION, + METADATA, DELETE, OPERATION, GRAPHQL, diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchNarrowingConsentService.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchNarrowingConsentService.java index cc7a9666b4e..ff8855b6596 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchNarrowingConsentService.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchNarrowingConsentService.java @@ -32,8 +32,8 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public class SearchNarrowingConsentService implements IConsentService { private static final Logger ourLog = LoggerFactory.getLogger(SearchNarrowingConsentService.class); @@ -57,7 +57,8 @@ public class SearchNarrowingConsentService implements IConsentService { * @param theValidationSupport The validation support module * @param theSearchParamRegistry The search param registry */ - public SearchNarrowingConsentService(IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) { + public SearchNarrowingConsentService( + IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) { myValidationSupport = theValidationSupport; mySearchParamRegistry = theSearchParamRegistry; } @@ -73,25 +74,29 @@ public class SearchNarrowingConsentService implements IConsentService { } @Override - public boolean shouldProcessCanSeeResource(RequestDetails theRequestDetails, IConsentContextServices theContextServices) { - List postFilteringList = SearchNarrowingInterceptor.getPostFilteringListOrNull(theRequestDetails); + public boolean shouldProcessCanSeeResource( + RequestDetails theRequestDetails, IConsentContextServices theContextServices) { + List postFilteringList = + SearchNarrowingInterceptor.getPostFilteringListOrNull(theRequestDetails); return postFilteringList != null && !postFilteringList.isEmpty(); } - @Override - public ConsentOutcome canSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + public ConsentOutcome canSeeResource( + RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { return applyFilterForResource(theRequestDetails, theResource); } @Override - public ConsentOutcome willSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + public ConsentOutcome willSeeResource( + RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { return applyFilterForResource(theRequestDetails, theResource); } @Nonnull private ConsentOutcome applyFilterForResource(RequestDetails theRequestDetails, IBaseResource theResource) { - List postFilteringList = SearchNarrowingInterceptor.getPostFilteringListOrNull(theRequestDetails); + List postFilteringList = + SearchNarrowingInterceptor.getPostFilteringListOrNull(theRequestDetails); if (postFilteringList == null) { return ConsentOutcome.PROCEED; } @@ -108,9 +113,20 @@ public class SearchNarrowingConsentService implements IConsentService { String searchParamName = next.getSearchParameterName(); String valueSetUrl = next.getValueSetUrl(); - SearchParameterAndValueSetRuleImpl.CodeMatchCount outcome = SearchParameterAndValueSetRuleImpl.countMatchingCodesInValueSetForSearchParameter(theResource, myValidationSupport, mySearchParamRegistry, returnOnFirstMatch, searchParamName, valueSetUrl, myTroubleshootingLog, "Search Narrowing"); + SearchParameterAndValueSetRuleImpl.CodeMatchCount outcome = + SearchParameterAndValueSetRuleImpl.countMatchingCodesInValueSetForSearchParameter( + theResource, + myValidationSupport, + mySearchParamRegistry, + returnOnFirstMatch, + searchParamName, + valueSetUrl, + myTroubleshootingLog, + "Search Narrowing"); if (outcome.isAtLeastOneUnableToValidate()) { - myTroubleshootingLog.warn("Terminology Services failed to validate value from " + next.getResourceName() + ":" + next.getSearchParameterName() + " in ValueSet " + next.getValueSetUrl() + " - Assuming REJECT"); + myTroubleshootingLog.warn("Terminology Services failed to validate value from " + next.getResourceName() + + ":" + next.getSearchParameterName() + " in ValueSet " + next.getValueSetUrl() + + " - Assuming REJECT"); return ConsentOutcome.REJECT; } @@ -124,7 +140,6 @@ public class SearchNarrowingConsentService implements IConsentService { break; } } - } if (!allPositiveRulesMatched) { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchNarrowingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchNarrowingInterceptor.java index 8ea3e6e93c2..776a6221778 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchNarrowingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchNarrowingInterceptor.java @@ -51,9 +51,6 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBundle; -import javax.annotation.Nullable; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -64,6 +61,9 @@ import java.util.Optional; import java.util.Set; import java.util.function.Consumer; import java.util.stream.Collectors; +import javax.annotation.Nullable; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; /** * This interceptor can be used to automatically narrow the scope of searches in order to @@ -90,7 +90,8 @@ import java.util.stream.Collectors; */ public class SearchNarrowingInterceptor { - public static final String POST_FILTERING_LIST_ATTRIBUTE_NAME = SearchNarrowingInterceptor.class.getName() + "_POST_FILTERING_LIST"; + public static final String POST_FILTERING_LIST_ATTRIBUTE_NAME = + SearchNarrowingInterceptor.class.getName() + "_POST_FILTERING_LIST"; private IValidationSupport myValidationSupport; private int myPostFilterLargeValueSetThreshold = 500; @@ -108,7 +109,9 @@ public class SearchNarrowingInterceptor { * @see #setValidationSupport(IValidationSupport) */ public void setPostFilterLargeValueSetThreshold(int thePostFilterLargeValueSetThreshold) { - Validate.isTrue(thePostFilterLargeValueSetThreshold > 0, "thePostFilterLargeValueSetThreshold must be a positive integer"); + Validate.isTrue( + thePostFilterLargeValueSetThreshold > 0, + "thePostFilterLargeValueSetThreshold must be a positive integer"); myPostFilterLargeValueSetThreshold = thePostFilterLargeValueSetThreshold; } @@ -141,11 +144,13 @@ public class SearchNarrowingInterceptor { } @Hook(Pointcut.SERVER_INCOMING_REQUEST_POST_PROCESSED) - public boolean hookIncomingRequestPostProcessed(RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) throws AuthenticationException { + public boolean hookIncomingRequestPostProcessed( + RequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) + throws AuthenticationException { // We don't support this operation type yet Validate.isTrue(theRequestDetails.getRestOperationType() != RestOperationTypeEnum.SEARCH_SYSTEM); - //N.B do not add code above this for filtering, this should only ever occur on search. + // N.B do not add code above this for filtering, this should only ever occur on search. if (shouldSkipNarrowing(theRequestDetails)) { return true; } @@ -161,7 +166,6 @@ public class SearchNarrowingInterceptor { postFilteringList.addAll(authorizedList.getAllowedCodeInValueSets()); } - FhirContext ctx = theRequestDetails.getServer().getFhirContext(); RuntimeResourceDefinition resDef = ctx.getResourceDefinition(theRequestDetails.getResourceName()); /* @@ -170,12 +174,14 @@ public class SearchNarrowingInterceptor { */ Collection compartments = authorizedList.getAllowedCompartments(); if (compartments != null) { - Map> parameterToOrValues = processResourcesOrCompartments(theRequestDetails, resDef, compartments, true); + Map> parameterToOrValues = + processResourcesOrCompartments(theRequestDetails, resDef, compartments, true); applyParametersToRequestDetails(theRequestDetails, parameterToOrValues, true); } Collection resources = authorizedList.getAllowedInstances(); if (resources != null) { - Map> parameterToOrValues = processResourcesOrCompartments(theRequestDetails, resDef, resources, false); + Map> parameterToOrValues = + processResourcesOrCompartments(theRequestDetails, resDef, resources, false); applyParametersToRequestDetails(theRequestDetails, parameterToOrValues, true); } List allowedCodeInValueSet = authorizedList.getAllowedCodeInValueSets(); @@ -187,28 +193,33 @@ public class SearchNarrowingInterceptor { return true; } - /** * Skip unless it is a search request or an $everything operation */ private boolean shouldSkipNarrowing(RequestDetails theRequestDetails) { return theRequestDetails.getRestOperationType() != RestOperationTypeEnum.SEARCH_TYPE - && !"$everything".equalsIgnoreCase(theRequestDetails.getOperation()); + && !"$everything".equalsIgnoreCase(theRequestDetails.getOperation()); } @Hook(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED) - public void hookIncomingRequestPreHandled(ServletRequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) throws AuthenticationException { + public void hookIncomingRequestPreHandled( + ServletRequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) + throws AuthenticationException { if (theRequestDetails.getRestOperationType() != RestOperationTypeEnum.TRANSACTION) { return; } IBaseBundle bundle = (IBaseBundle) theRequestDetails.getResource(); FhirContext ctx = theRequestDetails.getFhirContext(); - BundleEntryUrlProcessor processor = new BundleEntryUrlProcessor(ctx, theRequestDetails, theRequest, theResponse); + BundleEntryUrlProcessor processor = + new BundleEntryUrlProcessor(ctx, theRequestDetails, theRequest, theResponse); BundleUtil.processEntries(ctx, bundle, processor); } - private void applyParametersToRequestDetails(RequestDetails theRequestDetails, @Nullable Map> theParameterToOrValues, boolean thePatientIdMode) { + private void applyParametersToRequestDetails( + RequestDetails theRequestDetails, + @Nullable Map> theParameterToOrValues, + boolean thePatientIdMode) { if (theParameterToOrValues != null) { Map newParameters = new HashMap<>(theRequestDetails.getParameters()); for (Map.Entry> nextEntry : theParameterToOrValues.entrySet()) { @@ -234,24 +245,22 @@ public class SearchNarrowingInterceptor { String[] existingValues = newParameters.get(nextParamName); if (thePatientIdMode) { - List nextAllowedValueIds = nextAllowedValues - .stream() - .map(t -> t.lastIndexOf("/") > -1 ? t.substring(t.lastIndexOf("/") + 1) : t) - .collect(Collectors.toList()); + List nextAllowedValueIds = nextAllowedValues.stream() + .map(t -> t.lastIndexOf("/") > -1 ? t.substring(t.lastIndexOf("/") + 1) : t) + .collect(Collectors.toList()); boolean restrictedExistingList = false; for (int i = 0; i < existingValues.length; i++) { String nextExistingValue = existingValues[i]; - List nextRequestedValues = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, nextExistingValue); + List nextRequestedValues = + QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, nextExistingValue); List nextPermittedValues = ListUtils.union( - ListUtils.intersection(nextRequestedValues, nextAllowedValues), - ListUtils.intersection(nextRequestedValues, nextAllowedValueIds) - ); + ListUtils.intersection(nextRequestedValues, nextAllowedValues), + ListUtils.intersection(nextRequestedValues, nextAllowedValueIds)); if (nextPermittedValues.size() > 0) { restrictedExistingList = true; existingValues[i] = ParameterUtil.escapeAndJoinOrList(nextPermittedValues); } - } /* @@ -261,35 +270,41 @@ public class SearchNarrowingInterceptor { * caller is forbidden from accessing the resources they requested. */ if (!restrictedExistingList) { - throw new ForbiddenOperationException(Msg.code(2026) + "Value not permitted for parameter " + UrlUtil.escapeUrlParam(nextParamName)); + throw new ForbiddenOperationException(Msg.code(2026) + "Value not permitted for parameter " + + UrlUtil.escapeUrlParam(nextParamName)); } } else { int existingValuesCount = existingValues.length; - String[] newValues = Arrays.copyOf(existingValues, existingValuesCount + nextAllowedValues.size()); + String[] newValues = + Arrays.copyOf(existingValues, existingValuesCount + nextAllowedValues.size()); for (int i = 0; i < nextAllowedValues.size(); i++) { newValues[existingValuesCount + i] = nextAllowedValues.get(i); } newParameters.put(nextParamName, newValues); - } - } - } theRequestDetails.setParameters(newParameters); } } @Nullable - private Map> processResourcesOrCompartments(RequestDetails theRequestDetails, RuntimeResourceDefinition theResDef, Collection theResourcesOrCompartments, boolean theAreCompartments) { + private Map> processResourcesOrCompartments( + RequestDetails theRequestDetails, + RuntimeResourceDefinition theResDef, + Collection theResourcesOrCompartments, + boolean theAreCompartments) { Map> retVal = null; String lastCompartmentName = null; String lastSearchParamName = null; for (String nextCompartment : theResourcesOrCompartments) { - Validate.isTrue(StringUtils.countMatches(nextCompartment, '/') == 1, "Invalid compartment name (must be in form \"ResourceType/xxx\": %s", nextCompartment); + Validate.isTrue( + StringUtils.countMatches(nextCompartment, '/') == 1, + "Invalid compartment name (must be in form \"ResourceType/xxx\": %s", + nextCompartment); String compartmentName = nextCompartment.substring(0, nextCompartment.indexOf('/')); String searchParamName = null; @@ -306,12 +321,12 @@ public class SearchNarrowingInterceptor { } else if (theAreCompartments) { - searchParamName = selectBestSearchParameterForCompartment(theRequestDetails, theResDef, compartmentName); + searchParamName = + selectBestSearchParameterForCompartment(theRequestDetails, theResDef, compartmentName); } lastCompartmentName = compartmentName; lastSearchParamName = searchParamName; - } if (searchParamName != null) { @@ -327,15 +342,18 @@ public class SearchNarrowingInterceptor { } @Nullable - private Map> processAllowedCodes(RuntimeResourceDefinition theResDef, List theAllowedCodeInValueSet) { + private Map> processAllowedCodes( + RuntimeResourceDefinition theResDef, List theAllowedCodeInValueSet) { Map> retVal = null; for (AllowedCodeInValueSet next : theAllowedCodeInValueSet) { String resourceName = next.getResourceName(); String valueSetUrl = next.getValueSetUrl(); - ValidateUtil.isNotBlankOrThrowIllegalArgument(resourceName, "Resource name supplied by SearchNarrowingInterceptor must not be null"); - ValidateUtil.isNotBlankOrThrowIllegalArgument(valueSetUrl, "ValueSet URL supplied by SearchNarrowingInterceptor must not be null"); + ValidateUtil.isNotBlankOrThrowIllegalArgument( + resourceName, "Resource name supplied by SearchNarrowingInterceptor must not be null"); + ValidateUtil.isNotBlankOrThrowIllegalArgument( + valueSetUrl, "ValueSet URL supplied by SearchNarrowingInterceptor must not be null"); if (!resourceName.equals(theResDef.getName())) { continue; @@ -371,7 +389,8 @@ public class SearchNarrowingInterceptor { ValueSetExpansionOptions options = new ValueSetExpansionOptions(); options.setCount(myPostFilterLargeValueSetThreshold); options.setIncludeHierarchy(false); - IValidationSupport.ValueSetExpansionOutcome outcome = myValidationSupport.expandValueSet(ctx, options, theValueSetUrl); + IValidationSupport.ValueSetExpansionOutcome outcome = + myValidationSupport.expandValueSet(ctx, options, theValueSetUrl); if (outcome != null && outcome.getValueSet() != null) { FhirTerser terser = myValidationSupport.getFhirContext().newTerser(); List contains = terser.getValues(outcome.getValueSet(), "ValueSet.expansion.contains"); @@ -382,8 +401,8 @@ public class SearchNarrowingInterceptor { return false; } - - private String selectBestSearchParameterForCompartment(RequestDetails theRequestDetails, RuntimeResourceDefinition theResDef, String compartmentName) { + private String selectBestSearchParameterForCompartment( + RequestDetails theRequestDetails, RuntimeResourceDefinition theResDef, String compartmentName) { String searchParamName = null; Set queryParameters = theRequestDetails.getParameters().keySet(); @@ -394,9 +413,7 @@ public class SearchNarrowingInterceptor { // Resources like Observation have several fields that add the resource to // the compartment. In the case of Observation, it's subject, patient and performer. // For this kind of thing, we'll prefer the one that matches the compartment name. - Optional primarySearchParam = - searchParams - .stream() + Optional primarySearchParam = searchParams.stream() .filter(t -> t.getName().equalsIgnoreCase(compartmentName)) .findFirst(); @@ -406,16 +423,18 @@ public class SearchNarrowingInterceptor { if (queryParameters.contains(primarySearchParamName)) { searchParamName = primarySearchParamName; } else { - // If the primary search parameter itself isn't in use, check to see whether any of its synonyms are. - Optional synonymInUse = findSynonyms(searchParams, primarySearchParam.get()) - .stream() - .filter(t -> queryParameters.contains(t.getName())) - .findFirst(); + // If the primary search parameter itself isn't in use, check to see whether any of its synonyms + // are. + Optional synonymInUse = + findSynonyms(searchParams, primarySearchParam.get()).stream() + .filter(t -> queryParameters.contains(t.getName())) + .findFirst(); if (synonymInUse.isPresent()) { // if a synonym is in use, use it searchParamName = synonymInUse.get().getName(); } else { - // if not, i.e., the original query is not filtering on this field at all, use the primary search param + // if not, i.e., the original query is not filtering on this field at all, use the primary + // search param searchParamName = primarySearchParamName; } } @@ -423,20 +442,20 @@ public class SearchNarrowingInterceptor { // Otherwise, fall back to whatever search parameter is available searchParamName = searchParams.get(0).getName(); } - } return searchParamName; } - private List findSynonyms(List searchParams, RuntimeSearchParam primarySearchParam) { - // We define two search parameters in a compartment as synonyms if they refer to the same field in the model, ignoring any qualifiers + private List findSynonyms( + List searchParams, RuntimeSearchParam primarySearchParam) { + // We define two search parameters in a compartment as synonyms if they refer to the same field in the model, + // ignoring any qualifiers String primaryBasePath = getBasePath(primarySearchParam); - return searchParams - .stream() - .filter(t -> primaryBasePath.equals(getBasePath(t))) - .collect(Collectors.toList()); + return searchParams.stream() + .filter(t -> primaryBasePath.equals(getBasePath(t))) + .collect(Collectors.toList()); } private String getBasePath(RuntimeSearchParam searchParam) { @@ -454,7 +473,11 @@ public class SearchNarrowingInterceptor { private final HttpServletRequest myRequest; private final HttpServletResponse myResponse; - public BundleEntryUrlProcessor(FhirContext theFhirContext, ServletRequestDetails theRequestDetails, HttpServletRequest theRequest, HttpServletResponse theResponse) { + public BundleEntryUrlProcessor( + FhirContext theFhirContext, + ServletRequestDetails theRequestDetails, + HttpServletRequest theRequest, + HttpServletResponse theResponse) { myFhirContext = theFhirContext; myRequestDetails = theRequestDetails; myRequest = theRequest; @@ -467,18 +490,20 @@ public class SearchNarrowingInterceptor { String url = theModifiableBundleEntry.getRequestUrl(); - ServletSubRequestDetails subServletRequestDetails = ServletRequestUtil.getServletSubRequestDetails(myRequestDetails, url, paramValues); - BaseMethodBinding method = subServletRequestDetails.getServer().determineResourceMethod(subServletRequestDetails, url); + ServletSubRequestDetails subServletRequestDetails = + ServletRequestUtil.getServletSubRequestDetails(myRequestDetails, url, paramValues); + BaseMethodBinding method = + subServletRequestDetails.getServer().determineResourceMethod(subServletRequestDetails, url); RestOperationTypeEnum restOperationType = method.getRestOperationType(); subServletRequestDetails.setRestOperationType(restOperationType); hookIncomingRequestPostProcessed(subServletRequestDetails, myRequest, myResponse); - theModifiableBundleEntry.setRequestUrl(myFhirContext, ServletRequestUtil.extractUrl(subServletRequestDetails)); + theModifiableBundleEntry.setRequestUrl( + myFhirContext, ServletRequestUtil.extractUrl(subServletRequestDetails)); } } - static List getPostFilteringList(RequestDetails theRequestDetails) { List retVal = getPostFilteringListOrNull(theRequestDetails); if (retVal == null) { @@ -492,6 +517,4 @@ public class SearchNarrowingInterceptor { static List getPostFilteringListOrNull(RequestDetails theRequestDetails) { return (List) theRequestDetails.getAttribute(POST_FILTERING_LIST_ATTRIBUTE_NAME); } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchParameterAndValueSetRuleImpl.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchParameterAndValueSetRuleImpl.java index a39f21cc4d9..6008304c3ab 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchParameterAndValueSetRuleImpl.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/SearchParameterAndValueSetRuleImpl.java @@ -38,9 +38,9 @@ import org.hl7.fhir.instance.model.api.ICompositeType; import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; -import javax.annotation.Nonnull; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -71,17 +71,41 @@ class SearchParameterAndValueSetRuleImpl extends RuleImplOp { myValueSetUrl = theValueSetUrl; } - @Override - protected AuthorizationInterceptor.Verdict applyRuleLogic(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, Set theFlags, FhirContext theFhirContext, RuleTarget theRuleTarget, IRuleApplier theRuleApplier) { + protected AuthorizationInterceptor.Verdict applyRuleLogic( + RestOperationTypeEnum theOperation, + RequestDetails theRequestDetails, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + Set theFlags, + FhirContext theFhirContext, + RuleTarget theRuleTarget, + IRuleApplier theRuleApplier) { // Sanity check Validate.isTrue(theInputResource == null || theOutputResource == null); if (theInputResource != null) { - return applyRuleLogic(theFhirContext, theRequestDetails, theInputResource, theOperation, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return applyRuleLogic( + theFhirContext, + theRequestDetails, + theInputResource, + theOperation, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } if (theOutputResource != null) { - return applyRuleLogic(theFhirContext, theRequestDetails, theOutputResource, theOperation, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return applyRuleLogic( + theFhirContext, + theRequestDetails, + theOutputResource, + theOperation, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } // No resource present @@ -92,7 +116,15 @@ class SearchParameterAndValueSetRuleImpl extends RuleImplOp { return null; } - private AuthorizationInterceptor.Verdict applyRuleLogic(FhirContext theFhirContext, RequestDetails theRequestDetails, IBaseResource theResource, RestOperationTypeEnum theOperation, IBaseResource theInputResource, IIdType theInputResourceId, IBaseResource theOutputResource, IRuleApplier theRuleApplier) { + private AuthorizationInterceptor.Verdict applyRuleLogic( + FhirContext theFhirContext, + RequestDetails theRequestDetails, + IBaseResource theResource, + RestOperationTypeEnum theOperation, + IBaseResource theInputResource, + IIdType theInputResourceId, + IBaseResource theOutputResource, + IRuleApplier theRuleApplier) { IValidationSupport validationSupport = theRuleApplier.getValidationSupport(); if (validationSupport == null) { validationSupport = theFhirContext.getValidationSupport(); @@ -103,27 +135,46 @@ class SearchParameterAndValueSetRuleImpl extends RuleImplOp { boolean wantCode = myWantCode; ISearchParamRegistry searchParamRegistry = null; - CodeMatchCount codeMatchCount = countMatchingCodesInValueSetForSearchParameter(theResource, validationSupport, searchParamRegistry, wantCode, mySearchParameterName, myValueSetUrl, troubleshootingLog, operationDescription); + CodeMatchCount codeMatchCount = countMatchingCodesInValueSetForSearchParameter( + theResource, + validationSupport, + searchParamRegistry, + wantCode, + mySearchParameterName, + myValueSetUrl, + troubleshootingLog, + operationDescription); if (codeMatchCount.isAtLeastOneUnableToValidate()) { - troubleshootingLog - .warn("ValueSet {} could not be validated by terminology service - Assuming DENY", myValueSetUrl); + troubleshootingLog.warn( + "ValueSet {} could not be validated by terminology service - Assuming DENY", myValueSetUrl); return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY, this); } if (myWantCode && codeMatchCount.getMatchingCodeCount() > 0) { - return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + return newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); } else if (!myWantCode) { boolean notFound = getMode() == PolicyEnum.ALLOW && codeMatchCount.getMatchingCodeCount() == 0; - boolean othersFound = getMode() == PolicyEnum.DENY && codeMatchCount.getMatchingCodeCount() < codeMatchCount.getOverallCodeCount(); + boolean othersFound = getMode() == PolicyEnum.DENY + && codeMatchCount.getMatchingCodeCount() < codeMatchCount.getOverallCodeCount(); if (notFound || othersFound) { - AuthorizationInterceptor.Verdict verdict = newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier); + AuthorizationInterceptor.Verdict verdict = newVerdict( + theOperation, + theRequestDetails, + theInputResource, + theInputResourceId, + theOutputResource, + theRuleApplier); if (notFound) { - troubleshootingLog - .debug("Code was not found in VS - Verdict: {}", verdict); + troubleshootingLog.debug("Code was not found in VS - Verdict: {}", verdict); } else { - troubleshootingLog - .debug("Code(s) found that are not in VS - Verdict: {}", verdict); + troubleshootingLog.debug("Code(s) found that are not in VS - Verdict: {}", verdict); } return verdict; } @@ -145,8 +196,21 @@ class SearchParameterAndValueSetRuleImpl extends RuleImplOp { * @param theOperationDescription A description of the operation being peformed (for logging) */ @Nonnull - static CodeMatchCount countMatchingCodesInValueSetForSearchParameter(IBaseResource theResource, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry, boolean theReturnOnFirstMatch, String theSearchParameterName, String theValueSetUrl, Logger theTroubleshootingLog, String theOperationDescription) { - theTroubleshootingLog.debug("Applying {} {}:{} for valueSet: {}", theOperationDescription, theSearchParameterName, theReturnOnFirstMatch ? "in" : "not-in", theValueSetUrl); + static CodeMatchCount countMatchingCodesInValueSetForSearchParameter( + IBaseResource theResource, + IValidationSupport theValidationSupport, + ISearchParamRegistry theSearchParamRegistry, + boolean theReturnOnFirstMatch, + String theSearchParameterName, + String theValueSetUrl, + Logger theTroubleshootingLog, + String theOperationDescription) { + theTroubleshootingLog.debug( + "Applying {} {}:{} for valueSet: {}", + theOperationDescription, + theSearchParameterName, + theReturnOnFirstMatch ? "in" : "not-in", + theValueSetUrl); FhirContext fhirContext = theValidationSupport.getFhirContext(); FhirTerser terser = fhirContext.newTerser(); @@ -156,44 +220,59 @@ class SearchParameterAndValueSetRuleImpl extends RuleImplOp { RuntimeResourceDefinition resourceDefinition = fhirContext.getResourceDefinition(theResource); RuntimeSearchParam searchParameter = resourceDefinition.getSearchParam(theSearchParameterName); if (searchParameter == null) { - throw new InternalErrorException(Msg.code(2025) + "Unknown SearchParameter for resource " + resourceDefinition.getName() + ": " + theSearchParameterName); + throw new InternalErrorException(Msg.code(2025) + "Unknown SearchParameter for resource " + + resourceDefinition.getName() + ": " + theSearchParameterName); } List paths = searchParameter.getPathsSplitForResourceType(resourceDefinition.getName()); CodeMatchCount codeMatchCount = new CodeMatchCount(); for (String nextPath : paths) { - List foundCodeableConcepts = fhirContext.newFhirPath().evaluate(theResource, nextPath, ICompositeType.class); + List foundCodeableConcepts = + fhirContext.newFhirPath().evaluate(theResource, nextPath, ICompositeType.class); for (ICompositeType nextCodeableConcept : foundCodeableConcepts) { for (IBase nextCoding : terser.getValues(nextCodeableConcept, "coding")) { String system = terser.getSinglePrimitiveValueOrNull(nextCoding, "system"); String code = terser.getSinglePrimitiveValueOrNull(nextCoding, "code"); if (isNotBlank(system) && isNotBlank(code)) { - IValidationSupport.CodeValidationResult validateCodeResult = theValidationSupport.validateCode(validationSupportContext, conceptValidationOptions, system, code, null, theValueSetUrl); + IValidationSupport.CodeValidationResult validateCodeResult = theValidationSupport.validateCode( + validationSupportContext, conceptValidationOptions, system, code, null, theValueSetUrl); if (validateCodeResult != null) { if (validateCodeResult.isOk()) { codeMatchCount.addMatchingCode(); - theTroubleshootingLog.debug("Code {}#{} was found in ValueSet[{}] - {}", system, code, theValueSetUrl, validateCodeResult.getMessage()); + theTroubleshootingLog.debug( + "Code {}#{} was found in ValueSet[{}] - {}", + system, + code, + theValueSetUrl, + validateCodeResult.getMessage()); if (theReturnOnFirstMatch) { return codeMatchCount; } } else { codeMatchCount.addNonMatchingCode(); - theTroubleshootingLog.debug("Code {}#{} was not found in ValueSet[{}]: {}", system, code, theValueSetUrl, validateCodeResult.getMessage()); + theTroubleshootingLog.debug( + "Code {}#{} was not found in ValueSet[{}]: {}", + system, + code, + theValueSetUrl, + validateCodeResult.getMessage()); } } else { - theTroubleshootingLog.debug("Terminology service was unable to validate code {}#{} in ValueSet[{}] - No service was able to handle this request", system, code, theValueSetUrl); + theTroubleshootingLog.debug( + "Terminology service was unable to validate code {}#{} in ValueSet[{}] - No service was able to handle this request", + system, + code, + theValueSetUrl); codeMatchCount.addUnableToValidate(); } } } } - } return codeMatchCount; } - static class CodeMatchCount { private int myMatchingCodeCount; @@ -225,5 +304,4 @@ class SearchParameterAndValueSetRuleImpl extends RuleImplOp { return myOverallCodeCount; } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/TransactionAppliesToEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/TransactionAppliesToEnum.java index 3188f825d66..015728e5458 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/TransactionAppliesToEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/TransactionAppliesToEnum.java @@ -21,5 +21,4 @@ package ca.uhn.fhir.rest.server.interceptor.auth; enum TransactionAppliesToEnum { ANY_OPERATION - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/binary/BinarySecurityContextInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/binary/BinarySecurityContextInterceptor.java index b5949976512..753f8ee4cf2 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/binary/BinarySecurityContextInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/binary/BinarySecurityContextInterceptor.java @@ -85,7 +85,8 @@ public class BinarySecurityContextInterceptor { * Interceptor hook method. Do not call this method directly. */ @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED) - public void preShowResources(IBaseResource theOldValue, IBaseResource theNewValue, RequestDetails theRequestDetails) { + public void preShowResources( + IBaseResource theOldValue, IBaseResource theNewValue, RequestDetails theRequestDetails) { if (theOldValue instanceof IBaseBinary) { applyAccessControl((IBaseBinary) theOldValue, theRequestDetails); } @@ -101,8 +102,10 @@ public class BinarySecurityContextInterceptor { */ protected void applyAccessControl(IBaseBinary theBinary, RequestDetails theRequestDetails) { FhirTerser terser = myFhirContext.newTerser(); - String securityContextSystem = terser.getSinglePrimitiveValueOrNull(theBinary, "securityContext.identifier.system"); - String securityContextValue = terser.getSinglePrimitiveValueOrNull(theBinary, "securityContext.identifier.value"); + String securityContextSystem = + terser.getSinglePrimitiveValueOrNull(theBinary, "securityContext.identifier.system"); + String securityContextValue = + terser.getSinglePrimitiveValueOrNull(theBinary, "securityContext.identifier.value"); if (isNotBlank(securityContextSystem) || isNotBlank(securityContextValue)) { applyAccessControl(theBinary, securityContextSystem, securityContextValue, theRequestDetails); @@ -118,7 +121,11 @@ public class BinarySecurityContextInterceptor { * @param theSecurityContextValue The identifier value * @param theRequestDetails The request details */ - protected void applyAccessControl(IBaseBinary theBinary, String theSecurityContextSystem, String theSecurityContextValue, RequestDetails theRequestDetails) { + protected void applyAccessControl( + IBaseBinary theBinary, + String theSecurityContextSystem, + String theSecurityContextValue, + RequestDetails theRequestDetails) { if (theRequestDetails instanceof SystemRequestDetails) { return; } @@ -147,8 +154,8 @@ public class BinarySecurityContextInterceptor { * @param theRequestDetails The request details associated with this request * @return Returns true if the request should be permitted, and false otherwise */ - protected boolean securityContextIdentifierAllowed(String theSecurityContextSystem, String theSecurityContextValue, RequestDetails theRequestDetails) { + protected boolean securityContextIdentifierAllowed( + String theSecurityContextSystem, String theSecurityContextValue, RequestDetails theRequestDetails) { return false; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentInterceptor.java index 89e6898a0dd..95b0481c9bf 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentInterceptor.java @@ -69,9 +69,12 @@ import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_META; public class ConsentInterceptor { private static final AtomicInteger ourInstanceCount = new AtomicInteger(0); private final int myInstanceIndex = ourInstanceCount.incrementAndGet(); - private final String myRequestAuthorizedKey = ConsentInterceptor.class.getName() + "_" + myInstanceIndex + "_AUTHORIZED"; - private final String myRequestCompletedKey = ConsentInterceptor.class.getName() + "_" + myInstanceIndex + "_COMPLETED"; - private final String myRequestSeenResourcesKey = ConsentInterceptor.class.getName() + "_" + myInstanceIndex + "_SEENRESOURCES"; + private final String myRequestAuthorizedKey = + ConsentInterceptor.class.getName() + "_" + myInstanceIndex + "_AUTHORIZED"; + private final String myRequestCompletedKey = + ConsentInterceptor.class.getName() + "_" + myInstanceIndex + "_COMPLETED"; + private final String myRequestSeenResourcesKey = + ConsentInterceptor.class.getName() + "_" + myInstanceIndex + "_SEENRESOURCES"; private volatile List myConsentService = Collections.emptyList(); private IConsentContextServices myContextConsentServices = IConsentContextServices.NULL_IMPL; @@ -144,10 +147,8 @@ public class ConsentInterceptor { */ public ConsentInterceptor unregisterConsentService(IConsentService theConsentService) { Validate.notNull(theConsentService, "theConsentService must not be null"); - List newList = myConsentService - .stream() - .filter(t -> t != theConsentService) - .collect(Collectors.toList()); + List newList = + myConsentService.stream().filter(t -> t != theConsentService).collect(Collectors.toList()); myConsentService = newList; return this; } @@ -186,14 +187,16 @@ public class ConsentInterceptor { } @Hook(value = Pointcut.STORAGE_PRESEARCH_REGISTERED) - public void interceptPreSearchRegistered(RequestDetails theRequestDetails, ICachedSearchDetails theCachedSearchDetails) { + public void interceptPreSearchRegistered( + RequestDetails theRequestDetails, ICachedSearchDetails theCachedSearchDetails) { if (!isRequestAuthorized(theRequestDetails)) { theCachedSearchDetails.setCannotBeReused(); } } @Hook(value = Pointcut.STORAGE_PREACCESS_RESOURCES) - public void interceptPreAccess(RequestDetails theRequestDetails, IPreResourceAccessDetails thePreResourceAccessDetails) { + public void interceptPreAccess( + RequestDetails theRequestDetails, IPreResourceAccessDetails thePreResourceAccessDetails) { if (isRequestAuthorized(theRequestDetails)) { return; } @@ -211,7 +214,8 @@ public class ConsentInterceptor { for (int consentSvcIdx = 0; consentSvcIdx < myConsentService.size(); consentSvcIdx++) { IConsentService nextService = myConsentService.get(consentSvcIdx); - boolean shouldCallCanSeeResource = nextService.shouldProcessCanSeeResource(theRequestDetails, myContextConsentServices); + boolean shouldCallCanSeeResource = + nextService.shouldProcessCanSeeResource(theRequestDetails, myContextConsentServices); processAnyConsentSvcs |= shouldCallCanSeeResource; processConsentSvcs[consentSvcIdx] = shouldCallCanSeeResource; } @@ -230,9 +234,12 @@ public class ConsentInterceptor { continue; } - ConsentOutcome outcome = nextService.canSeeResource(theRequestDetails, nextResource, myContextConsentServices); + ConsentOutcome outcome = + nextService.canSeeResource(theRequestDetails, nextResource, myContextConsentServices); Validate.notNull(outcome, "Consent service returned null outcome"); - Validate.isTrue(outcome.getResource() == null, "Consent service returned a resource in its outcome. This is not permitted in canSeeResource(..)"); + Validate.isTrue( + outcome.getResource() == null, + "Consent service returned a resource in its outcome. This is not permitted in canSeeResource(..)"); boolean skipSubsequentServices = false; switch (outcome.getStatus()) { @@ -280,7 +287,8 @@ public class ConsentInterceptor { } for (IConsentService nextService : myConsentService) { - ConsentOutcome nextOutcome = nextService.willSeeResource(theRequestDetails, resource, myContextConsentServices); + ConsentOutcome nextOutcome = + nextService.willSeeResource(theRequestDetails, resource, myContextConsentServices); IBaseResource newResource = nextOutcome.getResource(); switch (nextOutcome.getStatus()) { @@ -334,14 +342,16 @@ public class ConsentInterceptor { if (authorizedResources.putIfAbsent(theResource.getResponseResource(), Boolean.TRUE) == null) { for (IConsentService next : myConsentService) { - final ConsentOutcome outcome = next.willSeeResource(theRequestDetails, theResource.getResponseResource(), myContextConsentServices); + final ConsentOutcome outcome = next.willSeeResource( + theRequestDetails, theResource.getResponseResource(), myContextConsentServices); if (outcome.getResource() != null) { theResource.setResponseResource(outcome.getResource()); } // Clear the total if (theResource.getResponseResource() instanceof IBaseBundle) { - BundleUtil.setTotal(theRequestDetails.getFhirContext(), (IBaseBundle) theResource.getResponseResource(), null); + BundleUtil.setTotal( + theRequestDetails.getFhirContext(), (IBaseBundle) theResource.getResponseResource(), null); } switch (outcome.getStatus()) { @@ -369,7 +379,11 @@ public class ConsentInterceptor { FhirContext ctx = theRequestDetails.getServer().getFhirContext(); IModelVisitor2 visitor = new IModelVisitor2() { @Override - public boolean acceptElement(IBase theElement, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptElement( + IBase theElement, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { // Clear the total if (theElement instanceof IBaseBundle) { @@ -387,7 +401,8 @@ public class ConsentInterceptor { boolean shouldCheckChildren = true; for (IConsentService next : myConsentService) { - ConsentOutcome childOutcome = next.willSeeResource(theRequestDetails, resource, myContextConsentServices); + ConsentOutcome childOutcome = + next.willSeeResource(theRequestDetails, resource, myContextConsentServices); IBaseResource replacementResource = null; boolean shouldReplaceResource = false; @@ -407,11 +422,11 @@ public class ConsentInterceptor { if (shouldReplaceResource) { IBase container = theContainingElementPath.get(theContainingElementPath.size() - 2); - BaseRuntimeChildDefinition containerChildElement = theChildDefinitionPath.get(theChildDefinitionPath.size() - 1); + BaseRuntimeChildDefinition containerChildElement = + theChildDefinitionPath.get(theChildDefinitionPath.size() - 1); containerChildElement.getMutator().setValue(container, replacementResource); resource = replacementResource; } - } return shouldCheckChildren; @@ -421,12 +436,15 @@ public class ConsentInterceptor { } @Override - public boolean acceptUndeclaredExtension(IBaseExtension theNextExt, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptUndeclaredExtension( + IBaseExtension theNextExt, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { return true; } }; ctx.newTerser().visit(outerResource, visitor); - } private IdentityHashMap getAuthorizedResourcesMap(RequestDetails theRequestDetails) { @@ -478,18 +496,24 @@ public class ConsentInterceptor { private void validateParameter(Map theParameterMap) { if (theParameterMap != null) { - if (theParameterMap.containsKey(Constants.PARAM_SEARCH_TOTAL_MODE) && Arrays.stream(theParameterMap.get("_total")).anyMatch("accurate"::equals)) { - throw new InvalidRequestException(Msg.code(2037) + Constants.PARAM_SEARCH_TOTAL_MODE + "=accurate is not permitted on this server"); + if (theParameterMap.containsKey(Constants.PARAM_SEARCH_TOTAL_MODE) + && Arrays.stream(theParameterMap.get("_total")).anyMatch("accurate"::equals)) { + throw new InvalidRequestException(Msg.code(2037) + Constants.PARAM_SEARCH_TOTAL_MODE + + "=accurate is not permitted on this server"); } - if (theParameterMap.containsKey(Constants.PARAM_SUMMARY) && Arrays.stream(theParameterMap.get("_summary")).anyMatch("count"::equals)) { - throw new InvalidRequestException(Msg.code(2038) + Constants.PARAM_SUMMARY + "=count is not permitted on this server"); + if (theParameterMap.containsKey(Constants.PARAM_SUMMARY) + && Arrays.stream(theParameterMap.get("_summary")).anyMatch("count"::equals)) { + throw new InvalidRequestException( + Msg.code(2038) + Constants.PARAM_SUMMARY + "=count is not permitted on this server"); } } } @SuppressWarnings("unchecked") - public static IdentityHashMap getAlreadySeenResourcesMap(RequestDetails theRequestDetails, String theKey) { - IdentityHashMap alreadySeenResources = (IdentityHashMap) theRequestDetails.getUserData().get(theKey); + public static IdentityHashMap getAlreadySeenResourcesMap( + RequestDetails theRequestDetails, String theKey) { + IdentityHashMap alreadySeenResources = (IdentityHashMap) + theRequestDetails.getUserData().get(theKey); if (alreadySeenResources == null) { alreadySeenResources = new IdentityHashMap<>(); theRequestDetails.getUserData().put(theKey, alreadySeenResources); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentOperationStatusEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentOperationStatusEnum.java index d0baf09ca5a..bdad756c174 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentOperationStatusEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentOperationStatusEnum.java @@ -39,5 +39,4 @@ public enum ConsentOperationStatusEnum { * counting/caching methods) */ AUTHORIZED, - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentOutcome.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentOutcome.java index 77cc87a37b1..ebebfb3e4c3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentOutcome.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentOutcome.java @@ -54,9 +54,14 @@ public class ConsentOutcome { this(theStatus, null, theResource); } - private ConsentOutcome(ConsentOperationStatusEnum theStatus, IBaseOperationOutcome theOperationOutcome, IBaseResource theResource) { + private ConsentOutcome( + ConsentOperationStatusEnum theStatus, + IBaseOperationOutcome theOperationOutcome, + IBaseResource theResource) { Validate.notNull(theStatus, "theStatus must not be null"); - Validate.isTrue(!(theOperationOutcome != null && theResource != null), "theOperationOutcome and theResource must not both be null"); + Validate.isTrue( + !(theOperationOutcome != null && theResource != null), + "theOperationOutcome and theResource must not both be null"); myStatus = theStatus; myOperationOutcome = theOperationOutcome; myResource = theResource; @@ -73,5 +78,4 @@ public class ConsentOutcome { public IBaseResource getResource() { return myResource; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/DelegatingConsentService.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/DelegatingConsentService.java index e57afb47818..f073a436b0a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/DelegatingConsentService.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/DelegatingConsentService.java @@ -38,13 +38,15 @@ public class DelegatingConsentService implements IConsentService { } @Override - public ConsentOutcome canSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + public ConsentOutcome canSeeResource( + RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { return myTarget.canSeeResource(theRequestDetails, theResource, theContextServices); } @Override - public ConsentOutcome willSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { - return myTarget.willSeeResource(theRequestDetails, theResource ,theContextServices); + public ConsentOutcome willSeeResource( + RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + return myTarget.willSeeResource(theRequestDetails, theResource, theContextServices); } @Override @@ -53,7 +55,10 @@ public class DelegatingConsentService implements IConsentService { } @Override - public void completeOperationFailure(RequestDetails theRequestDetails, BaseServerResponseException theException, IConsentContextServices theContextServices) { + public void completeOperationFailure( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + IConsentContextServices theContextServices) { myTarget.completeOperationFailure(theRequestDetails, theException, theContextServices); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/IConsentContextServices.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/IConsentContextServices.java index 007d924732c..81d72011b32 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/IConsentContextServices.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/IConsentContextServices.java @@ -25,5 +25,4 @@ public interface IConsentContextServices { * Implementation of this interface that simply always throws a {@link UnsupportedOperationException} */ IConsentContextServices NULL_IMPL = new NullConsentContextServices(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/IConsentService.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/IConsentService.java index f58cf3641b9..27c3622760a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/IConsentService.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/IConsentService.java @@ -52,7 +52,8 @@ public interface IConsentService { * consent directives. * @return An outcome object. See {@link ConsentOutcome} */ - default ConsentOutcome startOperation(RequestDetails theRequestDetails, IConsentContextServices theContextServices) { + default ConsentOutcome startOperation( + RequestDetails theRequestDetails, IConsentContextServices theContextServices) { return ConsentOutcome.PROCEED; } @@ -76,7 +77,8 @@ public interface IConsentService { * @return Returns {@literal false} to avoid calling {@link #canSeeResource(RequestDetails, IBaseResource, IConsentContextServices)} * @since 6.0.0 */ - default boolean shouldProcessCanSeeResource(RequestDetails theRequestDetails, IConsentContextServices theContextServices) { + default boolean shouldProcessCanSeeResource( + RequestDetails theRequestDetails, IConsentContextServices theContextServices) { return true; } @@ -130,7 +132,8 @@ public interface IConsentService { * to modify the response object, so an error will be thrown if {@link ConsentOutcome#getResource()} * returns a non-null response. */ - default ConsentOutcome canSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + default ConsentOutcome canSeeResource( + RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { return ConsentOutcome.PROCEED; } @@ -168,7 +171,8 @@ public interface IConsentService { * @return An outcome object. See method documentation for a description. * @see #canSeeResource(RequestDetails, IBaseResource, IConsentContextServices) for a description of the difference between these two methods. */ - default ConsentOutcome willSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + default ConsentOutcome willSeeResource( + RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { return ConsentOutcome.PROCEED; } @@ -191,8 +195,8 @@ public interface IConsentService { * consent directives. * @see #completeOperationFailure(RequestDetails, BaseServerResponseException, IConsentContextServices) */ - default void completeOperationSuccess(RequestDetails theRequestDetails, IConsentContextServices theContextServices) { - } + default void completeOperationSuccess( + RequestDetails theRequestDetails, IConsentContextServices theContextServices) {} /** * This method is called when an operation is complete. It can be used to perform @@ -214,6 +218,8 @@ public interface IConsentService { * consent directives. * @see #completeOperationSuccess(RequestDetails, IConsentContextServices) */ - default void completeOperationFailure(RequestDetails theRequestDetails, BaseServerResponseException theException, IConsentContextServices theContextServices) { - } + default void completeOperationFailure( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + IConsentContextServices theContextServices) {} } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/NullConsentContextServices.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/NullConsentContextServices.java index 055bc40c9b0..86688be8cb5 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/NullConsentContextServices.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/NullConsentContextServices.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.rest.server.interceptor.consent; /** * Implementation of {@link IConsentContextServices} that always throws a {@link UnsupportedOperationException} */ -class NullConsentContextServices implements IConsentContextServices { -} +class NullConsentContextServices implements IConsentContextServices {} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/RuleFilteringConsentService.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/RuleFilteringConsentService.java index 3af11415817..cda66a6a3d6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/RuleFilteringConsentService.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/RuleFilteringConsentService.java @@ -58,12 +58,13 @@ public class RuleFilteringConsentService implements IConsentService { * @return REJECT if the rules don't ALLOW, PROCEED otherwise. */ @Override - public ConsentOutcome canSeeResource(RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { + public ConsentOutcome canSeeResource( + RequestDetails theRequestDetails, IBaseResource theResource, IConsentContextServices theContextServices) { ourLog.trace("canSeeResource() {} {}", theRequestDetails, theResource); // apply rules! If yes, then yes! - AuthorizationInterceptor.Verdict ruleResult = - myRuleApplier.applyRulesAndReturnDecision(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, theResource, CAN_SEE_POINTCUT); + AuthorizationInterceptor.Verdict ruleResult = myRuleApplier.applyRulesAndReturnDecision( + theRequestDetails.getRestOperationType(), theRequestDetails, null, null, theResource, CAN_SEE_POINTCUT); if (ruleResult.getDecision() == PolicyEnum.ALLOW) { // are these the right codes? return ConsentOutcome.PROCEED; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/partition/RequestTenantPartitionInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/partition/RequestTenantPartitionInterceptor.java index eef6a577218..8e035e5a2d0 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/partition/RequestTenantPartitionInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/partition/RequestTenantPartitionInterceptor.java @@ -70,6 +70,4 @@ public class RequestTenantPartitionInterceptor { return RequestPartitionId.fromPartitionName(tenantId); } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/StandardizingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/StandardizingInterceptor.java index 8a472df001b..526b4d7568f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/StandardizingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/StandardizingInterceptor.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.rest.server.interceptor.s13n; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.fhirpath.FhirPathExecutionException; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; @@ -52,7 +52,12 @@ public class StandardizingInterceptor { * Pre-defined standardizers */ public enum StandardizationType { - NAME_FAMILY, NAME_GIVEN, EMAIL, TITLE, PHONE, TEXT; + NAME_FAMILY, + NAME_GIVEN, + EMAIL, + TITLE, + PHONE, + TEXT; } public static final String STANDARDIZATION_DISABLED_HEADER = "HAPI-Standardization-Disabled"; @@ -95,7 +100,8 @@ public class StandardizingInterceptor { } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void resourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { + public void resourcePreUpdate( + RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { ourLog.debug("Standardizing on pre-update for - {}, {}, {}", theRequest, theOldResource, theNewResource); standardize(theRequest, theNewResource); } @@ -107,7 +113,8 @@ public class StandardizingInterceptor { } if (!theRequest.getHeaders(STANDARDIZATION_DISABLED_HEADER).isEmpty()) { - ourLog.debug("Standardization for {} is disabled via header {}", theResource, STANDARDIZATION_DISABLED_HEADER); + ourLog.debug( + "Standardization for {} is disabled via header {}", theResource, STANDARDIZATION_DISABLED_HEADER); return; } @@ -144,7 +151,10 @@ public class StandardizingInterceptor { for (IBase v : values) { if (!(v instanceof IPrimitiveType)) { - ourLog.warn("Value at path {} is of type {}, which is not of primitive type - skipping", rule.getKey(), v.fhirType()); + ourLog.warn( + "Value at path {} is of type {}, which is not of primitive type - skipping", + rule.getKey(), + v.fhirType()); continue; } IPrimitiveType value = (IPrimitiveType) v; @@ -164,9 +174,11 @@ public class StandardizingInterceptor { IStandardizer standardizer; try { - standardizer = (IStandardizer) Class.forName(standardizerName).getDeclaredConstructor().newInstance(); + standardizer = (IStandardizer) + Class.forName(standardizerName).getDeclaredConstructor().newInstance(); } catch (Exception e) { - throw new RuntimeException(Msg.code(349) + String.format("Unable to create standardizer %s", standardizerName), e); + throw new RuntimeException( + Msg.code(349) + String.format("Unable to create standardizer %s", standardizerName), e); } myStandardizers.put(standardizerName, standardizer); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/FirstNameStandardizer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/FirstNameStandardizer.java index c9e28f043ef..222d8fda9d9 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/FirstNameStandardizer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/FirstNameStandardizer.java @@ -53,9 +53,9 @@ public class FirstNameStandardizer extends TextStandardizer { theString = replaceTranslates(theString); return Arrays.stream(theString.split("\\s+")) - .map(this::standardizeNameToken) - .filter(s -> !StringUtils.isEmpty(s)) - .collect(Collectors.joining(" ")); + .map(this::standardizeNameToken) + .filter(s -> !StringUtils.isEmpty(s)) + .collect(Collectors.joining(" ")); } protected String capitalize(String theString) { @@ -127,14 +127,15 @@ public class FirstNameStandardizer extends TextStandardizer { buf.append(theDelimiter); } buf.append(s); - } return buf.toString(); } protected String checkForRegexp(String theExpression) { - if (theExpression.equals(".") || theExpression.equals("|") - || theExpression.equals("(") || theExpression.equals(")")) { + if (theExpression.equals(".") + || theExpression.equals("|") + || theExpression.equals("(") + || theExpression.equals(")")) { return "\\".concat(theExpression); } return theExpression; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/IStandardizer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/IStandardizer.java index f2cdee40e54..1acaf1df915 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/IStandardizer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/IStandardizer.java @@ -31,5 +31,4 @@ public interface IStandardizer { * @return Returns a standardized string. */ public String standardize(String theString); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/LastNameStandardizer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/LastNameStandardizer.java index dd368310ef9..c40fac5c07a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/LastNameStandardizer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/LastNameStandardizer.java @@ -76,5 +76,4 @@ public class LastNameStandardizer extends FirstNameStandardizer { } return theToken; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/NoiseCharacters.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/NoiseCharacters.java index 6c0a0aee278..9b1191cee4f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/NoiseCharacters.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/NoiseCharacters.java @@ -94,7 +94,8 @@ public class NoiseCharacters { int upper = toInt(theUpperBound); if (lower > upper) { - throw new IllegalArgumentException(Msg.code(352) + String.format("Invalid character range %s-%s", theLowerBound, theUpperBound)); + throw new IllegalArgumentException( + Msg.code(352) + String.format("Invalid character range %s-%s", theLowerBound, theUpperBound)); } if (upper - lower >= RANGE_THRESHOLD) { @@ -115,5 +116,4 @@ public class NoiseCharacters { return Integer.parseInt(theLiteral.substring(2), 16); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/PhoneStandardizer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/PhoneStandardizer.java index 944edfa4ffb..a98211291e1 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/PhoneStandardizer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/PhoneStandardizer.java @@ -37,5 +37,4 @@ public class PhoneStandardizer implements IStandardizer { } return buf.toString().replaceFirst(PHONE_NUMBER_PATTERN, PHONE_NUMBER_REPLACE_PATTERN); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/TextStandardizer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/TextStandardizer.java index fc1ac7e0dbf..5b15021f1d8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/TextStandardizer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/TextStandardizer.java @@ -62,9 +62,7 @@ public class TextStandardizer implements IStandardizer { } protected Set asSet(Character... theCharacters) { - return Arrays.stream(theCharacters) - .map(c -> (int) c) - .collect(Collectors.toSet()); + return Arrays.stream(theCharacters).map(c -> (int) c).collect(Collectors.toSet()); } protected TextStandardizer addTranslate(int theTranslate, char theMapping) { @@ -92,7 +90,7 @@ public class TextStandardizer implements IStandardizer { myAllowedExtendedAscii = new ArrayList<>(); // refer to https://www.ascii-code.com for the codes - for (int[] i : new int[][]{{192, 214}, {216, 246}, {248, 255}}) { + for (int[] i : new int[][] {{192, 214}, {216, 246}, {248, 255}}) { addAllowedExtendedAsciiRange(i[0], i[1]); } } @@ -131,11 +129,11 @@ public class TextStandardizer implements IStandardizer { offset += Character.charCount(codePoint); switch (Character.getType(codePoint)) { - case Character.CONTROL: // \p{Cc} - case Character.FORMAT: // \p{Cf} + case Character.CONTROL: // \p{Cc} + case Character.FORMAT: // \p{Cf} case Character.PRIVATE_USE: // \p{Co} - case Character.SURROGATE: // \p{Cs} - case Character.UNASSIGNED: // \p{Cn} + case Character.SURROGATE: // \p{Cs} + case Character.UNASSIGNED: // \p{Cn} break; default: if (!isNoiseCharacter(codePoint)) { @@ -161,5 +159,4 @@ public class TextStandardizer implements IStandardizer { } return myNoiseCharacters.isNoise(theChar); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/TitleStandardizer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/TitleStandardizer.java index 10369ca4ddd..8fbeae48655 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/TitleStandardizer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/s13n/standardizers/TitleStandardizer.java @@ -33,7 +33,8 @@ import java.util.stream.Collectors; */ public class TitleStandardizer extends LastNameStandardizer { - private Set myExceptions = new HashSet<>(Arrays.asList("EAS", "EPS", "LLC", "LLP", "of", "at", "in", "and")); + private Set myExceptions = + new HashSet<>(Arrays.asList("EAS", "EPS", "LLC", "LLP", "of", "at", "in", "and")); private Set myBiGramExceptions = new HashSet(); public TitleStandardizer() { @@ -55,11 +56,11 @@ public class TitleStandardizer extends LastNameStandardizer { theString = replaceTranslates(theString); return Arrays.stream(theString.split("\\s+")) - .map(String::trim) - .map(this::standardizeText) - .filter(s -> !StringUtils.isEmpty(s)) - .map(this::checkTitleExceptions) - .collect(Collectors.joining(" ")); + .map(String::trim) + .map(this::standardizeText) + .filter(s -> !StringUtils.isEmpty(s)) + .map(this::checkTitleExceptions) + .collect(Collectors.joining(" ")); } private List split(String theString) { @@ -101,7 +102,7 @@ public class TitleStandardizer extends LastNameStandardizer { List parts = split(theToken); String prevPart = null; - for(String part : parts) { + for (String part : parts) { if (isAllText(part)) { part = standardizeNameToken(part); } @@ -115,8 +116,7 @@ public class TitleStandardizer extends LastNameStandardizer { private String checkBiGram(String thePart0, String thePart1) { for (String[] biGram : myBiGramExceptions) { - if (biGram[0].equalsIgnoreCase(thePart0) - && biGram[1].equalsIgnoreCase(thePart1)) { + if (biGram[0].equalsIgnoreCase(thePart0) && biGram[1].equalsIgnoreCase(thePart1)) { return biGram[1]; } } @@ -137,9 +137,9 @@ public class TitleStandardizer extends LastNameStandardizer { @Override protected String standardizeNameToken(String theToken) { String exception = myExceptions.stream() - .filter(s -> s.equalsIgnoreCase(theToken)) - .findFirst() - .orElse(null); + .filter(s -> s.equalsIgnoreCase(theToken)) + .findFirst() + .orElse(null); if (exception != null) { return exception; } @@ -149,8 +149,8 @@ public class TitleStandardizer extends LastNameStandardizer { private String checkTitleExceptions(String theString) { return myExceptions.stream() - .filter(s -> s.equalsIgnoreCase(theString)) - .findFirst() - .orElse(theString); + .filter(s -> s.equalsIgnoreCase(theString)) + .findFirst() + .orElse(theString); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/ValidationMessageSuppressingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/ValidationMessageSuppressingInterceptor.java index 7126d9fe41b..2e147e3b879 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/ValidationMessageSuppressingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/ValidationMessageSuppressingInterceptor.java @@ -76,11 +76,11 @@ public class ValidationMessageSuppressingInterceptor { return this; } - @Hook(Pointcut.VALIDATION_COMPLETED) public ValidationResult handle(ValidationResult theResult) { - List newMessages = new ArrayList<>(theResult.getMessages().size()); + List newMessages = + new ArrayList<>(theResult.getMessages().size()); for (SingleValidationMessage next : theResult.getMessages()) { String nextMessage = next.getMessage(); @@ -103,5 +103,4 @@ public class ValidationMessageSuppressingInterceptor { return new ValidationResult(theResult.getContext(), newMessages); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java index 10646a4846e..654b2cf68f6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidatingInterceptor.java @@ -19,26 +19,21 @@ */ package ca.uhn.fhir.rest.server.interceptor.validation.address; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.BaseRuntimeChildDefinition; -import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition; -import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.ConfigLoader; import ca.uhn.fhir.util.ExtensionUtil; -import ca.uhn.fhir.util.FhirTerser; -import ca.uhn.fhir.util.IModelVisitor2; import ca.uhn.fhir.util.TerserUtil; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseExtension; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IDomainResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -90,10 +85,11 @@ public class AddressValidatingInterceptor { Class validatorClass = Class.forName(validatorClassName); IAddressValidator addressValidator; try { - addressValidator = (IAddressValidator) validatorClass - .getDeclaredConstructor(Properties.class).newInstance(theProperties); + addressValidator = (IAddressValidator) + validatorClass.getDeclaredConstructor(Properties.class).newInstance(theProperties); } catch (Exception e) { - addressValidator = (IAddressValidator) validatorClass.getDeclaredConstructor().newInstance(); + addressValidator = (IAddressValidator) + validatorClass.getDeclaredConstructor().newInstance(); } setAddressValidator(addressValidator); } catch (Exception e) { @@ -108,7 +104,8 @@ public class AddressValidatingInterceptor { } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void resourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { + public void resourcePreUpdate( + RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { ourLog.debug("Validating address on for update {}, {}, {}", theOldResource, theNewResource, theRequest); handleRequest(theRequest, theNewResource); } @@ -130,10 +127,9 @@ public class AddressValidatingInterceptor { } FhirContext ctx = theRequest.getFhirContext(); - List addresses = getAddresses(theResource, ctx) - .stream() - .filter(this::isValidating) - .collect(Collectors.toList()); + List addresses = getAddresses(theResource, ctx).stream() + .filter(this::isValidating) + .collect(Collectors.toList()); if (!addresses.isEmpty()) { validateAddresses(theRequest, theResource, addresses); @@ -145,7 +141,8 @@ public class AddressValidatingInterceptor { * * @return Returns true if all addresses are valid, or false if there is at least one invalid address */ - protected boolean validateAddresses(RequestDetails theRequest, IBaseResource theResource, List theAddresses) { + protected boolean validateAddresses( + RequestDetails theRequest, IBaseResource theResource, List theAddresses) { boolean retVal = true; for (IBase address : theAddresses) { retVal &= validateAddress(address, theRequest.getFhirContext()); @@ -172,7 +169,8 @@ public class AddressValidatingInterceptor { ourLog.debug("Validated address {}", validationResult); clearPossibleDuplicatesDueToTerserCloning(theAddress, theFhirContext); - ExtensionUtil.setExtension(theFhirContext, theAddress, getExtensionUrl(), "boolean", !validationResult.isValid()); + ExtensionUtil.setExtension( + theFhirContext, theAddress, getExtensionUrl(), "boolean", !validationResult.isValid()); if (validationResult.getValidatedAddress() != null) { theFhirContext.newTerser().cloneInto(validationResult.getValidatedAddress(), theAddress, true); } else { @@ -207,11 +205,9 @@ public class AddressValidatingInterceptor { List retVal = new ArrayList<>(); for (BaseRuntimeChildDefinition c : definition.getChildren()) { Class childClass = c.getClass(); - List allValues = c.getAccessor() - .getValues(theResource) - .stream() - .filter(v -> ADDRESS_TYPE_NAME.equals(v.getClass().getSimpleName())) - .collect(Collectors.toList()); + List allValues = c.getAccessor().getValues(theResource).stream() + .filter(v -> ADDRESS_TYPE_NAME.equals(v.getClass().getSimpleName())) + .collect(Collectors.toList()); retVal.addAll(allValues); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidationException.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidationException.java index 185cc464351..f40914c0cce 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidationException.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidationException.java @@ -34,5 +34,4 @@ public class AddressValidationException extends RuntimeException { public AddressValidationException(Throwable theCause) { super(theCause); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidationResult.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidationResult.java index cb746793b6b..3618bf7a15d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidationResult.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/AddressValidationResult.java @@ -74,11 +74,10 @@ public class AddressValidationResult { @Override public String toString() { - return - " isValid=" + myIsValid + - ", validatedAddressString='" + myValidatedAddressString + '\'' + - ", validationResults=" + myValidationResults + '\'' + - ", rawResponse='" + myRawResponse + '\'' + - ", myValidatedAddress='" + myValidatedAddress + '\''; + return " isValid=" + myIsValid + ", validatedAddressString='" + + myValidatedAddressString + '\'' + ", validationResults=" + + myValidationResults + '\'' + ", rawResponse='" + + myRawResponse + '\'' + ", myValidatedAddress='" + + myValidatedAddress + '\''; } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/IAddressValidator.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/IAddressValidator.java index 216ee122939..444a77bd451 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/IAddressValidator.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/IAddressValidator.java @@ -30,22 +30,26 @@ public interface IAddressValidator { /** * URL for validation results that should be placed on addresses. Extension with boolean value "true" indicates there there is an address validation error. */ - public static final String ADDRESS_VALIDATION_EXTENSION_URL = "http://hapifhir.org/StructureDefinition/ext-validation-address-has-error"; + public static final String ADDRESS_VALIDATION_EXTENSION_URL = + "http://hapifhir.org/StructureDefinition/ext-validation-address-has-error"; /** * URL for an optional address quality extensions that may be added to addresses. */ - public static final String ADDRESS_QUALITY_EXTENSION_URL = "http://hapifhir.org/StructureDefinition/ext-validation-address-quality"; + public static final String ADDRESS_QUALITY_EXTENSION_URL = + "http://hapifhir.org/StructureDefinition/ext-validation-address-quality"; /** * URL for an optional geocoding accuracy extensions that may be added to addresses. */ - public static final String ADDRESS_GEO_ACCURACY_EXTENSION_URL = "http://hapifhir.org/StructureDefinition/ext-validation-address-geo-accuracy"; + public static final String ADDRESS_GEO_ACCURACY_EXTENSION_URL = + "http://hapifhir.org/StructureDefinition/ext-validation-address-geo-accuracy"; /** * URL for an optional address verification extensions that may be added to addresses. */ - public static final String ADDRESS_VERIFICATION_CODE_EXTENSION_URL = "http://hapifhir.org/StructureDefinition/ext-validation-address-verification"; + public static final String ADDRESS_VERIFICATION_CODE_EXTENSION_URL = + "http://hapifhir.org/StructureDefinition/ext-validation-address-verification"; /** * URL for an optional FHIR geolocation extension. @@ -61,5 +65,4 @@ public interface IAddressValidator { * @throws AddressValidationException AddressValidationException is thrown in case validation can not be completed successfully. */ AddressValidationResult isValid(IBase theAddress, FhirContext theFhirContext) throws AddressValidationException; - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/impl/BaseRestfulValidator.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/impl/BaseRestfulValidator.java index a8b1d5f2aca..f7b820f6179 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/impl/BaseRestfulValidator.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/impl/BaseRestfulValidator.java @@ -19,14 +19,13 @@ */ package ca.uhn.fhir.rest.server.interceptor.validation.address.impl; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.rest.server.interceptor.validation.address.AddressValidationException; +import ca.uhn.fhir.rest.server.interceptor.validation.address.AddressValidationResult; import ca.uhn.fhir.rest.server.interceptor.validation.address.IAddressValidator; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import ca.uhn.fhir.rest.server.interceptor.validation.address.AddressValidationException; -import ca.uhn.fhir.rest.server.interceptor.validation.address.AddressValidationResult; -import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,9 +43,11 @@ public abstract class BaseRestfulValidator implements IAddressValidator { private Properties myProperties; - protected abstract AddressValidationResult getValidationResult(AddressValidationResult theResult, JsonNode response, FhirContext theFhirContext) throws Exception; + protected abstract AddressValidationResult getValidationResult( + AddressValidationResult theResult, JsonNode response, FhirContext theFhirContext) throws Exception; - protected abstract ResponseEntity getResponseEntity(IBase theAddress, FhirContext theFhirContext) throws Exception; + protected abstract ResponseEntity getResponseEntity(IBase theAddress, FhirContext theFhirContext) + throws Exception; protected RestTemplate newTemplate() { return new RestTemplate(); @@ -57,16 +58,19 @@ public abstract class BaseRestfulValidator implements IAddressValidator { } @Override - public AddressValidationResult isValid(IBase theAddress, FhirContext theFhirContext) throws AddressValidationException { + public AddressValidationResult isValid(IBase theAddress, FhirContext theFhirContext) + throws AddressValidationException { ResponseEntity entity; try { entity = getResponseEntity(theAddress, theFhirContext); } catch (Exception e) { - throw new AddressValidationException(Msg.code(345) + "Unable to complete address validation web-service call", e); + throw new AddressValidationException( + Msg.code(345) + "Unable to complete address validation web-service call", e); } if (isError(entity)) { - throw new AddressValidationException(Msg.code(346) + String.format("Service returned an error code %s", entity.getStatusCode())); + throw new AddressValidationException( + Msg.code(346) + String.format("Service returned an error code %s", entity.getStatusCode())); } String responseBody = entity.getBody(); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/impl/LoquateAddressValidator.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/impl/LoquateAddressValidator.java index 419bfcb268a..3cb331d777b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/impl/LoquateAddressValidator.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/address/impl/LoquateAddressValidator.java @@ -40,12 +40,11 @@ import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.ResponseEntity; -import javax.annotation.Nullable; import java.math.BigDecimal; -import java.util.Arrays; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; +import javax.annotation.Nullable; import static ca.uhn.fhir.rest.server.interceptor.validation.address.IAddressValidator.ADDRESS_QUALITY_EXTENSION_URL; import static ca.uhn.fhir.rest.server.interceptor.validation.address.IAddressValidator.ADDRESS_VERIFICATION_CODE_EXTENSION_URL; @@ -65,21 +64,27 @@ public class LoquateAddressValidator extends BaseRestfulValidator { public static final String LOQUATE_AVC = "AVC"; public static final String LOQUATE_GEO_ACCURACY = "GeoAccuracy"; - protected static final String[] DUPLICATE_FIELDS_IN_ADDRESS_LINES = {"Locality", "AdministrativeArea", "PostalCode"}; - protected static final String DEFAULT_DATA_CLEANSE_ENDPOINT = "https://api.addressy.com/Cleansing/International/Batch/v1.00/json4.ws"; + protected static final String[] DUPLICATE_FIELDS_IN_ADDRESS_LINES = {"Locality", "AdministrativeArea", "PostalCode" + }; + protected static final String DEFAULT_DATA_CLEANSE_ENDPOINT = + "https://api.addressy.com/Cleansing/International/Batch/v1.00/json4.ws"; protected static final int MAX_ADDRESS_LINES = 8; private Pattern myCommaPattern = Pattern.compile("\\,(\\S)"); public LoquateAddressValidator(Properties theProperties) { super(theProperties); - Validate.isTrue(theProperties.containsKey(PROPERTY_SERVICE_KEY) || theProperties.containsKey(PROPERTY_SERVICE_ENDPOINT), - "Expected service key or custom service endpoint in the configuration, but got " + theProperties); + Validate.isTrue( + theProperties.containsKey(PROPERTY_SERVICE_KEY) || theProperties.containsKey(PROPERTY_SERVICE_ENDPOINT), + "Expected service key or custom service endpoint in the configuration, but got " + theProperties); } @Override - protected AddressValidationResult getValidationResult(AddressValidationResult theResult, JsonNode response, FhirContext theFhirContext) { - Validate.isTrue(response.isArray() && response.size() >= 1, "Invalid response - expected to get an array of validated addresses"); + protected AddressValidationResult getValidationResult( + AddressValidationResult theResult, JsonNode response, FhirContext theFhirContext) { + Validate.isTrue( + response.isArray() && response.size() >= 1, + "Invalid response - expected to get an array of validated addresses"); JsonNode firstMatch = response.get(0); Validate.isTrue(firstMatch.has("Matches"), "Invalid response - matches are unavailable"); @@ -91,7 +96,8 @@ public class LoquateAddressValidator extends BaseRestfulValidator { return toAddressValidationResult(theResult, match, theFhirContext); } - private AddressValidationResult toAddressValidationResult(AddressValidationResult theResult, JsonNode theMatch, FhirContext theFhirContext) { + private AddressValidationResult toAddressValidationResult( + AddressValidationResult theResult, JsonNode theMatch, FhirContext theFhirContext) { theResult.setValid(isValid(theMatch)); ourLog.debug("Address validation flag {}", theResult.isValid()); @@ -148,7 +154,12 @@ public class LoquateAddressValidator extends BaseRestfulValidator { return helper.getAddress(); } - private void addExtension(JsonNode theMatch, String theMatchField, String theExtUrl, AddressHelper theHelper, FhirContext theFhirContext) { + private void addExtension( + JsonNode theMatch, + String theMatchField, + String theExtUrl, + AddressHelper theHelper, + FhirContext theFhirContext) { String addressQuality = getField(theMatch, theMatchField); if (StringUtils.isEmpty(addressQuality)) { ourLog.debug("{} is not found in {}", theMatchField, theMatch); @@ -173,12 +184,16 @@ public class LoquateAddressValidator extends BaseRestfulValidator { IBaseExtension geolocation = ExtensionUtil.addExtension(address, FHIR_GEOCODE_EXTENSION_URL); IBaseExtension latitude = ExtensionUtil.addExtension(geolocation, "latitude"); - latitude.setValue(TerserUtil.newElement(theFhirContext, "decimal", - BigDecimal.valueOf(theMatch.get("Latitude").asDouble()))); + latitude.setValue(TerserUtil.newElement( + theFhirContext, + "decimal", + BigDecimal.valueOf(theMatch.get("Latitude").asDouble()))); IBaseExtension longitude = ExtensionUtil.addExtension(geolocation, "longitude"); - longitude.setValue(TerserUtil.newElement(theFhirContext, "decimal", - BigDecimal.valueOf(theMatch.get("Longitude").asDouble()))); + longitude.setValue(TerserUtil.newElement( + theFhirContext, + "decimal", + BigDecimal.valueOf(theMatch.get("Longitude").asDouble()))); } private void removeDuplicateAddressLines(JsonNode match, AddressHelper address) { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/EmailValidator.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/EmailValidator.java index 73dbf507cd6..40604e51559 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/EmailValidator.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/EmailValidator.java @@ -23,8 +23,8 @@ import java.util.regex.Pattern; public class EmailValidator implements IValidator { - private Pattern myEmailPattern = Pattern.compile("^[A-Z0-9._%+-]+@[A-Z0-9.-]+\\.[A-Z]{2,6}$", - Pattern.CASE_INSENSITIVE); + private Pattern myEmailPattern = + Pattern.compile("^[A-Z0-9._%+-]+@[A-Z0-9.-]+\\.[A-Z]{2,6}$", Pattern.CASE_INSENSITIVE); @Override public boolean isValid(String theString) { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/FieldValidatingInterceptor.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/FieldValidatingInterceptor.java index 2068da63f04..125e32bd43b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/FieldValidatingInterceptor.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/FieldValidatingInterceptor.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.rest.server.interceptor.validation.fields; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; @@ -29,7 +29,6 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.interceptor.ConfigLoader; import ca.uhn.fhir.util.ExtensionUtil; import org.hl7.fhir.instance.model.api.IBase; -import org.hl7.fhir.instance.model.api.IBaseExtension; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.slf4j.Logger; @@ -70,8 +69,13 @@ public class FieldValidatingInterceptor { } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void resourcePreUpdate(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { - ourLog.debug("Validating address on update for resource {} / old resource {} / request {}", theOldResource, theNewResource, theRequest); + public void resourcePreUpdate( + RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) { + ourLog.debug( + "Validating address on update for resource {} / old resource {} / request {}", + theOldResource, + theNewResource, + theRequest); handleRequest(theRequest, theNewResource); } @@ -137,7 +141,8 @@ public class FieldValidatingInterceptor { try { return (IValidator) Class.forName(theValue).getDeclaredConstructor().newInstance(); } catch (Exception e) { - throw new IllegalStateException(Msg.code(348) + String.format("Unable to create validator for %s", theValue), e); + throw new IllegalStateException( + Msg.code(348) + String.format("Unable to create validator for %s", theValue), e); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/IValidator.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/IValidator.java index c8f49cc777e..7b9039a564e 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/IValidator.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/fields/IValidator.java @@ -21,8 +21,8 @@ package ca.uhn.fhir.rest.server.interceptor.validation.fields; public interface IValidator { - public static final String VALIDATION_EXTENSION_URL = "https://hapifhir.org/StructureDefinition/ext-validation-field-has-error"; + public static final String VALIDATION_EXTENSION_URL = + "https://hapifhir.org/StructureDefinition/ext-validation-field-has-error"; public boolean isValid(String theString); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/helpers/AddressHelper.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/helpers/AddressHelper.java index f79b9543b8c..e65eaec2b28 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/helpers/AddressHelper.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/validation/helpers/AddressHelper.java @@ -41,8 +41,9 @@ public class AddressHelper extends PropertyModifyingHelper { public static final String FIELD_POSTAL = "postalCode"; public static final String FIELD_COUNTRY = "country"; - public static final String[] FIELD_NAMES = {FIELD_TEXT, FIELD_LINE, FIELD_CITY, FIELD_DISTRICT, FIELD_STATE, - FIELD_POSTAL, FIELD_COUNTRY}; + public static final String[] FIELD_NAMES = { + FIELD_TEXT, FIELD_LINE, FIELD_CITY, FIELD_DISTRICT, FIELD_STATE, FIELD_POSTAL, FIELD_COUNTRY + }; public static final String[] ADDRESS_PARTS = {FIELD_CITY, FIELD_DISTRICT, FIELD_STATE, FIELD_POSTAL}; @@ -92,9 +93,9 @@ public class AddressHelper extends PropertyModifyingHelper { public String getParts() { return Arrays.stream(ADDRESS_PARTS) - .map(this::get) - .filter(s -> !StringUtils.isBlank(s)) - .collect(Collectors.joining(getDelimiter())); + .map(this::get) + .filter(s -> !StringUtils.isBlank(s)) + .collect(Collectors.joining(getDelimiter())); } public String getLine() { @@ -118,5 +119,4 @@ public class AddressHelper extends PropertyModifyingHelper { public String toString() { return getFields(FIELD_NAMES); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/IMailSvc.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/IMailSvc.java index 204f77f6ee4..53526b4ffe8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/IMailSvc.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/IMailSvc.java @@ -22,16 +22,16 @@ package ca.uhn.fhir.rest.server.mail; import org.simplejavamail.api.email.Email; import org.simplejavamail.api.mailer.AsyncResponse; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface IMailSvc { void sendMail(@Nonnull List theEmails); void sendMail(@Nonnull Email theEmail); - void sendMail(@Nonnull Email theEmail, - @Nonnull Runnable theOnSuccess, - @Nonnull AsyncResponse.ExceptionConsumer theErrorHandler); - + void sendMail( + @Nonnull Email theEmail, + @Nonnull Runnable theOnSuccess, + @Nonnull AsyncResponse.ExceptionConsumer theErrorHandler); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/MailConfig.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/MailConfig.java index 4e43967c507..7a74848c86f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/MailConfig.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/MailConfig.java @@ -31,8 +31,7 @@ public class MailConfig { private String mySmtpPassword; private boolean mySmtpUseStartTLS; - public MailConfig() { - } + public MailConfig() {} public String getSmtpHostname() { return mySmtpHostname; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/MailSvc.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/MailSvc.java index 230fc66236e..e635414d151 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/MailSvc.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/mail/MailSvc.java @@ -31,9 +31,9 @@ import org.simplejavamail.mailer.MailerBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class MailSvc implements IMailSvc { private static final Logger ourLog = LoggerFactory.getLogger(MailSvc.class); @@ -59,15 +59,13 @@ public class MailSvc implements IMailSvc { } @Override - public void sendMail(@Nonnull Email theEmail, - @Nonnull Runnable theOnSuccess, - @Nonnull ExceptionConsumer theErrorHandler) { + public void sendMail( + @Nonnull Email theEmail, @Nonnull Runnable theOnSuccess, @Nonnull ExceptionConsumer theErrorHandler) { send(theEmail, theOnSuccess, theErrorHandler); } - private void send(@Nonnull Email theEmail, - @Nonnull Runnable theOnSuccess, - @Nonnull ExceptionConsumer theErrorHandler) { + private void send( + @Nonnull Email theEmail, @Nonnull Runnable theOnSuccess, @Nonnull ExceptionConsumer theErrorHandler) { Validate.notNull(theEmail); Validate.notNull(theOnSuccess); Validate.notNull(theErrorHandler); @@ -84,23 +82,26 @@ public class MailSvc implements IMailSvc { @Nonnull private Mailer makeMailer(@Nonnull MailConfig theMailConfig) { - ourLog.info("SMTP Mailer config Hostname:[{}] | Port:[{}] | Username:[{}] | TLS:[{}]", - theMailConfig.getSmtpHostname(), theMailConfig.getSmtpPort(), - theMailConfig.getSmtpUsername(), theMailConfig.isSmtpUseStartTLS()); - return MailerBuilder - .withSMTPServer( + ourLog.info( + "SMTP Mailer config Hostname:[{}] | Port:[{}] | Username:[{}] | TLS:[{}]", theMailConfig.getSmtpHostname(), theMailConfig.getSmtpPort(), theMailConfig.getSmtpUsername(), - theMailConfig.getSmtpPassword()) - .withTransportStrategy(theMailConfig.isSmtpUseStartTLS() ? TransportStrategy.SMTP_TLS : TransportStrategy.SMTP) - .buildMailer(); + theMailConfig.isSmtpUseStartTLS()); + return MailerBuilder.withSMTPServer( + theMailConfig.getSmtpHostname(), + theMailConfig.getSmtpPort(), + theMailConfig.getSmtpUsername(), + theMailConfig.getSmtpPassword()) + .withTransportStrategy( + theMailConfig.isSmtpUseStartTLS() ? TransportStrategy.SMTP_TLS : TransportStrategy.SMTP) + .buildMailer(); } @Nonnull private String makeMessage(@Nonnull Email theEmail) { return " with subject [" + theEmail.getSubject() + "] and recipients [" - + theEmail.getRecipients().stream().map(Recipient::getAddress).collect(Collectors.joining(",")) + "]"; + + theEmail.getRecipients().stream().map(Recipient::getAddress).collect(Collectors.joining(",")) + "]"; } private class OnSuccess implements Runnable { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceMessage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceMessage.java index 9fe9c51606b..6e3b2380f6b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceMessage.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceMessage.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.rest.server.messaging; - import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.Validate; -import javax.annotation.Nullable; import java.util.HashMap; import java.util.Map; import java.util.Optional; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultString; @@ -221,7 +220,8 @@ public abstract class BaseResourceMessage implements IResourceMessage, IModelJso case DELETE: return DELETE; default: - throw new IllegalArgumentException(Msg.code(2348) + "Unsupported operation type: " + theRestOperationType); + throw new IllegalArgumentException( + Msg.code(2348) + "Unsupported operation type: " + theRestOperationType); } } @@ -229,5 +229,4 @@ public abstract class BaseResourceMessage implements IResourceMessage, IModelJso return myRestOperationTypeEnum; } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceModifiedMessage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceModifiedMessage.java index 2f7356ab343..672ff2b2ca9 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceModifiedMessage.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/BaseResourceModifiedMessage.java @@ -34,9 +34,9 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -45,12 +45,16 @@ public abstract class BaseResourceModifiedMessage extends BaseResourceMessage im @JsonProperty("payload") protected String myPayload; + @JsonProperty("payloadId") protected String myPayloadId; + @JsonProperty(value = "partitionId") protected RequestPartitionId myPartitionId; + @JsonIgnore protected transient IBaseResource myPayloadDecoded; + @JsonIgnore protected transient String myPayloadType; @@ -61,19 +65,30 @@ public abstract class BaseResourceModifiedMessage extends BaseResourceMessage im super(); } - public BaseResourceModifiedMessage(FhirContext theFhirContext, IBaseResource theResource, OperationTypeEnum theOperationType) { + public BaseResourceModifiedMessage( + FhirContext theFhirContext, IBaseResource theResource, OperationTypeEnum theOperationType) { this(); setOperationType(theOperationType); setNewPayload(theFhirContext, theResource); } - public BaseResourceModifiedMessage(FhirContext theFhirContext, IBaseResource theNewResource, OperationTypeEnum theOperationType, RequestDetails theRequest) { + public BaseResourceModifiedMessage( + FhirContext theFhirContext, + IBaseResource theNewResource, + OperationTypeEnum theOperationType, + RequestDetails theRequest) { this(theFhirContext, theNewResource, theOperationType); if (theRequest != null) { setTransactionId(theRequest.getTransactionGuid()); } } - public BaseResourceModifiedMessage(FhirContext theFhirContext, IBaseResource theNewResource, OperationTypeEnum theOperationType, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + + public BaseResourceModifiedMessage( + FhirContext theFhirContext, + IBaseResource theNewResource, + OperationTypeEnum theOperationType, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { this(theFhirContext, theNewResource, theOperationType); if (theRequest != null) { setTransactionId(theRequest.getTransactionGuid()); @@ -200,10 +215,10 @@ public abstract class BaseResourceModifiedMessage extends BaseResourceMessage im @Override public String toString() { return new ToStringBuilder(this) - .append("operationType", myOperationType) - .append("partitionId", myPartitionId) - .append("payloadId", myPayloadId) - .toString(); + .append("operationType", myOperationType) + .append("partitionId", myPartitionId) + .append("payloadId", myPayloadId) + .toString(); } protected static boolean payloadContainsNoPlaceholderReferences(FhirContext theCtx, IBaseResource theNewPayload) { @@ -257,4 +272,3 @@ public abstract class BaseResourceModifiedMessage extends BaseResourceMessage im return retval; } } - diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/IResourceMessage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/IResourceMessage.java index 5470216073b..d1e27d37aa7 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/IResourceMessage.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/IResourceMessage.java @@ -19,8 +19,6 @@ */ package ca.uhn.fhir.rest.server.messaging; - - public interface IResourceMessage { String getPayloadId(); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/ResourceOperationMessage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/ResourceOperationMessage.java index fa136af604c..54fc7224292 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/ResourceOperationMessage.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/ResourceOperationMessage.java @@ -27,18 +27,27 @@ import org.hl7.fhir.instance.model.api.IBaseResource; public class ResourceOperationMessage extends BaseResourceModifiedMessage { - public ResourceOperationMessage() { - } + public ResourceOperationMessage() {} - public ResourceOperationMessage(FhirContext theFhirContext, IBaseResource theResource, OperationTypeEnum theOperationType) { + public ResourceOperationMessage( + FhirContext theFhirContext, IBaseResource theResource, OperationTypeEnum theOperationType) { super(theFhirContext, theResource, theOperationType); } - public ResourceOperationMessage(FhirContext theFhirContext, IBaseResource theNewResource, OperationTypeEnum theOperationType, RequestDetails theRequest) { + public ResourceOperationMessage( + FhirContext theFhirContext, + IBaseResource theNewResource, + OperationTypeEnum theOperationType, + RequestDetails theRequest) { super(theFhirContext, theNewResource, theOperationType, theRequest); } - public ResourceOperationMessage(FhirContext theFhirContext, IBaseResource theNewResource, OperationTypeEnum theOperationType, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) { + public ResourceOperationMessage( + FhirContext theFhirContext, + IBaseResource theNewResource, + OperationTypeEnum theOperationType, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId) { super(theFhirContext, theNewResource, theOperationType, theRequest, theRequestPartitionId); } @@ -53,9 +62,9 @@ public class ResourceOperationMessage extends BaseResourceModifiedMessage { @Override public String toString() { return new ToStringBuilder(this) - .append("operationType", myOperationType) - .append("payloadId", myPayloadId) - .append("partitionId", myPartitionId) - .toString(); + .append("operationType", myOperationType) + .append("payloadId", myPayloadId) + .append("partitionId", myPartitionId) + .toString(); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessage.java index e4dbe5d9f92..b914fd1bbdf 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessage.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/BaseJsonMessage.java @@ -19,10 +19,8 @@ */ package ca.uhn.fhir.rest.server.messaging.json; - import ca.uhn.fhir.model.api.IModelJson; import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.commons.lang3.StringUtils; import org.springframework.messaging.Message; import org.springframework.messaging.MessageHeaders; @@ -34,6 +32,7 @@ import static org.apache.commons.lang3.StringUtils.defaultString; public abstract class BaseJsonMessage implements Message, IModelJson { private static final long serialVersionUID = 1L; + @JsonProperty("headers") private HapiMessageHeaders myHeaders; @@ -73,9 +72,9 @@ public abstract class BaseJsonMessage implements Message, IModelJson { } @Nullable - public String getMessageKey() { + public String getMessageKey() { return null; - } + } /** * Returns {@link #getMessageKey()} or {@link #getMessageKeyDefaultValue()} when {@link #getMessageKey()} returns null. @@ -93,7 +92,7 @@ public abstract class BaseJsonMessage implements Message, IModelJson { * @return null by default */ @Nullable - protected String getMessageKeyDefaultValue(){ + protected String getMessageKeyDefaultValue() { return null; } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/HapiMessageHeaders.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/HapiMessageHeaders.java index b1ae4a1ae5d..2c5469bcac6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/HapiMessageHeaders.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/HapiMessageHeaders.java @@ -41,8 +41,10 @@ public class HapiMessageHeaders implements IModelJson { @JsonProperty(RETRY_COUNT_KEY) private Integer myRetryCount = 0; + @JsonProperty(FIRST_FAILURE_KEY) private Long myFirstFailureTimestamp; + @JsonProperty(LAST_FAILURE_KEY) private Long myLastFailureTimestamp; @@ -80,12 +82,10 @@ public class HapiMessageHeaders implements IModelJson { this.myLastFailureTimestamp = theLastFailureTimestamp; } - public void setFirstFailureTimestamp(Long theFirstFailureTimestamp) { this.myFirstFailureTimestamp = theFirstFailureTimestamp; } - public Map getCustomHeaders() { if (this.headers == null) { return new HashMap<>(); @@ -100,5 +100,4 @@ public class HapiMessageHeaders implements IModelJson { returnedHeaders.put(LAST_FAILURE_KEY, myLastFailureTimestamp); return new MessageHeaders(returnedHeaders); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/ResourceOperationJsonMessage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/ResourceOperationJsonMessage.java index 1960a636eb3..7b90f8c0461 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/ResourceOperationJsonMessage.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/messaging/json/ResourceOperationJsonMessage.java @@ -27,7 +27,6 @@ import javax.annotation.Nullable; public class ResourceOperationJsonMessage extends BaseJsonMessage { - @JsonProperty("payload") private ResourceOperationMessage myPayload; @@ -46,12 +45,12 @@ public class ResourceOperationJsonMessage extends BaseJsonMessage t instanceof BaseQueryParameter) - .map(t -> (BaseQueryParameter) t) - .collect(Collectors.toList()); + myQueryParameters = myParameters.stream() + .filter(t -> t instanceof BaseQueryParameter) + .map(t -> (BaseQueryParameter) t) + .collect(Collectors.toList()); for (IParameter next : myParameters) { if (next instanceof ConditionalParamBinder) { @@ -244,7 +244,8 @@ public abstract class BaseMethodBinding { public abstract MethodMatchEnum incomingServerRequestMatchesMethod(RequestDetails theRequest); - public abstract Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) throws BaseServerResponseException, IOException; + public abstract Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) + throws BaseServerResponseException, IOException; protected final Object invokeServerMethod(RequestDetails theRequest, Object[] theMethodParams) { // Handle server action interceptors @@ -254,9 +255,8 @@ public abstract class BaseMethodBinding { populateRequestDetailsForInterceptor(theRequest, theMethodParams); // Interceptor invoke: SERVER_INCOMING_REQUEST_PRE_HANDLED - PageMethodBinding.callPreHandledHooks(theRequest); - - } + PageMethodBinding.callPreHandledHooks(theRequest); + } // Actually invoke the method try { @@ -267,7 +267,7 @@ public abstract class BaseMethodBinding { throw (BaseServerResponseException) e.getCause(); } if (e.getTargetException() instanceof DataFormatException) { - throw (DataFormatException)e.getTargetException(); + throw (DataFormatException) e.getTargetException(); } throw new InternalErrorException(Msg.code(389) + "Failed to call access method: " + e.getCause(), e); } catch (Exception e) { @@ -319,7 +319,8 @@ public abstract class BaseMethodBinding { } return BundleProviders.newList(retVal); } else { - throw new InternalErrorException(Msg.code(391) + "Unexpected return type: " + response.getClass().getCanonicalName()); + throw new InternalErrorException(Msg.code(391) + "Unexpected return type: " + + response.getClass().getCanonicalName()); } } @@ -346,7 +347,23 @@ public abstract class BaseMethodBinding { GraphQL graphQL = theMethod.getAnnotation(GraphQL.class); // ** if you add another annotation above, also add it to the next line: - if (!verifyMethodHasZeroOrOneOperationAnnotation(theMethod, read, search, conformance, create, update, delete, history, validate, addTags, deleteTags, transaction, operation, getPage, patch, graphQL)) { + if (!verifyMethodHasZeroOrOneOperationAnnotation( + theMethod, + read, + search, + conformance, + create, + update, + delete, + history, + validate, + addTags, + deleteTags, + transaction, + operation, + getPage, + patch, + graphQL)) { return null; } @@ -364,8 +381,10 @@ public abstract class BaseMethodBinding { if (theProvider instanceof IResourceProvider) { returnTypeFromRp = ((IResourceProvider) theProvider).getResourceType(); if (!verifyIsValidResourceReturnType(returnTypeFromRp)) { - throw new ConfigurationException(Msg.code(392) + "getResourceType() from " + IResourceProvider.class.getSimpleName() + " type " + theMethod.getDeclaringClass().getCanonicalName() + " returned " - + toLogString(returnTypeFromRp) + " - Must return a resource type"); + throw new ConfigurationException( + Msg.code(392) + "getResourceType() from " + IResourceProvider.class.getSimpleName() + " type " + + theMethod.getDeclaringClass().getCanonicalName() + " returned " + + toLogString(returnTypeFromRp) + " - Must return a resource type"); } } @@ -380,10 +399,14 @@ public abstract class BaseMethodBinding { returnTypeFromMethod = ReflectionUtil.getGenericCollectionTypeOfMethodReturnType(theMethod); if (returnTypeFromMethod == null) { ourLog.trace("Method {} returns a non-typed list, can't verify return type", theMethod); - } else if (!verifyIsValidResourceReturnType(returnTypeFromMethod) && !isResourceInterface(returnTypeFromMethod)) { - throw new ConfigurationException(Msg.code(393) + "Method '" + theMethod.getName() + "' from " + IResourceProvider.class.getSimpleName() + " type " + theMethod.getDeclaringClass().getCanonicalName() - + " returns a collection with generic type " + toLogString(returnTypeFromMethod) - + " - Must return a resource type or a collection (List, Set) with a resource type parameter (e.g. List or List )"); + } else if (!verifyIsValidResourceReturnType(returnTypeFromMethod) + && !isResourceInterface(returnTypeFromMethod)) { + throw new ConfigurationException( + Msg.code(393) + "Method '" + theMethod.getName() + "' from " + + IResourceProvider.class.getSimpleName() + " type " + + theMethod.getDeclaringClass().getCanonicalName() + + " returns a collection with generic type " + toLogString(returnTypeFromMethod) + + " - Must return a resource type or a collection (List, Set) with a resource type parameter (e.g. List or List )"); } } else if (IBaseBundle.class.isAssignableFrom(returnTypeFromMethod) && returnTypeFromRp == null) { // If a plain provider method returns a Bundle, we'll assume it to be a system @@ -391,9 +414,12 @@ public abstract class BaseMethodBinding { returnTypeFromMethod = null; } else { if (!isResourceInterface(returnTypeFromMethod) && !verifyIsValidResourceReturnType(returnTypeFromMethod)) { - throw new ConfigurationException(Msg.code(394) + "Method '" + theMethod.getName() + "' from " + IResourceProvider.class.getSimpleName() + " type " + theMethod.getDeclaringClass().getCanonicalName() - + " returns " + toLogString(returnTypeFromMethod) + " - Must return a resource type (eg Patient, Bundle, " + IBundleProvider.class.getSimpleName() - + ", etc., see the documentation for more details)"); + throw new ConfigurationException(Msg.code(394) + "Method '" + theMethod.getName() + "' from " + + IResourceProvider.class.getSimpleName() + " type " + + theMethod.getDeclaringClass().getCanonicalName() + + " returns " + toLogString(returnTypeFromMethod) + + " - Must return a resource type (eg Patient, Bundle, " + IBundleProvider.class.getSimpleName() + + ", etc., see the documentation for more details)"); } } @@ -432,18 +458,26 @@ public abstract class BaseMethodBinding { } if (isNotBlank(returnTypeNameFromAnnotation)) { - returnTypeFromAnnotation = theContext.getResourceDefinition(returnTypeNameFromAnnotation).getImplementingClass(); + returnTypeFromAnnotation = theContext + .getResourceDefinition(returnTypeNameFromAnnotation) + .getImplementingClass(); } if (returnTypeFromRp != null) { if (returnTypeFromAnnotation != null && !isResourceInterface(returnTypeFromAnnotation)) { if (returnTypeFromMethod != null && !returnTypeFromRp.isAssignableFrom(returnTypeFromMethod)) { - throw new ConfigurationException(Msg.code(395) + "Method '" + theMethod.getName() + "' in type " + theMethod.getDeclaringClass().getCanonicalName() + " returns type " - + returnTypeFromMethod.getCanonicalName() + " - Must return " + returnTypeFromRp.getCanonicalName() + " (or a subclass of it) per IResourceProvider contract"); + throw new ConfigurationException(Msg.code(395) + "Method '" + theMethod.getName() + "' in type " + + theMethod.getDeclaringClass().getCanonicalName() + " returns type " + + returnTypeFromMethod.getCanonicalName() + " - Must return " + + returnTypeFromRp.getCanonicalName() + + " (or a subclass of it) per IResourceProvider contract"); } if (!returnTypeFromRp.isAssignableFrom(returnTypeFromAnnotation)) { - throw new ConfigurationException(Msg.code(396) + "Method '" + theMethod.getName() + "' in type " + theMethod.getDeclaringClass().getCanonicalName() + " claims to return type " + returnTypeFromAnnotation.getCanonicalName() - + " per method annotation - Must return " + returnTypeFromRp.getCanonicalName() + " (or a subclass of it) per IResourceProvider contract"); + throw new ConfigurationException(Msg.code(396) + "Method '" + theMethod.getName() + "' in type " + + theMethod.getDeclaringClass().getCanonicalName() + " claims to return type " + + returnTypeFromAnnotation.getCanonicalName() + " per method annotation - Must return " + + returnTypeFromRp.getCanonicalName() + + " (or a subclass of it) per IResourceProvider contract"); } returnType = returnTypeFromAnnotation; } else { @@ -452,8 +486,11 @@ public abstract class BaseMethodBinding { } else { if (!isResourceInterface(returnTypeFromAnnotation)) { if (!verifyIsValidResourceReturnType(returnTypeFromAnnotation)) { - throw new ConfigurationException(Msg.code(397) + "Method '" + theMethod.getName() + "' from " + IResourceProvider.class.getSimpleName() + " type " + theMethod.getDeclaringClass().getCanonicalName() - + " returns " + toLogString(returnTypeFromAnnotation) + " according to annotation - Must return a resource type"); + throw new ConfigurationException(Msg.code(397) + "Method '" + theMethod.getName() + "' from " + + IResourceProvider.class.getSimpleName() + " type " + + theMethod.getDeclaringClass().getCanonicalName() + " returns " + + toLogString(returnTypeFromAnnotation) + + " according to annotation - Must return a resource type"); } returnType = returnTypeFromAnnotation; } else { @@ -478,19 +515,25 @@ public abstract class BaseMethodBinding { } else if (history != null) { return new HistoryMethodBinding(theMethod, theContext, theProvider); } else if (validate != null) { - return new ValidateMethodBindingDstu2Plus(returnType, returnTypeFromRp, theMethod, theContext, theProvider, validate); + return new ValidateMethodBindingDstu2Plus( + returnType, returnTypeFromRp, theMethod, theContext, theProvider, validate); } else if (transaction != null) { return new TransactionMethodBinding(theMethod, theContext, theProvider); } else if (operation != null) { - return new OperationMethodBinding(returnType, returnTypeFromRp, theMethod, theContext, theProvider, operation); + return new OperationMethodBinding( + returnType, returnTypeFromRp, theMethod, theContext, theProvider, operation); } else { - throw new ConfigurationException(Msg.code(398) + "Did not detect any FHIR annotations on method '" + theMethod.getName() + "' on type: " + theMethod.getDeclaringClass().getCanonicalName()); + throw new ConfigurationException( + Msg.code(398) + "Did not detect any FHIR annotations on method '" + theMethod.getName() + + "' on type: " + theMethod.getDeclaringClass().getCanonicalName()); } - } private static boolean isResourceInterface(Class theReturnTypeFromMethod) { - return theReturnTypeFromMethod != null && (theReturnTypeFromMethod.equals(IBaseResource.class) || theReturnTypeFromMethod.equals(IResource.class) || theReturnTypeFromMethod.equals(IAnyResource.class)); + return theReturnTypeFromMethod != null + && (theReturnTypeFromMethod.equals(IBaseResource.class) + || theReturnTypeFromMethod.equals(IResource.class) + || theReturnTypeFromMethod.equals(IAnyResource.class)); } private static String toLogString(Class theType) { @@ -517,10 +560,11 @@ public abstract class BaseMethodBinding { if (obj1 == null) { obj1 = object; } else { - throw new ConfigurationException(Msg.code(399) + "Method " + theNextMethod.getName() + " on type '" + theNextMethod.getDeclaringClass().getSimpleName() + " has annotations @" - + obj1.getClass().getSimpleName() + " and @" + object.getClass().getSimpleName() + ". Can not have both."); + throw new ConfigurationException(Msg.code(399) + "Method " + theNextMethod.getName() + " on type '" + + theNextMethod.getDeclaringClass().getSimpleName() + " has annotations @" + + obj1.getClass().getSimpleName() + " and @" + + object.getClass().getSimpleName() + ". Can not have both."); } - } } if (obj1 == null) { @@ -528,5 +572,4 @@ public abstract class BaseMethodBinding { } return true; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBinding.java index d4d6b02cace..93280afd985 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBinding.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.rest.server.method; - -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.rest.api.*; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.server.IRestfulResponse; import ca.uhn.fhir.rest.api.server.IRestfulServer; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.ResponseDetails; +import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.server.RestfulServerUtils; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -50,12 +49,15 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { private boolean myReturnVoid; - public BaseOutcomeReturningMethodBinding(Method theMethod, FhirContext theContext, Class theMethodAnnotation, Object theProvider) { + public BaseOutcomeReturningMethodBinding( + Method theMethod, FhirContext theContext, Class theMethodAnnotation, Object theProvider) { super(theMethod, theContext, theProvider); if (!theMethod.getReturnType().equals(MethodOutcome.class)) { if (!allowVoidReturnType()) { - throw new ConfigurationException(Msg.code(367) + "Method " + theMethod.getName() + " in type " + theMethod.getDeclaringClass().getName() + " is a @" + theMethodAnnotation.getSimpleName() + " method but it does not return " + MethodOutcome.class); + throw new ConfigurationException(Msg.code(367) + "Method " + theMethod.getName() + " in type " + + theMethod.getDeclaringClass().getName() + " is a @" + theMethodAnnotation.getSimpleName() + + " method but it does not return " + MethodOutcome.class); } else if (theMethod.getReturnType() == void.class) { myReturnVoid = true; } @@ -108,7 +110,9 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { private void validateResponseNotNullIfItShouldntBe(MethodOutcome response) { if (response == null && !isReturnVoid()) { - throw new InternalErrorException(Msg.code(368) + "Method " + getMethod().getName() + " in type " + getMethod().getDeclaringClass().getCanonicalName() + " returned null"); + throw new InternalErrorException( + Msg.code(368) + "Method " + getMethod().getName() + " in type " + + getMethod().getDeclaringClass().getCanonicalName() + " returned null"); } } @@ -143,7 +147,8 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { } @Override - public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) throws BaseServerResponseException, IOException { + public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) + throws BaseServerResponseException, IOException { Object[] params = createParametersForServerRequest(theRequest); addParametersForServerRequest(theRequest, params); @@ -164,7 +169,8 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { if (response != null && response.getId() != null && response.getId().hasResourceType()) { if (getContext().getResourceDefinition(response.getId().getResourceType()) == null) { - throw new InternalErrorException(Msg.code(369) + "Server method returned invalid resource ID: " + response.getId().getValue()); + throw new InternalErrorException(Msg.code(369) + "Server method returned invalid resource ID: " + + response.getId().getValue()); } } @@ -179,7 +185,12 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { protected abstract Set provideAllowableRequestTypes(); - private Object returnResponse(IRestfulServer theServer, RequestDetails theRequest, MethodOutcome theMethodOutcome, IBaseResource theOriginalOutcome) throws IOException { + private Object returnResponse( + IRestfulServer theServer, + RequestDetails theRequest, + MethodOutcome theMethodOutcome, + IBaseResource theOriginalOutcome) + throws IOException { int operationStatus = getOperationStatus(theMethodOutcome); IBaseResource outcome = theOriginalOutcome; @@ -207,7 +218,6 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { outcome = theOriginalOutcome; break; } - } ResponseDetails responseDetails = new ResponseDetails(); @@ -224,7 +234,8 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { IPrimitiveType operationResourceLastUpdated = null; if (theMethodOutcome != null) { if (theMethodOutcome.getResource() != null) { - operationResourceLastUpdated = RestfulServerUtils.extractLastUpdatedFromResource(theMethodOutcome.getResource()); + operationResourceLastUpdated = + RestfulServerUtils.extractLastUpdatedFromResource(theMethodOutcome.getResource()); } responseId = theMethodOutcome.getId(); @@ -234,12 +245,21 @@ abstract class BaseOutcomeReturningMethodBinding extends BaseMethodBinding { if (responseId != null) { String serverBase = theRequest.getFhirServerBase(); - responseId = RestfulServerUtils.fullyQualifyResourceIdOrReturnNull(theServer, theMethodOutcome.getResource(), serverBase, responseId); + responseId = RestfulServerUtils.fullyQualifyResourceIdOrReturnNull( + theServer, theMethodOutcome.getResource(), serverBase, responseId); } } Set summaryMode = Collections.emptySet(); - return RestfulServerUtils.streamResponseAsResource(theServer, responseDetails.getResponseResource(), summaryMode, responseDetails.getResponseCode(), true, theRequest.isRespondGzip(), theRequest, responseId, operationResourceLastUpdated); + return RestfulServerUtils.streamResponseAsResource( + theServer, + responseDetails.getResponseResource(), + summaryMode, + responseDetails.getResponseCode(), + true, + theRequest.isRespondGzip(), + theRequest, + responseId, + operationResourceLastUpdated); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody.java index 16b09b037be..f1731d96261 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody.java @@ -19,23 +19,27 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; - -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.context.*; -//TODO Use of a deprecated method should be resolved +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.*; import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.server.IResourceProvider; +import org.hl7.fhir.instance.model.api.IBaseResource; -public abstract class BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody extends BaseOutcomeReturningMethodBinding { +import java.lang.reflect.Method; + +public abstract class BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody + extends BaseOutcomeReturningMethodBinding { private String myResourceName; private Integer myIdParameterIndex; - public BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody(Method theMethod, FhirContext theContext, Object theProvider, Class theMethodAnnotationType, Class theResourceTypeFromAnnotation) { + public BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody( + Method theMethod, + FhirContext theContext, + Object theProvider, + Class theMethodAnnotationType, + Class theResourceTypeFromAnnotation) { super(theMethod, theContext, theMethodAnnotationType, theProvider); Class resourceType = theResourceTypeFromAnnotation; @@ -44,18 +48,24 @@ public abstract class BaseOutcomeReturningMethodBindingWithResourceIdButNoResour myResourceName = def.getName(); } else { if (theProvider != null && theProvider instanceof IResourceProvider) { - RuntimeResourceDefinition def = theContext.getResourceDefinition(((IResourceProvider) theProvider).getResourceType()); + RuntimeResourceDefinition def = + theContext.getResourceDefinition(((IResourceProvider) theProvider).getResourceType()); myResourceName = def.getName(); } else { - throw new ConfigurationException(Msg.code(457) + "Can not determine resource type for method '" + theMethod.getName() + "' on type " + theMethod.getDeclaringClass().getCanonicalName() + " - Did you forget to include the resourceType() value on the @" + Delete.class.getSimpleName() + " method annotation?"); + throw new ConfigurationException( + Msg.code(457) + "Can not determine resource type for method '" + theMethod.getName() + + "' on type " + theMethod.getDeclaringClass().getCanonicalName() + + " - Did you forget to include the resourceType() value on the @" + + Delete.class.getSimpleName() + " method annotation?"); } } myIdParameterIndex = ParameterUtil.findIdParameterIndex(theMethod, getContext()); if (myIdParameterIndex == null) { - throw new ConfigurationException(Msg.code(458) + "Method '" + theMethod.getName() + "' on type '" + theMethod.getDeclaringClass().getCanonicalName() + "' has no parameter annotated with the @" + IdParam.class.getSimpleName() + " annotation"); + throw new ConfigurationException(Msg.code(458) + "Method '" + theMethod.getName() + "' on type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' has no parameter annotated with the @" + + IdParam.class.getSimpleName() + " annotation"); } - } @Override @@ -66,6 +76,4 @@ public abstract class BaseOutcomeReturningMethodBindingWithResourceIdButNoResour protected Integer getIdParameterIndex() { return myIdParameterIndex; } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBindingWithResourceParam.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBindingWithResourceParam.java index 345d6e7d6bd..d06df180397 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBindingWithResourceParam.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseOutcomeReturningMethodBindingWithResourceParam.java @@ -19,22 +19,21 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.defaultIfBlank; - -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; - -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.server.IResourceProvider; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; + +import static org.apache.commons.lang3.StringUtils.defaultIfBlank; abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOutcomeReturningMethodBinding { private final Integer myIdParamIndex; @@ -45,7 +44,8 @@ abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOu private int myConditionalUrlIndex = -1; @SuppressWarnings("unchecked") - public BaseOutcomeReturningMethodBindingWithResourceParam(Method theMethod, FhirContext theContext, Class theMethodAnnotation, Object theProvider) { + public BaseOutcomeReturningMethodBindingWithResourceParam( + Method theMethod, FhirContext theContext, Class theMethodAnnotation, Object theProvider) { super(theMethod, theContext, theMethodAnnotation, theProvider); ResourceParameter resourceParameter = null; @@ -58,7 +58,9 @@ abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOu continue; } if (myResourceType != null) { - throw new ConfigurationException(Msg.code(454) + "Method " + theMethod.getName() + " on type " + theMethod.getDeclaringClass() + " has more than one @ResourceParam. Only one is allowed."); + throw new ConfigurationException(Msg.code(454) + "Method " + theMethod.getName() + " on type " + + theMethod.getDeclaringClass() + + " has more than one @ResourceParam. Only one is allowed."); } myResourceType = resourceParameter.getResourceType(); @@ -70,11 +72,13 @@ abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOu index++; } - if ((myResourceType == null || Modifier.isAbstract(myResourceType.getModifiers())) && (theProvider instanceof IResourceProvider)) { + if ((myResourceType == null || Modifier.isAbstract(myResourceType.getModifiers())) + && (theProvider instanceof IResourceProvider)) { myResourceType = ((IResourceProvider) theProvider).getResourceType(); } if (myResourceType == null) { - throw new ConfigurationException(Msg.code(455) + "Unable to determine resource type for method: " + theMethod); + throw new ConfigurationException( + Msg.code(455) + "Unable to determine resource type for method: " + theMethod); } myResourceName = theContext.getResourceType(myResourceType); @@ -84,10 +88,10 @@ abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOu } if (resourceParameter == null) { - throw new ConfigurationException(Msg.code(456) + "Method " + theMethod.getName() + " in type " + theMethod.getDeclaringClass().getCanonicalName() + " does not have a resource parameter annotated with @" - + ResourceParam.class.getSimpleName()); + throw new ConfigurationException(Msg.code(456) + "Method " + theMethod.getName() + " in type " + + theMethod.getDeclaringClass().getCanonicalName() + + " does not have a resource parameter annotated with @" + ResourceParam.class.getSimpleName()); } - } @Override @@ -139,8 +143,8 @@ abstract class BaseOutcomeReturningMethodBindingWithResourceParam extends BaseOu /** * Subclasses may override */ - protected void validateResourceIdAndUrlIdForNonConditionalOperation(IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { + protected void validateResourceIdAndUrlIdForNonConditionalOperation( + IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { return; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseQueryParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseQueryParameter.java index e6e284461ac..80f399a7322 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseQueryParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseQueryParameter.java @@ -37,7 +37,8 @@ public abstract class BaseQueryParameter implements IParameter { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseQueryParameter.class); - public abstract List encode(FhirContext theContext, Object theObject) throws InternalErrorException; + public abstract List encode(FhirContext theContext, Object theObject) + throws InternalErrorException; public abstract String getName(); @@ -66,15 +67,24 @@ public abstract class BaseQueryParameter implements IParameter { public abstract boolean handlesMissing(); @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore for now } public abstract boolean isRequired(); - public abstract Object parse(FhirContext theContext, List theString) throws InternalErrorException, InvalidRequestException; + public abstract Object parse(FhirContext theContext, List theString) + throws InternalErrorException, InvalidRequestException; - private void parseParams(RequestDetails theRequest, List paramList, String theQualifiedParamName, String theQualifier) { + private void parseParams( + RequestDetails theRequest, + List paramList, + String theQualifiedParamName, + String theQualifier) { QualifierDetails qualifiers = QualifierDetails.extractQualifiersFromParameterName(theQualifier); if (!qualifiers.passes(getQualifierWhitelist(), getQualifierBlacklist())) { return; @@ -92,9 +102,10 @@ public abstract class BaseQueryParameter implements IParameter { } } - @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { List paramList = new ArrayList<>(); String name = getName(); @@ -109,7 +120,9 @@ public abstract class BaseQueryParameter implements IParameter { if (paramList.isEmpty()) { - ourLog.debug("No value for parameter '{}' - Qualified names {} and qualifier whitelist {}", new Object[] { getName(), qualified, getQualifierWhitelist() }); + ourLog.debug( + "No value for parameter '{}' - Qualified names {} and qualifier whitelist {}", + new Object[] {getName(), qualified, getQualifierWhitelist()}); if (handlesMissing()) { return parse(theRequest.getFhirContext(), paramList); @@ -118,7 +131,5 @@ public abstract class BaseQueryParameter implements IParameter { } return parse(theRequest.getFhirContext(), paramList); - } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java index 0c2788a686a..6233f6b6b0f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/BaseResourceReturningMethodBinding.java @@ -50,14 +50,14 @@ import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.Date; import java.util.Set; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; public abstract class BaseResourceReturningMethodBinding extends BaseMethodBinding { protected final ResponseBundleBuilder myResponseBundleBuilder; @@ -66,7 +66,8 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi private String myResourceName; @SuppressWarnings("unchecked") - public BaseResourceReturningMethodBinding(Class theReturnResourceType, Method theMethod, FhirContext theContext, Object theProvider) { + public BaseResourceReturningMethodBinding( + Class theReturnResourceType, Method theMethod, FhirContext theContext, Object theProvider) { super(theMethod, theContext, theProvider); Class methodReturnType = theMethod.getReturnType(); @@ -74,7 +75,11 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi Set> expectedReturnTypes = provideExpectedReturnTypes(); if (expectedReturnTypes != null) { - Validate.isTrue(expectedReturnTypes.contains(methodReturnType), "Unexpected method return type on %s - Allowed: %s", theMethod, expectedReturnTypes); + Validate.isTrue( + expectedReturnTypes.contains(methodReturnType), + "Unexpected method return type on %s - Allowed: %s", + theMethod, + expectedReturnTypes); } else if (Collection.class.isAssignableFrom(methodReturnType)) { @@ -82,13 +87,15 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi Class collectionType = ReflectionUtil.getGenericCollectionTypeOfMethodReturnType(theMethod); if (collectionType != null) { if (!Object.class.equals(collectionType) && !IBaseResource.class.isAssignableFrom(collectionType)) { - throw new ConfigurationException(Msg.code(433) + "Method " + theMethod.getDeclaringClass().getSimpleName() + "#" + theMethod.getName() + " returns an invalid collection generic type: " + collectionType); + throw new ConfigurationException(Msg.code(433) + "Method " + + theMethod.getDeclaringClass().getSimpleName() + "#" + theMethod.getName() + + " returns an invalid collection generic type: " + collectionType); } } } else if (IBaseResource.class.isAssignableFrom(methodReturnType)) { - if ( IBaseBundle.class.isAssignableFrom(methodReturnType)) { + if (IBaseBundle.class.isAssignableFrom(methodReturnType)) { myMethodReturnType = MethodReturnTypeEnum.BUNDLE_RESOURCE; } else { myMethodReturnType = MethodReturnTypeEnum.RESOURCE; @@ -100,7 +107,9 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi } else if (void.class.equals(methodReturnType)) { myMethodReturnType = MethodReturnTypeEnum.VOID; } else { - throw new ConfigurationException(Msg.code(434) + "Invalid return type '" + methodReturnType.getCanonicalName() + "' on method '" + theMethod.getName() + "' on type: " + theMethod.getDeclaringClass().getCanonicalName()); + throw new ConfigurationException(Msg.code(434) + "Invalid return type '" + + methodReturnType.getCanonicalName() + "' on method '" + theMethod.getName() + "' on type: " + + theMethod.getDeclaringClass().getCanonicalName()); } if (theReturnResourceType != null) { @@ -108,7 +117,8 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi // If we're returning an abstract type, that's ok, but if we know the resource // type let's grab it - if (!Modifier.isAbstract(theReturnResourceType.getModifiers()) && !Modifier.isInterface(theReturnResourceType.getModifiers())) { + if (!Modifier.isAbstract(theReturnResourceType.getModifiers()) + && !Modifier.isInterface(theReturnResourceType.getModifiers())) { Class resourceType = (Class) theReturnResourceType; RuntimeResourceDefinition resourceDefinition = theContext.getResourceDefinition(resourceType); myResourceName = resourceDefinition.getName(); @@ -150,7 +160,11 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi */ BundleTypeEnum responseBundleType = getResponseBundleType(); - BundleLinks bundleLinks = new BundleLinks(theRequest.getServerBaseForRequest(), null, RestfulServerUtils.prettyPrintResponse(theServer, theRequest), responseBundleType); + BundleLinks bundleLinks = new BundleLinks( + theRequest.getServerBaseForRequest(), + null, + RestfulServerUtils.prettyPrintResponse(theServer, theRequest), + responseBundleType); String linkSelf = RestfulServerUtils.createLinkSelf(theRequest.getFhirServerBase(), theRequest); bundleLinks.setSelf(linkSelf); @@ -169,13 +183,21 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi /* * We assume that the bundle we got back from the handling method may not have everything populated (e.g. self links, bundle type, etc) so we do that here. */ - IVersionSpecificBundleFactory bundleFactory = theServer.getFhirContext().newBundleFactory(); + IVersionSpecificBundleFactory bundleFactory = + theServer.getFhirContext().newBundleFactory(); bundleFactory.initializeWithBundleResource(resource); bundleFactory.addRootPropertiesToBundle(null, bundleLinks, count, lastUpdated); responseObject = resource; } else { - ResponseBundleRequest responseBundleRequest = buildResponseBundleRequest(theServer, theRequest, params, (IBundleProvider) resultObj, count, responseBundleType, linkSelf); + ResponseBundleRequest responseBundleRequest = buildResponseBundleRequest( + theServer, + theRequest, + params, + (IBundleProvider) resultObj, + count, + responseBundleType, + linkSelf); responseObject = myResponseBundleBuilder.buildResponseBundle(responseBundleRequest); } break; @@ -184,7 +206,8 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi IBundleProvider result = (IBundleProvider) resultObj; Integer size = result.size(); if (size == null || size == 0) { - throw new ResourceNotFoundException(Msg.code(436) + "Resource " + theRequest.getId() + " is not known"); + throw new ResourceNotFoundException( + Msg.code(436) + "Resource " + theRequest.getId() + " is not known"); } else if (size > 1) { throw new InternalErrorException(Msg.code(437) + "Method returned multiple resources"); } @@ -198,7 +221,14 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi return responseObject; } - private ResponseBundleRequest buildResponseBundleRequest(IRestfulServer theServer, RequestDetails theRequest, Object[] theParams, IBundleProvider theBundleProvider, Integer theCount, BundleTypeEnum theBundleTypeEnum, String theLinkSelf) { + private ResponseBundleRequest buildResponseBundleRequest( + IRestfulServer theServer, + RequestDetails theRequest, + Object[] theParams, + IBundleProvider theBundleProvider, + Integer theCount, + BundleTypeEnum theBundleTypeEnum, + String theLinkSelf) { Set includes = getRequestIncludesFromParams(theParams); if (theCount == null) { @@ -207,7 +237,16 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi int offset = OffsetCalculator.calculateOffset(theRequest, theBundleProvider); - return new ResponseBundleRequest(theServer, theBundleProvider, theRequest, offset, theCount, theLinkSelf, includes, theBundleTypeEnum, null); + return new ResponseBundleRequest( + theServer, + theBundleProvider, + theRequest, + offset, + theCount, + theLinkSelf, + includes, + theBundleTypeEnum, + null); } public MethodReturnTypeEnum getMethodReturnType() { @@ -231,11 +270,12 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi public abstract ReturnTypeEnum getReturnType(); @Override - public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) throws BaseServerResponseException, IOException { + public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) + throws BaseServerResponseException, IOException { IBaseResource response = doInvokeServer(theServer, theRequest); /* - When we write directly to an HttpServletResponse, the invocation returns null. However, we still want to invoke - the SERVER_OUTGOING_RESPONSE pointcut. + When we write directly to an HttpServletResponse, the invocation returns null. However, we still want to invoke + the SERVER_OUTGOING_RESPONSE pointcut. */ if (response == null) { ResponseDetails responseDetails = new ResponseDetails(); @@ -251,11 +291,22 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi return null; } - return RestfulServerUtils.streamResponseAsResource(theServer, responseDetails.getResponseResource(), summaryMode, responseDetails.getResponseCode(), isAddContentLocationHeader(), theRequest.isRespondGzip(), theRequest, null, null); + return RestfulServerUtils.streamResponseAsResource( + theServer, + responseDetails.getResponseResource(), + summaryMode, + responseDetails.getResponseCode(), + isAddContentLocationHeader(), + theRequest.isRespondGzip(), + theRequest, + null, + null); } } - public abstract Object invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws InvalidRequestException, InternalErrorException; + public abstract Object invokeServer( + IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) + throws InvalidRequestException, InternalErrorException; /** * Should the response include a Content-Location header. Search method bunding (and any others?) may override this to disable the content-location, since it doesn't make sense @@ -300,14 +351,17 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi return true; } - public static void callOutgoingFailureOperationOutcomeHook(RequestDetails theRequestDetails, IBaseOperationOutcome theOperationOutcome) { + public static void callOutgoingFailureOperationOutcomeHook( + RequestDetails theRequestDetails, IBaseOperationOutcome theOperationOutcome) { HookParams responseParams = new HookParams(); responseParams.add(RequestDetails.class, theRequestDetails); responseParams.addIfMatchesType(ServletRequestDetails.class, theRequestDetails); responseParams.add(IBaseOperationOutcome.class, theOperationOutcome); if (theRequestDetails.getInterceptorBroadcaster() != null) { - theRequestDetails.getInterceptorBroadcaster().callHooks(Pointcut.SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME, responseParams); + theRequestDetails + .getInterceptorBroadcaster() + .callHooks(Pointcut.SERVER_OUTGOING_FAILURE_OPERATIONOUTCOME, responseParams); } } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ConditionalParamBinder.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ConditionalParamBinder.java index d25150b5b3d..26bb65f94ae 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ConditionalParamBinder.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ConditionalParamBinder.java @@ -19,19 +19,16 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; -import java.util.Collection; - -import org.apache.commons.lang3.Validate; - import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.ConditionalUrlParam; -import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.lang.reflect.Method; +import java.util.Collection; + class ConditionalParamBinder implements IParameter { private boolean mySupportsMultiple; @@ -41,9 +38,17 @@ class ConditionalParamBinder implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { - if (theOuterCollectionType != null || theInnerCollectionType != null || theParameterType.equals(String.class) == false) { - throw new ConfigurationException(Msg.code(409) + "Parameters annotated with @" + ConditionalUrlParam.class.getSimpleName() + " must be of type String, found incorrect parameter in method \"" + theMethod + "\""); + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { + if (theOuterCollectionType != null + || theInnerCollectionType != null + || theParameterType.equals(String.class) == false) { + throw new ConfigurationException( + Msg.code(409) + "Parameters annotated with @" + ConditionalUrlParam.class.getSimpleName() + + " must be of type String, found incorrect parameter in method \"" + theMethod + "\""); } } @@ -52,8 +57,9 @@ class ConditionalParamBinder implements IParameter { } @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return theRequest.getConditionalUrl(theMethodBinding.getRestOperationType()); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ConformanceMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ConformanceMethodBinding.java index 021f46197fa..3d6dab05c5d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ConformanceMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ConformanceMethodBinding.java @@ -41,7 +41,6 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.system.HapiSystemProperties; import org.hl7.fhir.instance.model.api.IBaseConformance; -import javax.annotation.Nonnull; import java.lang.reflect.Method; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; @@ -50,6 +49,7 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; +import javax.annotation.Nonnull; public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding { public static final String CACHE_THREAD_PREFIX = "capabilitystatement-cache-"; @@ -68,8 +68,11 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding MethodReturnTypeEnum methodReturnType = getMethodReturnType(); Class genericReturnType = (Class) theMethod.getGenericReturnType(); - if (methodReturnType != MethodReturnTypeEnum.RESOURCE || !IBaseConformance.class.isAssignableFrom(genericReturnType)) { - throw new ConfigurationException(Msg.code(387) + "Conformance resource provider method '" + theMethod.getName() + "' should return a Conformance resource class, returns: " + theMethod.getReturnType()); + if (methodReturnType != MethodReturnTypeEnum.RESOURCE + || !IBaseConformance.class.isAssignableFrom(genericReturnType)) { + throw new ConfigurationException( + Msg.code(387) + "Conformance resource provider method '" + theMethod.getName() + + "' should return a Conformance resource class, returns: " + theMethod.getReturnType()); } Metadata metadata = theMethod.getAnnotation(Metadata.class); @@ -83,11 +86,14 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding t.setDaemon(false); return t; }; - myThreadPool = new ThreadPoolExecutor(1, 1, - 0L, TimeUnit.MILLISECONDS, - new LinkedBlockingQueue<>(1), - threadFactory, - new ThreadPoolExecutor.DiscardOldestPolicy()); + myThreadPool = new ThreadPoolExecutor( + 1, + 1, + 0L, + TimeUnit.MILLISECONDS, + new LinkedBlockingQueue<>(1), + threadFactory, + new ThreadPoolExecutor.DiscardOldestPolicy()); } /** @@ -127,13 +133,15 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding } @Override - public IBundleProvider invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws BaseServerResponseException { + public IBundleProvider invokeServer( + IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) + throws BaseServerResponseException { IBaseConformance conf; - CacheControlDirective cacheControlDirective = new CacheControlDirective().parse(theRequest.getHeaders(Constants.HEADER_CACHE_CONTROL)); + CacheControlDirective cacheControlDirective = + new CacheControlDirective().parse(theRequest.getHeaders(Constants.HEADER_CACHE_CONTROL)); - if (cacheControlDirective.isNoCache()) - conf = null; + if (cacheControlDirective.isNoCache()) conf = null; else { conf = myCachedResponse.get(); if (HapiSystemProperties.isTestModeEnabled()) { @@ -161,8 +169,8 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding preHandledParams.add(RequestDetails.class, theRequest); preHandledParams.addIfMatchesType(ServletRequestDetails.class, theRequest); theRequest - .getInterceptorBroadcaster() - .callHooks(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED, preHandledParams); + .getInterceptorBroadcaster() + .callHooks(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED, preHandledParams); } } } @@ -184,8 +192,8 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding params.add(RequestDetails.class, theRequest); params.addIfMatchesType(ServletRequestDetails.class, theRequest); IBaseConformance outcome = (IBaseConformance) theRequest - .getInterceptorBroadcaster() - .callHooksAndReturnObject(Pointcut.SERVER_CAPABILITY_STATEMENT_GENERATED, params); + .getInterceptorBroadcaster() + .callHooksAndReturnObject(Pointcut.SERVER_CAPABILITY_STATEMENT_GENERATED, params); if (outcome != null) { conf = outcome; } @@ -215,7 +223,8 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding if (theRequest.getRequestType() == RequestTypeEnum.GET) { return MethodMatchEnum.EXACT; } - throw new MethodNotAllowedException(Msg.code(388) + "/metadata request must use HTTP GET", RequestTypeEnum.GET); + throw new MethodNotAllowedException( + Msg.code(388) + "/metadata request must use HTTP GET", RequestTypeEnum.GET); } return MethodMatchEnum.NONE; @@ -240,5 +249,4 @@ public class ConformanceMethodBinding extends BaseResourceReturningMethodBinding IBundleProvider resultObj = invokeServer(theServer, theRequest, params); return (IBaseConformance) resultObj.getResources(0, 1).get(0); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CountParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CountParameter.java index afcb8335ec2..d8d484021a6 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CountParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CountParameter.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.primitive.IntegerDt; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.annotation.Count; @@ -39,7 +39,9 @@ public class CountParameter implements IParameter { private Class myType; @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { String[] countParam = theRequest.getParameters().get(Constants.PARAM_COUNT); if (countParam != null) { if (countParam.length > 0) { @@ -48,7 +50,8 @@ public class CountParameter implements IParameter { IntegerDt count = new IntegerDt(countParam[0]); return ParameterUtil.fromInteger(myType, count); } catch (DataFormatException e) { - throw new InvalidRequestException(Msg.code(375) + "Invalid " + Constants.PARAM_COUNT + " value: " + countParam[0]); + throw new InvalidRequestException( + Msg.code(375) + "Invalid " + Constants.PARAM_COUNT + " value: " + countParam[0]); } } } @@ -57,14 +60,21 @@ public class CountParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(376) + "Method '" + theMethod.getName() + "' in type '" +theMethod.getDeclaringClass().getCanonicalName()+ "' is annotated with @" + Count.class.getName() + " but can not be of collection type"); + throw new ConfigurationException(Msg.code(376) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + + " but can not be of collection type"); } if (!ParameterUtil.isBindableIntegerType(theParameterType)) { - throw new ConfigurationException(Msg.code(377) + "Method '" + theMethod.getName() + "' in type '" +theMethod.getDeclaringClass().getCanonicalName()+ "' is annotated with @" + Count.class.getName() + " but type '" + theParameterType + "' is an invalid type, must be one of Integer or IntegerType"); + throw new ConfigurationException(Msg.code(377) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + + " but type '" + theParameterType + "' is an invalid type, must be one of Integer or IntegerType"); } myType = theParameterType; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CreateMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CreateMethodBinding.java index fcc213d0768..c281da37bfd 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CreateMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/CreateMethodBinding.java @@ -19,25 +19,23 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.lang.reflect.Method; -import java.util.Collections; -import java.util.Set; - -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Create; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import java.lang.reflect.Method; +import java.util.Collections; +import java.util.Set; import javax.annotation.Nonnull; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + public class CreateMethodBinding extends BaseOutcomeReturningMethodBindingWithResourceParam { public CreateMethodBinding(Method theMethod, FhirContext theContext, Object theProvider) { @@ -61,22 +59,26 @@ public class CreateMethodBinding extends BaseOutcomeReturningMethodBindingWithRe } @Override - protected void validateResourceIdAndUrlIdForNonConditionalOperation(IBaseResource theResource, String theResourceId, - String theUrlId, String theMatchUrl) { + protected void validateResourceIdAndUrlIdForNonConditionalOperation( + IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { if (isNotBlank(theUrlId)) { - String msg = getContext().getLocalizer() + String msg = getContext() + .getLocalizer() .getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "idInUrlForCreate", theUrlId); throw new InvalidRequestException(Msg.code(365) + msg); } if (getContext().getVersion().getVersion().isOlderThan(FhirVersionEnum.DSTU3)) { if (isNotBlank(theResourceId)) { - String msg = getContext().getLocalizer().getMessage( - BaseOutcomeReturningMethodBindingWithResourceParam.class, "idInBodyForCreate", theResourceId); + String msg = getContext() + .getLocalizer() + .getMessage( + BaseOutcomeReturningMethodBindingWithResourceParam.class, + "idInBodyForCreate", + theResourceId); throw new InvalidRequestException(Msg.code(366) + msg); } } else { theResource.setId((IIdType) null); } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/DeleteMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/DeleteMethodBinding.java index cf9350f36cf..334eb8684a7 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/DeleteMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/DeleteMethodBinding.java @@ -19,22 +19,26 @@ */ package ca.uhn.fhir.rest.server.method; -import java.lang.reflect.Method; -import java.util.Collections; -import java.util.Set; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.annotation.Delete; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; +import java.lang.reflect.Method; +import java.util.Collections; +import java.util.Set; import javax.annotation.Nonnull; public class DeleteMethodBinding extends BaseOutcomeReturningMethodBindingWithResourceIdButNoResourceBody { public DeleteMethodBinding(Method theMethod, FhirContext theContext, Object theProvider) { - super(theMethod, theContext, theProvider, Delete.class, theMethod.getAnnotation(Delete.class).type()); + super( + theMethod, + theContext, + theProvider, + Delete.class, + theMethod.getAnnotation(Delete.class).type()); } @Nonnull @@ -62,5 +66,4 @@ public class DeleteMethodBinding extends BaseOutcomeReturningMethodBindingWithRe protected String getMatchingOperation() { return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ElementsParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ElementsParameter.java index e84ffa8426e..ae50c05d050 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ElementsParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ElementsParameter.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.SummaryEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; @@ -45,7 +45,9 @@ public class ElementsParameter implements IParameter { @Override @SuppressWarnings({"rawtypes", "unchecked"}) - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { Set value = getElementsValueOrNull(theRequest, false); if (value == null || value.isEmpty()) { return null; @@ -67,13 +69,19 @@ public class ElementsParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(415) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is of type " + SummaryEnum.class - + " but can not be a collection of collections"); + throw new ConfigurationException(Msg.code(415) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is of type " + SummaryEnum.class + + " but can not be a collection of collections"); } if (theInnerCollectionType != null) { - myInnerCollectionType = CollectionBinder.getInstantiableCollectionType(theInnerCollectionType, SummaryEnum.class.getSimpleName()); + myInnerCollectionType = CollectionBinder.getInstantiableCollectionType( + theInnerCollectionType, SummaryEnum.class.getSimpleName()); } } @@ -112,5 +120,4 @@ public class ElementsParameter implements IParameter { } return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLMethodBinding.java index e41aca5e59e..7d716075a87 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLMethodBinding.java @@ -36,14 +36,14 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.Writer; import java.lang.reflect.Method; import java.util.Collections; import java.util.Set; +import javax.annotation.Nonnull; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; public class GraphQLMethodBinding extends OperationMethodBinding { @@ -52,8 +52,22 @@ public class GraphQLMethodBinding extends OperationMethodBinding { private final Integer myQueryBodyParamIndex; private final RequestTypeEnum myMethodRequestType; - public GraphQLMethodBinding(Method theMethod, RequestTypeEnum theMethodRequestType, FhirContext theContext, Object theProvider) { - super(null, null, theMethod, theContext, theProvider, true, false, Constants.OPERATION_NAME_GRAPHQL, null, null, null, null, true); + public GraphQLMethodBinding( + Method theMethod, RequestTypeEnum theMethodRequestType, FhirContext theContext, Object theProvider) { + super( + null, + null, + theMethod, + theContext, + theProvider, + true, + false, + Constants.OPERATION_NAME_GRAPHQL, + null, + null, + null, + null, + true); myIdParamIndex = ParameterUtil.findIdParameterIndex(theMethod, theContext); myQueryUrlParamIndex = ParameterUtil.findParamAnnotationIndex(theMethod, GraphQLQueryUrl.class); @@ -99,7 +113,8 @@ public class GraphQLMethodBinding extends OperationMethodBinding { @Override public MethodMatchEnum incomingServerRequestMatchesMethod(RequestDetails theRequest) { - if (Constants.OPERATION_NAME_GRAPHQL.equals(theRequest.getOperation()) && myMethodRequestType.equals(theRequest.getRequestType())) { + if (Constants.OPERATION_NAME_GRAPHQL.equals(theRequest.getOperation()) + && myMethodRequestType.equals(theRequest.getRequestType())) { return MethodMatchEnum.EXACT; } @@ -109,17 +124,22 @@ public class GraphQLMethodBinding extends OperationMethodBinding { private String getQueryValue(Object[] methodParams) { switch (myMethodRequestType) { case POST: - Validate.notNull(myQueryBodyParamIndex, "GraphQL method does not have @" + GraphQLQueryBody.class.getSimpleName() + " parameter"); + Validate.notNull( + myQueryBodyParamIndex, + "GraphQL method does not have @" + GraphQLQueryBody.class.getSimpleName() + " parameter"); return (String) methodParams[myQueryBodyParamIndex]; case GET: - Validate.notNull(myQueryUrlParamIndex, "GraphQL method does not have @" + GraphQLQueryUrl.class.getSimpleName() + " parameter"); + Validate.notNull( + myQueryUrlParamIndex, + "GraphQL method does not have @" + GraphQLQueryUrl.class.getSimpleName() + " parameter"); return (String) methodParams[myQueryUrlParamIndex]; } return null; } @Override - public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) throws BaseServerResponseException, IOException { + public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) + throws BaseServerResponseException, IOException { Object[] methodParams = createMethodParams(theRequest); if (myIdParamIndex != null) { methodParams[myIdParamIndex] = theRequest.getId(); @@ -143,24 +163,24 @@ public class GraphQLMethodBinding extends OperationMethodBinding { String graphQLQuery = getQueryValue(methodParams); // Interceptor call: SERVER_OUTGOING_GRAPHQL_RESPONSE HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(String.class, graphQLQuery) - .add(String.class, responseString) - .add(HttpServletRequest.class, servletRequest) - .add(HttpServletResponse.class, servletResponse); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(String.class, graphQLQuery) + .add(String.class, responseString) + .add(HttpServletRequest.class, servletRequest) + .add(HttpServletResponse.class, servletResponse); if (!theRequest.getInterceptorBroadcaster().callHooks(Pointcut.SERVER_OUTGOING_GRAPHQL_RESPONSE, params)) { return null; } // Interceptor call: SERVER_OUTGOING_RESPONSE params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(IBaseResource.class, null) - .add(ResponseDetails.class, new ResponseDetails()) - .add(HttpServletRequest.class, servletRequest) - .add(HttpServletResponse.class, servletResponse); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(IBaseResource.class, null) + .add(ResponseDetails.class, new ResponseDetails()) + .add(HttpServletRequest.class, servletRequest) + .add(HttpServletResponse.class, servletResponse); if (!theRequest.getInterceptorBroadcaster().callHooks(Pointcut.SERVER_OUTGOING_RESPONSE, params)) { return null; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLQueryBodyParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLQueryBodyParameter.java index 432406b0144..17205abca4a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLQueryBodyParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLQueryBodyParameter.java @@ -46,7 +46,9 @@ public class GraphQLQueryBodyParameter implements IParameter { private Class myType; @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { String ctValue = defaultString(theRequest.getHeader(Constants.HEADER_CONTENT_TYPE)); Reader requestReader = createRequestReader(theRequest); @@ -81,14 +83,21 @@ public class GraphQLQueryBodyParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(358) + "Method '" + theMethod.getName() + "' in type '" +theMethod.getDeclaringClass().getCanonicalName()+ "' is annotated with @" + Count.class.getName() + " but can not be of collection type"); + throw new ConfigurationException(Msg.code(358) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + + " but can not be of collection type"); } if (!String.class.equals(theParameterType)) { - throw new ConfigurationException(Msg.code(359) + "Method '" + theMethod.getName() + "' in type '" +theMethod.getDeclaringClass().getCanonicalName()+ "' is annotated with @" + Count.class.getName() + " but type '" + theParameterType + "' is an invalid type, must be one of Integer or IntegerType"); + throw new ConfigurationException(Msg.code(359) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + + " but type '" + theParameterType + "' is an invalid type, must be one of Integer or IntegerType"); } myType = theParameterType; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLQueryUrlParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLQueryUrlParameter.java index ab88de812f7..a390fb1bc78 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLQueryUrlParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/GraphQLQueryUrlParameter.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Count; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.RequestDetails; @@ -35,7 +35,9 @@ public class GraphQLQueryUrlParameter implements IParameter { private Class myType; @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { String[] queryParams = theRequest.getParameters().get(Constants.PARAM_GRAPHQL_QUERY); String retVal = null; if (queryParams != null) { @@ -47,14 +49,21 @@ public class GraphQLQueryUrlParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(459) + "Method '" + theMethod.getName() + "' in type '" +theMethod.getDeclaringClass().getCanonicalName()+ "' is annotated with @" + Count.class.getName() + " but can not be of collection type"); + throw new ConfigurationException(Msg.code(459) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + + " but can not be of collection type"); } if (!String.class.equals(theParameterType)) { - throw new ConfigurationException(Msg.code(460) + "Method '" + theMethod.getName() + "' in type '" +theMethod.getDeclaringClass().getCanonicalName()+ "' is annotated with @" + Count.class.getName() + " but type '" + theParameterType + "' is an invalid type, must be one of Integer or IntegerType"); + throw new ConfigurationException(Msg.code(460) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Count.class.getName() + + " but type '" + theParameterType + "' is an invalid type, must be one of Integer or IntegerType"); } myType = theParameterType; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/HistoryMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/HistoryMethodBinding.java index 8e2a88ed9b4..af8eba13de9 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/HistoryMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/HistoryMethodBinding.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.primitive.IdDt; @@ -39,11 +39,11 @@ import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Date; import java.util.List; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -84,7 +84,6 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding { } else { myResourceName = null; } - } @Override @@ -142,9 +141,10 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding { return MethodMatchEnum.EXACT; } - @Override - public IBundleProvider invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws InvalidRequestException, InternalErrorException { + public IBundleProvider invokeServer( + IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) + throws InvalidRequestException, InternalErrorException { if (myIdParamIndex != null) { theMethodParams[myIdParamIndex] = theRequest.getId(); } @@ -185,14 +185,17 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding { List retVal = resources.getResources(theFromIndex, theToIndex); int index = theFromIndex; for (IBaseResource nextResource : retVal) { - if (nextResource.getIdElement() == null || isBlank(nextResource.getIdElement().getIdPart())) { - throw new InternalErrorException(Msg.code(410) + "Server provided resource at index " + index + " with no ID set (using IResource#setId(IdDt))"); + if (nextResource.getIdElement() == null + || isBlank(nextResource.getIdElement().getIdPart())) { + throw new InternalErrorException(Msg.code(410) + "Server provided resource at index " + index + + " with no ID set (using IResource#setId(IdDt))"); } if (isBlank(nextResource.getIdElement().getVersionIdPart()) && nextResource instanceof IResource) { - //TODO: Use of a deprecated method should be resolved. + // TODO: Use of a deprecated method should be resolved. IdDt versionId = ResourceMetadataKeyEnum.VERSION_ID.get(nextResource); if (versionId == null || versionId.isEmpty()) { - throw new InternalErrorException(Msg.code(411) + "Server provided resource at index " + index + " with no Version ID set (using IResource#setId(IdDt))"); + throw new InternalErrorException(Msg.code(411) + "Server provided resource at index " + + index + " with no Version ID set (using IResource#setId(IdDt))"); } } index++; @@ -228,5 +231,4 @@ public class HistoryMethodBinding extends BaseResourceReturningMethodBinding { } return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IParameter.java index feca99e578c..2da19562e9c 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IParameter.java @@ -19,20 +19,20 @@ */ package ca.uhn.fhir.rest.server.method; -import java.lang.reflect.Method; -import java.util.Collection; - import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.lang.reflect.Method; +import java.util.Collection; + public interface IParameter { /** * This server method method takes the data received by the server in an incoming request, and translates that data into a single argument for a server method invocation. Note that all * received data is passed to this method, but the expectation is that not necessarily that all data is used by every parameter. - * + * * @param theRequest * The incoming request object * @param theRequestContents @@ -40,8 +40,12 @@ public interface IParameter { * @param theMethodBinding TODO * @return Returns the argument object as it will be passed to the IResourceProvider method. */ - Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException; - - void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType); + Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException; + void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IRestfulHeader.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IRestfulHeader.java index 8ef7a351107..1d9bc1ed267 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IRestfulHeader.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IRestfulHeader.java @@ -19,6 +19,4 @@ */ package ca.uhn.fhir.rest.server.method; -public interface IRestfulHeader { - -} +public interface IRestfulHeader {} diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IncludeParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IncludeParameter.java index 7c518f3187d..40b4af74873 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IncludeParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/IncludeParameter.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.rest.annotation.IncludeParam; import ca.uhn.fhir.rest.api.Constants; @@ -46,7 +46,10 @@ class IncludeParameter extends BaseQueryParameter { private Class mySpecType; private boolean myReverse; - public IncludeParameter(IncludeParam theAnnotation, Class> theInstantiableCollectionType, Class theSpecType) { + public IncludeParameter( + IncludeParam theAnnotation, + Class> theInstantiableCollectionType, + Class theSpecType) { myInstantiableCollectionType = theInstantiableCollectionType; myReverse = theAnnotation.reverse(); if (theAnnotation.allow().length > 0) { @@ -62,9 +65,9 @@ class IncludeParameter extends BaseQueryParameter { mySpecType = theSpecType; if (mySpecType != Include.class && mySpecType != String.class) { - throw new ConfigurationException(Msg.code(439) + "Invalid @" + IncludeParam.class.getSimpleName() + " parameter type: " + mySpecType); + throw new ConfigurationException(Msg.code(439) + "Invalid @" + IncludeParam.class.getSimpleName() + + " parameter type: " + mySpecType); } - } public boolean isReverse() { @@ -127,14 +130,16 @@ class IncludeParameter extends BaseQueryParameter { } @Override - public Object parse(FhirContext theContext, List theString) throws InternalErrorException, InvalidRequestException { + public Object parse(FhirContext theContext, List theString) + throws InternalErrorException, InvalidRequestException { Collection retValCollection = null; if (myInstantiableCollectionType != null) { try { retValCollection = myInstantiableCollectionType.newInstance(); } catch (Exception e) { - throw new InternalErrorException(Msg.code(440) + "Failed to instantiate " + myInstantiableCollectionType.getName(), e); + throw new InternalErrorException( + Msg.code(440) + "Failed to instantiate " + myInstantiableCollectionType.getName(), e); } } @@ -143,7 +148,8 @@ class IncludeParameter extends BaseQueryParameter { continue; } if (nextParamList.size() > 1) { - throw new InvalidRequestException(Msg.code(441) + theContext.getLocalizer().getMessage(IncludeParameter.class, "orIncludeInRequest")); + throw new InvalidRequestException(Msg.code(441) + + theContext.getLocalizer().getMessage(IncludeParameter.class, "orIncludeInRequest")); } String qualifier = nextParamList.getQualifier(); @@ -153,7 +159,14 @@ class IncludeParameter extends BaseQueryParameter { if (myAllow != null && !myAllow.isEmpty()) { if (!myAllow.contains(value)) { if (!myAllow.contains("*")) { - String msg = theContext.getLocalizer().getMessage(IncludeParameter.class, "invalidIncludeNameInRequest", value, new TreeSet(myAllow).toString(), getName()); + String msg = theContext + .getLocalizer() + .getMessage( + IncludeParameter.class, + "invalidIncludeNameInRequest", + value, + new TreeSet(myAllow).toString(), + getName()); throw new InvalidRequestException(Msg.code(442) + msg); } } @@ -170,5 +183,4 @@ class IncludeParameter extends BaseQueryParameter { return retValCollection; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/InterceptorBroadcasterParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/InterceptorBroadcasterParameter.java index 80433ef92a9..76f4b46e666 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/InterceptorBroadcasterParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/InterceptorBroadcasterParameter.java @@ -19,23 +19,28 @@ */ package ca.uhn.fhir.rest.server.method; -import java.lang.reflect.Method; -import java.util.Collection; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.lang.reflect.Method; +import java.util.Collection; + class InterceptorBroadcasterParameter implements IParameter { @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return theRequest.getInterceptorBroadcaster(); } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/MethodMatchEnum.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/MethodMatchEnum.java index ab43660624b..19115da31d3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/MethodMatchEnum.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/MethodMatchEnum.java @@ -25,7 +25,7 @@ public enum MethodMatchEnum { NONE, APPROXIMATE, - EXACT; + EXACT; public MethodMatchEnum weakerOf(MethodMatchEnum theOther) { if (this.ordinal() < theOther.ordinal()) { @@ -34,5 +34,4 @@ public enum MethodMatchEnum { return theOther; } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/MethodUtil.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/MethodUtil.java index fe78a04f322..cdfa6b625d1 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/MethodUtil.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/MethodUtil.java @@ -19,17 +19,17 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.api.TagList; import ca.uhn.fhir.model.api.annotation.Description; import ca.uhn.fhir.rest.annotation.*; -import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.param.binder.CollectionBinder; import ca.uhn.fhir.rest.server.method.OperationParameter.IOperationParamConverter; import ca.uhn.fhir.rest.server.method.ResourceParameter.Mode; @@ -39,14 +39,14 @@ import ca.uhn.fhir.util.ReflectionUtil; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -69,9 +69,9 @@ public class MethodUtil { } } - @SuppressWarnings("unchecked") - public static List getResourceParameters(final FhirContext theContext, Method theMethod, Object theProvider) { + public static List getResourceParameters( + final FhirContext theContext, Method theMethod, Object theProvider) { List parameters = new ArrayList<>(); Class[] parameterTypes = theMethod.getParameterTypes(); @@ -90,13 +90,18 @@ public class MethodUtil { if (Collection.class.isAssignableFrom(parameterType)) { innerCollectionType = (Class>) parameterType; parameterType = ReflectionUtil.getGenericCollectionTypeOfMethodParameter(theMethod, paramIndex); - if(parameterType == null && theMethod.getDeclaringClass().isSynthetic()) { + if (parameterType == null && theMethod.getDeclaringClass().isSynthetic()) { try { - theMethod = theMethod.getDeclaringClass().getSuperclass().getMethod(theMethod.getName(), parameterTypes); - parameterType = ReflectionUtil.getGenericCollectionTypeOfMethodParameter(theMethod, paramIndex); + theMethod = theMethod + .getDeclaringClass() + .getSuperclass() + .getMethod(theMethod.getName(), parameterTypes); + parameterType = + ReflectionUtil.getGenericCollectionTypeOfMethodParameter(theMethod, paramIndex); } catch (NoSuchMethodException e) { - throw new ConfigurationException(Msg.code(400) + "A method with name '" + theMethod.getName() + "' does not exist for super class '" - + theMethod.getDeclaringClass().getSuperclass() + "'"); + throw new ConfigurationException(Msg.code(400) + "A method with name '" + + theMethod.getName() + "' does not exist for super class '" + + theMethod.getDeclaringClass().getSuperclass() + "'"); } } declaredParameterType = parameterType; @@ -108,8 +113,11 @@ public class MethodUtil { declaredParameterType = parameterType; } if (Collection.class.isAssignableFrom(parameterType)) { - throw new ConfigurationException(Msg.code(401) + "Argument #" + paramIndex + " of Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() - + "' is of an invalid generic type (can not be a collection of a collection of a collection)"); + throw new ConfigurationException( + Msg.code(401) + "Argument #" + paramIndex + " of Method '" + theMethod.getName() + + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + + "' is of an invalid generic type (can not be a collection of a collection of a collection)"); } /* @@ -121,7 +129,8 @@ public class MethodUtil { * This gets tested in HistoryR4Test */ if (IPrimitiveType.class.equals(parameterType)) { - Class genericType = ReflectionUtil.getGenericCollectionTypeOfMethodParameter(theMethod, paramIndex); + Class genericType = + ReflectionUtil.getGenericCollectionTypeOfMethodParameter(theMethod, paramIndex); if (Date.class.equals(genericType)) { BaseRuntimeElementDefinition dateTimeDef = theContext.getElementDefinition("dateTime"); parameterType = dateTimeDef.getImplementingClass(); @@ -136,7 +145,8 @@ public class MethodUtil { param = new ServletRequestParameter(); } else if (ServletResponse.class.isAssignableFrom(parameterType)) { param = new ServletResponseParameter(); - } else if (parameterType.equals(RequestDetails.class) || parameterType.equals(ServletRequestDetails.class)) { + } else if (parameterType.equals(RequestDetails.class) + || parameterType.equals(ServletRequestDetails.class)) { param = new RequestDetailsParameter(); } else if (parameterType.equals(IInterceptorBroadcaster.class)) { param = new InterceptorBroadcasterParameter(); @@ -158,7 +168,9 @@ public class MethodUtil { parameter.setRequired(true); parameter.setDeclaredTypes(((RequiredParam) nextAnnotation).targetTypes()); parameter.setCompositeTypes(((RequiredParam) nextAnnotation).compositeTypes()); - parameter.setChainLists(((RequiredParam) nextAnnotation).chainWhitelist(), ((RequiredParam) nextAnnotation).chainBlacklist()); + parameter.setChainLists( + ((RequiredParam) nextAnnotation).chainWhitelist(), + ((RequiredParam) nextAnnotation).chainBlacklist()); parameter.setType(theContext, parameterType, innerCollectionType, outerCollectionType); MethodUtil.extractDescription(parameter, nextParameterAnnotations); param = parameter; @@ -168,7 +180,9 @@ public class MethodUtil { parameter.setRequired(false); parameter.setDeclaredTypes(((OptionalParam) nextAnnotation).targetTypes()); parameter.setCompositeTypes(((OptionalParam) nextAnnotation).compositeTypes()); - parameter.setChainLists(((OptionalParam) nextAnnotation).chainWhitelist(), ((OptionalParam) nextAnnotation).chainBlacklist()); + parameter.setChainLists( + ((OptionalParam) nextAnnotation).chainWhitelist(), + ((OptionalParam) nextAnnotation).chainBlacklist()); parameter.setType(theContext, parameterType, innerCollectionType, outerCollectionType); MethodUtil.extractDescription(parameter, nextParameterAnnotations); param = parameter; @@ -181,15 +195,21 @@ public class MethodUtil { if (parameterType == String.class) { instantiableCollectionType = null; specType = String.class; - } else if ((parameterType != Include.class) || innerCollectionType == null || outerCollectionType != null) { - throw new ConfigurationException(Msg.code(402) + "Method '" + theMethod.getName() + "' is annotated with @" + IncludeParam.class.getSimpleName() + " but has a type other than Collection<" - + Include.class.getSimpleName() + ">"); + } else if ((parameterType != Include.class) + || innerCollectionType == null + || outerCollectionType != null) { + throw new ConfigurationException(Msg.code(402) + "Method '" + theMethod.getName() + + "' is annotated with @" + IncludeParam.class.getSimpleName() + + " but has a type other than Collection<" + Include.class.getSimpleName() + ">"); } else { - instantiableCollectionType = (Class>) CollectionBinder.getInstantiableCollectionType(innerCollectionType, "Method '" + theMethod.getName() + "'"); + instantiableCollectionType = (Class>) + CollectionBinder.getInstantiableCollectionType( + innerCollectionType, "Method '" + theMethod.getName() + "'"); specType = parameterType; } - param = new IncludeParameter((IncludeParam) nextAnnotation, instantiableCollectionType, specType); + param = new IncludeParameter( + (IncludeParam) nextAnnotation, instantiableCollectionType, specType); } else if (nextAnnotation instanceof ResourceParam) { Mode mode; if (IBaseResource.class.isAssignableFrom(parameterType)) { @@ -213,7 +233,12 @@ public class MethodUtil { } boolean methodIsOperation = theMethod.getAnnotation(Operation.class) != null; boolean methodIsPatch = theMethod.getAnnotation(Patch.class) != null; - param = new ResourceParameter((Class) parameterType, theProvider, mode, methodIsOperation, methodIsPatch); + param = new ResourceParameter( + (Class) parameterType, + theProvider, + mode, + methodIsOperation, + methodIsPatch); } else if (nextAnnotation instanceof IdParam) { param = new NullParameter(); } else if (nextAnnotation instanceof ServerBase) { @@ -222,10 +247,12 @@ public class MethodUtil { param = new ElementsParameter(); } else if (nextAnnotation instanceof Since) { param = new SinceParameter(); - ((SinceParameter) param).setType(theContext, parameterType, innerCollectionType, outerCollectionType); + ((SinceParameter) param) + .setType(theContext, parameterType, innerCollectionType, outerCollectionType); } else if (nextAnnotation instanceof At) { param = new AtParameter(); - ((AtParameter) param).setType(theContext, parameterType, innerCollectionType, outerCollectionType); + ((AtParameter) param) + .setType(theContext, parameterType, innerCollectionType, outerCollectionType); } else if (nextAnnotation instanceof Count) { param = new CountParameter(); } else if (nextAnnotation instanceof Offset) { @@ -243,78 +270,116 @@ public class MethodUtil { } else if (nextAnnotation instanceof OperationParam) { Operation op = theMethod.getAnnotation(Operation.class); if (op == null) { - throw new ConfigurationException(Msg.code(404) + "@OperationParam detected on method that is not annotated with @Operation: " + theMethod.toGenericString()); + throw new ConfigurationException(Msg.code(404) + + "@OperationParam detected on method that is not annotated with @Operation: " + + theMethod.toGenericString()); } OperationParam operationParam = (OperationParam) nextAnnotation; String description = ParametersUtil.extractDescription(nextParameterAnnotations); - List examples = ParametersUtil.extractExamples(nextParameterAnnotations);; - param = new OperationParameter(theContext, op.name(), operationParam.name(), operationParam.min(), operationParam.max(), description, examples); + List examples = ParametersUtil.extractExamples(nextParameterAnnotations); + ; + param = new OperationParameter( + theContext, + op.name(), + operationParam.name(), + operationParam.min(), + operationParam.max(), + description, + examples); if (isNotBlank(operationParam.typeName())) { - BaseRuntimeElementDefinition elementDefinition = theContext.getElementDefinition(operationParam.typeName()); + BaseRuntimeElementDefinition elementDefinition = + theContext.getElementDefinition(operationParam.typeName()); if (elementDefinition == null) { elementDefinition = theContext.getResourceDefinition(operationParam.typeName()); } - org.apache.commons.lang3.Validate.notNull(elementDefinition, "Unknown type name in @OperationParam: typeName=\"%s\"", operationParam.typeName()); + org.apache.commons.lang3.Validate.notNull( + elementDefinition, + "Unknown type name in @OperationParam: typeName=\"%s\"", + operationParam.typeName()); Class newParameterType = elementDefinition.getImplementingClass(); if (!declaredParameterType.isAssignableFrom(newParameterType)) { - throw new ConfigurationException(Msg.code(405) + "Non assignable parameter typeName=\"" + operationParam.typeName() + "\" specified on method " + theMethod); + throw new ConfigurationException(Msg.code(405) + "Non assignable parameter typeName=\"" + + operationParam.typeName() + "\" specified on method " + theMethod); } parameterType = newParameterType; } } else if (nextAnnotation instanceof Validate.Mode) { if (parameterType.equals(ValidationModeEnum.class) == false) { - throw new ConfigurationException(Msg.code(406) + "Parameter annotated with @" + Validate.class.getSimpleName() + "." + Validate.Mode.class.getSimpleName() + " must be of type " + ValidationModeEnum.class.getName()); + throw new ConfigurationException(Msg.code(406) + "Parameter annotated with @" + + Validate.class.getSimpleName() + "." + Validate.Mode.class.getSimpleName() + + " must be of type " + ValidationModeEnum.class.getName()); } String description = ParametersUtil.extractDescription(nextParameterAnnotations); List examples = ParametersUtil.extractExamples(nextParameterAnnotations); - param = new OperationParameter(theContext, Constants.EXTOP_VALIDATE, Constants.EXTOP_VALIDATE_MODE, 0, 1, description, examples).setConverter(new IOperationParamConverter() { - @Override - public Object incomingServer(Object theObject) { - if (isNotBlank(theObject.toString())) { - ValidationModeEnum retVal = ValidationModeEnum.forCode(theObject.toString()); - if (retVal == null) { - OperationParameter.throwInvalidMode(theObject.toString()); + param = new OperationParameter( + theContext, + Constants.EXTOP_VALIDATE, + Constants.EXTOP_VALIDATE_MODE, + 0, + 1, + description, + examples) + .setConverter(new IOperationParamConverter() { + @Override + public Object incomingServer(Object theObject) { + if (isNotBlank(theObject.toString())) { + ValidationModeEnum retVal = + ValidationModeEnum.forCode(theObject.toString()); + if (retVal == null) { + OperationParameter.throwInvalidMode(theObject.toString()); + } + return retVal; + } + return null; } - return retVal; - } - return null; - } - @Override - public Object outgoingClient(Object theObject) { - return ParametersUtil.createString(theContext, ((ValidationModeEnum) theObject).getCode()); - } - }); + @Override + public Object outgoingClient(Object theObject) { + return ParametersUtil.createString( + theContext, ((ValidationModeEnum) theObject).getCode()); + } + }); } else if (nextAnnotation instanceof Validate.Profile) { if (parameterType.equals(String.class) == false) { - throw new ConfigurationException(Msg.code(407) + "Parameter annotated with @" + Validate.class.getSimpleName() + "." + Validate.Profile.class.getSimpleName() + " must be of type " + String.class.getName()); + throw new ConfigurationException(Msg.code(407) + "Parameter annotated with @" + + Validate.class.getSimpleName() + "." + Validate.Profile.class.getSimpleName() + + " must be of type " + String.class.getName()); } String description = ParametersUtil.extractDescription(nextParameterAnnotations); List examples = ParametersUtil.extractExamples(nextParameterAnnotations); - param = new OperationParameter(theContext, Constants.EXTOP_VALIDATE, Constants.EXTOP_VALIDATE_PROFILE, 0, 1, description, examples).setConverter(new IOperationParamConverter() { - @Override - public Object incomingServer(Object theObject) { - return theObject.toString(); - } + param = new OperationParameter( + theContext, + Constants.EXTOP_VALIDATE, + Constants.EXTOP_VALIDATE_PROFILE, + 0, + 1, + description, + examples) + .setConverter(new IOperationParamConverter() { + @Override + public Object incomingServer(Object theObject) { + return theObject.toString(); + } - @Override - public Object outgoingClient(Object theObject) { - return ParametersUtil.createString(theContext, theObject.toString()); - } - }); + @Override + public Object outgoingClient(Object theObject) { + return ParametersUtil.createString(theContext, theObject.toString()); + } + }); } else { continue; } - } - } if (param == null) { - throw new ConfigurationException(Msg.code(408) + "Parameter #" + ((paramIndex + 1)) + "/" + (parameterTypes.length) + " of method '" + theMethod.getName() + "' on type '" + theMethod.getDeclaringClass().getCanonicalName() - + "' has no recognized FHIR interface parameter nextParameterAnnotations. Don't know how to handle this parameter"); + throw new ConfigurationException( + Msg.code(408) + "Parameter #" + ((paramIndex + 1)) + "/" + (parameterTypes.length) + + " of method '" + theMethod.getName() + "' on type '" + + theMethod.getDeclaringClass().getCanonicalName() + + "' has no recognized FHIR interface parameter nextParameterAnnotations. Don't know how to handle this parameter"); } param.initializeTypes(theMethod, outerCollectionType, innerCollectionType, parameterType); @@ -324,6 +389,4 @@ public class MethodUtil { } return parameters; } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/NullParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/NullParameter.java index 8fe559a466d..cfd26151aea 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/NullParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/NullParameter.java @@ -19,25 +19,29 @@ */ package ca.uhn.fhir.rest.server.method; -import java.lang.reflect.Method; -import java.util.Collection; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.lang.reflect.Method; +import java.util.Collection; + class NullParameter implements IParameter { - @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { // nothing return null; } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // nothing } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OffsetCalculator.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OffsetCalculator.java index fa74b6a17f8..94ed62f01cb 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OffsetCalculator.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OffsetCalculator.java @@ -31,7 +31,6 @@ public class OffsetCalculator { * @param theBundleProvider * @return */ - public static int calculateOffset(RequestDetails theRequest, IBundleProvider theBundleProvider) { Integer offset = RestfulServerUtils.tryToExtractNamedParameter(theRequest, Constants.PARAM_PAGINGOFFSET); if (offset == null || offset < 0) { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OffsetParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OffsetParameter.java index 18f098ba6b4..13914cbb30c 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OffsetParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OffsetParameter.java @@ -19,13 +19,8 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.reflect.Method; -import java.util.Collection; - -import org.apache.commons.lang3.StringUtils; - import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.primitive.IntegerDt; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.annotation.Offset; @@ -34,13 +29,19 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.apache.commons.lang3.StringUtils; + +import java.lang.reflect.Method; +import java.util.Collection; public class OffsetParameter implements IParameter { private Class myType; @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { String[] sinceParams = theRequest.getParameters().get(Constants.PARAM_OFFSET); if (sinceParams != null) { if (sinceParams.length > 0) { @@ -49,7 +50,8 @@ public class OffsetParameter implements IParameter { IntegerDt since = new IntegerDt(sinceParams[0]); return ParameterUtil.fromInteger(myType, since); } catch (DataFormatException e) { - throw new InvalidRequestException(Msg.code(461) + "Invalid " + Constants.PARAM_OFFSET + " value: " + sinceParams[0]); + throw new InvalidRequestException( + Msg.code(461) + "Invalid " + Constants.PARAM_OFFSET + " value: " + sinceParams[0]); } } } @@ -58,14 +60,22 @@ public class OffsetParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(462) + "Method '" + theMethod.getName() + "' in type '" +theMethod.getDeclaringClass().getCanonicalName()+ "' is annotated with @" + Offset.class.getName() + " but can not be of collection type"); + throw new ConfigurationException(Msg.code(462) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + + Offset.class.getName() + " but can not be of collection type"); } if (!ParameterUtil.isBindableIntegerType(theParameterType)) { - throw new ConfigurationException(Msg.code(463) + "Method '" + theMethod.getName() + "' in type '" +theMethod.getDeclaringClass().getCanonicalName()+ "' is annotated with @" + Offset.class.getName() + " but type '" + theParameterType + "' is an invalid type, must be one of Integer or IntegerType"); + throw new ConfigurationException(Msg.code(463) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + + Offset.class.getName() + " but type '" + theParameterType + + "' is an invalid type, must be one of Integer or IntegerType"); } myType = theParameterType; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationMethodBinding.java index 1cf2715ba09..4addaf9ee80 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationMethodBinding.java @@ -43,7 +43,6 @@ import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Method; @@ -52,6 +51,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -81,18 +81,46 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { * Constructor - This is the constructor that is called when binding a * standard @Operation method. */ - public OperationMethodBinding(Class theReturnResourceType, Class theReturnTypeFromRp, Method theMethod, FhirContext theContext, Object theProvider, - Operation theAnnotation) { - this(theReturnResourceType, theReturnTypeFromRp, theMethod, theContext, theProvider, theAnnotation.idempotent(), theAnnotation.deleteEnabled(), theAnnotation.name(), theAnnotation.type(), theAnnotation.typeName(), theAnnotation.returnParameters(), - theAnnotation.bundleType(), theAnnotation.global()); + public OperationMethodBinding( + Class theReturnResourceType, + Class theReturnTypeFromRp, + Method theMethod, + FhirContext theContext, + Object theProvider, + Operation theAnnotation) { + this( + theReturnResourceType, + theReturnTypeFromRp, + theMethod, + theContext, + theProvider, + theAnnotation.idempotent(), + theAnnotation.deleteEnabled(), + theAnnotation.name(), + theAnnotation.type(), + theAnnotation.typeName(), + theAnnotation.returnParameters(), + theAnnotation.bundleType(), + theAnnotation.global()); myCanonicalUrl = theAnnotation.canonicalUrl(); myManualRequestMode = theAnnotation.manualRequest(); myManualResponseMode = theAnnotation.manualResponse(); } - protected OperationMethodBinding(Class theReturnResourceType, Class theReturnTypeFromRp, Method theMethod, FhirContext theContext, Object theProvider, - boolean theIdempotent, boolean theDeleteEnabled, String theOperationName, Class theOperationType, String theOperationTypeName, - OperationParam[] theReturnParams, BundleTypeEnum theBundleType, boolean theGlobal) { + protected OperationMethodBinding( + Class theReturnResourceType, + Class theReturnTypeFromRp, + Method theMethod, + FhirContext theContext, + Object theProvider, + boolean theIdempotent, + boolean theDeleteEnabled, + String theOperationName, + Class theOperationType, + String theOperationTypeName, + OperationParam[] theReturnParams, + BundleTypeEnum theBundleType, + boolean theGlobal) { super(theReturnResourceType, theMethod, theContext, theProvider); myBundleType = theBundleType; @@ -105,14 +133,16 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { for (Annotation[] nextParamAnnotations : theMethod.getParameterAnnotations()) { for (Annotation nextParam : nextParamAnnotations) { if (nextParam instanceof OptionalParam || nextParam instanceof RequiredParam) { - throw new ConfigurationException(Msg.code(421) + "Illegal method parameter annotation @" + nextParam.annotationType().getSimpleName() + " on method: " + theMethod.toString()); + throw new ConfigurationException(Msg.code(421) + "Illegal method parameter annotation @" + + nextParam.annotationType().getSimpleName() + " on method: " + theMethod.toString()); } } } if (isBlank(theOperationName)) { - throw new ConfigurationException(Msg.code(422) + "Method '" + theMethod.getName() + "' on type " + theMethod.getDeclaringClass().getName() + " is annotated with @" + Operation.class.getSimpleName() - + " but this annotation has no name defined"); + throw new ConfigurationException(Msg.code(422) + "Method '" + theMethod.getName() + "' on type " + + theMethod.getDeclaringClass().getName() + " is annotated with @" + Operation.class.getSimpleName() + + " but this annotation has no name defined"); } if (theOperationName.startsWith("$") == false) { theOperationName = "$" + theOperationName; @@ -130,7 +160,8 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { setResourceName(null); } } catch (DataFormatException e) { - throw new ConfigurationException(Msg.code(423) + "Failed to bind method " + theMethod + " - " + e.getMessage(), e); + throw new ConfigurationException( + Msg.code(423) + "Failed to bind method " + theMethod + " - " + e.getMessage(), e); } if (theMethod.getReturnType().equals(IBundleProvider.class)) { @@ -173,9 +204,11 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { Class returnType = next.type(); if (!returnType.equals(IBase.class)) { if (returnType.isInterface() || Modifier.isAbstract(returnType.getModifiers())) { - throw new ConfigurationException(Msg.code(424) + "Invalid value for @OperationParam.type(): " + returnType.getName()); + throw new ConfigurationException( + Msg.code(424) + "Invalid value for @OperationParam.type(): " + returnType.getName()); } - OperationParameter.validateTypeIsAppropriateVersionForContext(theMethod, returnType, theContext, "return"); + OperationParameter.validateTypeIsAppropriateVersionForContext( + theMethod, returnType, theContext, "return"); type.setType(theContext.getElementDefinition(returnType).getName()); } myReturnParams.add(type); @@ -184,9 +217,11 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { // Parameter Validation if (myCanOperateAtInstanceLevel && !isGlobalMethod() && getResourceName() == null) { - throw new ConfigurationException(Msg.code(425) + "@" + Operation.class.getSimpleName() + " method is an instance level method (it has an @" + IdParam.class.getSimpleName() + " parameter) but is not marked as global() and is not declared in a resource provider: " + theMethod.getName()); + throw new ConfigurationException(Msg.code(425) + "@" + Operation.class.getSimpleName() + + " method is an instance level method (it has an @" + IdParam.class.getSimpleName() + + " parameter) but is not marked as global() and is not declared in a resource provider: " + + theMethod.getName()); } - } public String getShortDescription() { @@ -259,7 +294,9 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { } RequestTypeEnum requestType = theRequest.getRequestType(); - if (requestType != RequestTypeEnum.GET && requestType != RequestTypeEnum.POST && requestType != RequestTypeEnum.DELETE) { + if (requestType != RequestTypeEnum.GET + && requestType != RequestTypeEnum.POST + && requestType != RequestTypeEnum.DELETE) { // Operations can only be invoked with GET, POST and DELETE return MethodMatchEnum.NONE; } @@ -284,7 +321,9 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { } } - if (myGlobal && theRequestDetails.getId() != null && theRequestDetails.getId().hasIdPart()) { + if (myGlobal + && theRequestDetails.getId() != null + && theRequestDetails.getId().hasIdPart()) { retVal = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE; } else if (myGlobal && isNotBlank(theRequestDetails.getResourceName())) { retVal = RestOperationTypeEnum.EXTENDED_OPERATION_TYPE; @@ -296,16 +335,20 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("name", myName) - .append("methodName", getMethod().getDeclaringClass().getSimpleName() + "." + getMethod().getName()) - .append("serverLevel", myCanOperateAtServerLevel) - .append("typeLevel", myCanOperateAtTypeLevel) - .append("instanceLevel", myCanOperateAtInstanceLevel) - .toString(); + .append("name", myName) + .append( + "methodName", + getMethod().getDeclaringClass().getSimpleName() + "." + + getMethod().getName()) + .append("serverLevel", myCanOperateAtServerLevel) + .append("typeLevel", myCanOperateAtTypeLevel) + .append("instanceLevel", myCanOperateAtInstanceLevel) + .toString(); } @Override - public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) throws BaseServerResponseException, IOException { + public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest) + throws BaseServerResponseException, IOException { if (theRequest.getRequestType() == RequestTypeEnum.POST && !myManualRequestMode) { IBaseResource requestContents = ResourceParameter.loadResourceFromRequest(theRequest, this, null); theRequest.getUserData().put(OperationParameter.REQUEST_CONTENTS_USERDATA_KEY, requestContents); @@ -314,7 +357,8 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { } @Override - public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws BaseServerResponseException { + public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) + throws BaseServerResponseException { List allowedRequestTypes = new ArrayList<>(List.of(RequestTypeEnum.POST)); if (myIdempotent) { allowedRequestTypes.add(RequestTypeEnum.GET); @@ -322,20 +366,30 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { if (myDeleteEnabled) { allowedRequestTypes.add(RequestTypeEnum.DELETE); } - String messageParameter = allowedRequestTypes.stream().map(RequestTypeEnum::name).collect(Collectors.joining(", ")); - String message = getContext().getLocalizer().getMessage(OperationMethodBinding.class, "methodNotSupported", theRequest.getRequestType(), messageParameter); + String messageParameter = + allowedRequestTypes.stream().map(RequestTypeEnum::name).collect(Collectors.joining(", ")); + String message = getContext() + .getLocalizer() + .getMessage( + OperationMethodBinding.class, + "methodNotSupported", + theRequest.getRequestType(), + messageParameter); if (theRequest.getRequestType() == RequestTypeEnum.POST) { // all good } else if (theRequest.getRequestType() == RequestTypeEnum.GET) { if (!myIdempotent) { - throw new MethodNotAllowedException(Msg.code(426) + message, allowedRequestTypes.toArray(RequestTypeEnum[]::new)); + throw new MethodNotAllowedException( + Msg.code(426) + message, allowedRequestTypes.toArray(RequestTypeEnum[]::new)); } } else if (theRequest.getRequestType() == RequestTypeEnum.DELETE) { if (!myDeleteEnabled) { - throw new MethodNotAllowedException(Msg.code(427) + message, allowedRequestTypes.toArray(RequestTypeEnum[]::new)); + throw new MethodNotAllowedException( + Msg.code(427) + message, allowedRequestTypes.toArray(RequestTypeEnum[]::new)); } } else { - throw new MethodNotAllowedException(Msg.code(428) + message, allowedRequestTypes.toArray(RequestTypeEnum[]::new)); + throw new MethodNotAllowedException( + Msg.code(428) + message, allowedRequestTypes.toArray(RequestTypeEnum[]::new)); } if (myIdParamIndex != null) { @@ -374,7 +428,8 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { @Override protected void populateRequestDetailsForInterceptor(RequestDetails theRequestDetails, Object[] theMethodParams) { super.populateRequestDetailsForInterceptor(theRequestDetails, theMethodParams); - IBaseResource resource = (IBaseResource) theRequestDetails.getUserData().get(OperationParameter.REQUEST_CONTENTS_USERDATA_KEY); + IBaseResource resource = + (IBaseResource) theRequestDetails.getUserData().get(OperationParameter.REQUEST_CONTENTS_USERDATA_KEY); theRequestDetails.setResource(resource); } @@ -427,5 +482,4 @@ public class OperationMethodBinding extends BaseResourceReturningMethodBinding { myType = theType; } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationParameter.java index 400b9dfc531..8f4e77ff2ee 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/OperationParameter.java @@ -71,15 +71,19 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class OperationParameter implements IParameter { static final String REQUEST_CONTENTS_USERDATA_KEY = OperationParam.class.getName() + "_PARSED_RESOURCE"; + @SuppressWarnings("unchecked") private static final Class[] COMPOSITE_TYPES = new Class[0]; + private final FhirContext myContext; private final String myName; private final String myOperationName; private boolean myAllowGet; private IOperationParamConverter myConverter; + @SuppressWarnings("rawtypes") private Class myInnerCollectionType; + private int myMax; private int myMin; private Class myParameterType; @@ -88,7 +92,14 @@ public class OperationParameter implements IParameter { private String myDescription; private List myExampleValues; - OperationParameter(FhirContext theCtx, String theOperationName, String theParameterName, int theMin, int theMax, String theDescription, List theExampleValues) { + OperationParameter( + FhirContext theCtx, + String theOperationName, + String theParameterName, + int theMin, + int theMax, + String theDescription, + List theExampleValues) { myOperationName = theOperationName; myName = theParameterName; myMin = theMin; @@ -101,7 +112,6 @@ public class OperationParameter implements IParameter { exampleValues.addAll(theExampleValues); } myExampleValues = Collections.unmodifiableList(exampleValues); - } @SuppressWarnings({"rawtypes", "unchecked"}) @@ -148,7 +158,11 @@ public class OperationParameter implements IParameter { @SuppressWarnings("unchecked") @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { FhirContext context = getContext(); validateTypeIsAppropriateVersionForContext(theMethod, theParameterType, context, "parameter"); @@ -170,10 +184,9 @@ public class OperationParameter implements IParameter { boolean typeIsConcrete = !myParameterType.isInterface() && !Modifier.isAbstract(myParameterType.getModifiers()); - boolean isSearchParam = - IQueryParameterType.class.isAssignableFrom(myParameterType) || - IQueryParameterOr.class.isAssignableFrom(myParameterType) || - IQueryParameterAnd.class.isAssignableFrom(myParameterType); + boolean isSearchParam = IQueryParameterType.class.isAssignableFrom(myParameterType) + || IQueryParameterOr.class.isAssignableFrom(myParameterType) + || IQueryParameterAnd.class.isAssignableFrom(myParameterType); /* * Note: We say here !IBase.class.isAssignableFrom because a bunch of DSTU1/2 datatypes also @@ -183,9 +196,9 @@ public class OperationParameter implements IParameter { isSearchParam &= typeIsConcrete && !IBase.class.isAssignableFrom(myParameterType); myAllowGet = IPrimitiveType.class.isAssignableFrom(myParameterType) - || String.class.equals(myParameterType) - || isSearchParam - || ValidationModeEnum.class.equals(myParameterType); + || String.class.equals(myParameterType) + || isSearchParam + || ValidationModeEnum.class.equals(myParameterType); /* * The parameter can be of type string for validation methods - This is a bit weird. See ValidateDstu2Test. We @@ -207,22 +220,25 @@ public class OperationParameter implements IParameter { } else if (myParameterType.equals(ValidationModeEnum.class)) { myParamType = "code"; } else if (IBase.class.isAssignableFrom(myParameterType) && typeIsConcrete) { - myParamType = myContext.getElementDefinition((Class) myParameterType).getName(); + myParamType = myContext + .getElementDefinition((Class) myParameterType) + .getName(); } else if (isSearchParam) { myParamType = "string"; mySearchParameterBinding = new SearchParameter(myName, myMin > 0); mySearchParameterBinding.setCompositeTypes(COMPOSITE_TYPES); - mySearchParameterBinding.setType(myContext, theParameterType, theInnerCollectionType, theOuterCollectionType); + mySearchParameterBinding.setType( + myContext, theParameterType, theInnerCollectionType, theOuterCollectionType); myConverter = new OperationParamConverter(); } else { - throw new ConfigurationException(Msg.code(361) + "Invalid type for @OperationParam on method " + theMethod + ": " + myParameterType.getName()); + throw new ConfigurationException(Msg.code(361) + "Invalid type for @OperationParam on method " + + theMethod + ": " + myParameterType.getName()); } - } - } - public static void validateTypeIsAppropriateVersionForContext(Method theMethod, Class theParameterType, FhirContext theContext, String theUseDescription) { + public static void validateTypeIsAppropriateVersionForContext( + Method theMethod, Class theParameterType, FhirContext theContext, String theUseDescription) { if (theParameterType != null) { if (theParameterType.isInterface()) { // TODO: we could probably be a bit more nuanced here but things like @@ -233,7 +249,10 @@ public class OperationParameter implements IParameter { FhirVersionEnum elementVersion = FhirVersionEnum.determineVersionForType(theParameterType); if (elementVersion != null) { if (elementVersion != theContext.getVersion().getVersion()) { - throw new ConfigurationException(Msg.code(360) + "Incorrect use of type " + theParameterType.getSimpleName() + " as " + theUseDescription + " type for method when theContext is for version " + theContext.getVersion().getVersion().name() + " in method: " + theMethod.toString()); + throw new ConfigurationException(Msg.code(360) + "Incorrect use of type " + + theParameterType.getSimpleName() + " as " + theUseDescription + + " type for method when theContext is for version " + + theContext.getVersion().getVersion().name() + " in method: " + theMethod.toString()); } } } @@ -245,17 +264,22 @@ public class OperationParameter implements IParameter { } private void throwWrongParamType(Object nextValue) { - throw new InvalidRequestException(Msg.code(362) + "Request has parameter " + myName + " of type " + nextValue.getClass().getSimpleName() + " but method expects type " + myParameterType.getSimpleName()); + throw new InvalidRequestException(Msg.code(362) + "Request has parameter " + myName + " of type " + + nextValue.getClass().getSimpleName() + " but method expects type " + myParameterType.getSimpleName()); } @SuppressWarnings("unchecked") @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { List matchingParamValues = new ArrayList<>(); OperationMethodBinding method = (OperationMethodBinding) theMethodBinding; - if (theRequest.getRequestType() == RequestTypeEnum.GET || method.isManualRequestMode() || method.isDeleteEnabled()) { + if (theRequest.getRequestType() == RequestTypeEnum.GET + || method.isManualRequestMode() + || method.isDeleteEnabled()) { translateQueryParametersIntoServerArgumentForGet(theRequest, matchingParamValues); } else { translateQueryParametersIntoServerArgumentForPost(theRequest, matchingParamValues); @@ -274,7 +298,8 @@ public class OperationParameter implements IParameter { return retVal; } - private void translateQueryParametersIntoServerArgumentForGet(RequestDetails theRequest, List matchingParamValues) { + private void translateQueryParametersIntoServerArgumentForGet( + RequestDetails theRequest, List matchingParamValues) { if (mySearchParameterBinding != null) { List params = new ArrayList(); @@ -302,7 +327,6 @@ public class OperationParameter implements IParameter { Object values = mySearchParameterBinding.parse(myContext, Collections.singletonList(next)); addValueToList(matchingParamValues, values); } - } } else { @@ -359,15 +383,18 @@ public class OperationParameter implements IParameter { } else { for (String nextValue : paramValues) { FhirContext ctx = theRequest.getServer().getFhirContext(); - RuntimePrimitiveDatatypeDefinition def = (RuntimePrimitiveDatatypeDefinition) ctx.getElementDefinition(myParameterType.asSubclass(IBase.class)); + RuntimePrimitiveDatatypeDefinition def = (RuntimePrimitiveDatatypeDefinition) + ctx.getElementDefinition(myParameterType.asSubclass(IBase.class)); IPrimitiveType instance = def.newInstance(); instance.setValueAsString(nextValue); matchingParamValues.add(instance); } } } else { - HapiLocalizer localizer = theRequest.getServer().getFhirContext().getLocalizer(); - String msg = localizer.getMessage(OperationParameter.class, "urlParamNotPrimitive", myOperationName, myName); + HapiLocalizer localizer = + theRequest.getServer().getFhirContext().getLocalizer(); + String msg = localizer.getMessage( + OperationParameter.class, "urlParamNotPrimitive", myOperationName, myName); throw new MethodNotAllowedException(Msg.code(363) + msg, RequestTypeEnum.POST); } } @@ -383,23 +410,27 @@ public class OperationParameter implements IParameter { */ private void processAllCommaSeparatedValues(String[] theParamValues, Consumer theHandler) { for (String nextValue : theParamValues) { - QualifiedParamList qualifiedParamList = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, nextValue); + QualifiedParamList qualifiedParamList = + QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, nextValue); for (String nextSplitValue : qualifiedParamList) { theHandler.accept(nextSplitValue); } } } - private void translateQueryParametersIntoServerArgumentForPost(RequestDetails theRequest, List matchingParamValues) { + private void translateQueryParametersIntoServerArgumentForPost( + RequestDetails theRequest, List matchingParamValues) { IBaseResource requestContents = (IBaseResource) theRequest.getUserData().get(REQUEST_CONTENTS_USERDATA_KEY); if (requestContents != null) { RuntimeResourceDefinition def = myContext.getResourceDefinition(requestContents); if (def.getName().equals("Parameters")) { BaseRuntimeChildDefinition paramChild = def.getChildByName("parameter"); - BaseRuntimeElementCompositeDefinition paramChildElem = (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); + BaseRuntimeElementCompositeDefinition paramChildElem = + (BaseRuntimeElementCompositeDefinition) paramChild.getChildByName("parameter"); - RuntimeChildPrimitiveDatatypeDefinition nameChild = (RuntimeChildPrimitiveDatatypeDefinition) paramChildElem.getChildByName("name"); + RuntimeChildPrimitiveDatatypeDefinition nameChild = + (RuntimeChildPrimitiveDatatypeDefinition) paramChildElem.getChildByName("name"); BaseRuntimeChildDefinition valueChild = paramChildElem.getChildByName("value[x]"); BaseRuntimeChildDefinition resourceChild = paramChildElem.getChildByName("resource"); @@ -414,15 +445,16 @@ public class OperationParameter implements IParameter { if (myParameterType.isAssignableFrom(nextParameter.getClass())) { matchingParamValues.add(nextParameter); } else { - List paramValues = valueChild.getAccessor().getValues(nextParameter); - List paramResources = resourceChild.getAccessor().getValues(nextParameter); + List paramValues = + valueChild.getAccessor().getValues(nextParameter); + List paramResources = + resourceChild.getAccessor().getValues(nextParameter); if (paramValues != null && paramValues.size() > 0) { tryToAddValues(paramValues, matchingParamValues); } else if (paramResources != null && paramResources.size() > 0) { tryToAddValues(paramResources, matchingParamValues); } } - } } } @@ -432,7 +464,6 @@ public class OperationParameter implements IParameter { if (myParameterType.isAssignableFrom(requestContents.getClass())) { tryToAddValues(Arrays.asList(requestContents), matchingParamValues); } - } } } @@ -458,7 +489,8 @@ public class OperationParameter implements IParameter { Class targetType = (Class) myParameterType; BaseRuntimeElementDefinition sourceTypeDef = myContext.getElementDefinition(sourceType); BaseRuntimeElementDefinition targetTypeDef = myContext.getElementDefinition(targetType); - if (targetTypeDef instanceof IRuntimeDatatypeDefinition && sourceTypeDef instanceof IRuntimeDatatypeDefinition) { + if (targetTypeDef instanceof IRuntimeDatatypeDefinition + && sourceTypeDef instanceof IRuntimeDatatypeDefinition) { IRuntimeDatatypeDefinition targetTypeDtDef = (IRuntimeDatatypeDefinition) targetTypeDef; if (targetTypeDtDef.isProfileOf(sourceType)) { FhirTerser terser = myContext.newTerser(); @@ -488,7 +520,6 @@ public class OperationParameter implements IParameter { Object incomingServer(Object theObject); Object outgoingClient(Object theObject); - } class OperationParamConverter implements IOperationParamConverter { @@ -500,23 +531,22 @@ public class OperationParameter implements IParameter { @Override public Object incomingServer(Object theObject) { IPrimitiveType obj = (IPrimitiveType) theObject; - List paramList = Collections.singletonList(QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, obj.getValueAsString())); + List paramList = Collections.singletonList( + QualifiedParamList.splitQueryStringByCommasIgnoreEscape(null, obj.getValueAsString())); return mySearchParameterBinding.parse(myContext, paramList); } @Override public Object outgoingClient(Object theObject) { IQueryParameterType obj = (IQueryParameterType) theObject; - IPrimitiveType retVal = (IPrimitiveType) myContext.getElementDefinition("string").newInstance(); + IPrimitiveType retVal = + (IPrimitiveType) myContext.getElementDefinition("string").newInstance(); retVal.setValueAsString(obj.getValueAsQueryToken(myContext)); return retVal; } - } public static void throwInvalidMode(String paramValues) { throw new InvalidRequestException(Msg.code(364) + "Invalid mode value: \"" + paramValues + "\""); } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PageMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PageMethodBinding.java index 22199202c2b..ecf91cd264b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PageMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PageMethodBinding.java @@ -40,10 +40,10 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.util.ReflectionUtil; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.lang.reflect.Method; import java.util.HashSet; import java.util.Set; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -71,16 +71,20 @@ public class PageMethodBinding extends BaseResourceReturningMethodBinding { } @Override - public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws InvalidRequestException, InternalErrorException { - return handlePagingRequest(theServer, theRequest, theRequest.getParameters().get(Constants.PARAM_PAGINGACTION)[0]); + public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) + throws InvalidRequestException, InternalErrorException { + return handlePagingRequest( + theServer, theRequest, theRequest.getParameters().get(Constants.PARAM_PAGINGACTION)[0]); } @Override public IBaseResource doInvokeServer(IRestfulServer theServer, RequestDetails theRequest) { - return handlePagingRequest(theServer, theRequest, theRequest.getParameters().get(Constants.PARAM_PAGINGACTION)[0]); + return handlePagingRequest( + theServer, theRequest, theRequest.getParameters().get(Constants.PARAM_PAGINGACTION)[0]); } - private IBaseResource handlePagingRequest(IRestfulServer theServer, RequestDetails theRequest, String thePagingAction) { + private IBaseResource handlePagingRequest( + IRestfulServer theServer, RequestDetails theRequest, String thePagingAction) { IPagingProvider pagingProvider = theServer.getPagingProvider(); if (pagingProvider == null) { throw new InvalidRequestException(Msg.code(416) + "This server does not support paging"); @@ -90,11 +94,16 @@ public class PageMethodBinding extends BaseResourceReturningMethodBinding { populateRequestDetailsForInterceptor(theRequest, ReflectionUtil.EMPTY_OBJECT_ARRAY); callPreHandledHooks(theRequest); - ResponseBundleRequest responseBundleRequest = buildResponseBundleRequest(theServer, theRequest, thePagingAction, pagingProvider); + ResponseBundleRequest responseBundleRequest = + buildResponseBundleRequest(theServer, theRequest, thePagingAction, pagingProvider); return myResponseBundleBuilder.buildResponseBundle(responseBundleRequest); } - private ResponseBundleRequest buildResponseBundleRequest(IRestfulServer theServer, RequestDetails theRequest, String thePagingAction, IPagingProvider thePagingProvider) { + private ResponseBundleRequest buildResponseBundleRequest( + IRestfulServer theServer, + RequestDetails theRequest, + String thePagingAction, + IPagingProvider thePagingProvider) { int offset = 0; IBundleProvider bundleProvider; @@ -136,7 +145,8 @@ public class PageMethodBinding extends BaseResourceReturningMethodBinding { String linkSelfBase = theRequest.getFhirServerBase(); String completeUrl = theRequest.getCompleteUrl(); - String linkSelf = linkSelfBase + completeUrl.substring(theRequest.getCompleteUrl().indexOf('?')); + String linkSelf = + linkSelfBase + completeUrl.substring(theRequest.getCompleteUrl().indexOf('?')); BundleTypeEnum bundleType = null; String[] bundleTypeValues = theRequest.getParameters().get(Constants.PARAM_BUNDLETYPE); @@ -151,11 +161,11 @@ public class PageMethodBinding extends BaseResourceReturningMethodBinding { count = thePagingProvider.getMaximumPageSize(); } - ResponseBundleRequest responseBundleRequest = new ResponseBundleRequest(theServer, bundleProvider, theRequest, offset, count, linkSelf, includes, bundleType, thePagingAction); + ResponseBundleRequest responseBundleRequest = new ResponseBundleRequest( + theServer, bundleProvider, theRequest, offset, count, linkSelf, includes, bundleType, thePagingAction); return responseBundleRequest; } - static void callPreHandledHooks(RequestDetails theRequest) { HookParams preHandledParams = new HookParams(); preHandledParams.add(RestOperationTypeEnum.class, theRequest.getRestOperationType()); @@ -163,8 +173,8 @@ public class PageMethodBinding extends BaseResourceReturningMethodBinding { preHandledParams.addIfMatchesType(ServletRequestDetails.class, theRequest); if (theRequest.getInterceptorBroadcaster() != null) { theRequest - .getInterceptorBroadcaster() - .callHooks(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED, preHandledParams); + .getInterceptorBroadcaster() + .callHooks(Pointcut.SERVER_INCOMING_REQUEST_PRE_HANDLED, preHandledParams); } } @@ -172,7 +182,8 @@ public class PageMethodBinding extends BaseResourceReturningMethodBinding { // Return an HTTP 410 if the search is not known if (theBundleProvider == null) { ourLog.info("Client requested unknown paging ID[{}]", thePagingAction); - String msg = getContext().getLocalizer().getMessage(PageMethodBinding.class, "unknownSearchId", thePagingAction); + String msg = + getContext().getLocalizer().getMessage(PageMethodBinding.class, "unknownSearchId", thePagingAction); throw new ResourceGoneException(Msg.code(417) + msg); } } @@ -190,13 +201,10 @@ public class PageMethodBinding extends BaseResourceReturningMethodBinding { return MethodMatchEnum.NONE; } - if (theRequest.getRequestType() != RequestTypeEnum.GET && - theRequest.getRequestType() != RequestTypeEnum.POST) { + if (theRequest.getRequestType() != RequestTypeEnum.GET && theRequest.getRequestType() != RequestTypeEnum.POST) { return MethodMatchEnum.NONE; } return MethodMatchEnum.EXACT; } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PatchMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PatchMethodBinding.java index 46199fa5cfd..587ccf46811 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PatchMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PatchMethodBinding.java @@ -19,25 +19,23 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import java.lang.annotation.Annotation; -import java.lang.reflect.Method; -import java.util.Arrays; -import java.util.Collections; -import java.util.ListIterator; -import java.util.Set; - -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Patch; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.api.PatchTypeEnum; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; +import org.hl7.fhir.instance.model.api.IIdType; +import java.lang.annotation.Annotation; +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.Collections; +import java.util.ListIterator; +import java.util.Set; import javax.annotation.Nonnull; /** @@ -51,9 +49,16 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes private int myResourceParamIndex; public PatchMethodBinding(Method theMethod, FhirContext theContext, Object theProvider) { - super(theMethod, theContext, theProvider, Patch.class, theMethod.getAnnotation(Patch.class).type()); + super( + theMethod, + theContext, + theProvider, + Patch.class, + theMethod.getAnnotation(Patch.class).type()); - for (ListIterator> iter = Arrays.asList(theMethod.getParameterTypes()).listIterator(); iter.hasNext();) { + for (ListIterator> iter = + Arrays.asList(theMethod.getParameterTypes()).listIterator(); + iter.hasNext(); ) { int nextIndex = iter.nextIndex(); Class next = iter.next(); if (next.equals(PatchTypeEnum.class)) { @@ -67,10 +72,12 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes } if (myPatchTypeParameterIndex == -1) { - throw new ConfigurationException(Msg.code(370) + "Method has no parameter of type " + PatchTypeEnum.class.getName() + " - " + theMethod.toString()); + throw new ConfigurationException(Msg.code(370) + "Method has no parameter of type " + + PatchTypeEnum.class.getName() + " - " + theMethod.toString()); } if (myResourceParamIndex == -1) { - throw new ConfigurationException(Msg.code(371) + "Method has no parameter with @" + ResourceParam.class.getSimpleName() + " annotation - " + theMethod.toString()); + throw new ConfigurationException(Msg.code(371) + "Method has no parameter with @" + + ResourceParam.class.getSimpleName() + " annotation - " + theMethod.toString()); } } @@ -99,8 +106,6 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes return Collections.singleton(RequestTypeEnum.PATCH); } - - @Override protected void addParametersForServerRequest(RequestDetails theRequest, Object[] theParams) { IIdType id = theRequest.getId(); @@ -112,6 +117,4 @@ public class PatchMethodBinding extends BaseOutcomeReturningMethodBindingWithRes protected String getMatchingOperation() { return null; } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PatchTypeParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PatchTypeParameter.java index 497fbc8146e..abfee195741 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PatchTypeParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/PatchTypeParameter.java @@ -19,27 +19,33 @@ */ package ca.uhn.fhir.rest.server.method; -import static org.apache.commons.lang3.StringUtils.defaultString; -import static org.apache.commons.lang3.StringUtils.trim; - -import java.lang.reflect.Method; -import java.util.Collection; - import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.PatchTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.lang.reflect.Method; +import java.util.Collection; + +import static org.apache.commons.lang3.StringUtils.defaultString; +import static org.apache.commons.lang3.StringUtils.trim; + class PatchTypeParameter implements IParameter { @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return getTypeForRequestOrThrowInvalidRequestException(theRequest); } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore } @@ -55,5 +61,4 @@ class PatchTypeParameter implements IParameter { return PatchTypeEnum.forContentTypeOrThrowInvalidRequestException(theRequest.getFhirContext(), contentTypeAll); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RawParamsParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RawParamsParameter.java index 7a8c6fd571b..c6417e8a4ff 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RawParamsParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RawParamsParameter.java @@ -19,6 +19,13 @@ */ package ca.uhn.fhir.rest.server.method; +import ca.uhn.fhir.rest.annotation.RawParam; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.param.QualifierDetails; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.apache.commons.lang3.Validate; + import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; @@ -26,14 +33,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.lang3.Validate; - -import ca.uhn.fhir.rest.annotation.RawParam; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.param.QualifierDetails; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; - public class RawParamsParameter implements IParameter { private final List myAllMethodParameters; @@ -42,46 +41,52 @@ public class RawParamsParameter implements IParameter { myAllMethodParameters = theParameters; } - @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { HashMap> retVal = null; for (String nextName : theRequest.getParameters().keySet()) { if (nextName.startsWith("_")) { continue; } - + QualifierDetails qualifiers = QualifierDetails.extractQualifiersFromParameterName(nextName); - + boolean alreadyCaptured = false; for (IParameter nextParameter : myAllMethodParameters) { if (nextParameter instanceof SearchParameter) { - SearchParameter nextSearchParam = (SearchParameter)nextParameter; + SearchParameter nextSearchParam = (SearchParameter) nextParameter; if (nextSearchParam.getName().equals(qualifiers.getParamName())) { - if (qualifiers.passes(nextSearchParam.getQualifierWhitelist(), nextSearchParam.getQualifierBlacklist())) { + if (qualifiers.passes( + nextSearchParam.getQualifierWhitelist(), nextSearchParam.getQualifierBlacklist())) { alreadyCaptured = true; break; } } } } - + if (!alreadyCaptured) { if (retVal == null) { retVal = new HashMap<>(); } retVal.put(nextName, Arrays.asList(theRequest.getParameters().get(nextName))); } - } - + return retVal; } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { - Validate.isTrue(theParameterType.equals(Map.class), "Parameter with @" + RawParam.class + " must be of type Map>"); - } - + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { + Validate.isTrue( + theParameterType.equals(Map.class), + "Parameter with @" + RawParam.class + " must be of type Map>"); + } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ReadMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ReadMethodBinding.java index 0193a755b21..098b36c006a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ReadMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ReadMethodBinding.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.primitive.InstantDt; @@ -46,12 +46,12 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -63,7 +63,11 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { private Class myIdParameterType; @SuppressWarnings("unchecked") - public ReadMethodBinding(Class theAnnotatedResourceType, Method theMethod, FhirContext theContext, Object theProvider) { + public ReadMethodBinding( + Class theAnnotatedResourceType, + Method theMethod, + FhirContext theContext, + Object theProvider) { super(theAnnotatedResourceType, theMethod, theContext, theProvider); Validate.notNull(theMethod, "Method must not be null"); @@ -76,14 +80,17 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { myIdIndex = idIndex; if (myIdIndex == null) { - throw new ConfigurationException(Msg.code(382) + "@" + Read.class.getSimpleName() + " method " + theMethod.getName() + " on type \"" + theMethod.getDeclaringClass().getName() + "\" does not have a parameter annotated with @" + IdParam.class.getSimpleName()); + throw new ConfigurationException( + Msg.code(382) + "@" + Read.class.getSimpleName() + " method " + theMethod.getName() + " on type \"" + + theMethod.getDeclaringClass().getName() + "\" does not have a parameter annotated with @" + + IdParam.class.getSimpleName()); } myIdParameterType = (Class) parameterTypes[myIdIndex]; if (!IIdType.class.isAssignableFrom(myIdParameterType)) { - throw new ConfigurationException(Msg.code(383) + "ID parameter must be of type IdDt or IdType - Found: " + myIdParameterType); + throw new ConfigurationException( + Msg.code(383) + "ID parameter must be of type IdDt or IdType - Found: " + myIdParameterType); } - } @Override @@ -134,8 +141,11 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { if (isNotBlank(theRequest.getCompartmentName())) { return MethodMatchEnum.NONE; } - if (theRequest.getRequestType() != RequestTypeEnum.GET && theRequest.getRequestType() != RequestTypeEnum.HEAD ) { - ourLog.trace("Method {} doesn't match because request type is not GET or HEAD: {}", theRequest.getId(), theRequest.getRequestType()); + if (theRequest.getRequestType() != RequestTypeEnum.GET && theRequest.getRequestType() != RequestTypeEnum.HEAD) { + ourLog.trace( + "Method {} doesn't match because request type is not GET or HEAD: {}", + theRequest.getId(), + theRequest.getRequestType()); return MethodMatchEnum.NONE; } if (Constants.PARAM_HISTORY.equals(theRequest.getOperation())) { @@ -150,13 +160,21 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { return MethodMatchEnum.EXACT; } - @Override - public IBundleProvider invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws InvalidRequestException, InternalErrorException { + public IBundleProvider invokeServer( + IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) + throws InvalidRequestException, InternalErrorException { IIdType requestId = theRequest.getId(); FhirContext ctx = theRequest.getServer().getFhirContext(); - String[] invalidQueryStringParams = new String[]{Constants.PARAM_CONTAINED, Constants.PARAM_COUNT, Constants.PARAM_INCLUDE, Constants.PARAM_REVINCLUDE, Constants.PARAM_SORT, Constants.PARAM_SEARCH_TOTAL_MODE}; + String[] invalidQueryStringParams = new String[] { + Constants.PARAM_CONTAINED, + Constants.PARAM_COUNT, + Constants.PARAM_INCLUDE, + Constants.PARAM_REVINCLUDE, + Constants.PARAM_SORT, + Constants.PARAM_SEARCH_TOTAL_MODE + }; List invalidQueryStringParamsInRequest = new ArrayList<>(); Set queryStringParamsInRequest = theRequest.getParameters().keySet(); @@ -168,7 +186,12 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { } if (!invalidQueryStringParamsInRequest.isEmpty()) { - throw new InvalidRequestException(Msg.code(384) + ctx.getLocalizer().getMessage(ReadMethodBinding.class, "invalidParamsInRequest", invalidQueryStringParamsInRequest)); + throw new InvalidRequestException(Msg.code(384) + + ctx.getLocalizer() + .getMessage( + ReadMethodBinding.class, + "invalidParamsInRequest", + invalidQueryStringParamsInRequest)); } theMethodParams[myIdIndex] = ParameterUtil.convertIdToType(requestId, myIdParameterType); @@ -176,7 +199,6 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { Object response = invokeServerMethod(theRequest, theMethodParams); IBundleProvider retVal = toResourceList(response); - if (Integer.valueOf(1).equals(retVal.size())) { List responseResources = retVal.getResources(0, 1); IBaseResource responseResource = responseResources.get(0); @@ -191,12 +213,15 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { versionIdPart = responseResource.getMeta().getVersionId(); } if (ifNoneMatch.equals(versionIdPart)) { - ourLog.debug("Returning HTTP 304 because request specified {}={}", Constants.HEADER_IF_NONE_MATCH, ifNoneMatch); + ourLog.debug( + "Returning HTTP 304 because request specified {}={}", + Constants.HEADER_IF_NONE_MATCH, + ifNoneMatch); throw new NotModifiedException(Msg.code(385) + "Not Modified"); } } } - + // If-Modified-Since String ifModifiedSince = theRequest.getHeader(Constants.HEADER_IF_MODIFIED_SINCE_LC); if (isNotBlank(ifModifiedSince)) { @@ -210,16 +235,14 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { } else { lastModified = responseResource.getMeta().getLastUpdated(); } - + if (lastModified != null && lastModified.getTime() <= ifModifiedSinceDate.getTime()) { ourLog.debug("Returning HTTP 304 because If-Modified-Since does not match"); throw new NotModifiedException(Msg.code(386) + "Not Modified"); } } - } // if we have at least 1 result - return retVal; } @@ -231,5 +254,4 @@ public class ReadMethodBinding extends BaseResourceReturningMethodBinding { protected BundleTypeEnum getResponseBundleType() { return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RequestDetailsParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RequestDetailsParameter.java index 40c866ba7a1..4c5ad6edeb0 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RequestDetailsParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RequestDetailsParameter.java @@ -19,25 +19,29 @@ */ package ca.uhn.fhir.rest.server.method; -import java.lang.reflect.Method; -import java.util.Collection; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.lang.reflect.Method; +import java.util.Collection; + public class RequestDetailsParameter implements IParameter { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RequestDetailsParameter.class); - @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return theRequest; } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RequestedPage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RequestedPage.java index d182f13f327..db5a607bb40 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RequestedPage.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/RequestedPage.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.rest.server.method; - /** * This is an intermediate record object that holds the offset and limit (count) the user requested for the page of results. */ diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResourceParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResourceParameter.java index fff2eea253b..4165d16e959 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResourceParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResourceParameter.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.Constants; @@ -39,7 +39,6 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStreamReader; @@ -48,6 +47,7 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.nio.charset.Charset; import java.util.Collection; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -58,7 +58,12 @@ public class ResourceParameter implements IParameter { private Mode myMode; private Class myResourceType; - public ResourceParameter(Class theParameterType, Object theProvider, Mode theMode, boolean theMethodIsOperation, boolean theMethodIsPatch) { + public ResourceParameter( + Class theParameterType, + Object theProvider, + Mode theMode, + boolean theMethodIsOperation, + boolean theMethodIsPatch) { Validate.notNull(theParameterType, "theParameterType can not be null"); Validate.notNull(theMode, "theMode can not be null"); @@ -74,7 +79,6 @@ public class ResourceParameter implements IParameter { if (Modifier.isAbstract(myResourceType.getModifiers()) && providerResourceType != null) { myResourceType = providerResourceType; } - } public Mode getMode() { @@ -86,13 +90,18 @@ public class ResourceParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore for now } - @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { switch (myMode) { case BODY: try { @@ -118,7 +127,10 @@ public class ResourceParameter implements IParameter { } public enum Mode { - BODY, BODY_BYTE_ARRAY, ENCODING, RESOURCE + BODY, + BODY_BYTE_ARRAY, + ENCODING, + RESOURCE } private static Reader createRequestReader(RequestDetails theRequest, Charset charset) { @@ -140,20 +152,30 @@ public class ResourceParameter implements IParameter { } @SuppressWarnings("unchecked") - static T loadResourceFromRequest(RequestDetails theRequest, @Nonnull BaseMethodBinding theMethodBinding, Class theResourceType) { + static T loadResourceFromRequest( + RequestDetails theRequest, @Nonnull BaseMethodBinding theMethodBinding, Class theResourceType) { FhirContext ctx = theRequest.getServer().getFhirContext(); final Charset charset = determineRequestCharset(theRequest); Reader requestReader = createRequestReader(theRequest, charset); - RestOperationTypeEnum restOperationType = theMethodBinding != null ? theMethodBinding.getRestOperationType() : null; + RestOperationTypeEnum restOperationType = + theMethodBinding != null ? theMethodBinding.getRestOperationType() : null; EncodingEnum encoding = RestfulServerUtils.determineRequestEncodingNoDefault(theRequest); if (encoding == null) { String ctValue = theRequest.getHeader(Constants.HEADER_CONTENT_TYPE); if (ctValue != null) { if (ctValue.startsWith("application/x-www-form-urlencoded")) { - String msg = theRequest.getServer().getFhirContext().getLocalizer().getMessage(ResourceParameter.class, "invalidContentTypeInRequest", ctValue, theMethodBinding.getRestOperationType()); + String msg = theRequest + .getServer() + .getFhirContext() + .getLocalizer() + .getMessage( + ResourceParameter.class, + "invalidContentTypeInRequest", + ctValue, + theMethodBinding.getRestOperationType()); throw new InvalidRequestException(Msg.code(446) + msg); } } @@ -169,10 +191,12 @@ public class ResourceParameter implements IParameter { return null; } - String msg = ctx.getLocalizer().getMessage(ResourceParameter.class, "noContentTypeInRequest", restOperationType); + String msg = ctx.getLocalizer() + .getMessage(ResourceParameter.class, "noContentTypeInRequest", restOperationType); throw new InvalidRequestException(Msg.code(448) + msg); } else { - String msg = ctx.getLocalizer().getMessage(ResourceParameter.class, "invalidContentTypeInRequest", ctValue, restOperationType); + String msg = ctx.getLocalizer() + .getMessage(ResourceParameter.class, "invalidContentTypeInRequest", ctValue, restOperationType); throw new InvalidRequestException(Msg.code(449) + msg); } } @@ -187,14 +211,18 @@ public class ResourceParameter implements IParameter { retVal = (T) parser.parseResource(requestReader); } } catch (DataFormatException e) { - String msg = ctx.getLocalizer().getMessage(ResourceParameter.class, "failedToParseRequest", encoding.name(), e.getMessage()); + String msg = ctx.getLocalizer() + .getMessage(ResourceParameter.class, "failedToParseRequest", encoding.name(), e.getMessage()); throw new InvalidRequestException(Msg.code(450) + msg); } return retVal; } - static IBaseResource parseResourceFromRequest(RequestDetails theRequest, @Nonnull BaseMethodBinding theMethodBinding, Class theResourceType) { + static IBaseResource parseResourceFromRequest( + RequestDetails theRequest, + @Nonnull BaseMethodBinding theMethodBinding, + Class theResourceType) { if (theRequest.getResource() != null) { return theRequest.getResource(); } @@ -240,5 +268,4 @@ public class ResourceParameter implements IParameter { return retVal; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponseBundleBuilder.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponseBundleBuilder.java index cb9b53bc89a..87945b6269d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponseBundleBuilder.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponseBundleBuilder.java @@ -65,13 +65,21 @@ public class ResponseBundleBuilder { return buildBundle(theResponseBundleRequest, responsePage, links); } - private static IBaseBundle buildBundle(ResponseBundleRequest theResponseBundleRequest, ResponsePage pageResponse, BundleLinks links) { + private static IBaseBundle buildBundle( + ResponseBundleRequest theResponseBundleRequest, ResponsePage pageResponse, BundleLinks links) { final IRestfulServer server = theResponseBundleRequest.server; - final IVersionSpecificBundleFactory bundleFactory = server.getFhirContext().newBundleFactory(); + final IVersionSpecificBundleFactory bundleFactory = + server.getFhirContext().newBundleFactory(); final IBundleProvider bundleProvider = theResponseBundleRequest.bundleProvider; - bundleFactory.addRootPropertiesToBundle(bundleProvider.getUuid(), links, bundleProvider.size(), bundleProvider.getPublished()); - bundleFactory.addResourcesToBundle(new ArrayList<>(pageResponse.resourceList), theResponseBundleRequest.bundleType, links.serverBase, server.getBundleInclusionRule(), theResponseBundleRequest.includes); + bundleFactory.addRootPropertiesToBundle( + bundleProvider.getUuid(), links, bundleProvider.size(), bundleProvider.getPublished()); + bundleFactory.addResourcesToBundle( + new ArrayList<>(pageResponse.resourceList), + theResponseBundleRequest.bundleType, + links.serverBase, + server.getBundleInclusionRule(), + theResponseBundleRequest.includes); return (IBaseBundle) bundleFactory.getResourceBundle(); } @@ -110,7 +118,8 @@ public class ResponseBundleBuilder { return new ResponsePage(searchId, resourceList, pageSize, numToReturn, bundleProvider.size()); } - private static String pagingBuildSearchId(ResponseBundleRequest theResponseBundleRequest, int theNumToReturn, Integer theNumTotalResults) { + private static String pagingBuildSearchId( + ResponseBundleRequest theResponseBundleRequest, int theNumToReturn, Integer theNumTotalResults) { final IPagingProvider pagingProvider = theResponseBundleRequest.server.getPagingProvider(); String retval = null; @@ -118,9 +127,12 @@ public class ResponseBundleBuilder { retval = theResponseBundleRequest.searchId; } else { if (theNumTotalResults == null || theNumTotalResults > theNumToReturn) { - retval = pagingProvider.storeResultList(theResponseBundleRequest.requestDetails, theResponseBundleRequest.bundleProvider); + retval = pagingProvider.storeResultList( + theResponseBundleRequest.requestDetails, theResponseBundleRequest.bundleProvider); if (StringUtils.isBlank(retval)) { - ourLog.info("Found {} results but paging provider did not provide an ID to use for paging", theNumTotalResults); + ourLog.info( + "Found {} results but paging provider did not provide an ID to use for paging", + theNumTotalResults); retval = null; } } @@ -128,10 +140,12 @@ public class ResponseBundleBuilder { return retval; } - private static List pagingBuildResourceList(ResponseBundleRequest theResponseBundleRequest, IBundleProvider theBundleProvider, int theNumToReturn) { + private static List pagingBuildResourceList( + ResponseBundleRequest theResponseBundleRequest, IBundleProvider theBundleProvider, int theNumToReturn) { final List retval; if (theNumToReturn > 0 || theBundleProvider.getCurrentPageId() != null) { - retval = theBundleProvider.getResources(theResponseBundleRequest.offset, theNumToReturn + theResponseBundleRequest.offset); + retval = theBundleProvider.getResources( + theResponseBundleRequest.offset, theNumToReturn + theResponseBundleRequest.offset); } else { retval = Collections.emptyList(); } @@ -146,10 +160,13 @@ public class ResponseBundleBuilder { } } - private List offsetBuildResourceList(IBundleProvider theBundleProvider, RequestedPage theRequestedPage, int theNumToReturn) { + private List offsetBuildResourceList( + IBundleProvider theBundleProvider, RequestedPage theRequestedPage, int theNumToReturn) { final List retval; - if ((theRequestedPage.offset != null && !myIsOffsetModeHistory) || theBundleProvider.getCurrentPageOffset() != null) { - // When offset query is done theResult already contains correct amount (+ their includes etc.) so return everything + if ((theRequestedPage.offset != null && !myIsOffsetModeHistory) + || theBundleProvider.getCurrentPageOffset() != null) { + // When offset query is done theResult already contains correct amount (+ their includes etc.) so return + // everything retval = theBundleProvider.getResources(0, Integer.MAX_VALUE); } else if (theNumToReturn > 0) { retval = theBundleProvider.getResources(0, theNumToReturn); @@ -159,7 +176,8 @@ public class ResponseBundleBuilder { return retval; } - private static int offsetCalculatePageSize(IRestfulServer server, RequestedPage theRequestedPage, Integer theNumTotalResults) { + private static int offsetCalculatePageSize( + IRestfulServer server, RequestedPage theRequestedPage, Integer theNumTotalResults) { final int retval; if (theRequestedPage.limit != null) { retval = theRequestedPage.limit; @@ -181,7 +199,9 @@ public class ResponseBundleBuilder { for (IBaseResource next : theResourceList) { if (next.getIdElement() == null || next.getIdElement().isEmpty()) { if (!(next instanceof IBaseOperationOutcome)) { - throw new InternalErrorException(Msg.code(435) + "Server method returned resource of type[" + next.getClass().getSimpleName() + "] with no ID specified (IResource#setId(IdDt) must be called)"); + throw new InternalErrorException(Msg.code(435) + "Server method returned resource of type[" + + next.getClass().getSimpleName() + + "] with no ID specified (IResource#setId(IdDt) must be called)"); } } } @@ -209,21 +229,38 @@ public class ResponseBundleBuilder { final IBundleProvider bundleProvider = theResponseBundleRequest.bundleProvider; final RequestedPage pageRequest = theResponseBundleRequest.requestedPage; - BundleLinks retval = new BundleLinks(theResponseBundleRequest.requestDetails.getFhirServerBase(), theResponseBundleRequest.includes, RestfulServerUtils.prettyPrintResponse(server, theResponseBundleRequest.requestDetails), theResponseBundleRequest.bundleType); + BundleLinks retval = new BundleLinks( + theResponseBundleRequest.requestDetails.getFhirServerBase(), + theResponseBundleRequest.includes, + RestfulServerUtils.prettyPrintResponse(server, theResponseBundleRequest.requestDetails), + theResponseBundleRequest.bundleType); retval.setSelf(theResponseBundleRequest.linkSelf); if (bundleProvider.getCurrentPageOffset() != null) { if (StringUtils.isNotBlank(bundleProvider.getNextPageId())) { - retval.setNext(RestfulServerUtils.createOffsetPagingLink(retval, theResponseBundleRequest.requestDetails.getRequestPath(), theResponseBundleRequest.requestDetails.getTenantId(), pageRequest.offset + pageRequest.limit, pageRequest.limit, theResponseBundleRequest.getRequestParameters())); + retval.setNext(RestfulServerUtils.createOffsetPagingLink( + retval, + theResponseBundleRequest.requestDetails.getRequestPath(), + theResponseBundleRequest.requestDetails.getTenantId(), + pageRequest.offset + pageRequest.limit, + pageRequest.limit, + theResponseBundleRequest.getRequestParameters())); } if (StringUtils.isNotBlank(bundleProvider.getPreviousPageId())) { - retval.setNext(RestfulServerUtils.createOffsetPagingLink(retval, theResponseBundleRequest.requestDetails.getRequestPath(), theResponseBundleRequest.requestDetails.getTenantId(), Math.max(pageRequest.offset - pageRequest.limit, 0), pageRequest.limit, theResponseBundleRequest.getRequestParameters())); + retval.setNext(RestfulServerUtils.createOffsetPagingLink( + retval, + theResponseBundleRequest.requestDetails.getRequestPath(), + theResponseBundleRequest.requestDetails.getTenantId(), + Math.max(pageRequest.offset - pageRequest.limit, 0), + pageRequest.limit, + theResponseBundleRequest.getRequestParameters())); } - } - if (pageRequest.offset != null || (!server.canStoreSearchResults() && !isEverythingOperation(theResponseBundleRequest.requestDetails)) || myIsOffsetModeHistory) { + if (pageRequest.offset != null + || (!server.canStoreSearchResults() && !isEverythingOperation(theResponseBundleRequest.requestDetails)) + || myIsOffsetModeHistory) { // Paging without caching // We're doing offset pages int requestedToReturn = theResponsePage.numToReturn; @@ -233,21 +270,43 @@ public class ResponseBundleBuilder { } if (theResponsePage.numTotalResults == null || requestedToReturn < theResponsePage.numTotalResults) { if (!theResponsePage.resourceList.isEmpty()) { - retval.setNext(RestfulServerUtils.createOffsetPagingLink(retval, theResponseBundleRequest.requestDetails.getRequestPath(), theResponseBundleRequest.requestDetails.getTenantId(), ObjectUtils.defaultIfNull(pageRequest.offset, 0) + theResponsePage.numToReturn, theResponsePage.numToReturn, theResponseBundleRequest.getRequestParameters())); + retval.setNext(RestfulServerUtils.createOffsetPagingLink( + retval, + theResponseBundleRequest.requestDetails.getRequestPath(), + theResponseBundleRequest.requestDetails.getTenantId(), + ObjectUtils.defaultIfNull(pageRequest.offset, 0) + theResponsePage.numToReturn, + theResponsePage.numToReturn, + theResponseBundleRequest.getRequestParameters())); } } if (pageRequest.offset != null && pageRequest.offset > 0) { int start = Math.max(0, pageRequest.offset - theResponsePage.pageSize); - retval.setPrev(RestfulServerUtils.createOffsetPagingLink(retval, theResponseBundleRequest.requestDetails.getRequestPath(), theResponseBundleRequest.requestDetails.getTenantId(), start, theResponsePage.pageSize, theResponseBundleRequest.getRequestParameters())); + retval.setPrev(RestfulServerUtils.createOffsetPagingLink( + retval, + theResponseBundleRequest.requestDetails.getRequestPath(), + theResponseBundleRequest.requestDetails.getTenantId(), + start, + theResponsePage.pageSize, + theResponseBundleRequest.getRequestParameters())); } } else if (StringUtils.isNotBlank(bundleProvider.getCurrentPageId())) { // We're doing named pages final String uuid = bundleProvider.getUuid(); if (StringUtils.isNotBlank(bundleProvider.getNextPageId())) { - retval.setNext(RestfulServerUtils.createPagingLink(retval, theResponseBundleRequest.requestDetails, uuid, bundleProvider.getNextPageId(), theResponseBundleRequest.getRequestParameters())); + retval.setNext(RestfulServerUtils.createPagingLink( + retval, + theResponseBundleRequest.requestDetails, + uuid, + bundleProvider.getNextPageId(), + theResponseBundleRequest.getRequestParameters())); } if (StringUtils.isNotBlank(bundleProvider.getPreviousPageId())) { - retval.setPrev(RestfulServerUtils.createPagingLink(retval, theResponseBundleRequest.requestDetails, uuid, bundleProvider.getPreviousPageId(), theResponseBundleRequest.getRequestParameters())); + retval.setPrev(RestfulServerUtils.createPagingLink( + retval, + theResponseBundleRequest.requestDetails, + uuid, + bundleProvider.getPreviousPageId(), + theResponseBundleRequest.getRequestParameters())); } } else if (theResponsePage.searchId != null) { /* @@ -257,22 +316,36 @@ public class ResponseBundleBuilder { * back paging links that don't make sense. */ if (theResponsePage.size() > 0) { - if (theResponsePage.numTotalResults == null || theResponseBundleRequest.offset + theResponsePage.numToReturn < theResponsePage.numTotalResults) { - retval.setNext((RestfulServerUtils.createPagingLink(retval, theResponseBundleRequest.requestDetails, theResponsePage.searchId, theResponseBundleRequest.offset + theResponsePage.numToReturn, theResponsePage.numToReturn, theResponseBundleRequest.getRequestParameters()))); + if (theResponsePage.numTotalResults == null + || theResponseBundleRequest.offset + theResponsePage.numToReturn + < theResponsePage.numTotalResults) { + retval.setNext((RestfulServerUtils.createPagingLink( + retval, + theResponseBundleRequest.requestDetails, + theResponsePage.searchId, + theResponseBundleRequest.offset + theResponsePage.numToReturn, + theResponsePage.numToReturn, + theResponseBundleRequest.getRequestParameters()))); } if (theResponseBundleRequest.offset > 0) { int start = Math.max(0, theResponseBundleRequest.offset - theResponsePage.pageSize); - retval.setPrev(RestfulServerUtils.createPagingLink(retval, theResponseBundleRequest.requestDetails, theResponsePage.searchId, start, theResponsePage.pageSize, theResponseBundleRequest.getRequestParameters())); + retval.setPrev(RestfulServerUtils.createPagingLink( + retval, + theResponseBundleRequest.requestDetails, + theResponsePage.searchId, + start, + theResponsePage.pageSize, + theResponseBundleRequest.getRequestParameters())); } } } return retval; } - private boolean isEverythingOperation(RequestDetails theRequest) { return (theRequest.getRestOperationType() == RestOperationTypeEnum.EXTENDED_OPERATION_TYPE - || theRequest.getRestOperationType() == RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE) - && theRequest.getOperation() != null && theRequest.getOperation().equals("$everything"); + || theRequest.getRestOperationType() == RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE) + && theRequest.getOperation() != null + && theRequest.getOperation().equals("$everything"); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponseBundleRequest.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponseBundleRequest.java index 9deb8c41e50..c315f02a7c5 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponseBundleRequest.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponseBundleRequest.java @@ -68,9 +68,19 @@ public class ResponseBundleRequest { * The id of the search used to page through search results */ public final String searchId; + public final RequestedPage requestedPage; - public ResponseBundleRequest(IRestfulServer theServer, IBundleProvider theBundleProvider, RequestDetails theRequest, int theOffset, Integer theLimit, String theLinkSelf, Set theIncludes, BundleTypeEnum theBundleType, String theSearchId) { + public ResponseBundleRequest( + IRestfulServer theServer, + IBundleProvider theBundleProvider, + RequestDetails theRequest, + int theOffset, + Integer theLimit, + String theLinkSelf, + Set theIncludes, + BundleTypeEnum theBundleType, + String theSearchId) { server = theServer; bundleProvider = theBundleProvider; requestDetails = theRequest; @@ -89,11 +99,14 @@ public class ResponseBundleRequest { private RequestedPage getRequestedPage(Integer theLimit) { // If the BundleProvider has an offset and page size, we use that if (bundleProvider.getCurrentPageOffset() != null) { - Validate.notNull(bundleProvider.getCurrentPageSize(), "IBundleProvider returned a non-null offset, but did not return a non-null page size"); + Validate.notNull( + bundleProvider.getCurrentPageSize(), + "IBundleProvider returned a non-null offset, but did not return a non-null page size"); return new RequestedPage(bundleProvider.getCurrentPageOffset(), bundleProvider.getCurrentPageSize()); - // Otherwise, we build it from the request + // Otherwise, we build it from the request } else { - Integer parameterOffset = RestfulServerUtils.tryToExtractNamedParameter(requestDetails, Constants.PARAM_OFFSET); + Integer parameterOffset = + RestfulServerUtils.tryToExtractNamedParameter(requestDetails, Constants.PARAM_OFFSET); return new RequestedPage(parameterOffset, theLimit); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponsePage.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponsePage.java index b9c91944ba1..2f9a4742631 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponsePage.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ResponsePage.java @@ -49,7 +49,12 @@ public class ResponsePage { */ public final int numToReturn; - public ResponsePage(String theSearchId, List theResourceList, int thePageSize, int theNumToReturn, Integer theNumTotalResults) { + public ResponsePage( + String theSearchId, + List theResourceList, + int thePageSize, + int theNumToReturn, + Integer theNumTotalResults) { searchId = theSearchId; resourceList = theResourceList; pageSize = thePageSize; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchContainedModeParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchContainedModeParameter.java index e01b5639ded..019ba87bbf9 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchContainedModeParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchContainedModeParameter.java @@ -33,21 +33,27 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; class SearchContainedModeParameter implements IParameter { @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return getTypeForRequestOrThrowInvalidRequestException(theRequest); } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore } public static SearchContainedModeEnum getTypeForRequestOrThrowInvalidRequestException(RequestDetails theRequest) { - String[] paramValues = theRequest.getParameters().getOrDefault(Constants.PARAM_CONTAINED, Constants.EMPTY_STRING_ARRAY); + String[] paramValues = + theRequest.getParameters().getOrDefault(Constants.PARAM_CONTAINED, Constants.EMPTY_STRING_ARRAY); if (paramValues.length > 0 && isNotBlank(paramValues[0])) { return SearchContainedModeEnum.fromCode(paramValues[0]); } return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchMethodBinding.java index d8e08c40f7f..f8987491af3 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchMethodBinding.java @@ -19,10 +19,9 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.api.annotation.Description; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.valueset.BundleTypeEnum; import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.api.Constants; @@ -40,13 +39,13 @@ import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.lang.reflect.Method; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -73,7 +72,12 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { private final String myQueryName; private final boolean myAllowUnknownParams; - public SearchMethodBinding(Class theReturnResourceType, Class theResourceProviderResourceType, Method theMethod, FhirContext theContext, Object theProvider) { + public SearchMethodBinding( + Class theReturnResourceType, + Class theResourceProviderResourceType, + Method theMethod, + FhirContext theContext, + Object theProvider) { super(theReturnResourceType, theMethod, theContext, theProvider); Search search = theMethod.getAnnotation(Search.class); this.myQueryName = StringUtils.defaultIfBlank(search.queryName(), null); @@ -86,7 +90,12 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { * Only compartment searching methods may have an ID parameter */ if (isBlank(myCompartmentName) && myIdParamIndex != null) { - String msg = theContext.getLocalizer().getMessage(getClass().getName() + ".idWithoutCompartment", theMethod.getName(), theMethod.getDeclaringClass()); + String msg = theContext + .getLocalizer() + .getMessage( + getClass().getName() + ".idWithoutCompartment", + theMethod.getName(), + theMethod.getDeclaringClass()); throw new ConfigurationException(Msg.code(412) + msg); } @@ -96,17 +105,14 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { this.myResourceProviderResourceName = null; } - myRequiredParamNames = getQueryParameters() - .stream() - .filter(t -> t.isRequired()) - .map(t -> t.getName()) - .collect(Collectors.toList()); - myOptionalParamNames = getQueryParameters() - .stream() - .filter(t -> !t.isRequired()) - .map(t -> t.getName()) - .collect(Collectors.toList()); - + myRequiredParamNames = getQueryParameters().stream() + .filter(t -> t.isRequired()) + .map(t -> t.getName()) + .collect(Collectors.toList()); + myOptionalParamNames = getQueryParameters().stream() + .filter(t -> !t.isRequired()) + .map(t -> t.getName()) + .collect(Collectors.toList()); } public String getDescription() { @@ -149,7 +155,11 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { return MethodMatchEnum.NONE; } if (!StringUtils.equals(myCompartmentName, theRequest.getCompartmentName())) { - ourLog.trace("Method {} doesn't match because it is for compartment {} but request is compartment {}", getMethod(), myCompartmentName, theRequest.getCompartmentName()); + ourLog.trace( + "Method {} doesn't match because it is for compartment {} but request is compartment {}", + getMethod(), + myCompartmentName, + theRequest.getCompartmentName()); return MethodMatchEnum.NONE; } @@ -173,7 +183,8 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { } } - Set unqualifiedNames = theRequest.getUnqualifiedToQualifiedNames().keySet(); + Set unqualifiedNames = + theRequest.getUnqualifiedToQualifiedNames().keySet(); Set qualifiedParamNames = theRequest.getParameters().keySet(); MethodMatchEnum retVal = MethodMatchEnum.EXACT; @@ -189,24 +200,28 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { if (nextRequestParam.equals(nextMethodParam.getName())) { QualifierDetails qualifiers = QualifierDetails.extractQualifiersFromParameterName(nextRequestParam); - if (qualifiers.passes(nextMethodParam.getQualifierWhitelist(), nextMethodParam.getQualifierBlacklist())) { + if (qualifiers.passes( + nextMethodParam.getQualifierWhitelist(), nextMethodParam.getQualifierBlacklist())) { parameterMatches = true; } } else if (nextUnqualifiedRequestParam.equals(nextMethodParam.getName())) { - List qualifiedNames = theRequest.getUnqualifiedToQualifiedNames().get(nextUnqualifiedRequestParam); - if (passesWhitelistAndBlacklist(qualifiedNames, nextMethodParam.getQualifierWhitelist(), nextMethodParam.getQualifierBlacklist())) { + List qualifiedNames = + theRequest.getUnqualifiedToQualifiedNames().get(nextUnqualifiedRequestParam); + if (passesWhitelistAndBlacklist( + qualifiedNames, + nextMethodParam.getQualifierWhitelist(), + nextMethodParam.getQualifierBlacklist())) { parameterMatches = true; } } // Repetitions supplied by URL but not supported by this parameter - if (theRequest.getParameters().get(nextRequestParam).length > 1 != nextMethodParam.supportsRepetition()) { + if (theRequest.getParameters().get(nextRequestParam).length > 1 + != nextMethodParam.supportsRepetition()) { approx = true; } - } - if (parameterMatches) { if (approx) { @@ -220,13 +235,11 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { } else { retVal = retVal.weakerOf(MethodMatchEnum.NONE); } - } if (retVal == MethodMatchEnum.NONE) { break; } - } if (retVal != MethodMatchEnum.NONE) { @@ -269,10 +282,13 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { } private static boolean mightBeSearchRequest(RequestDetails theRequest) { - if (theRequest.getRequestType() == RequestTypeEnum.GET && theRequest.getOperation() != null && !Constants.PARAM_SEARCH.equals(theRequest.getOperation())) { + if (theRequest.getRequestType() == RequestTypeEnum.GET + && theRequest.getOperation() != null + && !Constants.PARAM_SEARCH.equals(theRequest.getOperation())) { return false; } - if (theRequest.getRequestType() == RequestTypeEnum.POST && !Constants.PARAM_SEARCH.equals(theRequest.getOperation())) { + if (theRequest.getRequestType() == RequestTypeEnum.POST + && !Constants.PARAM_SEARCH.equals(theRequest.getOperation())) { return false; } if (theRequest.getRequestType() != RequestTypeEnum.GET && theRequest.getRequestType() != RequestTypeEnum.POST) { @@ -285,7 +301,9 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { } @Override - public IBundleProvider invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws InvalidRequestException, InternalErrorException { + public IBundleProvider invokeServer( + IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) + throws InvalidRequestException, InternalErrorException { if (myIdParamIndex != null) { theMethodParams[myIdParamIndex] = theRequest.getId(); } @@ -293,7 +311,6 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { Object response = invokeServerMethod(theRequest, theMethodParams); return toResourceList(response); - } @Override @@ -301,8 +318,8 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { return false; } - - private boolean passesWhitelistAndBlacklist(List theQualifiedNames, Set theQualifierWhitelist, Set theQualifierBlacklist) { + private boolean passesWhitelistAndBlacklist( + List theQualifiedNames, Set theQualifierWhitelist, Set theQualifierBlacklist) { if (theQualifierWhitelist == null && theQualifierBlacklist == null) { return true; } @@ -319,6 +336,4 @@ public class SearchMethodBinding extends BaseResourceReturningMethodBinding { public String toString() { return getMethod().toString(); } - - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchParameter.java index b60268ccc8a..29c0d40fe1f 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchParameter.java @@ -19,14 +19,8 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import java.util.*; - -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IPrimitiveType; - import ca.uhn.fhir.context.*; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.*; import ca.uhn.fhir.model.base.composite.BaseIdentifierDt; import ca.uhn.fhir.model.base.composite.BaseQuantityDt; @@ -39,6 +33,11 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.util.CollectionUtil; import ca.uhn.fhir.util.ReflectionUtil; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IPrimitiveType; + +import java.util.*; public class SearchParameter extends BaseQueryParameter { @@ -54,45 +53,66 @@ public class SearchParameter extends BaseQueryParameter { ourParamTypes.put(StringParam.class, RestSearchParameterTypeEnum.STRING); ourParamTypes.put(StringOrListParam.class, RestSearchParameterTypeEnum.STRING); ourParamTypes.put(StringAndListParam.class, RestSearchParameterTypeEnum.STRING); - ourParamQualifiers.put(RestSearchParameterTypeEnum.STRING, CollectionUtil.newSet(Constants.PARAMQUALIFIER_STRING_EXACT, Constants.PARAMQUALIFIER_STRING_CONTAINS, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.STRING, + CollectionUtil.newSet( + Constants.PARAMQUALIFIER_STRING_EXACT, + Constants.PARAMQUALIFIER_STRING_CONTAINS, + Constants.PARAMQUALIFIER_MISSING, + EMPTY_STRING)); ourParamTypes.put(UriParam.class, RestSearchParameterTypeEnum.URI); ourParamTypes.put(UriOrListParam.class, RestSearchParameterTypeEnum.URI); ourParamTypes.put(UriAndListParam.class, RestSearchParameterTypeEnum.URI); // TODO: are these right for URI? - ourParamQualifiers.put(RestSearchParameterTypeEnum.URI, CollectionUtil.newSet(Constants.PARAMQUALIFIER_STRING_EXACT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.URI, + CollectionUtil.newSet( + Constants.PARAMQUALIFIER_STRING_EXACT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(TokenParam.class, RestSearchParameterTypeEnum.TOKEN); ourParamTypes.put(TokenOrListParam.class, RestSearchParameterTypeEnum.TOKEN); ourParamTypes.put(TokenAndListParam.class, RestSearchParameterTypeEnum.TOKEN); - ourParamQualifiers.put(RestSearchParameterTypeEnum.TOKEN, CollectionUtil.newSet(Constants.PARAMQUALIFIER_TOKEN_TEXT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.TOKEN, + CollectionUtil.newSet( + Constants.PARAMQUALIFIER_TOKEN_TEXT, Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(DateParam.class, RestSearchParameterTypeEnum.DATE); ourParamTypes.put(DateOrListParam.class, RestSearchParameterTypeEnum.DATE); ourParamTypes.put(DateAndListParam.class, RestSearchParameterTypeEnum.DATE); ourParamTypes.put(DateRangeParam.class, RestSearchParameterTypeEnum.DATE); - ourParamQualifiers.put(RestSearchParameterTypeEnum.DATE, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.DATE, + CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(QuantityParam.class, RestSearchParameterTypeEnum.QUANTITY); ourParamTypes.put(QuantityOrListParam.class, RestSearchParameterTypeEnum.QUANTITY); ourParamTypes.put(QuantityAndListParam.class, RestSearchParameterTypeEnum.QUANTITY); - ourParamQualifiers.put(RestSearchParameterTypeEnum.QUANTITY, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.QUANTITY, + CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(NumberParam.class, RestSearchParameterTypeEnum.NUMBER); ourParamTypes.put(NumberOrListParam.class, RestSearchParameterTypeEnum.NUMBER); ourParamTypes.put(NumberAndListParam.class, RestSearchParameterTypeEnum.NUMBER); - ourParamQualifiers.put(RestSearchParameterTypeEnum.NUMBER, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.NUMBER, + CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(ReferenceParam.class, RestSearchParameterTypeEnum.REFERENCE); ourParamTypes.put(ReferenceOrListParam.class, RestSearchParameterTypeEnum.REFERENCE); ourParamTypes.put(ReferenceAndListParam.class, RestSearchParameterTypeEnum.REFERENCE); // --vvvv-- no empty because that gets added from OptionalParam#chainWhitelist - ourParamQualifiers.put(RestSearchParameterTypeEnum.REFERENCE, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.REFERENCE, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING)); ourParamTypes.put(CompositeParam.class, RestSearchParameterTypeEnum.COMPOSITE); ourParamTypes.put(CompositeOrListParam.class, RestSearchParameterTypeEnum.COMPOSITE); ourParamTypes.put(CompositeAndListParam.class, RestSearchParameterTypeEnum.COMPOSITE); - ourParamQualifiers.put(RestSearchParameterTypeEnum.COMPOSITE, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); + ourParamQualifiers.put( + RestSearchParameterTypeEnum.COMPOSITE, + CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING, EMPTY_STRING)); ourParamTypes.put(HasParam.class, RestSearchParameterTypeEnum.HAS); ourParamTypes.put(HasOrListParam.class, RestSearchParameterTypeEnum.HAS); @@ -101,8 +121,8 @@ public class SearchParameter extends BaseQueryParameter { ourParamTypes.put(SpecialParam.class, RestSearchParameterTypeEnum.SPECIAL); ourParamTypes.put(SpecialOrListParam.class, RestSearchParameterTypeEnum.SPECIAL); ourParamTypes.put(SpecialAndListParam.class, RestSearchParameterTypeEnum.SPECIAL); - ourParamQualifiers.put(RestSearchParameterTypeEnum.SPECIAL, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING)); - + ourParamQualifiers.put( + RestSearchParameterTypeEnum.SPECIAL, CollectionUtil.newSet(Constants.PARAMQUALIFIER_MISSING)); } private List> myCompositeTypes = Collections.emptyList(); @@ -117,8 +137,7 @@ public class SearchParameter extends BaseQueryParameter { private Class myType; private boolean mySupportsRepetition = false; - public SearchParameter() { - } + public SearchParameter() {} public SearchParameter(String theName, boolean theRequired) { this.myName = theName; @@ -127,7 +146,7 @@ public class SearchParameter extends BaseQueryParameter { /* * (non-Javadoc) - * + * * @see ca.uhn.fhir.rest.param.IParameter#encode(java.lang.Object) */ @Override @@ -157,7 +176,7 @@ public class SearchParameter extends BaseQueryParameter { /* * (non-Javadoc) - * + * * @see ca.uhn.fhir.rest.param.IParameter#getName() */ @Override @@ -196,11 +215,12 @@ public class SearchParameter extends BaseQueryParameter { /* * (non-Javadoc) - * + * * @see ca.uhn.fhir.rest.param.IParameter#parse(java.util.List) */ @Override - public Object parse(FhirContext theContext, List theString) throws InternalErrorException, InvalidRequestException { + public Object parse(FhirContext theContext, List theString) + throws InternalErrorException, InvalidRequestException { return myParamBinder.parse(theContext, getName(), theString); } @@ -256,16 +276,22 @@ public class SearchParameter extends BaseQueryParameter { } @SuppressWarnings("unchecked") - public void setType(FhirContext theContext, final Class theType, Class> theInnerCollectionType, Class> theOuterCollectionType) { + public void setType( + FhirContext theContext, + final Class theType, + Class> theInnerCollectionType, + Class> theOuterCollectionType) { - this.myType = theType; if (IQueryParameterType.class.isAssignableFrom(theType)) { - myParamBinder = new QueryParameterTypeBinder((Class) theType, myCompositeTypes); + myParamBinder = + new QueryParameterTypeBinder((Class) theType, myCompositeTypes); } else if (IQueryParameterOr.class.isAssignableFrom(theType)) { - myParamBinder = new QueryParameterOrBinder((Class>) theType, myCompositeTypes); + myParamBinder = + new QueryParameterOrBinder((Class>) theType, myCompositeTypes); } else if (IQueryParameterAnd.class.isAssignableFrom(theType)) { - myParamBinder = new QueryParameterAndBinder((Class>) theType, myCompositeTypes); + myParamBinder = + new QueryParameterAndBinder((Class>) theType, myCompositeTypes); mySupportsRepetition = true; } else if (String.class.equals(theType)) { myParamBinder = new StringBinder(); @@ -277,7 +303,8 @@ public class SearchParameter extends BaseQueryParameter { myParamBinder = new CalendarBinder(); myParamType = RestSearchParameterTypeEnum.DATE; } else if (IPrimitiveType.class.isAssignableFrom(theType) && ReflectionUtil.isInstantiable(theType)) { - RuntimePrimitiveDatatypeDefinition def = (RuntimePrimitiveDatatypeDefinition) theContext.getElementDefinition((Class>) theType); + RuntimePrimitiveDatatypeDefinition def = (RuntimePrimitiveDatatypeDefinition) + theContext.getElementDefinition((Class>) theType); if (def.getNativeType() != null) { if (def.getNativeType().equals(Date.class)) { myParamBinder = new FhirPrimitiveBinder((Class>) theType); @@ -288,7 +315,8 @@ public class SearchParameter extends BaseQueryParameter { } } } else { - throw new ConfigurationException(Msg.code(354) + "Unsupported data type for parameter: " + theType.getCanonicalName()); + throw new ConfigurationException( + Msg.code(354) + "Unsupported data type for parameter: " + theType.getCanonicalName()); } RestSearchParameterTypeEnum typeEnum = ourParamTypes.get(theType); @@ -345,5 +373,4 @@ public class SearchParameter extends BaseQueryParameter { retVal.append("required", myRequired); return retVal.toString(); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java index 480090de694..ed9f4b8d3de 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java @@ -31,12 +31,18 @@ import java.util.Collection; class SearchTotalModeParameter implements IParameter { @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return getTypeForRequestOrThrowInvalidRequestException(theRequest); } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore } @@ -48,5 +54,4 @@ class SearchTotalModeParameter implements IParameter { return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServerBaseParamBinder.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServerBaseParamBinder.java index 03a95a59a22..078081d20fa 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServerBaseParamBinder.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServerBaseParamBinder.java @@ -19,23 +19,28 @@ */ package ca.uhn.fhir.rest.server.method; -import java.lang.reflect.Method; -import java.util.Collection; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.lang.reflect.Method; +import java.util.Collection; + class ServerBaseParamBinder implements IParameter { @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return theRequest.getFhirServerBase(); } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore for now } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServletRequestParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServletRequestParameter.java index 8c4a427ba99..70b0954c165 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServletRequestParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServletRequestParameter.java @@ -19,29 +19,34 @@ */ package ca.uhn.fhir.rest.server.method; -import java.lang.reflect.Method; -import java.util.Collection; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import java.lang.reflect.Method; +import java.util.Collection; + class ServletRequestParameter implements IParameter { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServletRequestParameter.class); ServletRequestParameter() { super(); } - + @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return ((ServletRequestDetails) theRequest).getServletRequest(); } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServletResponseParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServletResponseParameter.java index 7aa23a77d1b..a71a4d082cd 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServletResponseParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ServletResponseParameter.java @@ -19,25 +19,30 @@ */ package ca.uhn.fhir.rest.server.method; -import java.lang.reflect.Method; -import java.util.Collection; - import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; +import java.lang.reflect.Method; +import java.util.Collection; + class ServletResponseParameter implements IParameter { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServletResponseParameter.class); @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { return ((ServletRequestDetails) theRequest).getServletResponse(); } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { // ignore } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SinceOrAtParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SinceOrAtParameter.java index e7c9d298e79..7a5ad3988e1 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SinceOrAtParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SinceOrAtParameter.java @@ -19,19 +19,18 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import java.util.Set; class SinceOrAtParameter extends SearchParameter { -// private Class myType; -// private String myParamName; -// private Class myAnnotationType; + // private Class myType; + // private String myParamName; + // private Class myAnnotationType; public SinceOrAtParameter(String theParamName, Class theAnnotationType) { super(theParamName, false); -// myParamName = theParamName; -// myAnnotationType = theAnnotationType; + // myParamName = theParamName; + // myAnnotationType = theAnnotationType; } @Override @@ -44,43 +43,51 @@ class SinceOrAtParameter extends SearchParameter { return null; } -// @Override -// public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException { -// if (theSourceClientArgument != null) { -// InstantDt since = ParameterUtil.toInstant(theSourceClientArgument); -// if (since.isEmpty() == false) { -// theTargetQueryArguments.put(myParamName, Collections.singletonList(since.getValueAsString())); -// } -// } -// } -// -// @Override -// public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { -// String[] sinceParams = theRequest.getParameters().remove(myParamName); -// if (sinceParams != null) { -// if (sinceParams.length > 0) { -// if (StringUtils.isNotBlank(sinceParams[0])) { -// try { -// return ParameterUtil.fromInstant(myType, sinceParams); -// } catch (DataFormatException e) { -// throw new InvalidRequestException(Msg.code(451) + "Invalid " + Constants.PARAM_SINCE + " value: " + sinceParams[0]); -// } -// } -// } -// } -// return ParameterUtil.fromInstant(myType, null); -// } -// -// @Override -// public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { -// if (theOuterCollectionType != null) { -// throw new ConfigurationException(Msg.code(452) + "Method '" + theMethod.getName() + "' in type '" + "' is annotated with @" + myAnnotationType.getName() + " but can not be of collection type"); -// } -// if (ParameterUtil.getBindableInstantTypes().contains(theParameterType)) { -// myType = theParameterType; -// } else { -// throw new ConfigurationException(Msg.code(453) + "Method '" + theMethod.getName() + "' in type '" + "' is annotated with @" + myAnnotationType.getName() + " but is an invalid type, must be one of: " + ParameterUtil.getBindableInstantTypes()); -// } -// } + // @Override + // public void translateClientArgumentIntoQueryArgument(FhirContext theContext, Object theSourceClientArgument, + // Map> theTargetQueryArguments, IBaseResource theTargetResource) throws InternalErrorException + // { + // if (theSourceClientArgument != null) { + // InstantDt since = ParameterUtil.toInstant(theSourceClientArgument); + // if (since.isEmpty() == false) { + // theTargetQueryArguments.put(myParamName, Collections.singletonList(since.getValueAsString())); + // } + // } + // } + // + // @Override + // public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding + // theMethodBinding) throws InternalErrorException, InvalidRequestException { + // String[] sinceParams = theRequest.getParameters().remove(myParamName); + // if (sinceParams != null) { + // if (sinceParams.length > 0) { + // if (StringUtils.isNotBlank(sinceParams[0])) { + // try { + // return ParameterUtil.fromInstant(myType, sinceParams); + // } catch (DataFormatException e) { + // throw new InvalidRequestException(Msg.code(451) + "Invalid " + Constants.PARAM_SINCE + " value: " + + // sinceParams[0]); + // } + // } + // } + // } + // return ParameterUtil.fromInstant(myType, null); + // } + // + // @Override + // public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + // if (theOuterCollectionType != null) { + // throw new ConfigurationException(Msg.code(452) + "Method '" + theMethod.getName() + "' in type '" + "' is + // annotated with @" + myAnnotationType.getName() + " but can not be of collection type"); + // } + // if (ParameterUtil.getBindableInstantTypes().contains(theParameterType)) { + // myType = theParameterType; + // } else { + // throw new ConfigurationException(Msg.code(453) + "Method '" + theMethod.getName() + "' in type '" + "' is + // annotated with @" + myAnnotationType.getName() + " but is an invalid type, must be one of: " + + // ParameterUtil.getBindableInstantTypes()); + // } + // } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SinceParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SinceParameter.java index ec241f074dc..b365f8a7968 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SinceParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SinceParameter.java @@ -27,5 +27,4 @@ class SinceParameter extends SinceOrAtParameter { public SinceParameter() { super(Constants.PARAM_SINCE, Since.class); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SortParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SortParameter.java index 1ab857bfc31..0bd2c343c75 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SortParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SortParameter.java @@ -19,20 +19,20 @@ */ package ca.uhn.fhir.rest.server.method; +import ca.uhn.fhir.context.*; import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isNotBlank; +import ca.uhn.fhir.rest.annotation.Sort; +import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.*; +import ca.uhn.fhir.rest.param.ParameterUtil; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import java.lang.reflect.Method; import java.util.Collection; import java.util.StringTokenizer; -import ca.uhn.fhir.context.*; -import ca.uhn.fhir.rest.annotation.Sort; -import ca.uhn.fhir.rest.api.*; -import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.param.ParameterUtil; -import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class SortParameter implements IParameter { @@ -43,20 +43,27 @@ public class SortParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null || theInnerCollectionType != null) { - throw new ConfigurationException(Msg.code(443) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Sort.class.getName() + throw new ConfigurationException(Msg.code(443) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Sort.class.getName() + " but can not be of collection type"); } if (!theParameterType.equals(SortSpec.class)) { - throw new ConfigurationException(Msg.code(444) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Sort.class.getName() + throw new ConfigurationException(Msg.code(444) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + Sort.class.getName() + " but is an invalid type, must be: " + SortSpec.class.getCanonicalName()); } - } @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { if (!theRequest.getParameters().containsKey(Constants.PARAM_SORT)) { if (!theRequest.getParameters().containsKey(Constants.PARAM_SORT_ASC)) { if (!theRequest.getParameters().containsKey(Constants.PARAM_SORT_DESC)) { @@ -105,7 +112,6 @@ public class SortParameter implements IParameter { innerSpec.setChain(spec); innerSpec = spec; } - } } @@ -151,5 +157,4 @@ public class SortParameter implements IParameter { String string = val.toString(); return string; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SummaryEnumParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SummaryEnumParameter.java index cb56dc619b3..ec6f632cba4 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SummaryEnumParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SummaryEnumParameter.java @@ -19,13 +19,8 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isBlank; - -import java.lang.reflect.Method; -import java.util.*; - import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.SummaryEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; @@ -33,24 +28,30 @@ import ca.uhn.fhir.rest.param.binder.CollectionBinder; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.lang.reflect.Method; +import java.util.*; + +import static org.apache.commons.lang3.StringUtils.isBlank; + public class SummaryEnumParameter implements IParameter { @SuppressWarnings("rawtypes") private Class myInnerCollectionType; - @Override - @SuppressWarnings({ "rawtypes", "unchecked" }) - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { + @SuppressWarnings({"rawtypes", "unchecked"}) + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { Set value = getSummaryValueOrNull(theRequest); if (value == null || value.isEmpty()) { return null; } - + if (myInnerCollectionType == null) { return value.iterator().next(); } - + try { Collection retVal = myInnerCollectionType.newInstance(); retVal.addAll(value); @@ -89,16 +90,20 @@ public class SummaryEnumParameter implements IParameter { } } } - + if (retVal != null) { if (retVal.contains(SummaryEnum.TEXT)) { if (retVal.size() > 1) { - String msg = theRequest.getServer().getFhirContext().getLocalizer().getMessage(SummaryEnumParameter.class, "cantCombineText"); + String msg = theRequest + .getServer() + .getFhirContext() + .getLocalizer() + .getMessage(SummaryEnumParameter.class, "cantCombineText"); throw new InvalidRequestException(Msg.code(380) + msg); } } } - + return retVal; } @@ -110,13 +115,19 @@ public class SummaryEnumParameter implements IParameter { } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(381) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is of type " + SummaryEnum.class + " but can not be a collection of collections"); + throw new ConfigurationException(Msg.code(381) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is of type " + SummaryEnum.class + + " but can not be a collection of collections"); } if (theInnerCollectionType != null) { - myInnerCollectionType = CollectionBinder.getInstantiableCollectionType(theInnerCollectionType, SummaryEnum.class.getSimpleName()); + myInnerCollectionType = CollectionBinder.getInstantiableCollectionType( + theInnerCollectionType, SummaryEnum.class.getSimpleName()); } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/TransactionMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/TransactionMethodBinding.java index db1146ec56a..59e9eb52300 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/TransactionMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/TransactionMethodBinding.java @@ -36,9 +36,9 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.method.TransactionParameter.ParamStyle; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.lang.reflect.Method; import java.util.List; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -55,9 +55,11 @@ public class TransactionMethodBinding extends BaseResourceReturningMethodBinding for (IParameter next : getParameters()) { if (next instanceof TransactionParameter) { if (myTransactionParamIndex != -1) { - throw new ConfigurationException(Msg.code(372) + "Method '" + theMethod.getName() + "' in type " + theMethod.getDeclaringClass().getCanonicalName() + " has multiple parameters annotated with the @" - + TransactionParam.class + " annotation, exactly one is required for @" + Transaction.class - + " methods"); + throw new ConfigurationException(Msg.code(372) + "Method '" + theMethod.getName() + "' in type " + + theMethod.getDeclaringClass().getCanonicalName() + + " has multiple parameters annotated with the @" + + TransactionParam.class + " annotation, exactly one is required for @" + Transaction.class + + " methods"); } myTransactionParamIndex = index; myTransactionParamStyle = ((TransactionParameter) next).getParamStyle(); @@ -66,8 +68,9 @@ public class TransactionMethodBinding extends BaseResourceReturningMethodBinding } if (myTransactionParamIndex == -1) { - throw new ConfigurationException(Msg.code(373) + "Method '" + theMethod.getName() + "' in type " + theMethod.getDeclaringClass().getCanonicalName() + " does not have a parameter annotated with the @" - + TransactionParam.class + " annotation"); + throw new ConfigurationException(Msg.code(373) + "Method '" + theMethod.getName() + "' in type " + + theMethod.getDeclaringClass().getCanonicalName() + + " does not have a parameter annotated with the @" + TransactionParam.class + " annotation"); } } @@ -103,7 +106,8 @@ public class TransactionMethodBinding extends BaseResourceReturningMethodBinding @SuppressWarnings("unchecked") @Override - public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws InvalidRequestException, InternalErrorException { + public Object invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) + throws InvalidRequestException, InternalErrorException { /* * The design of HAPI's transaction method for DSTU1 support assumed that a transaction was just an update on a @@ -132,7 +136,8 @@ public class TransactionMethodBinding extends BaseResourceReturningMethodBinding IBaseResource newRes = retResources.get(i); if (newRes.getIdElement() == null || newRes.getIdElement().isEmpty()) { if (!(newRes instanceof BaseOperationOutcome)) { - throw new InternalErrorException(Msg.code(374) + "Transaction method returned resource at index " + i + " with no id specified - IResource#setId(IdDt)"); + throw new InternalErrorException(Msg.code(374) + "Transaction method returned resource at index " + + i + " with no id specified - IResource#setId(IdDt)"); } } } @@ -151,11 +156,11 @@ public class TransactionMethodBinding extends BaseResourceReturningMethodBinding if (myTransactionParamIndex != -1) { resource = (IBaseResource) theMethodParams[myTransactionParamIndex]; } else { - Class resourceType = getContext().getResourceDefinition("Bundle").getImplementingClass(); + Class resourceType = + getContext().getResourceDefinition("Bundle").getImplementingClass(); resource = ResourceParameter.parseResourceFromRequest(theRequestDetails, this, resourceType); } theRequestDetails.setResource(resource); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/TransactionParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/TransactionParameter.java index a7c5ee8d6cd..163e9aee5f8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/TransactionParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/TransactionParameter.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.rest.annotation.TransactionParam; import ca.uhn.fhir.rest.api.server.RequestDetails; @@ -49,17 +49,25 @@ public class TransactionParameter implements IParameter { } private String createParameterTypeError(Method theMethod) { - return "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + TransactionParam.class.getName() - + " but is not of type Bundle, IBaseResource, IBaseBundle, or List<" + IResource.class.getCanonicalName() + ">"; + return "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + + TransactionParam.class.getName() + " but is not of type Bundle, IBaseResource, IBaseBundle, or List<" + + IResource.class.getCanonicalName() + ">"; } @Override - public void initializeTypes(Method theMethod, Class> theOuterCollectionType, Class> theInnerCollectionType, Class theParameterType) { + public void initializeTypes( + Method theMethod, + Class> theOuterCollectionType, + Class> theInnerCollectionType, + Class theParameterType) { if (theOuterCollectionType != null) { - throw new ConfigurationException(Msg.code(429) + "Method '" + theMethod.getName() + "' in type '" + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" - + TransactionParam.class.getName() + " but can not be a collection of collections"); + throw new ConfigurationException(Msg.code(429) + "Method '" + theMethod.getName() + "' in type '" + + theMethod.getDeclaringClass().getCanonicalName() + "' is annotated with @" + + TransactionParam.class.getName() + " but can not be a collection of collections"); } - if (Modifier.isInterface(theParameterType.getModifiers()) == false && IBaseResource.class.isAssignableFrom(theParameterType)) { + if (Modifier.isInterface(theParameterType.getModifiers()) == false + && IBaseResource.class.isAssignableFrom(theParameterType)) { @SuppressWarnings("unchecked") Class parameterType = (Class) theParameterType; RuntimeResourceDefinition def = myContext.getResourceDefinition(parameterType); @@ -87,8 +95,11 @@ public class TransactionParameter implements IParameter { } @Override - public Object translateQueryParametersIntoServerArgument(RequestDetails theRequest, BaseMethodBinding theMethodBinding) throws InternalErrorException, InvalidRequestException { - IBaseResource parsedBundle = ResourceParameter.parseResourceFromRequest(theRequest, theMethodBinding, myResourceBundleType); + public Object translateQueryParametersIntoServerArgument( + RequestDetails theRequest, BaseMethodBinding theMethodBinding) + throws InternalErrorException, InvalidRequestException { + IBaseResource parsedBundle = + ResourceParameter.parseResourceFromRequest(theRequest, theMethodBinding, myResourceBundleType); switch (myParamStyle) { case RESOURCE_LIST: @@ -116,5 +127,4 @@ public class TransactionParameter implements IParameter { */ RESOURCE_LIST } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/UpdateMethodBinding.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/UpdateMethodBinding.java index eb77fe09984..0b42bc547a2 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/UpdateMethodBinding.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/UpdateMethodBinding.java @@ -19,18 +19,8 @@ */ package ca.uhn.fhir.rest.server.method; -import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isBlank; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.lang.reflect.Method; -import java.util.Collections; -import java.util.Set; - -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.annotation.Update; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RequestTypeEnum; @@ -38,9 +28,16 @@ import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.param.ParameterUtil; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import java.lang.reflect.Method; +import java.util.Collections; +import java.util.Set; import javax.annotation.Nonnull; -import javax.annotation.Nullable; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class UpdateMethodBinding extends BaseOutcomeReturningMethodBindingWithResourceParam { @@ -99,23 +96,33 @@ public class UpdateMethodBinding extends BaseOutcomeReturningMethodBindingWithRe } @Override - protected void validateResourceIdAndUrlIdForNonConditionalOperation(IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { + protected void validateResourceIdAndUrlIdForNonConditionalOperation( + IBaseResource theResource, String theResourceId, String theUrlId, String theMatchUrl) { if (isBlank(theMatchUrl)) { if (isBlank(theUrlId)) { - String msg = getContext().getLocalizer().getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "noIdInUrlForUpdate"); + String msg = getContext() + .getLocalizer() + .getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "noIdInUrlForUpdate"); throw new InvalidRequestException(Msg.code(418) + msg); } if (isBlank(theResourceId)) { - String msg = getContext().getLocalizer().getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "noIdInBodyForUpdate"); + String msg = getContext() + .getLocalizer() + .getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "noIdInBodyForUpdate"); throw new InvalidRequestException(Msg.code(419) + msg); } if (!theResourceId.equals(theUrlId)) { - String msg = getContext().getLocalizer().getMessage(BaseOutcomeReturningMethodBindingWithResourceParam.class, "incorrectIdForUpdate", theResourceId, theUrlId); + String msg = getContext() + .getLocalizer() + .getMessage( + BaseOutcomeReturningMethodBindingWithResourceParam.class, + "incorrectIdForUpdate", + theResourceId, + theUrlId); throw new InvalidRequestException(Msg.code(420) + msg); } } else { theResource.setId((IIdType) null); } - } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ValidateMethodBindingDstu2Plus.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ValidateMethodBindingDstu2Plus.java index 6d3260b12f2..5a8635274d8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ValidateMethodBindingDstu2Plus.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/ValidateMethodBindingDstu2Plus.java @@ -35,9 +35,27 @@ import java.util.List; public class ValidateMethodBindingDstu2Plus extends OperationMethodBinding { - public ValidateMethodBindingDstu2Plus(Class theReturnResourceType, Class theReturnTypeFromRp, Method theMethod, FhirContext theContext, Object theProvider, + public ValidateMethodBindingDstu2Plus( + Class theReturnResourceType, + Class theReturnTypeFromRp, + Method theMethod, + FhirContext theContext, + Object theProvider, Validate theAnnotation) { - super(theReturnResourceType, theReturnTypeFromRp, theMethod, theContext, theProvider, true, false, Constants.EXTOP_VALIDATE, theAnnotation.type(), null, new OperationParam[0], BundleTypeEnum.COLLECTION, false); + super( + theReturnResourceType, + theReturnTypeFromRp, + theMethod, + theContext, + theProvider, + true, + false, + Constants.EXTOP_VALIDATE, + theAnnotation.type(), + null, + new OperationParam[0], + BundleTypeEnum.COLLECTION, + false); List newParams = new ArrayList<>(); int idx = 0; @@ -51,7 +69,14 @@ public class ValidateMethodBindingDstu2Plus extends OperationMethodBinding { Annotation[] parameterAnnotations = theMethod.getParameterAnnotations()[idx]; String description = ParametersUtil.extractDescription(parameterAnnotations); List examples = ParametersUtil.extractExamples(parameterAnnotations); - OperationParameter parameter = new OperationParameter(theContext, Constants.EXTOP_VALIDATE, Constants.EXTOP_VALIDATE_RESOURCE, 0, 1, description, examples); + OperationParameter parameter = new OperationParameter( + theContext, + Constants.EXTOP_VALIDATE, + Constants.EXTOP_VALIDATE_RESOURCE, + 0, + 1, + description, + examples); parameter.initializeTypes(theMethod, null, null, parameterType); newParams.add(parameter); } @@ -64,7 +89,5 @@ public class ValidateMethodBindingDstu2Plus extends OperationMethodBinding { idx++; } setParameters(newParams); - } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/BaseLastNProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/BaseLastNProvider.java index c282c43f593..ed677c7449b 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/BaseLastNProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/BaseLastNProvider.java @@ -44,19 +44,22 @@ public abstract class BaseLastNProvider { @Operation(name = Constants.OPERATION_LASTN, typeName = "Observation", idempotent = true) public IBaseBundle lastN( - ServletRequestDetails theRequestDetails, - @OperationParam(name = "subject", typeName = "reference", min = 0, max = 1) IBaseReference theSubject, - @OperationParam(name = "category", typeName = "coding", min = 0, max = OperationParam.MAX_UNLIMITED) List theCategories, - @OperationParam(name = "code", typeName = "coding", min = 0, max = OperationParam.MAX_UNLIMITED) List theCodes, - @OperationParam(name = "max", typeName = "integer", min = 0, max = 1) IPrimitiveType theMax - ) { + ServletRequestDetails theRequestDetails, + @OperationParam(name = "subject", typeName = "reference", min = 0, max = 1) IBaseReference theSubject, + @OperationParam(name = "category", typeName = "coding", min = 0, max = OperationParam.MAX_UNLIMITED) + List theCategories, + @OperationParam(name = "code", typeName = "coding", min = 0, max = OperationParam.MAX_UNLIMITED) + List theCodes, + @OperationParam(name = "max", typeName = "integer", min = 0, max = 1) IPrimitiveType theMax) { return processLastN(theSubject, theCategories, theCodes, theMax); } /** * Subclasses should implement this method */ - protected abstract IBaseBundle processLastN(IBaseReference theSubject, List theCategories, List theCodes, IPrimitiveType theMax); - - + protected abstract IBaseBundle processLastN( + IBaseReference theSubject, + List theCategories, + List theCodes, + IPrimitiveType theMax); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/HashMapResourceProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/HashMapResourceProvider.java index 5efe1e59b85..5fe86adc4ea 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/HashMapResourceProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/HashMapResourceProvider.java @@ -64,7 +64,6 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -77,6 +76,7 @@ import java.util.Map; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static java.lang.Math.max; import static java.lang.Math.min; @@ -155,13 +155,11 @@ public class HashMapResourceProvider implements IResour myCreateCount.incrementAndGet(); - return new MethodOutcome() - .setCreated(true) - .setResource(theResource) - .setId(theResource.getIdElement()); + return new MethodOutcome().setCreated(true).setResource(theResource).setId(theResource.getIdElement()); } - private void createInternal(@ResourceParam T theResource, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + private void createInternal( + @ResourceParam T theResource, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { long idPart = myNextId++; String idPartAsString = Long.toString(idPart); Long versionIdPart = 1L; @@ -181,14 +179,15 @@ public class HashMapResourceProvider implements IResour throw new ResourceNotFoundException(Msg.code(2250) + theId); } - T deletedInstance = (T) myFhirContext.getResourceDefinition(myResourceType).newInstance(); + T deletedInstance = + (T) myFhirContext.getResourceDefinition(myResourceType).newInstance(); long nextVersion = versions.lastEntry().getKey() + 1L; - IIdType id = store(deletedInstance, theId.getIdPart(), nextVersion, theRequestDetails, transactionDetails, true); + IIdType id = + store(deletedInstance, theId.getIdPart(), nextVersion, theRequestDetails, transactionDetails, true); myDeleteCount.incrementAndGet(); - return new MethodOutcome() - .setId(id); + return new MethodOutcome().setId(id); } /** @@ -302,15 +301,13 @@ public class HashMapResourceProvider implements IResour if (theRequestDetails.getParameters().containsKey(Constants.PARAM_ID)) { for (String nextParam : theRequestDetails.getParameters().get(Constants.PARAM_ID)) { List wantIds = Arrays.stream(nextParam.split(",")) - .map(StringUtils::trim) - .filter(StringUtils::isNotBlank) - .map(IdDt::new) - .collect(Collectors.toList()); + .map(StringUtils::trim) + .filter(StringUtils::isNotBlank) + .map(IdDt::new) + .collect(Collectors.toList()); for (Iterator iter = allResources.iterator(); iter.hasNext(); ) { T next = iter.next(); - boolean found = wantIds - .stream() - .anyMatch(t -> resourceIdMatches(next, t)); + boolean found = wantIds.stream().anyMatch(t -> resourceIdMatches(next, t)); if (!found) { iter.remove(); } @@ -359,14 +356,21 @@ public class HashMapResourceProvider implements IResour } @SuppressWarnings({"unchecked", "DataFlowIssue"}) - private IIdType store(@Nonnull T theResource, String theIdPart, Long theVersionIdPart, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, boolean theDeleted) { + private IIdType store( + @Nonnull T theResource, + String theIdPart, + Long theVersionIdPart, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + boolean theDeleted) { IIdType id = myFhirContext.getVersion().newIdType(); String versionIdPart = Long.toString(theVersionIdPart); id.setParts(null, myResourceName, theIdPart, versionIdPart); theResource.setId(id); if (theDeleted) { - IPrimitiveType deletedAt = (IPrimitiveType) myFhirContext.getElementDefinition("instant").newInstance(); + IPrimitiveType deletedAt = (IPrimitiveType) + myFhirContext.getElementDefinition("instant").newInstance(); deletedAt.setValue(new Date()); ResourceMetadataKeyEnum.DELETED_AT.put(theResource, deletedAt); ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(theResource, BundleEntryTransactionMethodEnum.DELETE); @@ -375,7 +379,8 @@ public class HashMapResourceProvider implements IResour if (theVersionIdPart > 1) { ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(theResource, BundleEntryTransactionMethodEnum.PUT); } else { - ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put(theResource, BundleEntryTransactionMethodEnum.POST); + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.put( + theResource, BundleEntryTransactionMethodEnum.POST); } } @@ -387,7 +392,8 @@ public class HashMapResourceProvider implements IResour if (myFhirContext.getVersion().getVersion() == FhirVersionEnum.DSTU2) { ResourceMetadataKeyEnum.VERSION.put(theResource, versionIdPart); } else { - BaseRuntimeChildDefinition metaChild = myFhirContext.getResourceDefinition(myResourceType).getChildByName("meta"); + BaseRuntimeChildDefinition metaChild = + myFhirContext.getResourceDefinition(myResourceType).getChildByName("meta"); List metaValues = metaChild.getAccessor().getValues(theResource); if (metaValues.size() > 0) { theResource.getMeta().setVersionId(versionIdPart); @@ -403,63 +409,67 @@ public class HashMapResourceProvider implements IResour // Interceptor call: STORAGE_PRESTORAGE_RESOURCE_DELETED HookParams preStorageParams = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst()) - .add(TransactionDetails.class, theTransactionDetails); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst()) + .add(TransactionDetails.class, theTransactionDetails); interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED, preStorageParams); // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_DELETED HookParams preCommitParams = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst()) - .add(TransactionDetails.class, theTransactionDetails) - .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst()) + .add(TransactionDetails.class, theTransactionDetails) + .add( + InterceptorInvocationTimingEnum.class, + theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, preCommitParams); - } else if (!myIdToHistory.containsKey(theIdPart)) { // Interceptor call: STORAGE_PRESTORAGE_RESOURCE_CREATED HookParams preStorageParams = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(IBaseResource.class, theResource) - .add(RequestPartitionId.class, null) // we should add this if we want - but this is test usage - .add(TransactionDetails.class, theTransactionDetails); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(IBaseResource.class, theResource) + .add(RequestPartitionId.class, null) // we should add this if we want - but this is test usage + .add(TransactionDetails.class, theTransactionDetails); interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, preStorageParams); // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_CREATED HookParams preCommitParams = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(IBaseResource.class, theResource) - .add(TransactionDetails.class, theTransactionDetails) - .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(IBaseResource.class, theResource) + .add(TransactionDetails.class, theTransactionDetails) + .add( + InterceptorInvocationTimingEnum.class, + theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, preCommitParams); } else { // Interceptor call: STORAGE_PRESTORAGE_RESOURCE_UPDATED HookParams preStorageParams = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst()) - .add(IBaseResource.class, theResource) - .add(TransactionDetails.class, theTransactionDetails); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst()) + .add(IBaseResource.class, theResource) + .add(TransactionDetails.class, theTransactionDetails); interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, preStorageParams); // Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED HookParams preCommitParams = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst()) - .add(IBaseResource.class, theResource) - .add(TransactionDetails.class, theTransactionDetails) - .add(InterceptorInvocationTimingEnum.class, theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(IBaseResource.class, myIdToHistory.get(theIdPart).getFirst()) + .add(IBaseResource.class, theResource) + .add(TransactionDetails.class, theTransactionDetails) + .add( + InterceptorInvocationTimingEnum.class, + theTransactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)); interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, preCommitParams); - } } @@ -483,23 +493,22 @@ public class HashMapResourceProvider implements IResour */ @Update public synchronized MethodOutcome update( - @ResourceParam T theResource, - @ConditionalUrlParam String theConditional, - RequestDetails theRequestDetails) { + @ResourceParam T theResource, + @ConditionalUrlParam String theConditional, + RequestDetails theRequestDetails) { TransactionDetails transactionDetails = new TransactionDetails(); - ValidateUtil.isTrueOrThrowInvalidRequest(isBlank(theConditional), "This server doesn't support conditional update"); + ValidateUtil.isTrueOrThrowInvalidRequest( + isBlank(theConditional), "This server doesn't support conditional update"); boolean created = updateInternal(theResource, theRequestDetails, transactionDetails); myUpdateCount.incrementAndGet(); - return new MethodOutcome() - .setCreated(created) - .setResource(theResource) - .setId(theResource.getIdElement()); + return new MethodOutcome().setCreated(created).setResource(theResource).setId(theResource.getIdElement()); } - private boolean updateInternal(@ResourceParam T theResource, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + private boolean updateInternal( + @ResourceParam T theResource, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { String idPartAsString = theResource.getIdElement().getIdPart(); TreeMap versionToResource = getVersionToResource(idPartAsString); @@ -553,7 +562,8 @@ public class HashMapResourceProvider implements IResour } private boolean resourceIdMatches(T theResource, IdDt theId) { - if (theId.getResourceType() == null || theId.getResourceType().equals(myFhirContext.getResourceType(theResource))) { + if (theId.getResourceType() == null + || theId.getResourceType().equals(myFhirContext.getResourceType(theResource))) { if (theResource.getIdElement().getIdPart().equals(theId.getIdPart())) { return true; } @@ -561,8 +571,10 @@ public class HashMapResourceProvider implements IResour return false; } - private static T fireInterceptorsAndFilterAsNeeded(T theResource, RequestDetails theRequestDetails) { - List output = fireInterceptorsAndFilterAsNeeded(Lists.newArrayList(theResource), theRequestDetails); + private static T fireInterceptorsAndFilterAsNeeded( + T theResource, RequestDetails theRequestDetails) { + List output = + fireInterceptorsAndFilterAsNeeded(Lists.newArrayList(theResource), theRequestDetails); if (output.size() == 1) { return theResource; } else { @@ -570,30 +582,31 @@ public class HashMapResourceProvider implements IResour } } - protected static List fireInterceptorsAndFilterAsNeeded(List theResources, RequestDetails theRequestDetails) { + protected static List fireInterceptorsAndFilterAsNeeded( + List theResources, RequestDetails theRequestDetails) { List resourcesToReturn = new ArrayList<>(theResources); if (theRequestDetails != null) { IInterceptorBroadcaster interceptorBroadcaster = theRequestDetails.getInterceptorBroadcaster(); // Call the STORAGE_PREACCESS_RESOURCES pointcut (used for consent/auth interceptors) - SimplePreResourceAccessDetails preResourceAccessDetails = new SimplePreResourceAccessDetails(resourcesToReturn); + SimplePreResourceAccessDetails preResourceAccessDetails = + new SimplePreResourceAccessDetails(resourcesToReturn); HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(IPreResourceAccessDetails.class, preResourceAccessDetails); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(IPreResourceAccessDetails.class, preResourceAccessDetails); interceptorBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); preResourceAccessDetails.applyFilterToList(); // Call the STORAGE_PREACCESS_RESOURCES pointcut (used for consent/auth interceptors) SimplePreResourceShowDetails preResourceShowDetails = new SimplePreResourceShowDetails(resourcesToReturn); HookParams preShowParams = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(IPreResourceShowDetails.class, preResourceShowDetails); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(IPreResourceShowDetails.class, preResourceShowDetails); interceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESHOW_RESOURCES, preShowParams); resourcesToReturn = preResourceShowDetails.toList(); - } return resourcesToReturn; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IResourceProviderFactoryObserver.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IResourceProviderFactoryObserver.java index cdd446f00d0..0256d43ebac 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IResourceProviderFactoryObserver.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/IResourceProviderFactoryObserver.java @@ -19,10 +19,11 @@ */ package ca.uhn.fhir.rest.server.provider; -import javax.annotation.Nonnull; import java.util.function.Supplier; +import javax.annotation.Nonnull; public interface IResourceProviderFactoryObserver { void update(@Nonnull Supplier theSupplier); + void remove(@Nonnull Supplier theSupplier); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java index b2a05505b27..f5ceb74ec62 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ProviderConstants.java @@ -56,6 +56,7 @@ public class ProviderConstants { * Operation name: diff */ public static final String DIFF_OPERATION_NAME = "$diff"; + public static final String DIFF_FROM_VERSION_PARAMETER = "fromVersion"; public static final String DIFF_FROM_PARAMETER = "from"; @@ -66,6 +67,7 @@ public class ProviderConstants { * EMPI Operations */ public static final String EMPI_MATCH = "$match"; + public static final String MDM_MATCH = "$mdm-match"; public static final String MDM_MATCH_RESOURCE = "resource"; public static final String MDM_RESOURCE_TYPE = "resourceType"; @@ -110,6 +112,7 @@ public class ProviderConstants { * Clinical Reasoning Operations */ public static final String CR_OPERATION_APPLY = "$apply"; + public static final String CR_OPERATION_R5_APPLY = "$r5.apply"; public static final String CR_OPERATION_PREPOPULATE = "$prepopulate"; public static final String CR_OPERATION_POPULATE = "$populate"; diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ResourceProviderFactory.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ResourceProviderFactory.java index 4ebf6049e57..1a9fd7dcfc5 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ResourceProviderFactory.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ResourceProviderFactory.java @@ -19,13 +19,13 @@ */ package ca.uhn.fhir.rest.server.provider; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.function.Supplier; +import javax.annotation.Nonnull; public class ResourceProviderFactory { private Set myObservers = Collections.synchronizedSet(new HashSet<>()); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java index d64dd429821..b7532e20d5d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java @@ -63,9 +63,6 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.servlet.ServletContext; -import javax.servlet.http.HttpServletRequest; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -77,6 +74,9 @@ import java.util.Set; import java.util.TreeSet; import java.util.UUID; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.servlet.ServletContext; +import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -114,7 +114,8 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv /** * Constructor */ - public ServerCapabilityStatementProvider(FhirContext theContext, RestfulServerConfiguration theServerConfiguration) { + public ServerCapabilityStatementProvider( + FhirContext theContext, RestfulServerConfiguration theServerConfiguration) { myContext = theContext; myServerConfiguration = theServerConfiguration; mySearchParamRegistry = null; @@ -125,7 +126,10 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv /** * Constructor */ - public ServerCapabilityStatementProvider(RestfulServer theRestfulServer, ISearchParamRegistry theSearchParamRegistry, IValidationSupport theValidationSupport) { + public ServerCapabilityStatementProvider( + RestfulServer theRestfulServer, + ISearchParamRegistry theSearchParamRegistry, + IValidationSupport theValidationSupport) { myContext = theRestfulServer.getFhirContext(); mySearchParamRegistry = theSearchParamRegistry; myServer = theRestfulServer; @@ -133,7 +137,8 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv myValidationSupport = theValidationSupport; } - private void checkBindingForSystemOps(FhirTerser theTerser, IBase theRest, Set theSystemOps, BaseMethodBinding theMethodBinding) { + private void checkBindingForSystemOps( + FhirTerser theTerser, IBase theRest, Set theSystemOps, BaseMethodBinding theMethodBinding) { RestOperationTypeEnum restOperationType = theMethodBinding.getRestOperationType(); if (restOperationType.isSystemLevel()) { String sysOp = restOperationType.getCode(); @@ -145,7 +150,6 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv } } - private String conformanceDate(RestfulServerConfiguration theServerConfiguration) { IPrimitiveType buildDate = theServerConfiguration.getConformanceDate(); if (buildDate != null && buildDate.getValue() != null) { @@ -165,7 +169,6 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv return myServerConfiguration; } - /** * Gets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. @@ -194,7 +197,8 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv RestfulServerConfiguration configuration = getServerConfiguration(); Bindings bindings = configuration.provideBindings(); - IBaseConformance retVal = (IBaseConformance) myContext.getResourceDefinition("CapabilityStatement").newInstance(); + IBaseConformance retVal = (IBaseConformance) + myContext.getResourceDefinition("CapabilityStatement").newInstance(); FhirTerser terser = myContext.newTerser(); @@ -204,9 +208,11 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv terser.addElement(retVal, "name", "RestServer"); terser.addElement(retVal, "publisher", myPublisher); terser.addElement(retVal, "date", conformanceDate(configuration)); - terser.addElement(retVal, "fhirVersion", myContext.getVersion().getVersion().getFhirVersionString()); + terser.addElement( + retVal, "fhirVersion", myContext.getVersion().getVersion().getFhirVersionString()); - ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); + ServletContext servletContext = (ServletContext) + (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); String serverBase = configuration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); terser.addElement(retVal, "implementation.url", serverBase); terser.addElement(retVal, "implementation.description", configuration.getImplementationDescription()); @@ -236,7 +242,8 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv Set systemOps = new HashSet<>(); Map> resourceToMethods = configuration.collectMethodBindings(); - Map> resourceNameToSharedSupertype = configuration.getNameToSharedSupertype(); + Map> resourceNameToSharedSupertype = + configuration.getNameToSharedSupertype(); List globalMethodBindings = configuration.getGlobalBindings(); TreeMultimap resourceNameToIncludes = TreeMultimap.create(); @@ -249,7 +256,6 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv resourceNameToRevIncludes.putAll(resourceName, nextMethod.getRevIncludes()); } } - } for (Entry> nextEntry : resourceToMethods.entrySet()) { @@ -340,7 +346,9 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv // Resource Operations if (nextMethodBinding instanceof SearchMethodBinding) { - addSearchMethodIfSearchIsNamedQuery(theRequestDetails, bindings, terser, operationNames, resource, (SearchMethodBinding) nextMethodBinding); + addSearchMethodIfSearchIsNamedQuery( + theRequestDetails, bindings, terser, operationNames, resource, (SearchMethodBinding) + nextMethodBinding); } else if (nextMethodBinding instanceof OperationMethodBinding) { OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; String opName = bindings.getOperationBindingToId().get(methodBinding); @@ -350,7 +358,6 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv populateOperation(theRequestDetails, terser, methodBinding, opName, operation); } } - } // Find any global operations (Operations defines at the system level but with the @@ -361,8 +368,10 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv if (next instanceof OperationMethodBinding) { OperationMethodBinding methodBinding = (OperationMethodBinding) next; if (methodBinding.isGlobalMethod()) { - if (methodBinding.isCanOperateAtInstanceLevel() || methodBinding.isCanOperateAtTypeLevel()) { - String opName = bindings.getOperationBindingToId().get(methodBinding); + if (methodBinding.isCanOperateAtInstanceLevel() + || methodBinding.isCanOperateAtTypeLevel()) { + String opName = + bindings.getOperationBindingToId().get(methodBinding); // Only add each operation (by name) once if (globalOperationNames.add(opName)) { IBase operation = terser.addElement(resource, "operation"); @@ -388,15 +397,19 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv */ ResourceSearchParams searchParams; ISearchParamRegistry searchParamRegistry; - ResourceSearchParams serverConfigurationActiveSearchParams = serverConfiguration.getActiveSearchParams(resourceName); + ResourceSearchParams serverConfigurationActiveSearchParams = + serverConfiguration.getActiveSearchParams(resourceName); if (mySearchParamRegistry != null) { searchParamRegistry = mySearchParamRegistry; - searchParams = mySearchParamRegistry.getActiveSearchParams(resourceName).makeCopy(); + searchParams = mySearchParamRegistry + .getActiveSearchParams(resourceName) + .makeCopy(); for (String nextBuiltInSpName : serverConfigurationActiveSearchParams.getSearchParamNames()) { - if (nextBuiltInSpName.startsWith("_") && - !searchParams.containsParamName(nextBuiltInSpName) && - searchParamEnabled(nextBuiltInSpName)) { - searchParams.put(nextBuiltInSpName, serverConfigurationActiveSearchParams.get(nextBuiltInSpName)); + if (nextBuiltInSpName.startsWith("_") + && !searchParams.containsParamName(nextBuiltInSpName) + && searchParamEnabled(nextBuiltInSpName)) { + searchParams.put( + nextBuiltInSpName, serverConfigurationActiveSearchParams.get(nextBuiltInSpName)); } } } else { @@ -404,7 +417,6 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv searchParams = serverConfigurationActiveSearchParams; } - for (RuntimeSearchParam next : searchParams.values()) { IBase searchParam = terser.addElement(resource, "searchParam"); terser.addElement(searchParam, "name", next.getName()); @@ -423,13 +435,11 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv // Add Include to CapabilityStatement.rest.resource NavigableSet resourceIncludes = resourceNameToIncludes.get(resourceName); if (resourceIncludes.isEmpty()) { - List includes = searchParams - .values() - .stream() - .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) - .map(t -> resourceName + ":" + t.getName()) - .sorted() - .collect(Collectors.toList()); + List includes = searchParams.values().stream() + .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) + .map(t -> resourceName + ":" + t.getName()) + .sorted() + .collect(Collectors.toList()); terser.addElement(resource, "searchInclude", "*"); for (String nextInclude : includes) { terser.addElement(resource, "searchInclude", nextInclude); @@ -450,11 +460,14 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv continue; } - for (RuntimeSearchParam t : searchParamRegistry.getActiveSearchParams(nextResourceName).values()) { + for (RuntimeSearchParam t : searchParamRegistry + .getActiveSearchParams(nextResourceName) + .values()) { if (t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { if (isNotBlank(t.getName())) { boolean appropriateTarget = false; - if (t.getTargets().contains(resourceName) || t.getTargets().isEmpty()) { + if (t.getTargets().contains(resourceName) + || t.getTargets().isEmpty()) { appropriateTarget = true; } @@ -494,14 +507,14 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv } } } else if (nextMethodBinding instanceof SearchMethodBinding) { - addSearchMethodIfSearchIsNamedQuery(theRequestDetails, bindings, terser, operationNames, rest, (SearchMethodBinding) nextMethodBinding); + addSearchMethodIfSearchIsNamedQuery( + theRequestDetails, bindings, terser, operationNames, rest, (SearchMethodBinding) + nextMethodBinding); } } } - } - // Find any global operations (Operations defines at the system level but with the // global flag set to true, meaning they apply to all resource types) if (globalMethodBindings != null) { @@ -523,7 +536,6 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv } } - postProcessRest(terser, rest); postProcess(terser, retVal); @@ -539,7 +551,13 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv return true; } - private void addSearchMethodIfSearchIsNamedQuery(RequestDetails theRequestDetails, Bindings theBindings, FhirTerser theTerser, Set theOperationNamesAlreadyAdded, IBase theElementToAddTo, SearchMethodBinding theSearchMethodBinding) { + private void addSearchMethodIfSearchIsNamedQuery( + RequestDetails theRequestDetails, + Bindings theBindings, + FhirTerser theTerser, + Set theOperationNamesAlreadyAdded, + IBase theElementToAddTo, + SearchMethodBinding theSearchMethodBinding) { if (theSearchMethodBinding.getQueryName() != null) { String queryName = theBindings.getNamedSearchMethodBindingToName().get(theSearchMethodBinding); if (theOperationNamesAlreadyAdded.add(queryName)) { @@ -550,15 +568,19 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv } } - private void populateOperation(RequestDetails theRequestDetails, FhirTerser theTerser, OperationMethodBinding theMethodBinding, String theOpName, IBase theOperation) { + private void populateOperation( + RequestDetails theRequestDetails, + FhirTerser theTerser, + OperationMethodBinding theMethodBinding, + String theOpName, + IBase theOperation) { String operationName = theMethodBinding.getName().substring(1); theTerser.addElement(theOperation, "name", operationName); String operationCanonicalUrl = theMethodBinding.getCanonicalUrl(); if (isNotBlank(operationCanonicalUrl)) { theTerser.addElement(theOperation, "definition", operationCanonicalUrl); operationCanonicalUrlToId.put(operationCanonicalUrl, theOpName); - } - else { + } else { theTerser.addElement(theOperation, "definition", createOperationUrl(theRequestDetails, theOpName)); } if (isNotBlank(theMethodBinding.getDescription())) { @@ -582,8 +604,10 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv String baseDefinition = defaultString(terser.getSinglePrimitiveValueOrNull(next, "baseDefinition")); if ("resource".equals(kind) && isNotBlank(url)) { - // Don't include the base resource definitions in the supported profile list - This isn't helpful - if (baseDefinition.equals("http://hl7.org/fhir/StructureDefinition/DomainResource") || baseDefinition.equals("http://hl7.org/fhir/StructureDefinition/Resource")) { + // Don't include the base resource definitions in the supported profile list - This isn't + // helpful + if (baseDefinition.equals("http://hl7.org/fhir/StructureDefinition/DomainResource") + || baseDefinition.equals("http://hl7.org/fhir/StructureDefinition/Resource")) { continue; } @@ -630,7 +654,6 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv return theRequestDetails.getServerBaseForRequest() + "/"; } - @Override @Read(typeName = "OperationDefinition") public IBaseResource readOperationDefinition(@IdParam IIdType theId, RequestDetails theRequestDetails) { @@ -640,12 +663,14 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv RestfulServerConfiguration configuration = getServerConfiguration(); Bindings bindings = configuration.provideBindings(); String operationId = getOperationId(theId); - List operationBindings = bindings.getOperationIdToBindings().get(operationId); + List operationBindings = + bindings.getOperationIdToBindings().get(operationId); if (operationBindings != null && !operationBindings.isEmpty()) { return readOperationDefinitionForOperation(theRequestDetails, bindings, operationBindings); } - List searchBindings = bindings.getSearchNameToBindings().get(theId.getIdPart()); + List searchBindings = + bindings.getSearchNameToBindings().get(theId.getIdPart()); if (searchBindings != null && !searchBindings.isEmpty()) { return readOperationDefinitionForNamedSearch(searchBindings); } @@ -653,14 +678,15 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv } private String getOperationId(IIdType theId) { - if (operationCanonicalUrlToId.get(theId.getValue()) !=null ) { + if (operationCanonicalUrlToId.get(theId.getValue()) != null) { return operationCanonicalUrlToId.get(theId.getValue()); } return theId.getIdPart(); } private IBaseResource readOperationDefinitionForNamedSearch(List bindings) { - IBaseResource op = myContext.getResourceDefinition("OperationDefinition").newInstance(); + IBaseResource op = + myContext.getResourceDefinition("OperationDefinition").newInstance(); FhirTerser terser = myContext.newTerser(); terser.addElement(op, "status", "active"); @@ -699,13 +725,13 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv IBase param = terser.addElement(op, "parameter"); terser.addElement(param, "use", "in"); terser.addElement(param, "type", "string"); - terser.addElement(param, "searchType", nextParam.getParamType().getCode()); + terser.addElement( + param, "searchType", nextParam.getParamType().getCode()); terser.addElement(param, "min", nextParam.isRequired() ? "1" : "0"); terser.addElement(param, "max", "1"); terser.addElement(param, "name", nextParam.getName()); } } - } terser.addElement(op, "code", operationCode); @@ -716,8 +742,12 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv return op; } - private IBaseResource readOperationDefinitionForOperation(RequestDetails theRequestDetails, Bindings theBindings, List theOperationMethodBindings) { - IBaseResource op = myContext.getResourceDefinition("OperationDefinition").newInstance(); + private IBaseResource readOperationDefinitionForOperation( + RequestDetails theRequestDetails, + Bindings theBindings, + List theOperationMethodBindings) { + IBaseResource op = + myContext.getResourceDefinition("OperationDefinition").newInstance(); FhirTerser terser = myContext.newTerser(); terser.addElement(op, "status", "active"); @@ -769,19 +799,19 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv } } - for (IParameter nextParamUntyped : operationMethodBinding.getParameters()) { if (nextParamUntyped instanceof OperationParameter) { OperationParameter nextParam = (OperationParameter) nextParamUntyped; IBase param = inParams.get(nextParam.getName()); - if (param == null){ + if (param == null) { param = terser.addElement(op, "parameter"); inParams.put(nextParam.getName(), param); } IBase existingParam = inParams.get(nextParam.getName()); - if (isNotBlank(nextParam.getDescription()) && terser.getValues(existingParam, "documentation").isEmpty()) { + if (isNotBlank(nextParam.getDescription()) + && terser.getValues(existingParam, "documentation").isEmpty()) { terser.addElement(existingParam, "documentation", nextParam.getDescription()); } @@ -801,23 +831,28 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv terser.setElement(param, "searchType", nextParam.getSearchParamType()); } terser.setElement(param, "min", Integer.toString(nextParam.getMin())); - terser.setElement(param, "max", (nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()))); + terser.setElement( + param, "max", (nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()))); terser.setElement(param, "name", nextParam.getName()); - List> existingExampleExtensions = ExtensionUtil.getExtensionsByUrl((IBaseHasExtensions) param, HapiExtensions.EXT_OP_PARAMETER_EXAMPLE_VALUE); - Set existingExamples = existingExampleExtensions - .stream() - .map(t -> t.getValue()) - .filter(t -> t != null) - .map(t -> (IPrimitiveType) t) - .map(t -> t.getValueAsString()) - .collect(Collectors.toSet()); + List> existingExampleExtensions = ExtensionUtil.getExtensionsByUrl( + (IBaseHasExtensions) param, HapiExtensions.EXT_OP_PARAMETER_EXAMPLE_VALUE); + Set existingExamples = existingExampleExtensions.stream() + .map(t -> t.getValue()) + .filter(t -> t != null) + .map(t -> (IPrimitiveType) t) + .map(t -> t.getValueAsString()) + .collect(Collectors.toSet()); for (String nextExample : nextParam.getExampleValues()) { if (!existingExamples.contains(nextExample)) { - ExtensionUtil.addExtension(myContext, param, HapiExtensions.EXT_OP_PARAMETER_EXAMPLE_VALUE, "string", nextExample); + ExtensionUtil.addExtension( + myContext, + param, + HapiExtensions.EXT_OP_PARAMETER_EXAMPLE_VALUE, + "string", + nextExample); } } - } } @@ -834,7 +869,8 @@ public class ServerCapabilityStatementProvider implements IServerConformanceProv terser.addElement(param, "type", nextParam.getType()); } terser.addElement(param, "min", Integer.toString(nextParam.getMin())); - terser.addElement(param, "max", (nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()))); + terser.addElement( + param, "max", (nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()))); terser.addElement(param, "name", nextParam.getName()); } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRequestDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRequestDetails.java index f52a8ce7a70..fead7ac698c 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRequestDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRequestDetails.java @@ -31,9 +31,6 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -48,6 +45,9 @@ import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.zip.GZIPInputStream; +import javax.annotation.Nonnull; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.trim; @@ -105,7 +105,8 @@ public class ServletRequestDetails extends RequestDetails { return requestContents; } catch (IOException e) { ourLog.error("Could not load request resource", e); - throw new InvalidRequestException(Msg.code(308) + String.format("Could not load request resource: %s", e.getMessage())); + throw new InvalidRequestException( + Msg.code(308) + String.format("Could not load request resource: %s", e.getMessage())); } } @@ -134,7 +135,9 @@ public class ServletRequestDetails extends RequestDetails { @Override public List getHeaders(String name) { Enumeration headers = getServletRequest().getHeaders(name); - return headers == null ? Collections.emptyList() : Collections.list(getServletRequest().getHeaders(name)); + return headers == null + ? Collections.emptyList() + : Collections.list(getServletRequest().getHeaders(name)); } @Override @@ -192,14 +195,14 @@ public class ServletRequestDetails extends RequestDetails { return this; } - private void setRetryFields(HttpServletRequest theRequest){ - if (theRequest == null){ + private void setRetryFields(HttpServletRequest theRequest) { + if (theRequest == null) { return; } Enumeration headers = theRequest.getHeaders(Constants.HEADER_RETRY_ON_VERSION_CONFLICT); if (headers != null) { Iterator headerIterator = headers.asIterator(); - while(headerIterator.hasNext()){ + while (headerIterator.hasNext()) { String headerValue = headerIterator.next(); if (isNotBlank(headerValue)) { StringTokenizer tok = new StringTokenizer(headerValue, ";"); @@ -245,5 +248,4 @@ public class ServletRequestDetails extends RequestDetails { PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader); return prefer.getRespondAsync(); } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponse.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponse.java index 6a36d1f5780..7b991a17c7a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponse.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/servlet/ServletRestfulResponse.java @@ -25,9 +25,6 @@ import ca.uhn.fhir.util.IoUtil; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; @@ -37,6 +34,9 @@ import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map.Entry; import java.util.zip.GZIPOutputStream; +import javax.annotation.Nonnull; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; public class ServletRestfulResponse extends BaseRestfulResponse { @@ -52,9 +52,10 @@ public class ServletRestfulResponse extends BaseRestfulResponse myExtraSearchParams = new ArrayList<>(); private final FhirContext myCtx; @@ -88,10 +87,10 @@ public class FhirContextSearchParamRegistry implements ISearchParamRegistry { public RuntimeSearchParam getActiveSearchParamByUrl(String theUrl) { // simple implementation for test support return myCtx.getResourceTypes().stream() - .flatMap(type->getActiveSearchParams(type).values().stream()) - .filter(rsp->theUrl.equals(rsp.getUri())) - .findFirst() - .orElse(null); + .flatMap(type -> getActiveSearchParams(type).values().stream()) + .filter(rsp -> theUrl.equals(rsp.getUri())) + .findFirst() + .orElse(null); } @Override @@ -100,7 +99,8 @@ public class FhirContextSearchParamRegistry implements ISearchParamRegistry { } @Override - public List getActiveComboSearchParams(String theResourceName, ComboSearchParamType theParamType) { + public List getActiveComboSearchParams( + String theResourceName, ComboSearchParamType theParamType) { throw new UnsupportedOperationException(Msg.code(2209)); } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java index 28fd680a9ff..4a109ee99b8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java @@ -31,5 +31,4 @@ public interface ICachedSearchDetails { * won't be reused in the future) */ void setCannotBeReused(); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRegistry.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRegistry.java index f236ef682b4..3518530bbcf 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRegistry.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ISearchParamRegistry.java @@ -28,13 +28,13 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nullable; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.TreeSet; +import javax.annotation.Nullable; // TODO: JA remove default methods public interface ISearchParamRegistry { @@ -44,7 +44,6 @@ public interface ISearchParamRegistry { */ RuntimeSearchParam getActiveSearchParam(String theResourceName, String theParamName); - /** * @return Returns all active search params for the given resource */ @@ -53,14 +52,12 @@ public interface ISearchParamRegistry { /** * Request that the cache be refreshed now, in the current thread */ - default void forceRefresh() { - } + default void forceRefresh() {} /** * Request that the cache be refreshed at the next convenient time (in a different thread) */ - default void requestRefresh() { - } + default void requestRefresh() {} /** * When indexing a HumanName, if a StringEncoder is set in the context, then the "phonetic" search parameter will normalize @@ -68,16 +65,15 @@ public interface ISearchParamRegistry { * * @since 5.1.0 */ - default void setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder) { - } + default void setPhoneticEncoder(IPhoneticEncoder thePhoneticEncoder) {} default List getActiveComboSearchParams(String theResourceName) { return Collections.emptyList(); } - // TODO ND remove default implementation - default List getActiveComboSearchParams(String theResourceName, ComboSearchParamType theParamType) { + default List getActiveComboSearchParams( + String theResourceName, ComboSearchParamType theParamType) { return Collections.emptyList(); } @@ -130,7 +126,8 @@ public interface ISearchParamRegistry { availableSearchParamDef = getActiveSearchParam("Resource", theParamName); } if (availableSearchParamDef == null) { - throw new InvalidRequestException(Msg.code(1209) + "Unknown parameter name: " + theResourceType + ':' + theParamName); + throw new InvalidRequestException( + Msg.code(1209) + "Unknown parameter name: " + theResourceType + ':' + theParamName); } return availableSearchParamDef; } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ITestingUiClientFactory.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ITestingUiClientFactory.java index 518d44a1623..7692cd61767 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ITestingUiClientFactory.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ITestingUiClientFactory.java @@ -19,13 +19,13 @@ */ package ca.uhn.fhir.rest.server.util; -import javax.servlet.http.HttpServletRequest; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.client.api.IGenericClient; +import javax.servlet.http.HttpServletRequest; + /** - * This interface isn't used by hapi-fhir-base, but is used by the + * This interface isn't used by hapi-fhir-base, but is used by the * Web Testing UI */ public interface ITestingUiClientFactory { @@ -34,5 +34,4 @@ public interface ITestingUiClientFactory { * Instantiate a new client */ IGenericClient newClient(FhirContext theFhirContext, HttpServletRequest theRequest, String theServerBaseUrl); - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateDeserializer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateDeserializer.java index cc0be2c96c3..e982179755d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateDeserializer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateDeserializer.java @@ -39,5 +39,4 @@ public class JsonDateDeserializer extends JsonDeserializer { } return null; } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateSerializer.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateSerializer.java index 4b1e176fb52..6155ca69ee0 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateSerializer.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/JsonDateSerializer.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.rest.server.util; -import ca.uhn.fhir.model.primitive.DateTimeDt; import ca.uhn.fhir.model.primitive.InstantDt; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonSerializer; @@ -36,5 +35,4 @@ public class JsonDateSerializer extends JsonSerializer { theGen.writeString(new InstantDt(theValue).getValueAsString()); } } - } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/NarrativeUtil.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/NarrativeUtil.java index 2d0ebe38496..bb7b89c0516 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/NarrativeUtil.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/NarrativeUtil.java @@ -70,22 +70,18 @@ public class NarrativeUtil { public static XhtmlNode sanitize(XhtmlNode theNode) { String html = theNode.getValueAsString(); - PolicyFactory idPolicy = new HtmlPolicyBuilder() - .allowAttributes("id").globally() - .toFactory(); + PolicyFactory idPolicy = + new HtmlPolicyBuilder().allowAttributes("id").globally().toFactory(); PolicyFactory policy = Sanitizers.FORMATTING - .and(Sanitizers.BLOCKS) - .and(Sanitizers.TABLES) - .and(Sanitizers.STYLES) - .and(idPolicy); + .and(Sanitizers.BLOCKS) + .and(Sanitizers.TABLES) + .and(Sanitizers.STYLES) + .and(idPolicy); String safeHTML = policy.sanitize(html); XhtmlNode retVal = new XhtmlNode(); retVal.setValueAsString(safeHTML); return retVal; } - } - - diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ResourceSearchParams.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ResourceSearchParams.java index 6cfa13ee5ed..aa1221d2af8 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ResourceSearchParams.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ResourceSearchParams.java @@ -85,13 +85,15 @@ public class ResourceSearchParams { } public void removeInactive() { - myMap.entrySet().removeIf(entry -> entry.getValue().getStatus() != RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE); + myMap.entrySet() + .removeIf(entry -> + entry.getValue().getStatus() != RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE); } public Stream getReferenceSearchParamNames() { return myMap.entrySet().stream() - .filter(entry -> entry.getValue().getParamType() == RestSearchParameterTypeEnum.REFERENCE) - .map(Map.Entry::getKey); + .filter(entry -> entry.getValue().getParamType() == RestSearchParameterTypeEnum.REFERENCE) + .map(Map.Entry::getKey); } public ResourceSearchParams makeCopy() { diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ServletRequestUtil.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ServletRequestUtil.java index 55615960f59..44ce54a368a 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ServletRequestUtil.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ServletRequestUtil.java @@ -32,7 +32,8 @@ import java.util.List; import java.util.Map; public class ServletRequestUtil { - public static ServletSubRequestDetails getServletSubRequestDetails(ServletRequestDetails theRequestDetails, String url, ArrayListMultimap theParamValues) { + public static ServletSubRequestDetails getServletSubRequestDetails( + ServletRequestDetails theRequestDetails, String url, ArrayListMultimap theParamValues) { ServletSubRequestDetails requestDetails = new ServletSubRequestDetails(theRequestDetails); requestDetails.setServletRequest(theRequestDetails.getServletRequest()); requestDetails.setRequestType(RequestTypeEnum.GET); @@ -46,8 +47,11 @@ public class ServletRequestUtil { for (NameValuePair next : parameters) { theParamValues.put(next.getName(), next.getValue()); } - for (Map.Entry> nextParamEntry : theParamValues.asMap().entrySet()) { - String[] nextValue = nextParamEntry.getValue().toArray(new String[nextParamEntry.getValue().size()]); + for (Map.Entry> nextParamEntry : + theParamValues.asMap().entrySet()) { + String[] nextValue = nextParamEntry + .getValue() + .toArray(new String[nextParamEntry.getValue().size()]); requestDetails.addParameter(nextParamEntry.getKey(), nextValue); } url = url.substring(0, qIndex); @@ -66,7 +70,8 @@ public class ServletRequestUtil { public static String extractUrl(ServletRequestDetails theRequestDetails) { StringBuilder b = new StringBuilder(); - for (Map.Entry next : theRequestDetails.getParameters().entrySet()) { + for (Map.Entry next : + theRequestDetails.getParameters().entrySet()) { for (String nextValue : next.getValue()) { if (b.length() == 0) { b.append('?'); diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/subscription/SubscriptionConstants.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/subscription/SubscriptionConstants.java index 9a7ec34a7c5..43447f86f36 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/subscription/SubscriptionConstants.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/subscription/SubscriptionConstants.java @@ -21,38 +21,41 @@ package ca.uhn.fhir.subscription; public class SubscriptionConstants { - /** * The number of threads used in subscription channel processing */ public static final int MATCHING_CHANNEL_CONCURRENT_CONSUMERS = 5; + public static final int DELIVERY_CHANNEL_CONCURRENT_CONSUMERS = 2; /** * The maximum number of subscriptions that can be active at once */ - public static final int MAX_SUBSCRIPTION_RESULTS = 50000; /** * The size of the queue used for sending resources to the subscription matching processor and by each subscription delivery queue */ - public static final int DELIVERY_EXECUTOR_QUEUE_SIZE = 1000; + public static final String SUBSCRIPTION_STATUS = "Subscription.status"; public static final String SUBSCRIPTION_TYPE = "Subscription.channel.type"; // These STATUS codes are unchanged from DSTU2 Subscription onwards public static final String REQUESTED_STATUS = "requested"; public static final String ACTIVE_STATUS = "active"; public static final String ERROR_STATUS = "error"; - public static final String SUBSCRIPTION_TOPIC_PROFILE_URL = "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-subscription"; - public static final String SUBSCRIPTION_TOPIC_FILTER_URL = "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-filter-criteria"; - public static final String SUBSCRIPTION_TOPIC_CHANNEL_HEARTBEAT_PERIOD_URL = "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-heartbeat-period"; - public static final String SUBSCRIPTION_TOPIC_CHANNEL_TIMEOUT_URL = "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-timeout"; - public static final String SUBSCRIPTION_TOPIC_CHANNEL_MAX_COUNT = "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-max-count"; - public static final String SUBSCRIPTION_TOPIC_CHANNEL_PAYLOAD_CONTENT = "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-payload-content"; - public static final String SUBSCRIPTION_TOPIC_STATUS = "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-subscription-status-r4"; - - - + public static final String SUBSCRIPTION_TOPIC_PROFILE_URL = + "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-subscription"; + public static final String SUBSCRIPTION_TOPIC_FILTER_URL = + "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-filter-criteria"; + public static final String SUBSCRIPTION_TOPIC_CHANNEL_HEARTBEAT_PERIOD_URL = + "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-heartbeat-period"; + public static final String SUBSCRIPTION_TOPIC_CHANNEL_TIMEOUT_URL = + "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-timeout"; + public static final String SUBSCRIPTION_TOPIC_CHANNEL_MAX_COUNT = + "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-max-count"; + public static final String SUBSCRIPTION_TOPIC_CHANNEL_PAYLOAD_CONTENT = + "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-payload-content"; + public static final String SUBSCRIPTION_TOPIC_STATUS = + "http://hl7.org/fhir/uv/subscriptions-backport/StructureDefinition/backport-subscription-status-r4"; } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/Cache.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/Cache.java index 0394306f011..562a6c59994 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/Cache.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/Cache.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.sl.cache; */ import java.util.Map; -import java.util.concurrent.ExecutionException; import java.util.function.Function; /** @@ -32,23 +31,23 @@ import java.util.function.Function; * Please check their documentation for information in the methods below. */ public interface Cache { - V getIfPresent(K key); + V getIfPresent(K key); - V get(K key, Function mappingFunction); + V get(K key, Function mappingFunction); - Map getAllPresent(Iterable keys); + Map getAllPresent(Iterable keys); - void put(K key, V value); + void put(K key, V value); - void putAll(Map map); + void putAll(Map map); - void invalidate(K key); + void invalidate(K key); - void invalidateAll(Iterable keys); + void invalidateAll(Iterable keys); - void invalidateAll(); + void invalidateAll(); - long estimatedSize(); + long estimatedSize(); - void cleanUp(); + void cleanUp(); } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheFactory.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheFactory.java index a462f3224ad..65c9f2bb1e2 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheFactory.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheFactory.java @@ -37,7 +37,9 @@ public class CacheFactory { if (iterator.hasNext()) { return iterator.next(); } - throw new RuntimeException(Msg.code(2200) + "No Cache Service Providers found. Choose between hapi-fhir-caching-caffeine (Default) and hapi-fhir-caching-guava (Android)"); + throw new RuntimeException( + Msg.code(2200) + + "No Cache Service Providers found. Choose between hapi-fhir-caching-caffeine (Default) and hapi-fhir-caching-guava (Android)"); } public static Cache build(long theTimeoutMillis) { @@ -45,17 +47,18 @@ public class CacheFactory { return cacheProvider.create(theTimeoutMillis); } - public static LoadingCache build(long theTimeoutMillis, CacheLoader theCacheLoader) { + public static LoadingCache build(long theTimeoutMillis, CacheLoader theCacheLoader) { CacheProvider cacheProvider = getCacheProvider(); return cacheProvider.create(theTimeoutMillis, theCacheLoader); } - public static Cache build(long theTimeoutMillis, long theMaximumSize) { + public static Cache build(long theTimeoutMillis, long theMaximumSize) { CacheProvider cacheProvider = getCacheProvider(); return cacheProvider.create(theTimeoutMillis, theMaximumSize); } - public static LoadingCache build(long theTimeoutMillis, long theMaximumSize, CacheLoader cacheLoader) { + public static LoadingCache build( + long theTimeoutMillis, long theMaximumSize, CacheLoader cacheLoader) { CacheProvider cacheProvider = getCacheProvider(); return cacheProvider.create(theTimeoutMillis, theMaximumSize, cacheLoader); } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheLoader.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheLoader.java index 26583cd12c5..906d98cc495 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheLoader.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheLoader.java @@ -21,5 +21,5 @@ package ca.uhn.fhir.sl.cache; */ public interface CacheLoader { - V load(K var1) throws Exception; + V load(K var1) throws Exception; } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheProvider.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheProvider.java index fc3af9f0dae..4a9b47f44e4 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheProvider.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/CacheProvider.java @@ -20,12 +20,12 @@ package ca.uhn.fhir.sl.cache; * #L% */ -public interface CacheProvider { - Cache create(long timeoutMillis); +public interface CacheProvider { + Cache create(long timeoutMillis); - Cache create(long timeoutMillis, long maximumSize); + Cache create(long timeoutMillis, long maximumSize); - LoadingCache create(long timeoutMillis, CacheLoader cacheLoader); + LoadingCache create(long timeoutMillis, CacheLoader cacheLoader); - LoadingCache create(long timeoutMillis, long maximumSize, CacheLoader cacheLoader); + LoadingCache create(long timeoutMillis, long maximumSize, CacheLoader cacheLoader); } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/LoadingCache.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/LoadingCache.java index 97c6adfd38b..68932c42870 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/LoadingCache.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/src/main/java/ca/uhn/fhir/sl/cache/LoadingCache.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.sl.cache; */ import java.util.Map; -import java.util.concurrent.CompletableFuture; /** * This interface is a blend between @@ -31,9 +30,9 @@ import java.util.concurrent.CompletableFuture; * Please check their documentation for information in the methods below. */ public interface LoadingCache extends Cache { - V get(K key); + V get(K key); - Map getAll(Iterable keys); + Map getAll(Iterable keys); - void refresh(K key); + void refresh(K key); } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/CacheDelegator.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/CacheDelegator.java index 6cad9a4f26a..2ff599546ae 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/CacheDelegator.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/CacheDelegator.java @@ -42,7 +42,9 @@ public class CacheDelegator implements ca.uhn.fhir.sl.cache.Cache { } @Override - public Map getAllPresent(Iterable keys) { return cache.getAllPresent(keys); } + public Map getAllPresent(Iterable keys) { + return cache.getAllPresent(keys); + } @Override public void put(K key, V value) { @@ -55,10 +57,12 @@ public class CacheDelegator implements ca.uhn.fhir.sl.cache.Cache { } @Override - public void invalidate(K key) { cache.invalidate(key); } + public void invalidate(K key) { + cache.invalidate(key); + } @Override - public void invalidateAll(Iterable keys) { + public void invalidateAll(Iterable keys) { cache.invalidateAll(keys); } @@ -73,7 +77,7 @@ public class CacheDelegator implements ca.uhn.fhir.sl.cache.Cache { } @Override - public void cleanUp(){ + public void cleanUp() { cache.cleanUp(); } } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/CacheProvider.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/CacheProvider.java index ef6d4d9eb14..e3f9d232cb7 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/CacheProvider.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/CacheProvider.java @@ -20,47 +20,38 @@ package ca.uhn.fhir.sl.cache.caffeine; * #L% */ -import java.util.concurrent.TimeUnit; - -import com.github.benmanes.caffeine.cache.Caffeine; import ca.uhn.fhir.sl.cache.Cache; import ca.uhn.fhir.sl.cache.CacheLoader; import ca.uhn.fhir.sl.cache.LoadingCache; +import com.github.benmanes.caffeine.cache.Caffeine; -public class CacheProvider implements ca.uhn.fhir.sl.cache.CacheProvider { +import java.util.concurrent.TimeUnit; - public Cache create(long timeoutMillis) { - return new CacheDelegator( - Caffeine.newBuilder() +public class CacheProvider implements ca.uhn.fhir.sl.cache.CacheProvider { + + public Cache create(long timeoutMillis) { + return new CacheDelegator(Caffeine.newBuilder() .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) - .build() - ); - } - - public LoadingCache create(long timeoutMillis, CacheLoader loading) { - return new LoadingCacheDelegator( - Caffeine.newBuilder() - .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) - .build(loading::load) - ); + .build()); } - public Cache create(long timeoutMillis, long maximumSize) { - return new CacheDelegator( - Caffeine.newBuilder() + public LoadingCache create(long timeoutMillis, CacheLoader loading) { + return new LoadingCacheDelegator(Caffeine.newBuilder() + .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) + .build(loading::load)); + } + + public Cache create(long timeoutMillis, long maximumSize) { + return new CacheDelegator(Caffeine.newBuilder() .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) .maximumSize(maximumSize) - .build() - ); + .build()); } - public LoadingCache create(long timeoutMillis, long maximumSize, CacheLoader loading) { - return new LoadingCacheDelegator( - Caffeine.newBuilder() + public LoadingCache create(long timeoutMillis, long maximumSize, CacheLoader loading) { + return new LoadingCacheDelegator(Caffeine.newBuilder() .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) .maximumSize(maximumSize) - .build(loading::load) - ); + .build(loading::load)); } - } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/LoadingCacheDelegator.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/LoadingCacheDelegator.java index 68035a3275e..5af70b19c58 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/LoadingCacheDelegator.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/src/main/java/ca/uhn/fhir/sl/cache/caffeine/LoadingCacheDelegator.java @@ -20,11 +20,11 @@ package ca.uhn.fhir.sl.cache.caffeine; * #L% */ -import java.util.Map; - import ca.uhn.fhir.sl.cache.LoadingCache; -public class LoadingCacheDelegator extends CacheDelegator implements LoadingCache { +import java.util.Map; + +public class LoadingCacheDelegator extends CacheDelegator implements LoadingCache { public LoadingCacheDelegator(com.github.benmanes.caffeine.cache.LoadingCache impl) { super(impl); @@ -45,5 +45,7 @@ public class LoadingCacheDelegator extends CacheDelegator implements } @Override - public void refresh(K key) { getCache().refresh(key); } + public void refresh(K key) { + getCache().refresh(key); + } } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/src/main/java/ca/uhn/fhir/sl/cache/guava/CacheDelegator.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/src/main/java/ca/uhn/fhir/sl/cache/guava/CacheDelegator.java index 5c5841c95c6..7f023101c12 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/src/main/java/ca/uhn/fhir/sl/cache/guava/CacheDelegator.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/src/main/java/ca/uhn/fhir/sl/cache/guava/CacheDelegator.java @@ -20,14 +20,14 @@ package ca.uhn.fhir.sl.cache.guava; * #L% */ -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.function.Function; - import ca.uhn.fhir.i18n.Msg; import com.google.common.cache.CacheLoader; import com.google.common.util.concurrent.UncheckedExecutionException; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; + public class CacheDelegator implements ca.uhn.fhir.sl.cache.Cache { com.google.common.cache.Cache cache; @@ -50,7 +50,7 @@ public class CacheDelegator implements ca.uhn.fhir.sl.cache.Cache { } catch (UncheckedExecutionException e) { if (e.getCause() instanceof RuntimeException) { // Unwrap exception to match Caffeine - throw (RuntimeException)e.getCause(); + throw (RuntimeException) e.getCause(); } throw e; } catch (CacheLoader.InvalidCacheLoadException e) { @@ -61,7 +61,9 @@ public class CacheDelegator implements ca.uhn.fhir.sl.cache.Cache { } @Override - public Map getAllPresent(Iterable keys) { return cache.getAllPresent(keys); } + public Map getAllPresent(Iterable keys) { + return cache.getAllPresent(keys); + } @Override public void put(K key, V value) { @@ -74,10 +76,12 @@ public class CacheDelegator implements ca.uhn.fhir.sl.cache.Cache { } @Override - public void invalidate(K key) { cache.invalidate(key); } + public void invalidate(K key) { + cache.invalidate(key); + } @Override - public void invalidateAll(Iterable keys) { + public void invalidateAll(Iterable keys) { cache.invalidateAll(keys); } @@ -92,7 +96,7 @@ public class CacheDelegator implements ca.uhn.fhir.sl.cache.Cache { } @Override - public void cleanUp(){ + public void cleanUp() { cache.cleanUp(); } } diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/src/main/java/ca/uhn/fhir/sl/cache/guava/CacheProvider.java b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/src/main/java/ca/uhn/fhir/sl/cache/guava/CacheProvider.java index 7cfd03352b7..b346e07517c 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/src/main/java/ca/uhn/fhir/sl/cache/guava/CacheProvider.java +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/src/main/java/ca/uhn/fhir/sl/cache/guava/CacheProvider.java @@ -20,48 +20,41 @@ package ca.uhn.fhir.sl.cache.guava; * #L% */ -import java.util.concurrent.TimeUnit; - -import com.google.common.cache.CacheBuilder; import ca.uhn.fhir.sl.cache.Cache; import ca.uhn.fhir.sl.cache.CacheLoader; import ca.uhn.fhir.sl.cache.LoadingCache; +import com.google.common.cache.CacheBuilder; -public class CacheProvider implements ca.uhn.fhir.sl.cache.CacheProvider { +import java.util.concurrent.TimeUnit; - public Cache create(long timeoutMillis) { - return new CacheDelegator( - CacheBuilder.newBuilder() +public class CacheProvider implements ca.uhn.fhir.sl.cache.CacheProvider { + + public Cache create(long timeoutMillis) { + return new CacheDelegator(CacheBuilder.newBuilder() .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) - .build() - ); - } + .build()); + } - public LoadingCache create(long timeoutMillis, CacheLoader loading) { - return new LoadingCacheDelegator( - CacheBuilder.newBuilder() + public LoadingCache create(long timeoutMillis, CacheLoader loading) { + return new LoadingCacheDelegator(CacheBuilder.newBuilder() .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) .build(new com.google.common.cache.CacheLoader<>() { @Override public V load(K k) throws Exception { return loading.load(k); } - }) - ); + })); } - public Cache create(long timeoutMillis, long maximumSize) { - return new CacheDelegator( - CacheBuilder.newBuilder() + public Cache create(long timeoutMillis, long maximumSize) { + return new CacheDelegator(CacheBuilder.newBuilder() .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) .maximumSize(maximumSize) - .build() - ); + .build()); } - public LoadingCache create(long timeoutMillis, long maximumSize, CacheLoader loading) { - return new LoadingCacheDelegator( - CacheBuilder.newBuilder() + public LoadingCache create(long timeoutMillis, long maximumSize, CacheLoader loading) { + return new LoadingCacheDelegator(CacheBuilder.newBuilder() .expireAfterWrite(timeoutMillis, TimeUnit.MILLISECONDS) .maximumSize(maximumSize) .build(new com.google.common.cache.CacheLoader<>() { @@ -69,8 +62,6 @@ public class CacheProvider implements ca.uhn.fhir.sl.cache.CacheProvider extends CacheDelegator implements LoadingCache { -public class LoadingCacheDelegator extends CacheDelegator implements LoadingCache { - - public LoadingCacheDelegator(com.google.common.cache.LoadingCache impl) { super(impl); } + public LoadingCacheDelegator(com.google.common.cache.LoadingCache impl) { + super(impl); + } public com.google.common.cache.LoadingCache getCache() { return (com.google.common.cache.LoadingCache) cache; @@ -43,7 +45,7 @@ public class LoadingCacheDelegator extends CacheDelegator implements } catch (UncheckedExecutionException e) { if (e.getCause() instanceof RuntimeException) { // Unwrap exception to match Caffeine - throw (RuntimeException)e.getCause(); + throw (RuntimeException) e.getCause(); } throw e; } catch (ExecutionException e) { @@ -62,7 +64,7 @@ public class LoadingCacheDelegator extends CacheDelegator implements } catch (UncheckedExecutionException e) { if (e.getCause() instanceof RuntimeException) { // Unwrap exception to match Caffeine - throw (RuntimeException)e.getCause(); + throw (RuntimeException) e.getCause(); } throw e; } catch (ExecutionException e) { @@ -75,5 +77,7 @@ public class LoadingCacheDelegator extends CacheDelegator implements } @Override - public void refresh(K key) { getCache().refresh(key); } + public void refresh(K key) { + getCache().refresh(key); + } } diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirAutoConfiguration.java b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirAutoConfiguration.java index 3332d84d26e..2e524810521 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirAutoConfiguration.java +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirAutoConfiguration.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.spring.boot.autoconfigure; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jaxrs.server.AbstractJaxRsProvider; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; @@ -74,11 +73,11 @@ import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.util.CollectionUtils; +import java.util.List; +import java.util.concurrent.ScheduledExecutorService; import javax.persistence.EntityManagerFactory; import javax.servlet.ServletException; import javax.sql.DataSource; -import java.util.List; -import java.util.concurrent.ScheduledExecutorService; /** * {@link EnableAutoConfiguration Auto-configuration} for HAPI FHIR. @@ -90,7 +89,6 @@ import java.util.concurrent.ScheduledExecutorService; @EnableConfigurationProperties(FhirProperties.class) public class FhirAutoConfiguration { - private final FhirProperties properties; public FhirAutoConfiguration(FhirProperties properties) { @@ -104,7 +102,6 @@ public class FhirAutoConfiguration { return fhirContext; } - @Configuration @ConditionalOnClass(AbstractJaxRsProvider.class) @EnableConfigurationProperties(FhirProperties.class) @@ -123,12 +120,12 @@ public class FhirAutoConfiguration { private final List customizers; public FhirRestfulServerConfiguration( - FhirProperties properties, - FhirContext fhirContext, - ObjectProvider> resourceProviders, - ObjectProvider pagingProvider, - ObjectProvider> interceptors, - ObjectProvider> customizers) { + FhirProperties properties, + FhirContext fhirContext, + ObjectProvider> resourceProviders, + ObjectProvider pagingProvider, + ObjectProvider> interceptors, + ObjectProvider> customizers) { this.properties = properties; this.fhirContext = fhirContext; this.resourceProviders = resourceProviders.getIfAvailable(); @@ -147,7 +144,8 @@ public class FhirAutoConfiguration { @Bean public ServletRegistrationBean fhirServerRegistrationBean() { - ServletRegistrationBean registration = new ServletRegistrationBean(this, this.properties.getServer().getPath()); + ServletRegistrationBean registration = new ServletRegistrationBean( + this, this.properties.getServer().getPath()); registration.setLoadOnStartup(1); return registration; } @@ -160,7 +158,8 @@ public class FhirAutoConfiguration { setResourceProviders(this.resourceProviders); setPagingProvider(this.pagingProvider); - setServerAddressStrategy(new HardcodedServerAddressStrategy(this.properties.getServer().getPath())); + setServerAddressStrategy(new HardcodedServerAddressStrategy( + this.properties.getServer().getPath())); customize(); } @@ -206,8 +205,6 @@ public class FhirAutoConfiguration { public PartitionSettings partitionSettings() { return new PartitionSettings(); } - - } @Configuration @@ -231,22 +228,19 @@ public class FhirAutoConfiguration { @Import({JpaDstu3Config.class, HapiJpaConfig.class}) @ConditionalOnMissingBean(type = "ca.uhn.fhir.jpa.config.JpaConfig") @ConditionalOnProperty(name = "hapi.fhir.version", havingValue = "DSTU3") - static class Dstu3 { - } + static class Dstu3 {} @Configuration @Import({JpaDstu2Config.class, HapiJpaConfig.class}) @ConditionalOnMissingBean(type = "ca.uhn.fhir.jpa.config.JpaConfig") @ConditionalOnProperty(name = "hapi.fhir.version", havingValue = "DSTU2") - static class Dstu2 { - } + static class Dstu2 {} @Configuration @Import({JpaR4Config.class, HapiJpaConfig.class}) @ConditionalOnMissingBean(type = "ca.uhn.fhir.jpa.config.JpaConfig") @ConditionalOnProperty(name = "hapi.fhir.version", havingValue = "R4") - static class R4 { - } + static class R4 {} } @Configuration @@ -270,12 +264,13 @@ public class FhirAutoConfiguration { static class SchemaAvailableCondition extends ResourceCondition { SchemaAvailableCondition() { - super("ValidationSchema", - "hapi.fhir.validation", - "schema-location", - "classpath:/org/hl7/fhir/instance/model/schema", - "classpath:/org/hl7/fhir/dstu2016may/model/schema", - "classpath:/org/hl7/fhir/dstu3/model/schema"); + super( + "ValidationSchema", + "hapi.fhir.validation", + "schema-location", + "classpath:/org/hl7/fhir/instance/model/schema", + "classpath:/org/hl7/fhir/dstu2016may/model/schema", + "classpath:/org/hl7/fhir/dstu3/model/schema"); } } } @@ -289,7 +284,8 @@ public class FhirAutoConfiguration { private final List clientInterceptors; - public FhirRestfulClientConfiguration(FhirProperties properties, ObjectProvider> clientInterceptors) { + public FhirRestfulClientConfiguration( + FhirProperties properties, ObjectProvider> clientInterceptors) { this.properties = properties; this.clientInterceptors = clientInterceptors.getIfAvailable(); } @@ -297,7 +293,8 @@ public class FhirAutoConfiguration { @Bean @ConditionalOnBean(IRestfulClientFactory.class) public IGenericClient fhirClient(final IRestfulClientFactory clientFactory) { - IGenericClient fhirClient = clientFactory.newGenericClient(this.properties.getServer().getUrl()); + IGenericClient fhirClient = + clientFactory.newGenericClient(this.properties.getServer().getUrl()); if (!CollectionUtils.isEmpty(this.clientInterceptors)) { for (IClientInterceptor interceptor : this.clientInterceptors) { fhirClient.registerInterceptor(interceptor); @@ -345,5 +342,4 @@ public class FhirAutoConfiguration { } } } - } diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirProperties.java b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirProperties.java index f949fa89a41..5c0aeb6d0a9 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirProperties.java +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirProperties.java @@ -19,87 +19,85 @@ */ package ca.uhn.fhir.spring.boot.autoconfigure; - import ca.uhn.fhir.context.FhirVersionEnum; - import org.springframework.boot.context.properties.ConfigurationProperties; @ConfigurationProperties(prefix = "hapi.fhir") public class FhirProperties { - private FhirVersionEnum version = FhirVersionEnum.DSTU2; + private FhirVersionEnum version = FhirVersionEnum.DSTU2; - private Server server = new Server(); + private Server server = new Server(); - private Validation validation = new Validation(); + private Validation validation = new Validation(); - public FhirVersionEnum getVersion() { - return version; - } + public FhirVersionEnum getVersion() { + return version; + } - public void setVersion(FhirVersionEnum version) { - this.version = version; - } + public void setVersion(FhirVersionEnum version) { + this.version = version; + } - public Server getServer() { - return server; - } + public Server getServer() { + return server; + } - public void setServer(Server server) { - this.server = server; - } + public void setServer(Server server) { + this.server = server; + } - public Validation getValidation() { - return validation; - } + public Validation getValidation() { + return validation; + } - public void setValidation(Validation validation) { - this.validation = validation; - } + public void setValidation(Validation validation) { + this.validation = validation; + } - public static class Server { + public static class Server { - private String url; + private String url; - private String path = "/fhir/*"; + private String path = "/fhir/*"; - public String getUrl() { - return url; - } + public String getUrl() { + return url; + } - public void setUrl(String url) { - this.url = url; - } + public void setUrl(String url) { + this.url = url; + } - public String getPath() { - return path; - } + public String getPath() { + return path; + } - public void setPath(String path) { - this.path = path; - } - } + public void setPath(String path) { + this.path = path; + } + } - public static class Validation { + public static class Validation { - private boolean enabled = true; + private boolean enabled = true; - private boolean requestOnly = true; + private boolean requestOnly = true; - public boolean isEnabled() { - return enabled; - } + public boolean isEnabled() { + return enabled; + } - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } - public boolean isRequestOnly() { - return requestOnly; - } + public boolean isRequestOnly() { + return requestOnly; + } - public void setRequestOnly(boolean requestOnly) { - this.requestOnly = requestOnly; - } - } + public void setRequestOnly(boolean requestOnly) { + this.requestOnly = requestOnly; + } + } } diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirRestfulServerCustomizer.java b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirRestfulServerCustomizer.java index a218b75b1e2..e3623679dae 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirRestfulServerCustomizer.java +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/src/main/java/ca/uhn/fhir/spring/boot/autoconfigure/FhirRestfulServerCustomizer.java @@ -24,9 +24,9 @@ import ca.uhn.fhir.rest.server.RestfulServer; @FunctionalInterface public interface FhirRestfulServerCustomizer { - /** - * Customize the server. - * @param server the server to customize - */ - void customize(RestfulServer server); + /** + * Customize the server. + * @param server the server to customize + */ + void customize(RestfulServer server); } diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/src/main/java/sample/fhir/client/SampleApacheRestfulClientApplication.java b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/src/main/java/sample/fhir/client/SampleApacheRestfulClientApplication.java index 31003ec4b87..66b85ae7c16 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/src/main/java/sample/fhir/client/SampleApacheRestfulClientApplication.java +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/src/main/java/sample/fhir/client/SampleApacheRestfulClientApplication.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +22,6 @@ package sample.fhir.client; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import org.hl7.fhir.dstu3.model.CapabilityStatement; - import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -31,26 +30,23 @@ import org.springframework.context.annotation.Bean; @SpringBootApplication public class SampleApacheRestfulClientApplication { - public static void main(String[] args) { - SpringApplication.run(SampleApacheRestfulClientApplication.class, args); - } + public static void main(String[] args) { + SpringApplication.run(SampleApacheRestfulClientApplication.class, args); + } - @Bean - public LoggingInterceptor loggingInterceptor() { - return new LoggingInterceptor(true); - } + @Bean + public LoggingInterceptor loggingInterceptor() { + return new LoggingInterceptor(true); + } - @Bean - public CommandLineRunner runner(final IGenericClient fhirClient) { - return new CommandLineRunner() { - - @Override - public void run(String... args) throws Exception { - fhirClient.capabilities() - .ofType(CapabilityStatement.class) - .execute(); - } - }; - } + @Bean + public CommandLineRunner runner(final IGenericClient fhirClient) { + return new CommandLineRunner() { + @Override + public void run(String... args) throws Exception { + fhirClient.capabilities().ofType(CapabilityStatement.class).execute(); + } + }; + } } diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/src/main/java/sample/fhir/client/SampleOkHttpRestfulClientApplication.java b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/src/main/java/sample/fhir/client/SampleOkHttpRestfulClientApplication.java index 6451cf4115f..abc015d3351 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/src/main/java/sample/fhir/client/SampleOkHttpRestfulClientApplication.java +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/src/main/java/sample/fhir/client/SampleOkHttpRestfulClientApplication.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,7 +22,6 @@ package sample.fhir.client; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import org.hl7.fhir.dstu3.model.CapabilityStatement; - import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -31,26 +30,23 @@ import org.springframework.context.annotation.Bean; @SpringBootApplication public class SampleOkHttpRestfulClientApplication { - public static void main(String[] args) { - SpringApplication.run(SampleOkHttpRestfulClientApplication.class, args); - } + public static void main(String[] args) { + SpringApplication.run(SampleOkHttpRestfulClientApplication.class, args); + } - @Bean - public LoggingInterceptor loggingInterceptor() { - return new LoggingInterceptor(true); - } + @Bean + public LoggingInterceptor loggingInterceptor() { + return new LoggingInterceptor(true); + } - @Bean - public CommandLineRunner runner(final IGenericClient fhirClient) { - return new CommandLineRunner() { - - @Override - public void run(String... args) throws Exception { - fhirClient.capabilities() - .ofType(CapabilityStatement.class) - .execute(); - } - }; - } + @Bean + public CommandLineRunner runner(final IGenericClient fhirClient) { + return new CommandLineRunner() { + @Override + public void run(String... args) throws Exception { + fhirClient.capabilities().ofType(CapabilityStatement.class).execute(); + } + }; + } } diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/src/main/java/sample/fhir/server/jersey/SampleJerseyRestfulServerApplication.java b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/src/main/java/sample/fhir/server/jersey/SampleJerseyRestfulServerApplication.java index 7754fa49fc4..35d0c6cfd8e 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/src/main/java/sample/fhir/server/jersey/SampleJerseyRestfulServerApplication.java +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/src/main/java/sample/fhir/server/jersey/SampleJerseyRestfulServerApplication.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,6 @@ package sample.fhir.server.jersey; import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor; - import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.annotation.Bean; @@ -28,12 +27,12 @@ import org.springframework.context.annotation.Bean; @SpringBootApplication public class SampleJerseyRestfulServerApplication { - public static void main(String[] args) { - SpringApplication.run(SampleJerseyRestfulServerApplication.class, args); - } + public static void main(String[] args) { + SpringApplication.run(SampleJerseyRestfulServerApplication.class, args); + } - @Bean - public LoggingInterceptor loggingInterceptor() { - return new LoggingInterceptor(); - } + @Bean + public LoggingInterceptor loggingInterceptor() { + return new LoggingInterceptor(); + } } diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/src/main/java/sample/fhir/server/jersey/provider/PatientResourceProvider.java b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/src/main/java/sample/fhir/server/jersey/provider/PatientResourceProvider.java index 190c0220927..f5866d7e615 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/src/main/java/sample/fhir/server/jersey/provider/PatientResourceProvider.java +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/src/main/java/sample/fhir/server/jersey/provider/PatientResourceProvider.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,10 +19,8 @@ */ package sample.fhir.server.jersey.provider; -import ca.uhn.fhir.i18n.Msg; -import java.util.concurrent.ConcurrentHashMap; - import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jaxrs.server.AbstractJaxRsResourceProvider; import ca.uhn.fhir.rest.annotation.Create; import ca.uhn.fhir.rest.annotation.IdParam; @@ -33,61 +31,61 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.hl7.fhir.dstu3.model.HumanName; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.Patient; - import org.springframework.stereotype.Component; +import java.util.concurrent.ConcurrentHashMap; + @Component public class PatientResourceProvider extends AbstractJaxRsResourceProvider { - private static Long counter = 1L; + private static Long counter = 1L; - private static final ConcurrentHashMap patients = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap patients = new ConcurrentHashMap<>(); - static { - patients.put(String.valueOf(counter), createPatient("Van Houte")); - patients.put(String.valueOf(counter), createPatient("Agnew")); - for (int i = 0; i < 20; i++) { - patients.put(String.valueOf(counter), createPatient("Random Patient " + counter)); - } - } + static { + patients.put(String.valueOf(counter), createPatient("Van Houte")); + patients.put(String.valueOf(counter), createPatient("Agnew")); + for (int i = 0; i < 20; i++) { + patients.put(String.valueOf(counter), createPatient("Random Patient " + counter)); + } + } - public PatientResourceProvider(FhirContext fhirContext) { - super(fhirContext); - } + public PatientResourceProvider(FhirContext fhirContext) { + super(fhirContext); + } - @Read - public Patient find(@IdParam final IdType theId) { - if (patients.containsKey(theId.getIdPart())) { - return patients.get(theId.getIdPart()); - } else { - throw new ResourceNotFoundException(Msg.code(2005) + theId); - } - } + @Read + public Patient find(@IdParam final IdType theId) { + if (patients.containsKey(theId.getIdPart())) { + return patients.get(theId.getIdPart()); + } else { + throw new ResourceNotFoundException(Msg.code(2005) + theId); + } + } - @Create - public MethodOutcome createPatient(@ResourceParam Patient patient) { + @Create + public MethodOutcome createPatient(@ResourceParam Patient patient) { - patient.setId(createId(counter, 1L)); - patients.put(String.valueOf(counter), patient); + patient.setId(createId(counter, 1L)); + patients.put(String.valueOf(counter), patient); - return new MethodOutcome(patient.getIdElement()); - } + return new MethodOutcome(patient.getIdElement()); + } - @Override - public Class getResourceType() { - return Patient.class; - } + @Override + public Class getResourceType() { + return Patient.class; + } - private static IdType createId(final Long id, final Long theVersionId) { - return new IdType("Patient", "" + id, "" + theVersionId); - } - - private static Patient createPatient(final String name) { - final Patient patient = new Patient(); - patient.getName().add(new HumanName().setFamily(name)); - patient.setId(createId(counter, 1L)); - counter++; - return patient; - } + private static IdType createId(final Long id, final Long theVersionId) { + return new IdType("Patient", "" + id, "" + theVersionId); + } + private static Patient createPatient(final String name) { + final Patient patient = new Patient(); + patient.getName().add(new HumanName().setFamily(name)); + patient.setId(createId(counter, 1L)); + counter++; + return patient; + } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java index 219eba967d7..0c49ad0ab9b 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java @@ -32,14 +32,13 @@ import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; -import javax.sql.DataSource; import java.lang.reflect.InvocationTargetException; import java.sql.Connection; import java.sql.SQLException; +import javax.annotation.Nonnull; +import javax.sql.DataSource; public enum DriverTypeEnum { - H2_EMBEDDED("org.h2.Driver", false), DERBY_EMBEDDED("org.apache.derby.jdbc.EmbeddedDriver", true), MARIADB_10_1("org.mariadb.jdbc.Driver", false), @@ -54,7 +53,6 @@ public enum DriverTypeEnum { MSSQL_2012("com.microsoft.sqlserver.jdbc.SQLServerDriver", false), COCKROACHDB_21_1("org.postgresql.Driver", false), - ; private static final Logger ourLog = LoggerFactory.getLogger(DriverTypeEnum.class); @@ -108,7 +106,8 @@ public enum DriverTypeEnum { retval = "cockroachdb201.sql"; break; default: - throw new ConfigurationException(Msg.code(45) + "No schema initialization script available for driver " + this); + throw new ConfigurationException( + Msg.code(45) + "No schema initialization script available for driver " + this); } return retval; } @@ -137,7 +136,11 @@ public enum DriverTypeEnum { public ConnectionProperties newConnectionProperties(DataSource theDataSource) { try { Class.forName(myDriverClassName).getConstructor().newInstance(); - } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { + } catch (ClassNotFoundException + | InstantiationException + | IllegalAccessException + | NoSuchMethodException + | InvocationTargetException e) { throw new InternalErrorException(Msg.code(46) + "Unable to find driver class: " + myDriverClassName, e); } @@ -162,7 +165,8 @@ public enum DriverTypeEnum { /** * Constructor */ - public ConnectionProperties(DataSource theDataSource, TransactionTemplate theTxTemplate, DriverTypeEnum theDriverType) { + public ConnectionProperties( + DataSource theDataSource, TransactionTemplate theTxTemplate, DriverTypeEnum theDriverType) { Validate.notNull(theDataSource); Validate.notNull(theTxTemplate); Validate.notNull(theDriverType); diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationException.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationException.java index 160f451a816..90b86a60e28 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationException.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationException.java @@ -25,14 +25,15 @@ public class HapiMigrationException extends RuntimeException { public HapiMigrationException(String theMessage) { super(theMessage); } + public HapiMigrationException(String theMessage, Exception theException) { super(theMessage, theException); } - public HapiMigrationException(String theMessage, MigrationResult theResult, Exception theException) { + public HapiMigrationException(String theMessage, MigrationResult theResult, Exception theException) { super(theMessage, theException); myResult = theResult; - } + } public MigrationResult getResult() { return myResult; diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationLock.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationLock.java index caf26fb5744..bcf4062fc4a 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationLock.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationLock.java @@ -64,7 +64,11 @@ public class HapiMigrationLock implements AutoCloseable { retryCount++; if (retryCount < ourMaxRetryAttempts) { - ourLog.info("Waiting for lock on {}. Retry {}/{}", myMigrationStorageSvc.getMigrationTablename(), retryCount, ourMaxRetryAttempts); + ourLog.info( + "Waiting for lock on {}. Retry {}/{}", + myMigrationStorageSvc.getMigrationTablename(), + retryCount, + ourMaxRetryAttempts); Thread.sleep(SLEEP_MILLIS_BETWEEN_LOCK_RETRIES); } } catch (InterruptedException ex) { @@ -72,12 +76,14 @@ public class HapiMigrationLock implements AutoCloseable { } } while (retryCount < ourMaxRetryAttempts); - String message = "Unable to obtain table lock - another database migration may be running. If no " + - "other database migration is running, then the previous migration did not shut down properly and the " + - "lock record needs to be deleted manually. The lock record is located in the " + myMigrationStorageSvc.getMigrationTablename() + " table with " + - "INSTALLED_RANK = " + LOCK_PID; + String message = "Unable to obtain table lock - another database migration may be running. If no " + + "other database migration is running, then the previous migration did not shut down properly and the " + + "lock record needs to be deleted manually. The lock record is located in the " + + myMigrationStorageSvc.getMigrationTablename() + " table with " + "INSTALLED_RANK = " + + LOCK_PID; - Optional otherLockFound = myMigrationStorageSvc.findFirstByPidAndNotDescription(LOCK_PID, myLockDescription); + Optional otherLockFound = + myMigrationStorageSvc.findFirstByPidAndNotDescription(LOCK_PID, myLockDescription); if (otherLockFound.isPresent()) { message += " and DESCRIPTION = " + otherLockFound.get().getDescription(); } @@ -98,7 +104,8 @@ public class HapiMigrationLock implements AutoCloseable { return false; } - ourLog.info("Repairing lock table. Removing row in " + myMigrationStorageSvc.getMigrationTablename() + " with INSTALLED_RANK = " + LOCK_PID + " and DESCRIPTION = " + description); + ourLog.info("Repairing lock table. Removing row in " + myMigrationStorageSvc.getMigrationTablename() + + " with INSTALLED_RANK = " + LOCK_PID + " and DESCRIPTION = " + description); boolean result = myMigrationStorageSvc.deleteLockRecord(description); if (result) { ourLog.info("Successfully removed lock record"); diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationStorageSvc.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationStorageSvc.java index 7aadae52dcf..80f928520a3 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationStorageSvc.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrationStorageSvc.java @@ -47,7 +47,6 @@ public class HapiMigrationStorageSvc { * @param theTaskList the full list of tasks for this release * @return a list of tasks that have not yet been successfully run against the database */ - public MigrationTaskList diff(MigrationTaskList theTaskList) { Set appliedMigrationVersions = fetchAppliedMigrationVersions(); @@ -68,10 +67,10 @@ public class HapiMigrationStorageSvc { */ public String getLatestAppliedVersion() { return fetchAppliedMigrationVersions().stream() - .sorted() - .map(MigrationVersion::toString) - .reduce((first, second) -> second) - .orElse(UNKNOWN_VERSION); + .sorted() + .map(MigrationVersion::toString) + .reduce((first, second) -> second) + .orElse(UNKNOWN_VERSION); } /** @@ -87,12 +86,10 @@ public class HapiMigrationStorageSvc { /** * Create the migration table if it does not already exist */ - public boolean createMigrationTableIfRequired() { return myHapiMigrationDao.createMigrationTableIfRequired(); } - /** * * @param theLockDescription value of the Description for the lock record @@ -106,11 +103,13 @@ public class HapiMigrationStorageSvc { } void verifyNoOtherLocksPresent(String theLockDescription) { - Optional otherLockFound = myHapiMigrationDao.findFirstByPidAndNotDescription(HapiMigrationLock.LOCK_PID, theLockDescription); + Optional otherLockFound = + myHapiMigrationDao.findFirstByPidAndNotDescription(HapiMigrationLock.LOCK_PID, theLockDescription); // Check that there are no other locks in place. This should not happen! if (otherLockFound.isPresent()) { - throw new HapiMigrationException(Msg.code(2152) + "Internal error: on unlocking, a competing lock was found"); + throw new HapiMigrationException( + Msg.code(2152) + "Internal error: on unlocking, a competing lock was found"); } } @@ -125,7 +124,8 @@ public class HapiMigrationStorageSvc { return myHapiMigrationDao.save(entity); } - public Optional findFirstByPidAndNotDescription(Integer theLockPid, String theLockDescription) { + public Optional findFirstByPidAndNotDescription( + Integer theLockPid, String theLockDescription) { return myHapiMigrationDao.findFirstByPidAndNotDescription(theLockPid, theLockDescription); } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java index 2d948171531..302b05cde23 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/HapiMigrator.java @@ -30,12 +30,12 @@ import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.sql.DataSource; import java.sql.SQLException; import java.util.Collections; import java.util.List; import java.util.Objects; +import javax.annotation.Nonnull; +import javax.sql.DataSource; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -53,7 +53,8 @@ public class HapiMigrator { public HapiMigrator(String theMigrationTableName, DataSource theDataSource, DriverTypeEnum theDriverType) { myDriverType = theDriverType; myDataSource = theDataSource; - myHapiMigrationStorageSvc = new HapiMigrationStorageSvc(new HapiMigrationDao(theDataSource, theDriverType, theMigrationTableName)); + myHapiMigrationStorageSvc = + new HapiMigrationStorageSvc(new HapiMigrationDao(theDataSource, theDriverType, theMigrationTableName)); } public DataSource getDataSource() { @@ -80,13 +81,15 @@ public class HapiMigrator { return myDriverType; } - protected StringBuilder buildExecutedStatementsString(MigrationResult theMigrationResult) { StringBuilder statementBuilder = new StringBuilder(); String lastTable = null; for (BaseTask.ExecutedStatement next : theMigrationResult.executedStatements) { if (!Objects.equals(lastTable, next.getTableName())) { - statementBuilder.append("\n\n-- Table: ").append(next.getTableName()).append("\n"); + statementBuilder + .append("\n\n-- Table: ") + .append(next.getTableName()) + .append("\n"); lastTable = next.getTableName(); } @@ -120,12 +123,15 @@ public class HapiMigrator { // Lock the migration table so only one server migrates the database at once try (HapiMigrationLock ignored = new HapiMigrationLock(myHapiMigrationStorageSvc)) { MigrationTaskList newTaskList = myHapiMigrationStorageSvc.diff(myTaskList); - ourLog.info("{} of these {} migration tasks are new. Executing them now.", newTaskList.size(), myTaskList.size()); + ourLog.info( + "{} of these {} migration tasks are new. Executing them now.", + newTaskList.size(), + myTaskList.size()); - try (DriverTypeEnum.ConnectionProperties connectionProperties = getDriverType().newConnectionProperties(getDataSource())) { + try (DriverTypeEnum.ConnectionProperties connectionProperties = + getDriverType().newConnectionProperties(getDataSource())) { newTaskList.forEach(next -> { - next.setDriverType(getDriverType()); next.setDryRun(isDryRun()); next.setNoColumnShrink(isNoColumnShrink()); @@ -143,7 +149,9 @@ public class HapiMigrator { if (isDryRun()) { StringBuilder statementBuilder = buildExecutedStatementsString(retval); - ourLog.info("SQL that would be executed:\n\n***********************************\n{}***********************************", statementBuilder); + ourLog.info( + "SQL that would be executed:\n\n***********************************\n{}***********************************", + statementBuilder); } return retval; @@ -177,7 +185,6 @@ public class HapiMigrator { private void preExecute(BaseTask theTask) { myCallbacks.forEach(action -> action.preExecution(theTask)); - } private void postExecute(BaseTask theNext, StopWatch theStopWatch, boolean theSuccess) { diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/IHapiMigrationCallback.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/IHapiMigrationCallback.java index a1aac1ab786..91336b69af6 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/IHapiMigrationCallback.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/IHapiMigrationCallback.java @@ -23,5 +23,6 @@ import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask; public interface IHapiMigrationCallback { default void preExecution(BaseTask theTask) {} + default void postExecution(BaseTask theTask) {} } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java index 32fbeedacab..b9863105de6 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java @@ -47,8 +47,6 @@ import org.slf4j.LoggerFactory; import org.springframework.jdbc.core.ColumnMapRowMapper; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nullable; -import javax.sql.DataSource; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; @@ -62,6 +60,8 @@ import java.util.Locale; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nullable; +import javax.sql.DataSource; public class JdbcUtils { private static final Logger ourLog = LoggerFactory.getLogger(JdbcUtils.class); @@ -69,7 +69,8 @@ public class JdbcUtils { /** * Retrieve all index names */ - public static Set getIndexNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException { + public static Set getIndexNames( + DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException { if (!getTableNames(theConnectionProperties).contains(theTableName)) { return Collections.emptySet(); @@ -97,11 +98,10 @@ public class JdbcUtils { indexNames.add(indexName); } - indexNames = indexNames - .stream() - .filter(Objects::nonNull) // filter out the nulls first - .map(s -> s.toUpperCase(Locale.US)) // then convert the non-null entries to upper case - .collect(Collectors.toSet()); + indexNames = indexNames.stream() + .filter(Objects::nonNull) // filter out the nulls first + .map(s -> s.toUpperCase(Locale.US)) // then convert the non-null entries to upper case + .collect(Collectors.toSet()); return indexNames; @@ -113,7 +113,9 @@ public class JdbcUtils { } @SuppressWarnings("ConstantConditions") - public static boolean isIndexUnique(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theIndexName) throws SQLException { + public static boolean isIndexUnique( + DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theIndexName) + throws SQLException { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); try (Connection connection = dataSource.getConnection()) { return theConnectionProperties.getTxTemplate().execute(t -> { @@ -134,21 +136,31 @@ public class JdbcUtils { throw new InternalErrorException(Msg.code(30) + e); } - throw new InternalErrorException(Msg.code(31) + "Can't find index: " + theIndexName + " on table " + theTableName); + throw new InternalErrorException( + Msg.code(31) + "Can't find index: " + theIndexName + " on table " + theTableName); }); } } - private static ResultSet getIndexInfo(String theTableName, Connection theConnection, DatabaseMetaData theMetadata, boolean theUnique) throws SQLException { + private static ResultSet getIndexInfo( + String theTableName, Connection theConnection, DatabaseMetaData theMetadata, boolean theUnique) + throws SQLException { // FYI Using approximate=false causes a very slow table scan on Oracle boolean approximate = true; - return theMetadata.getIndexInfo(theConnection.getCatalog(), theConnection.getSchema(), massageIdentifier(theMetadata, theTableName), theUnique, approximate); + return theMetadata.getIndexInfo( + theConnection.getCatalog(), + theConnection.getSchema(), + massageIdentifier(theMetadata, theTableName), + theUnique, + approximate); } /** * Retrieve all index names */ - public static ColumnType getColumnType(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException { + public static ColumnType getColumnType( + DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) + throws SQLException { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); try (Connection connection = dataSource.getConnection()) { return theConnectionProperties.getTxTemplate().execute(t -> { @@ -157,7 +169,8 @@ public class JdbcUtils { metadata = connection.getMetaData(); String catalog = connection.getCatalog(); String schema = connection.getSchema(); - ResultSet indexes = metadata.getColumns(catalog, schema, massageIdentifier(metadata, theTableName), null); + ResultSet indexes = + metadata.getColumns(catalog, schema, massageIdentifier(metadata, theTableName), null); while (indexes.next()) { @@ -193,18 +206,23 @@ public class JdbcUtils { return new ColumnType(ColumnTypeEnum.BLOB, length); case Types.LONGVARBINARY: if (DriverTypeEnum.MYSQL_5_7.equals(theConnectionProperties.getDriverType())) { - //See git + // See git return new ColumnType(ColumnTypeEnum.BLOB, length); } else { - throw new IllegalArgumentException(Msg.code(32) + "Don't know how to handle datatype " + dataType + " for column " + theColumnName + " on table " + theTableName); + throw new IllegalArgumentException( + Msg.code(32) + "Don't know how to handle datatype " + dataType + + " for column " + theColumnName + " on table " + theTableName); } case Types.VARBINARY: if (DriverTypeEnum.MSSQL_2012.equals(theConnectionProperties.getDriverType())) { - // MS SQLServer seems to be mapping BLOB to VARBINARY under the covers, so we need to reverse that mapping + // MS SQLServer seems to be mapping BLOB to VARBINARY under the covers, so we need + // to reverse that mapping return new ColumnType(ColumnTypeEnum.BLOB, length); } else { - throw new IllegalArgumentException(Msg.code(33) + "Don't know how to handle datatype " + dataType + " for column " + theColumnName + " on table " + theTableName); + throw new IllegalArgumentException( + Msg.code(33) + "Don't know how to handle datatype " + dataType + + " for column " + theColumnName + " on table " + theTableName); } case Types.CLOB: return new ColumnType(ColumnTypeEnum.CLOB, length); @@ -213,9 +231,9 @@ public class JdbcUtils { case Types.FLOAT: return new ColumnType(ColumnTypeEnum.FLOAT, length); default: - throw new IllegalArgumentException(Msg.code(34) + "Don't know how to handle datatype " + dataType + " for column " + theColumnName + " on table " + theTableName); + throw new IllegalArgumentException(Msg.code(34) + "Don't know how to handle datatype " + + dataType + " for column " + theColumnName + " on table " + theTableName); } - } ourLog.debug("Unable to find column {} in table {}.", theColumnName, theTableName); @@ -224,7 +242,6 @@ public class JdbcUtils { } catch (SQLException e) { throw new InternalErrorException(Msg.code(35) + e); } - }); } } @@ -232,7 +249,11 @@ public class JdbcUtils { /** * Retrieve all index names */ - public static Set getForeignKeys(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, @Nullable String theForeignTable) throws SQLException { + public static Set getForeignKeys( + DriverTypeEnum.ConnectionProperties theConnectionProperties, + String theTableName, + @Nullable String theForeignTable) + throws SQLException { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); try (Connection connection = dataSource.getConnection()) { @@ -244,7 +265,6 @@ public class JdbcUtils { String catalog = connection.getCatalog(); String schema = connection.getSchema(); - List parentTables = new ArrayList<>(); if (theTableName != null) { parentTables.add(massageIdentifier(metadata, theTableName)); @@ -257,7 +277,8 @@ public class JdbcUtils { Set fkNames = new HashSet<>(); for (String nextParentTable : parentTables) { - ResultSet indexes = metadata.getCrossReference(catalog, schema, nextParentTable, catalog, schema, foreignTable); + ResultSet indexes = metadata.getCrossReference( + catalog, schema, nextParentTable, catalog, schema, foreignTable); while (indexes.next()) { String fkName = indexes.getString("FK_NAME"); @@ -277,7 +298,11 @@ public class JdbcUtils { /** * Retrieve names of foreign keys that reference a specified foreign key column. */ - public static Set getForeignKeysForColumn(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theForeignKeyColumn, String theForeignTable) throws SQLException { + public static Set getForeignKeysForColumn( + DriverTypeEnum.ConnectionProperties theConnectionProperties, + String theForeignKeyColumn, + String theForeignTable) + throws SQLException { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); try (Connection connection = dataSource.getConnection()) { @@ -288,7 +313,6 @@ public class JdbcUtils { String catalog = connection.getCatalog(); String schema = connection.getSchema(); - List parentTables = new ArrayList<>(); parentTables.addAll(JdbcUtils.getTableNames(theConnectionProperties)); @@ -296,7 +320,8 @@ public class JdbcUtils { Set fkNames = new HashSet<>(); for (String nextParentTable : parentTables) { - ResultSet indexes = metadata.getCrossReference(catalog, schema, nextParentTable, catalog, schema, foreignTable); + ResultSet indexes = metadata.getCrossReference( + catalog, schema, nextParentTable, catalog, schema, foreignTable); while (indexes.next()) { if (theForeignKeyColumn.equals(indexes.getString("FKCOLUMN_NAME"))) { @@ -318,14 +343,19 @@ public class JdbcUtils { /** * Retrieve all index names */ - public static Set getColumnNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException { + public static Set getColumnNames( + DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); try (Connection connection = dataSource.getConnection()) { return theConnectionProperties.getTxTemplate().execute(t -> { DatabaseMetaData metadata; try { metadata = connection.getMetaData(); - ResultSet indexes = metadata.getColumns(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), null); + ResultSet indexes = metadata.getColumns( + connection.getCatalog(), + connection.getSchema(), + massageIdentifier(metadata, theTableName), + null); Set columnNames = new HashSet<>(); while (indexes.next()) { @@ -347,19 +377,22 @@ public class JdbcUtils { } } - public static Set getSequenceNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) throws SQLException { + public static Set getSequenceNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) + throws SQLException { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); try (Connection connection = dataSource.getConnection()) { return theConnectionProperties.getTxTemplate().execute(t -> { try { DialectResolver dialectResolver = new StandardDialectResolver(); - Dialect dialect = dialectResolver.resolveDialect(new DatabaseMetaDataDialectResolutionInfoAdapter(connection.getMetaData())); + Dialect dialect = dialectResolver.resolveDialect( + new DatabaseMetaDataDialectResolutionInfoAdapter(connection.getMetaData())); Set sequenceNames = new HashSet<>(); if (dialect.supportsSequences()) { // Use Hibernate to get a list of current sequences - SequenceInformationExtractor sequenceInformationExtractor = dialect.getSequenceInformationExtractor(); + SequenceInformationExtractor sequenceInformationExtractor = + dialect.getSequenceInformationExtractor(); ExtractionContext extractionContext = new ExtractionContext.EmptyExtractionContext() { @Override public Connection getJdbcConnection() { @@ -401,7 +434,8 @@ public class JdbcUtils { @Override public IdentifierHelper getIdentifierHelper() { - return new NormalizingIdentifierHelperImpl(this, null, true, true, true, null, null, null); + return new NormalizingIdentifierHelperImpl( + this, null, true, true, true, null, null, null); } @Override @@ -421,11 +455,12 @@ public class JdbcUtils { }; } }; - Iterable sequences = sequenceInformationExtractor.extractMetadata(extractionContext); + Iterable sequences = + sequenceInformationExtractor.extractMetadata(extractionContext); for (SequenceInformation next : sequences) { - sequenceNames.add(next.getSequenceName().getSequenceName().getText()); + sequenceNames.add( + next.getSequenceName().getSequenceName().getText()); } - } return sequenceNames; } catch (SQLException e) { @@ -435,7 +470,8 @@ public class JdbcUtils { } } - public static Set getTableNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) throws SQLException { + public static Set getTableNames(DriverTypeEnum.ConnectionProperties theConnectionProperties) + throws SQLException { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); try (Connection connection = dataSource.getConnection()) { return theConnectionProperties.getTxTemplate().execute(t -> { @@ -468,7 +504,9 @@ public class JdbcUtils { } } - public static boolean isColumnNullable(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException { + public static boolean isColumnNullable( + DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) + throws SQLException { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); try (Connection connection = dataSource.getConnection()) { //noinspection ConstantConditions @@ -476,7 +514,11 @@ public class JdbcUtils { DatabaseMetaData metadata; try { metadata = connection.getMetaData(); - ResultSet tables = metadata.getColumns(connection.getCatalog(), connection.getSchema(), massageIdentifier(metadata, theTableName), null); + ResultSet tables = metadata.getColumns( + connection.getCatalog(), + connection.getSchema(), + massageIdentifier(metadata, theTableName), + null); while (tables.next()) { String tableName = tables.getString("TABLE_NAME").toUpperCase(Locale.US); @@ -546,17 +588,17 @@ public class JdbcUtils { ColumnType that = (ColumnType) theO; return new EqualsBuilder() - .append(myColumnTypeEnum, that.myColumnTypeEnum) - .append(myLength, that.myLength) - .isEquals(); + .append(myColumnTypeEnum, that.myColumnTypeEnum) + .append(myLength, that.myLength) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(myColumnTypeEnum) - .append(myLength) - .toHashCode(); + .append(myColumnTypeEnum) + .append(myLength) + .toHashCode(); } @Override @@ -578,8 +620,14 @@ public class JdbcUtils { } public boolean equals(ColumnTypeEnum theTaskColumnType, Long theTaskColumnLength) { - ourLog.debug("Comparing existing {} {} to new {} {}", myColumnTypeEnum, myLength, theTaskColumnType, theTaskColumnLength); - return myColumnTypeEnum == theTaskColumnType && (theTaskColumnLength == null || theTaskColumnLength.equals(myLength)); + ourLog.debug( + "Comparing existing {} {} to new {} {}", + myColumnTypeEnum, + myLength, + theTaskColumnType, + theTaskColumnLength); + return myColumnTypeEnum == theTaskColumnType + && (theTaskColumnLength == null || theTaskColumnLength.equals(myLength)); } } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationResult.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationResult.java index 5fa5d40607a..8701c330f96 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationResult.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationResult.java @@ -31,10 +31,11 @@ public class MigrationResult { public final List failedTasks = new ArrayList<>(); public String summary() { - return String.format("Completed executing %s migration tasks: %s succeeded, %s failed. %s SQL statements were executed.", - succeededTasks.size() + failedTasks.size(), - succeededTasks.size(), - failedTasks.size(), - executedStatements.size()); + return String.format( + "Completed executing %s migration tasks: %s succeeded, %s failed. %s SQL statements were executed.", + succeededTasks.size() + failedTasks.size(), + succeededTasks.size(), + failedTasks.size(), + executedStatements.size()); } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java index e2d6ac16abe..1275412c875 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskList.java @@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.migrate; import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask; import org.flywaydb.core.api.MigrationVersion; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; @@ -30,6 +29,7 @@ import java.util.List; import java.util.Set; import java.util.function.Consumer; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class MigrationTaskList implements Iterable { private final List myTasks; @@ -56,8 +56,9 @@ public class MigrationTaskList implements Iterable { public MigrationTaskList diff(Set theAppliedMigrationVersions) { List unappliedTasks = myTasks.stream() - .filter(task -> !theAppliedMigrationVersions.contains(MigrationVersion.fromVersion(task.getMigrationVersion()))) - .collect(Collectors.toList()); + .filter(task -> + !theAppliedMigrationVersions.contains(MigrationVersion.fromVersion(task.getMigrationVersion()))) + .collect(Collectors.toList()); return new MigrationTaskList(unappliedTasks); } @@ -87,11 +88,11 @@ public class MigrationTaskList implements Iterable { public String getLastVersion() { return myTasks.stream() - .map(BaseTask::getMigrationVersion) - .map(MigrationVersion::fromVersion) - .sorted() - .map(MigrationVersion::toString) - .reduce((first, second) -> second) - .orElse(null); + .map(BaseTask::getMigrationVersion) + .map(MigrationVersion::fromVersion) + .sorted() + .map(MigrationVersion::toString) + .reduce((first, second) -> second) + .orElse(null); } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskSkipper.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskSkipper.java index d3ae989fd31..2985106c68f 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskSkipper.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/MigrationTaskSkipper.java @@ -39,12 +39,12 @@ public class MigrationTaskSkipper { return; } Set skippedVersionSet = Stream.of(theSkipVersions.split(",")) - .map(String::trim) - // TODO KHS filter out all characters that aren't numbers, periods and underscores - .map(s -> s.replace("'", "")) - .map(s -> s.replace("\"", "")) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toSet()); + .map(String::trim) + // TODO KHS filter out all characters that aren't numbers, periods and underscores + .map(s -> s.replace("'", "")) + .map(s -> s.replace("\"", "")) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toSet()); for (BaseTask task : theTasks) { if (skippedVersionSet.contains(task.getMigrationVersion())) { diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/SchemaMigrator.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/SchemaMigrator.java index 7bd8f43e3a5..48f68cf93f3 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/SchemaMigrator.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/SchemaMigrator.java @@ -25,12 +25,12 @@ import org.hibernate.cfg.AvailableSettings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.sql.DataSource; import java.sql.Connection; import java.sql.SQLException; import java.util.Collections; import java.util.List; import java.util.Properties; +import javax.sql.DataSource; public class SchemaMigrator { public static final String HAPI_FHIR_MIGRATION_TABLENAME = "FLY_HFJ_MIGRATION"; @@ -47,13 +47,20 @@ public class SchemaMigrator { /** * Constructor */ - public SchemaMigrator(String theSchemaName, String theMigrationTableName, DataSource theDataSource, Properties jpaProperties, MigrationTaskList theMigrationTasks, HapiMigrationStorageSvc theHapiMigrationStorageSvc) { + public SchemaMigrator( + String theSchemaName, + String theMigrationTableName, + DataSource theDataSource, + Properties jpaProperties, + MigrationTaskList theMigrationTasks, + HapiMigrationStorageSvc theHapiMigrationStorageSvc) { mySchemaName = theSchemaName; myDataSource = theDataSource; myMigrationTableName = theMigrationTableName; myMigrationTasks = theMigrationTasks; - mySkipValidation = jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO)); + mySkipValidation = jpaProperties.containsKey(AvailableSettings.HBM2DDL_AUTO) + && "update".equals(jpaProperties.getProperty(AvailableSettings.HBM2DDL_AUTO)); myHapiMigrationStorageSvc = theHapiMigrationStorageSvc; } @@ -68,15 +75,17 @@ public class SchemaMigrator { if (unappliedMigrations.size() > 0) { String url = connection.getMetaData().getURL(); - throw new ConfigurationException(Msg.code(27) + "The database schema for " + url + " is out of date. " + - "Current database schema version is " + myHapiMigrationStorageSvc.getLatestAppliedVersion() + ". Schema version required by application is " + - unappliedMigrations.getLastVersion() + ". Please run the database migrator."); + throw new ConfigurationException(Msg.code(27) + "The database schema for " + url + " is out of date. " + + "Current database schema version is " + + myHapiMigrationStorageSvc.getLatestAppliedVersion() + + ". Schema version required by application is " + unappliedMigrations.getLastVersion() + + ". Please run the database migrator."); } - ourLog.info("Database schema confirmed at expected version " + myHapiMigrationStorageSvc.getLatestAppliedVersion()); + ourLog.info("Database schema confirmed at expected version " + + myHapiMigrationStorageSvc.getLatestAppliedVersion()); } catch (SQLException e) { throw new ConfigurationException(Msg.code(28) + "Unable to connect to " + myDataSource, e); } - } public MigrationResult migrate() { diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/dao/HapiMigrationDao.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/dao/HapiMigrationDao.java index 432a68c8520..14271741c92 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/dao/HapiMigrationDao.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/dao/HapiMigrationDao.java @@ -30,7 +30,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.jdbc.core.JdbcTemplate; -import javax.sql.DataSource; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; @@ -39,6 +38,7 @@ import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.sql.DataSource; public class HapiMigrationDao { private static final Logger ourLog = LoggerFactory.getLogger(HapiMigrationDao.class); @@ -62,10 +62,10 @@ public class HapiMigrationDao { public Set fetchSuccessfulMigrationVersions() { List allEntries = findAll(); return allEntries.stream() - .filter(HapiMigrationEntity::getSuccess) - .map(HapiMigrationEntity::getVersion) - .map(MigrationVersion::fromVersion) - .collect(Collectors.toSet()); + .filter(HapiMigrationEntity::getSuccess) + .map(HapiMigrationEntity::getVersion) + .map(MigrationVersion::fromVersion) + .collect(Collectors.toSet()); } public void deleteAll() { @@ -127,7 +127,8 @@ public class HapiMigrationDao { private boolean migrationTableExists() { try { try (Connection connection = myDataSource.getConnection()) { - ResultSet tables = connection.getMetaData().getTables(connection.getCatalog(), connection.getSchema(), null, null); + ResultSet tables = + connection.getMetaData().getTables(connection.getCatalog(), connection.getSchema(), null, null); while (tables.next()) { String tableName = tables.getString("TABLE_NAME"); @@ -153,13 +154,16 @@ public class HapiMigrationDao { * @return true if the record was successfully deleted */ public boolean deleteLockRecord(Integer theLockPid, String theLockDescription) { - int recordsChanged = myJdbcTemplate.update(myMigrationQueryBuilder.deleteLockRecordStatement(theLockPid, theLockDescription)); + int recordsChanged = myJdbcTemplate.update( + myMigrationQueryBuilder.deleteLockRecordStatement(theLockPid, theLockDescription)); return recordsChanged > 0; } - public Optional findFirstByPidAndNotDescription(Integer theLockPid, String theLockDescription) { + public Optional findFirstByPidAndNotDescription( + Integer theLockPid, String theLockDescription) { String query = myMigrationQueryBuilder.findByPidAndNotDescriptionQuery(theLockPid, theLockDescription); - return myJdbcTemplate.query(query, HapiMigrationEntity.rowMapper()).stream().findFirst(); + return myJdbcTemplate.query(query, HapiMigrationEntity.rowMapper()).stream() + .findFirst(); } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/dao/MigrationQueryBuilder.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/dao/MigrationQueryBuilder.java index 1e779dff66c..4d3f4b46f21 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/dao/MigrationQueryBuilder.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/dao/MigrationQueryBuilder.java @@ -74,7 +74,8 @@ public class MigrationQueryBuilder { myVersionCol = myTable.addColumn("\"version\"", Types.VARCHAR, HapiMigrationEntity.VERSION_MAX_SIZE); - myDescriptionCol = myTable.addColumn("\"description\"", Types.VARCHAR, HapiMigrationEntity.DESCRIPTION_MAX_SIZE); + myDescriptionCol = + myTable.addColumn("\"description\"", Types.VARCHAR, HapiMigrationEntity.DESCRIPTION_MAX_SIZE); myDescriptionCol.notNull(); myTypeCol = myTable.addColumn("\"type\"", Types.VARCHAR, HapiMigrationEntity.TYPE_MAX_SIZE); @@ -85,7 +86,8 @@ public class MigrationQueryBuilder { myChecksumCol = myTable.addColumn("\"checksum\"", Types.INTEGER, null); - myInstalledByCol = myTable.addColumn("\"installed_by\"", Types.VARCHAR, HapiMigrationEntity.INSTALLED_BY_MAX_SIZE); + myInstalledByCol = + myTable.addColumn("\"installed_by\"", Types.VARCHAR, HapiMigrationEntity.INSTALLED_BY_MAX_SIZE); myInstalledByCol.notNull(); myInstalledOnCol = myTable.addColumn("\"installed_on\"", Types.DATE, null); @@ -94,7 +96,9 @@ public class MigrationQueryBuilder { myExecutionTimeCol = myTable.addColumn("\"execution_time\"", Types.INTEGER, null); myExecutionTimeCol.notNull(); - myBooleanType = ColumnTypeToDriverTypeToSqlType.getColumnTypeToDriverTypeToSqlType().get(ColumnTypeEnum.BOOLEAN).get(theDriverType); + myBooleanType = ColumnTypeToDriverTypeToSqlType.getColumnTypeToDriverTypeToSqlType() + .get(ColumnTypeEnum.BOOLEAN) + .get(theDriverType); mySuccessCol = myTable.addColumn("\"success\"", myBooleanType, null); mySuccessCol.notNull(); @@ -112,65 +116,64 @@ public class MigrationQueryBuilder { private String buildHighestKeyQuery() { return new SelectQuery() - .addCustomColumns(FunctionCall.max().addColumnParams(myInstalledRankCol)) - .validate() - .toString(); + .addCustomColumns(FunctionCall.max().addColumnParams(myInstalledRankCol)) + .validate() + .toString(); } public String insertPreparedStatement() { return new InsertQuery(myTable) - .addPreparedColumns(myInstalledRankCol, - myVersionCol, - myDescriptionCol, - myTypeCol, - myScriptCol, - myChecksumCol, - myInstalledByCol, - myInstalledOnCol, - myExecutionTimeCol, - mySuccessCol) - .validate() - .toString(); + .addPreparedColumns( + myInstalledRankCol, + myVersionCol, + myDescriptionCol, + myTypeCol, + myScriptCol, + myChecksumCol, + myInstalledByCol, + myInstalledOnCol, + myExecutionTimeCol, + mySuccessCol) + .validate() + .toString(); } public String createTableStatement() { - return new CreateTableQuery(myTable, true) - .validate() - .toString(); + return new CreateTableQuery(myTable, true).validate().toString(); } public String createIndexStatement() { return new CreateIndexQuery(myTable, myMigrationTablename.toUpperCase() + "_PK_INDEX") - .setIndexType(CreateIndexQuery.IndexType.UNIQUE) - .addColumns(myInstalledRankCol) - .validate() - .toString(); + .setIndexType(CreateIndexQuery.IndexType.UNIQUE) + .addColumns(myInstalledRankCol) + .validate() + .toString(); } public String findAllQuery() { return new SelectQuery() - .addFromTable(myTable) - .addCondition(BinaryCondition.notEqualTo(myInstalledRankCol, HapiMigrationEntity.CREATE_TABLE_PID)) - .addAllColumns() - .validate() - .toString(); + .addFromTable(myTable) + .addCondition(BinaryCondition.notEqualTo(myInstalledRankCol, HapiMigrationEntity.CREATE_TABLE_PID)) + .addAllColumns() + .validate() + .toString(); } - public String deleteLockRecordStatement(Integer theLockPid, String theLockDescription) { - return new DeleteQuery(myTable) - .addCondition(BinaryCondition.equalTo(myInstalledRankCol, theLockPid)) + public String deleteLockRecordStatement(Integer theLockPid, String theLockDescription) { + return new DeleteQuery(myTable) + .addCondition(BinaryCondition.equalTo(myInstalledRankCol, theLockPid)) .addCondition(BinaryCondition.equalTo(myDescriptionCol, theLockDescription)) .validate() .toString(); - } + } public String findByPidAndNotDescriptionQuery(Integer theLockPid, String theLockDescription) { return new SelectQuery() - .addFromTable(myTable) - .addCondition(BinaryCondition.equalTo(myInstalledRankCol, theLockPid)) - .addCondition(BinaryCondition.notEqualTo(myDescriptionCol, theLockDescription)) - .addAllColumns() - .validate() - .toString(); + .addFromTable(myTable) + .addCondition(BinaryCondition.equalTo(myInstalledRankCol, theLockPid)) + .addCondition(BinaryCondition.notEqualTo(myDescriptionCol, theLockDescription)) + .addAllColumns() + .validate() + .toString(); } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/entity/HapiMigrationEntity.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/entity/HapiMigrationEntity.java index e596f927edb..7e9db2b7e3d 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/entity/HapiMigrationEntity.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/entity/HapiMigrationEntity.java @@ -25,12 +25,12 @@ import org.hibernate.annotations.GenericGenerator; import org.springframework.jdbc.core.PreparedStatementSetter; import org.springframework.jdbc.core.RowMapper; +import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; -import java.util.Date; // Note even though we are using javax.persistence annotations here, we are managing these records outside of jpa // so these annotations are for informational purposes only @@ -44,8 +44,11 @@ public class HapiMigrationEntity { public static final int CREATE_TABLE_PID = -1; public static final String INITIAL_RECORD_DESCRIPTION = "<< HAPI FHIR Schema History table created >>"; public static final String INITIAL_RECORD_SCRIPT = "HAPI FHIR"; + @Id - @GenericGenerator(name = "SEQ_FLY_HFJ_MIGRATION", strategy = "ca.uhn.fhir.jpa.model.dialect.HapiSequenceStyleGenerator") + @GenericGenerator( + name = "SEQ_FLY_HFJ_MIGRATION", + strategy = "ca.uhn.fhir.jpa.model.dialect.HapiSequenceStyleGenerator") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_FLY_HFJ_MIGRATION") @Column(name = "INSTALLED_RANK") private Integer myPid; @@ -209,7 +212,11 @@ public class HapiMigrationEntity { ps.setInt(6, getChecksum()); } ps.setString(7, getInstalledBy()); - ps.setDate(8, getInstalledOn() != null ? new java.sql.Date(getInstalledOn().getTime()) : null); + ps.setDate( + 8, + getInstalledOn() != null + ? new java.sql.Date(getInstalledOn().getTime()) + : null); ps.setInt(9, getExecutionTime()); ps.setBoolean(10, getSuccess()); }; diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java index b439e2fc250..d39bc434614 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java @@ -52,7 +52,11 @@ public class AddColumnTask extends BaseTableColumnTypeTask { if (myCheckForExistingTables) { Set columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName()); if (columnNames.contains(getColumnName())) { - logInfo(ourLog, "Column {} already exists on table {} - No action performed", getColumnName(), getTableName()); + logInfo( + ourLog, + "Column {} already exists on table {} - No action performed", + getColumnName(), + getTableName()); return; } } @@ -95,5 +99,4 @@ public class AddColumnTask extends BaseTableColumnTypeTask { String space = isNullable() ? "" : " "; return type + space + nullable; } - } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddForeignKeyTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddForeignKeyTask.java index 5d467fa9192..0e9053a98f3 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddForeignKeyTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddForeignKeyTask.java @@ -62,7 +62,8 @@ public class AddForeignKeyTask extends BaseTableColumnTask { Validate.isTrue(isNotBlank(myConstraintName)); Validate.isTrue(isNotBlank(myForeignTableName)); Validate.isTrue(isNotBlank(myForeignColumnName)); - setDescription("Add foreign key " + myConstraintName + " from column " + getColumnName() + " of table " + getTableName() + " to column " + myForeignColumnName + " of table " + myForeignTableName); + setDescription("Add foreign key " + myConstraintName + " from column " + getColumnName() + " of table " + + getTableName() + " to column " + myForeignColumnName + " of table " + myForeignTableName); } @Override @@ -79,7 +80,8 @@ public class AddForeignKeyTask extends BaseTableColumnTask { case MARIADB_10_1: case MYSQL_5_7: // Quote the column names as "SYSTEM" is a reserved word in MySQL - sql = "alter table " + getTableName() + " add constraint " + myConstraintName + " foreign key (`" + getColumnName() + "`) references " + myForeignTableName + " (`" + myForeignColumnName + "`)"; + sql = "alter table " + getTableName() + " add constraint " + myConstraintName + " foreign key (`" + + getColumnName() + "`) references " + myForeignTableName + " (`" + myForeignColumnName + "`)"; break; case COCKROACHDB_21_1: case POSTGRES_9_4: @@ -87,13 +89,13 @@ public class AddForeignKeyTask extends BaseTableColumnTask { case H2_EMBEDDED: case ORACLE_12C: case MSSQL_2012: - sql = "alter table " + getTableName() + " add constraint " + myConstraintName + " foreign key (" + getColumnName() + ") references " + myForeignTableName; + sql = "alter table " + getTableName() + " add constraint " + myConstraintName + " foreign key (" + + getColumnName() + ") references " + myForeignTableName; break; default: throw new IllegalStateException(Msg.code(68)); } - try { executeSql(getTableName(), sql); } catch (Exception e) { @@ -121,5 +123,4 @@ public class AddForeignKeyTask extends BaseTableColumnTask { theBuilder.append(myForeignTableName, otherObject.myForeignTableName); theBuilder.append(myForeignColumnName, otherObject.myForeignColumnName); } - } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIdGeneratorTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIdGeneratorTask.java index 60ae864f426..37b9d01beb9 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIdGeneratorTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIdGeneratorTask.java @@ -65,7 +65,6 @@ public class AddIdGeneratorTask extends BaseTask { String initSql = "insert into " + myGeneratorName + " values ( 1 )"; executeSql(myGeneratorName, initSql); - } break; case DERBY_EMBEDDED: @@ -87,9 +86,7 @@ public class AddIdGeneratorTask extends BaseTask { } if (isNotBlank(sql)) { - Set sequenceNames = - JdbcUtils.getSequenceNames(getConnectionProperties()) - .stream() + Set sequenceNames = JdbcUtils.getSequenceNames(getConnectionProperties()).stream() .map(String::toLowerCase) .collect(Collectors.toSet()); ourLog.debug("Currently have sequences: {}", sequenceNames); @@ -100,7 +97,6 @@ public class AddIdGeneratorTask extends BaseTask { executeSql(myGeneratorName, sql); } - } @Override diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java index 5ebbf1fb778..b0d2ae6ccd4 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java @@ -27,13 +27,13 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.sql.SQLException; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Set; +import javax.annotation.Nonnull; public class AddIndexTask extends BaseTableTask { @@ -68,7 +68,9 @@ public class AddIndexTask extends BaseTableTask { public void validate() { super.validate(); Validate.notBlank(myIndexName, "Index name not specified"); - Validate.isTrue(myColumns.size() > 0, "Columns not specified for AddIndexTask " + myIndexName + " on table " + getTableName()); + Validate.isTrue( + myColumns.size() > 0, + "Columns not specified for AddIndexTask " + myIndexName + " on table " + getTableName()); Validate.notNull(myUnique, "Uniqueness not specified"); setDescription("Add " + myIndexName + " index to table " + getTableName()); } @@ -81,7 +83,13 @@ public class AddIndexTask extends BaseTableTask { return; } - logInfo(ourLog, "Going to add a {} index named {} on table {} for columns {}", (myUnique ? "UNIQUE" : "NON-UNIQUE"), myIndexName, getTableName(), myColumns); + logInfo( + ourLog, + "Going to add a {} index named {} on table {} for columns {}", + (myUnique ? "UNIQUE" : "NON-UNIQUE"), + myIndexName, + getTableName(), + myColumns); String sql = generateSql(); String tableName = getTableName(); @@ -149,10 +157,8 @@ public class AddIndexTask extends BaseTableTask { } } - - String sql = - "create " + unique + "index " + postgresOnlineClause + myIndexName + - " on " + getTableName() + "(" + columns + ")" + includeClause + mssqlWhereClause + msSqlOracleOnlineClause; + String sql = "create " + unique + "index " + postgresOnlineClause + myIndexName + " on " + getTableName() + "(" + + columns + ")" + includeClause + mssqlWhereClause + msSqlOracleOnlineClause; return sql; } @@ -189,6 +195,7 @@ public class AddIndexTask extends BaseTableTask { public void setOnline(boolean theFlag) { myOnline = theFlag; } + @Override protected void generateEquals(EqualsBuilder theBuilder, BaseTask theOtherObject) { super.generateEquals(theBuilder, theOtherObject); @@ -199,7 +206,6 @@ public class AddIndexTask extends BaseTableTask { theBuilder.append(myUnique, otherObject.myUnique); theBuilder.append(myIncludeColumns, otherObject.myIncludeColumns); theBuilder.append(myOnline, otherObject.myOnline); - } @Override diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddTableByColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddTableByColumnTask.java index c9cf308630f..45e254bce89 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddTableByColumnTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddTableByColumnTask.java @@ -133,7 +133,7 @@ public class AddTableByColumnTask extends BaseTableTask { // foreign keys if (!myFKColumns.isEmpty()) { - for (int i =0; i < myFKColumns.size(); i++) { + for (int i = 0; i < myFKColumns.size(); i++) { if (i > 0) { sb.append(", "); } @@ -171,13 +171,13 @@ public class AddTableByColumnTask extends BaseTableTask { @Override public void doExecute() throws SQLException { - if (myCheckForExistingTables && JdbcUtils.getTableNames(getConnectionProperties()).contains(getTableName())) { + if (myCheckForExistingTables + && JdbcUtils.getTableNames(getConnectionProperties()).contains(getTableName())) { logInfo(ourLog, "Already have table named {} - No action performed", getTableName()); return; } executeSql(getTableName(), generateSQLCreateScript()); - } @Override diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java index 91491e0fd8f..ceb8a1a3f73 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java @@ -74,9 +74,14 @@ public class ArbitrarySqlTask extends BaseTask { } for (TableAndColumn next : myConditionalOnExistenceOf) { - JdbcUtils.ColumnType columnType = JdbcUtils.getColumnType(getConnectionProperties(), next.getTable(), next.getColumn()); + JdbcUtils.ColumnType columnType = + JdbcUtils.getColumnType(getConnectionProperties(), next.getTable(), next.getColumn()); if (columnType == null) { - logInfo(ourLog, "Table {} does not have column {} - No action performed", next.getTable(), next.getColumn()); + logInfo( + ourLog, + "Table {} does not have column {} - No action performed", + next.getTable(), + next.getColumn()); return; } } @@ -84,7 +89,6 @@ public class ArbitrarySqlTask extends BaseTask { for (BaseTask next : myTask) { next.execute(); } - } public void setBatchSize(int theBatchSize) { @@ -149,7 +153,6 @@ public class ArbitrarySqlTask extends BaseTask { setDescription("Execute raw sql"); } - @Override public void execute() { if (isDryRun()) { diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseColumnCalculatorTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseColumnCalculatorTask.java index 94c051a0f2b..95c6825242b 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseColumnCalculatorTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseColumnCalculatorTask.java @@ -94,7 +94,12 @@ public abstract class BaseColumnCalculatorTask extends BaseTableColumnTask { jdbcTemplate.setMaxRows(100000); String sql = "SELECT * FROM " + getTableName() + " WHERE " + getWhereClause(); - logInfo(ourLog, "Finding up to {} rows in {} that requires calculations, using query: {}", myBatchSize, getTableName(), sql); + logInfo( + ourLog, + "Finding up to {} rows in {} that requires calculations, using query: {}", + myBatchSize, + getTableName(), + sql); jdbcTemplate.query(sql, rch); rch.done(); @@ -116,7 +121,6 @@ public abstract class BaseColumnCalculatorTask extends BaseTableColumnTask { throw new SQLException(Msg.code(69) + e); } } - } } finally { @@ -133,32 +137,35 @@ public abstract class BaseColumnCalculatorTask extends BaseTableColumnTask { LinkedBlockingQueue executorQueue = new LinkedBlockingQueue<>(maximumPoolSize); BasicThreadFactory threadFactory = new BasicThreadFactory.Builder() - .namingPattern("worker-" + "-%d") - .daemon(false) - .priority(Thread.NORM_PRIORITY) - .build(); + .namingPattern("worker-" + "-%d") + .daemon(false) + .priority(Thread.NORM_PRIORITY) + .build(); RejectedExecutionHandler rejectedExecutionHandler = new RejectedExecutionHandler() { @Override public void rejectedExecution(Runnable theRunnable, ThreadPoolExecutor theExecutor) { - logInfo(ourLog, "Note: Executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size()); + logInfo( + ourLog, + "Note: Executor queue is full ({} elements), waiting for a slot to become available!", + executorQueue.size()); StopWatch sw = new StopWatch(); try { executorQueue.put(theRunnable); } catch (InterruptedException theE) { - throw new RejectedExecutionException(Msg.code(70) + "Task " + theRunnable.toString() + - " rejected from " + theE.toString()); + throw new RejectedExecutionException( + Msg.code(70) + "Task " + theRunnable.toString() + " rejected from " + theE.toString()); } logInfo(ourLog, "Slot become available after {}ms", sw.getMillis()); } }; myExecutor = new ThreadPoolExecutor( - maximumPoolSize, - maximumPoolSize, - 0L, - TimeUnit.MILLISECONDS, - executorQueue, - threadFactory, - rejectedExecutionHandler); + maximumPoolSize, + maximumPoolSize, + 0L, + TimeUnit.MILLISECONDS, + executorQueue, + threadFactory, + rejectedExecutionHandler); } public void setPidColumnName(String thePidColumnName) { @@ -178,9 +185,11 @@ public abstract class BaseColumnCalculatorTask extends BaseTableColumnTask { MandatoryKeyMap nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow); // Apply calculators - for (Map.Entry, Object>> nextCalculatorEntry : myCalculators.entrySet()) { + for (Map.Entry, Object>> nextCalculatorEntry : + myCalculators.entrySet()) { String nextColumn = nextCalculatorEntry.getKey(); - Function, Object> nextCalculator = nextCalculatorEntry.getValue(); + Function, Object> nextCalculator = + nextCalculatorEntry.getValue(); Object value = nextCalculator.apply(nextRowMandatoryKeyMap); newValues.put(nextColumn, value); } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTask.java index fed25274c61..c7616bde567 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTask.java @@ -29,9 +29,10 @@ import java.util.function.Function; public abstract class BaseTableColumnTask extends BaseTableTask { - protected Map, Object>> myCalculators = new HashMap<>(); + protected Map, Object>> myCalculators = + new HashMap<>(); protected String myColumnName; - //If a concrete class decides to, they can define a custom WHERE clause for the task. + // If a concrete class decides to, they can define a custom WHERE clause for the task. protected String myWhereClause; public BaseTableColumnTask(String theProductVersion, String theSchemaVersion) { @@ -78,7 +79,9 @@ public abstract class BaseTableColumnTask extends BaseTableTask { theBuilder.append(myColumnName); } - public BaseTableColumnTask addCalculator(String theColumnName, Function, Object> theConsumer) { + public BaseTableColumnTask addCalculator( + String theColumnName, + Function, Object> theConsumer) { Validate.isTrue(myCalculators.containsKey(theColumnName) == false); myCalculators.put(theColumnName, theConsumer); return this; diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java index d21f6c10d2b..50abfa63d33 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java @@ -33,7 +33,6 @@ public abstract class BaseTableColumnTypeTask extends BaseTableColumnTask { /** * Constructor */ - public BaseTableColumnTypeTask(String theProductVersion, String theSchemaVersion) { super(theProductVersion, theSchemaVersion); } @@ -54,7 +53,8 @@ public abstract class BaseTableColumnTypeTask extends BaseTableColumnTask { Validate.notNull(myNullable); if (myColumnType == ColumnTypeEnum.STRING) { - Validate.notNull(myColumnLength, "No length specified for " + ColumnTypeEnum.STRING + " column " + getColumnName()); + Validate.notNull( + myColumnLength, "No length specified for " + ColumnTypeEnum.STRING + " column " + getColumnName()); } else { Validate.isTrue(myColumnLength == null); } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableTask.java index 0f963fd05b0..049ed847d35 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableTask.java @@ -28,7 +28,6 @@ import java.util.Objects; public abstract class BaseTableTask extends BaseTask { private String myTableName; - public BaseTableTask(String theProductVersion, String theSchemaVersion) { super(theProductVersion, theSchemaVersion); } @@ -55,7 +54,9 @@ public abstract class BaseTableTask extends BaseTask { } protected String getSqlType(ColumnTypeEnum theColumnType, Long theColumnLength) { - String retVal = ColumnTypeToDriverTypeToSqlType.getColumnTypeToDriverTypeToSqlType().get(theColumnType).get(getDriverType()); + String retVal = ColumnTypeToDriverTypeToSqlType.getColumnTypeToDriverTypeToSqlType() + .get(theColumnType) + .get(getDriverType()); Objects.requireNonNull(retVal); if (theColumnType == ColumnTypeEnum.STRING) { diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java index bf80883d0aa..cb7edd5ceb2 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java @@ -211,12 +211,14 @@ public abstract class BaseTask { ourLog.debug("Error was: {}", e.getMessage(), e); return 0; } else { - throw new HapiMigrationException(Msg.code(61) + "Failed during task " + getMigrationVersion() + ": " + e, e); + throw new HapiMigrationException( + Msg.code(61) + "Failed during task " + getMigrationVersion() + ": " + e, e); } } } - protected void captureExecutedStatement(String theTableName, @Language("SQL") String theSql, Object... theArguments) { + protected void captureExecutedStatement( + String theTableName, @Language("SQL") String theSql, Object... theArguments) { myExecutedStatements.add(new ExecutedStatement(theTableName, theSql, theArguments)); } @@ -289,7 +291,8 @@ public abstract class BaseTask { public void validateVersion() { Matcher matcher = versionPattern.matcher(mySchemaVersion); if (!matcher.matches()) { - throw new IllegalStateException(Msg.code(62) + "The version " + mySchemaVersion + " does not match the expected pattern " + MIGRATION_VERSION_PATTERN); + throw new IllegalStateException(Msg.code(62) + "The version " + mySchemaVersion + + " does not match the expected pattern " + MIGRATION_VERSION_PATTERN); } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java index 387fb04d432..3450298a582 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java @@ -41,7 +41,8 @@ public class CalculateHashesTask extends BaseColumnCalculatorTask { try { Set tableNames = JdbcUtils.getTableNames(getConnectionProperties()); boolean shouldSkip = tableNames.contains("HFJ_RES_REINDEX_JOB"); - // This table was added shortly after hash indexes were added, so it is a reasonable indicator for whether this + // This table was added shortly after hash indexes were added, so it is a reasonable indicator for whether + // this // migration has already been run if (shouldSkip) { logInfo(ourLog, "The table HFJ_RES_REINDEX_JOB already exists. Skipping calculate hashes task."); diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateOrdinalDatesTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateOrdinalDatesTask.java index 0487c86a4f6..54e193cbec8 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateOrdinalDatesTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateOrdinalDatesTask.java @@ -25,8 +25,10 @@ public class CalculateOrdinalDatesTask extends BaseColumnCalculatorTask { public CalculateOrdinalDatesTask(VersionEnum theRelease, String theVersion) { super(theRelease, theVersion); - setDescription("Calculate SP_LOW_VALUE_DATE_ORDINAL and SP_HIGH_VALUE_DATE_ORDINAL based on existing SP_VALUE_LOW and SP_VALUE_HIGH date values in Date Search Params"); - setWhereClause("(SP_VALUE_LOW_DATE_ORDINAL IS NULL AND SP_VALUE_LOW IS NOT NULL) OR (SP_VALUE_HIGH_DATE_ORDINAL IS NULL AND SP_VALUE_HIGH IS NOT NULL)"); + setDescription( + "Calculate SP_LOW_VALUE_DATE_ORDINAL and SP_HIGH_VALUE_DATE_ORDINAL based on existing SP_VALUE_LOW and SP_VALUE_HIGH date values in Date Search Params"); + setWhereClause( + "(SP_VALUE_LOW_DATE_ORDINAL IS NULL AND SP_VALUE_LOW IS NOT NULL) OR (SP_VALUE_HIGH_DATE_ORDINAL IS NULL AND SP_VALUE_HIGH IS NOT NULL)"); setPidColumnName("SP_ID"); } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java index 5c1a0290dce..d78223f7773 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeEnum.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.migrate.taskdef; public enum ColumnTypeEnum { - LONG, STRING, DATE_ONLY, diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java index cf81cecde76..03b222718cd 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ColumnTypeToDriverTypeToSqlType.java @@ -28,6 +28,7 @@ import java.util.Map; public final class ColumnTypeToDriverTypeToSqlType { private ColumnTypeToDriverTypeToSqlType() {} + static Map> myColumnTypeToDriverTypeToSqlType = new HashMap<>(); static { @@ -116,7 +117,10 @@ public final class ColumnTypeToDriverTypeToSqlType { setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MARIADB_10_1, "longtext"); setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MYSQL_5_7, "longtext"); setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.ORACLE_12C, "clob"); - setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.POSTGRES_9_4, "oid"); // the PG driver will write oid into a `text` column + setColumnType( + ColumnTypeEnum.CLOB, + DriverTypeEnum.POSTGRES_9_4, + "oid"); // the PG driver will write oid into a `text` column setColumnType(ColumnTypeEnum.CLOB, DriverTypeEnum.MSSQL_2012, "varchar(MAX)"); setColumnType(ColumnTypeEnum.TEXT, DriverTypeEnum.H2_EMBEDDED, "character varying"); @@ -132,8 +136,10 @@ public final class ColumnTypeToDriverTypeToSqlType { return myColumnTypeToDriverTypeToSqlType; } - private static void setColumnType(ColumnTypeEnum theColumnType, DriverTypeEnum theDriverType, String theColumnTypeSql) { - Map columnSqlType = myColumnTypeToDriverTypeToSqlType.computeIfAbsent(theColumnType, k -> new HashMap<>()); + private static void setColumnType( + ColumnTypeEnum theColumnType, DriverTypeEnum theDriverType, String theColumnTypeSql) { + Map columnSqlType = + myColumnTypeToDriverTypeToSqlType.computeIfAbsent(theColumnType, k -> new HashMap<>()); if (columnSqlType.containsKey(theDriverType)) { throw new IllegalStateException(Msg.code(65) + "Duplicate key: " + theDriverType); } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropColumnTask.java index 2f54b1d52f7..3ee5b5fbfbe 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropColumnTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropColumnTask.java @@ -55,18 +55,30 @@ public class DropColumnTask extends BaseTableColumnTask { public void doExecute() throws SQLException { Set columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName()); if (!columnNames.contains(getColumnName())) { - logInfo(ourLog, "Column {} does not exist on table {} - No action performed", getColumnName(), getTableName()); + logInfo( + ourLog, + "Column {} does not exist on table {} - No action performed", + getColumnName(), + getTableName()); return; } - if (getDriverType().equals(DriverTypeEnum.MYSQL_5_7) || getDriverType().equals(DriverTypeEnum.MARIADB_10_1) - || getDriverType().equals(DriverTypeEnum.MSSQL_2012)) { - // Some DBs such as MYSQL and Maria DB require that foreign keys depending on the column be dropped before the column itself is dropped. - logInfo(ourLog, "Dropping any foreign keys on table {} depending on column {}", getTableName(), getColumnName()); - Set foreignKeys = JdbcUtils.getForeignKeysForColumn(getConnectionProperties(), getColumnName(), getTableName()); + if (getDriverType().equals(DriverTypeEnum.MYSQL_5_7) + || getDriverType().equals(DriverTypeEnum.MARIADB_10_1) + || getDriverType().equals(DriverTypeEnum.MSSQL_2012)) { + // Some DBs such as MYSQL and Maria DB require that foreign keys depending on the column be dropped before + // the column itself is dropped. + logInfo( + ourLog, + "Dropping any foreign keys on table {} depending on column {}", + getTableName(), + getColumnName()); + Set foreignKeys = + JdbcUtils.getForeignKeysForColumn(getConnectionProperties(), getColumnName(), getTableName()); if (foreignKeys != null) { for (String foreignKey : foreignKeys) { - List dropFkSqls = DropForeignKeyTask.generateSql(getTableName(), foreignKey, getDriverType()); + List dropFkSqls = + DropForeignKeyTask.generateSql(getTableName(), foreignKey, getDriverType()); for (String dropFkSql : dropFkSqls) { executeSql(getTableName(), dropFkSql); } @@ -80,6 +92,4 @@ public class DropColumnTask extends BaseTableColumnTask { logInfo(ourLog, "Dropping column {} on table {}", getColumnName(), getTableName()); executeSql(getTableName(), sql); } - - } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropForeignKeyTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropForeignKeyTask.java index 44476cd1c70..b8f99f221ef 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropForeignKeyTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropForeignKeyTask.java @@ -28,11 +28,11 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -86,7 +86,6 @@ public class DropForeignKeyTask extends BaseTableTask { Validate.isTrue(isNotBlank(myConstraintName)); Validate.isTrue(isNotBlank(myParentTableName)); setDescription("Drop foreign key " + myConstraintName + " from table " + getTableName()); - } @Override @@ -103,7 +102,6 @@ public class DropForeignKeyTask extends BaseTableTask { for (String next : sqls) { executeSql(getTableName(), next); } - } @Override diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIdGeneratorTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIdGeneratorTask.java index 0f5a56d29fe..73756bbd5c6 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIdGeneratorTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIdGeneratorTask.java @@ -65,7 +65,6 @@ public class DropIdGeneratorTask extends BaseTask { String creationSql = "drop table " + myGeneratorName; executeSql(myGeneratorName, creationSql); - } break; case DERBY_EMBEDDED: @@ -89,9 +88,7 @@ public class DropIdGeneratorTask extends BaseTask { } if (isNotBlank(sql)) { - Set sequenceNames = - JdbcUtils.getSequenceNames(getConnectionProperties()) - .stream() + Set sequenceNames = JdbcUtils.getSequenceNames(getConnectionProperties()).stream() .map(String::toLowerCase) .collect(Collectors.toSet()); ourLog.debug("Currently have sequences: {}", sequenceNames); @@ -102,7 +99,6 @@ public class DropIdGeneratorTask extends BaseTask { executeSql(myGeneratorName, sql); } - } @Override diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIndexTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIndexTask.java index 12fc29f8398..8689efaec96 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIndexTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIndexTask.java @@ -31,14 +31,14 @@ import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapperResultSetExtractor; import org.springframework.jdbc.core.SingleColumnRowMapper; -import javax.annotation.Nonnull; -import javax.sql.DataSource; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Set; +import javax.annotation.Nonnull; +import javax.sql.DataSource; public class DropIndexTask extends BaseTableTask { @@ -83,14 +83,16 @@ public class DropIndexTask extends BaseTableTask { sql.add("alter table " + getTableName() + " drop constraint " + myIndexName); break; case ORACLE_12C: - sql.add("drop index " + myIndexName + (myOnline?" ONLINE":"")); + sql.add("drop index " + myIndexName + (myOnline ? " ONLINE" : "")); break; case MSSQL_2012: - sql.add("drop index " + myIndexName + " on " + getTableName() + (myOnline?" WITH (ONLINE = ON)":"")); + sql.add("drop index " + myIndexName + " on " + getTableName() + + (myOnline ? " WITH (ONLINE = ON)" : "")); break; case POSTGRES_9_4: sql.add("alter table " + getTableName() + " drop constraint if exists " + myIndexName + " cascade"); - sql.add("drop index " + (myOnline?"CONCURRENTLY ":"") + "if exists " + myIndexName + " cascade"); + sql.add("drop index " + (myOnline ? "CONCURRENTLY " : "") + "if exists " + myIndexName + + " cascade"); setTransactional(!myOnline); break; case COCKROACHDB_21_1: @@ -105,7 +107,7 @@ public class DropIndexTask extends BaseTableTask { sql.add("alter table " + getTableName() + " drop index " + myIndexName); break; case POSTGRES_9_4: - sql.add("drop index " + (myOnline?"CONCURRENTLY ":"") + myIndexName); + sql.add("drop index " + (myOnline ? "CONCURRENTLY " : "") + myIndexName); setTransactional(!myOnline); break; case DERBY_EMBEDDED: @@ -113,10 +115,10 @@ public class DropIndexTask extends BaseTableTask { sql.add("drop index " + myIndexName); break; case ORACLE_12C: - sql.add("drop index " + myIndexName + (myOnline?" ONLINE":"")); + sql.add("drop index " + myIndexName + (myOnline ? " ONLINE" : "")); break; case MSSQL_2012: - sql.add("drop index " + getTableName() + "." + myIndexName ); + sql.add("drop index " + getTableName() + "." + myIndexName); break; case COCKROACHDB_21_1: sql.add("drop index " + getTableName() + "@" + myIndexName); @@ -152,21 +154,33 @@ public class DropIndexTask extends BaseTableTask { */ if (getDriverType() == DriverTypeEnum.H2_EMBEDDED) { - @Language("SQL") String findConstraintSql = "SELECT DISTINCT constraint_name FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE constraint_name = ? AND table_name = ?"; - @Language("SQL") String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?"; + @Language("SQL") + String findConstraintSql = + "SELECT DISTINCT constraint_name FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE constraint_name = ? AND table_name = ?"; + @Language("SQL") + String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?"; findAndDropConstraint(findConstraintSql, dropConstraintSql); } else if (getDriverType() == DriverTypeEnum.DERBY_EMBEDDED) { - @Language("SQL") String findConstraintSql = "SELECT c.constraintname FROM sys.sysconstraints c, sys.systables t WHERE c.tableid = t.tableid AND c.constraintname = ? AND t.tablename = ?"; - @Language("SQL") String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?"; + @Language("SQL") + String findConstraintSql = + "SELECT c.constraintname FROM sys.sysconstraints c, sys.systables t WHERE c.tableid = t.tableid AND c.constraintname = ? AND t.tablename = ?"; + @Language("SQL") + String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?"; findAndDropConstraint(findConstraintSql, dropConstraintSql); } else if (getDriverType() == DriverTypeEnum.ORACLE_12C) { - @Language("SQL") String findConstraintSql = "SELECT constraint_name FROM user_constraints WHERE constraint_name = ? AND table_name = ?"; - @Language("SQL") String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?"; + @Language("SQL") + String findConstraintSql = + "SELECT constraint_name FROM user_constraints WHERE constraint_name = ? AND table_name = ?"; + @Language("SQL") + String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?"; findAndDropConstraint(findConstraintSql, dropConstraintSql); } else if (getDriverType() == DriverTypeEnum.MSSQL_2012) { // Legacy deletion for SQL Server unique indexes - @Language("SQL") String findConstraintSql = "SELECT tc.CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc WHERE tc.CONSTRAINT_NAME = ? AND tc.TABLE_NAME = ?"; - @Language("SQL") String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?"; + @Language("SQL") + String findConstraintSql = + "SELECT tc.CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc WHERE tc.CONSTRAINT_NAME = ? AND tc.TABLE_NAME = ?"; + @Language("SQL") + String dropConstraintSql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT ?"; findAndDropConstraint(findConstraintSql, dropConstraintSql); } @@ -193,8 +207,10 @@ public class DropIndexTask extends BaseTableTask { DataSource dataSource = Objects.requireNonNull(getConnectionProperties().getDataSource()); getConnectionProperties().getTxTemplate().executeWithoutResult(t -> { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); - RowMapperResultSetExtractor resultSetExtractor = new RowMapperResultSetExtractor<>(new SingleColumnRowMapper<>(String.class)); - List outcome = jdbcTemplate.query(theFindConstraintSql, new Object[]{myIndexName, getTableName()}, resultSetExtractor); + RowMapperResultSetExtractor resultSetExtractor = + new RowMapperResultSetExtractor<>(new SingleColumnRowMapper<>(String.class)); + List outcome = jdbcTemplate.query( + theFindConstraintSql, new Object[] {myIndexName, getTableName()}, resultSetExtractor); assert outcome != null; for (String next : outcome) { String sql = theDropConstraintSql.replace("?", next); diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropTableTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropTableTask.java index 8a8ccb27c7b..823d31cc306 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropTableTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropTableTask.java @@ -64,14 +64,12 @@ public class DropTableTask extends BaseTableTask { DropIndexTask theIndexTask = new DropIndexTask(getProductVersion(), getSchemaVersion()); theIndexTask - .setTableName(getTableName()) - .setConnectionProperties(getConnectionProperties()) - .setDriverType(getDriverType()) - .setDryRun(isDryRun()); + .setTableName(getTableName()) + .setConnectionProperties(getConnectionProperties()) + .setDriverType(getDriverType()) + .setDryRun(isDryRun()); for (String nextIndex : indexNames) { - theIndexTask - .setIndexName(nextIndex) - .execute(); + theIndexTask.setIndexName(nextIndex).execute(); } logInfo(ourLog, "Dropping table: {}", getTableName()); @@ -79,8 +77,5 @@ public class DropTableTask extends BaseTableTask { @Language("SQL") String sql = "DROP TABLE " + getTableName(); executeSql(getTableName(), sql); - } - - } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ExecuteRawSqlTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ExecuteRawSqlTask.java index 3cb9c2a4697..6a3a5cdb930 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ExecuteRawSqlTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ExecuteRawSqlTask.java @@ -83,7 +83,6 @@ public class ExecuteRawSqlTask extends BaseTask { for (String nextSql : sqlStatements) { executeSql(null, nextSql); } - } @Override diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ForeignKeyContainer.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ForeignKeyContainer.java index f4bf990304d..b0846ee464c 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ForeignKeyContainer.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ForeignKeyContainer.java @@ -47,11 +47,10 @@ public class ForeignKeyContainer { private String myParentTableColumnName; public ForeignKeyContainer( - String theColumnName, - ColumnTypeEnum theColumnTypeEnum, - String theParentTableName, - String theParentTableColumnName - ) { + String theColumnName, + ColumnTypeEnum theColumnTypeEnum, + String theParentTableName, + String theParentTableColumnName) { myColumnName = theColumnName; myColumnTypeEnum = theColumnTypeEnum; myParentTableName = theParentTableName; @@ -90,37 +89,22 @@ public class ForeignKeyContainer { myColumnTypeEnum = theColumnTypeEnum; } - public String generateSQL( - @Nonnull DriverTypeEnum theDriverTypeEnum, - boolean thePrettyPrint - ) { + public String generateSQL(@Nonnull DriverTypeEnum theDriverTypeEnum, boolean thePrettyPrint) { switch (theDriverTypeEnum) { case MYSQL_5_7: return String.format( - "FOREIGN KEY (%s) REFERENCES %s(%s)", - myColumnName, - myParentTableName, - myParentTableColumnName - ); + "FOREIGN KEY (%s) REFERENCES %s(%s)", myColumnName, myParentTableName, myParentTableColumnName); case MSSQL_2012: case ORACLE_12C: return String.format( - "%s %s FOREIGN KEY REFERENCES %s(%s)", - myColumnName, - myColumnTypeEnum.name(), - myParentTableName, - myParentTableColumnName - ); + "%s %s FOREIGN KEY REFERENCES %s(%s)", + myColumnName, myColumnTypeEnum.name(), myParentTableName, myParentTableColumnName); case POSTGRES_9_4: return String.format( - "FOREIGN KEY(%s) REFERENCES %s(%s)", - myColumnName, - myParentTableName, - myParentTableColumnName - ); + "FOREIGN KEY(%s) REFERENCES %s(%s)", myColumnName, myParentTableName, myParentTableColumnName); default: throw new UnsupportedOperationException( - Msg.code(2232) + " SQL Engine " + theDriverTypeEnum.name() + " not supported for foreign key!"); + Msg.code(2232) + " SQL Engine " + theDriverTypeEnum.name() + " not supported for foreign key!"); } } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/InitializeSchemaTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/InitializeSchemaTask.java index 3470ab8b990..f5768135b34 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/InitializeSchemaTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/InitializeSchemaTask.java @@ -37,7 +37,10 @@ public class InitializeSchemaTask extends BaseTask { private final ISchemaInitializationProvider mySchemaInitializationProvider; private boolean myInitializedSchema; - public InitializeSchemaTask(String theProductVersion, String theSchemaVersion, ISchemaInitializationProvider theSchemaInitializationProvider) { + public InitializeSchemaTask( + String theProductVersion, + String theSchemaVersion, + ISchemaInitializationProvider theSchemaInitializationProvider) { super(theProductVersion, theSchemaVersion); mySchemaInitializationProvider = theSchemaInitializationProvider; setDescription(DESCRIPTION_PREFIX + mySchemaInitializationProvider.getSchemaDescription()); @@ -60,11 +63,19 @@ public class InitializeSchemaTask extends BaseTask { Set tableNames = JdbcUtils.getTableNames(getConnectionProperties()); String schemaExistsIndicatorTable = mySchemaInitializationProvider.getSchemaExistsIndicatorTable(); if (tableNames.contains(schemaExistsIndicatorTable)) { - logInfo(ourLog, "The table {} already exists. Skipping schema initialization for {}", schemaExistsIndicatorTable, driverType); + logInfo( + ourLog, + "The table {} already exists. Skipping schema initialization for {}", + schemaExistsIndicatorTable, + driverType); return; } - logInfo(ourLog, "Initializing {} schema for {}", driverType, mySchemaInitializationProvider.getSchemaDescription()); + logInfo( + ourLog, + "Initializing {} schema for {}", + driverType, + mySchemaInitializationProvider.getSchemaDescription()); List sqlStatements = mySchemaInitializationProvider.getSqlStatements(driverType); @@ -76,7 +87,11 @@ public class InitializeSchemaTask extends BaseTask { myInitializedSchema = true; } - logInfo(ourLog, "{} schema for {} initialized successfully", driverType, mySchemaInitializationProvider.getSchemaDescription()); + logInfo( + ourLog, + "{} schema for {} initialized successfully", + driverType, + mySchemaInitializationProvider.getSchemaDescription()); } @Override diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MetadataSource.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MetadataSource.java index f232ab3c694..3fa8b996a3f 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MetadataSource.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MetadataSource.java @@ -56,9 +56,13 @@ public class MetadataSource { private String getEdition(DriverTypeEnum.ConnectionProperties theConnectionProperties) { final String result; if (theConnectionProperties.getDriverType() == DriverTypeEnum.MSSQL_2012) { - result = theConnectionProperties.newJdbcTemplate().queryForObject("SELECT SERVERPROPERTY ('edition')", String.class); + result = theConnectionProperties + .newJdbcTemplate() + .queryForObject("SELECT SERVERPROPERTY ('edition')", String.class); } else if (theConnectionProperties.getDriverType() == DriverTypeEnum.ORACLE_12C) { - result = theConnectionProperties.newJdbcTemplate().queryForObject("SELECT BANNER FROM v$version WHERE banner LIKE 'Oracle%'", String.class); + result = theConnectionProperties + .newJdbcTemplate() + .queryForObject("SELECT BANNER FROM v$version WHERE banner LIKE 'Oracle%'", String.class); } else { throw new UnsupportedOperationException(Msg.code(2084) + "We only know about MSSQL editions."); } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigratePostgresTextClobToBinaryClobTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigratePostgresTextClobToBinaryClobTask.java index 507c64db7a3..cea94dc998d 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigratePostgresTextClobToBinaryClobTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/MigratePostgresTextClobToBinaryClobTask.java @@ -25,7 +25,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.SQLException; -import java.util.Locale; public class MigratePostgresTextClobToBinaryClobTask extends BaseTableColumnTask { private static final Logger ourLog = LoggerFactory.getLogger(MigratePostgresTextClobToBinaryClobTask.class); @@ -40,7 +39,8 @@ public class MigratePostgresTextClobToBinaryClobTask extends BaseTableColumnTask @Override public void validate() { super.validate(); - setDescription("Migrate text clob " + getColumnName() + " from table " + getTableName() + " (only affects Postgresql)"); + setDescription("Migrate text clob " + getColumnName() + " from table " + getTableName() + + " (only affects Postgresql)"); } @Override @@ -62,9 +62,11 @@ public class MigratePostgresTextClobToBinaryClobTask extends BaseTableColumnTask columnName = columnName.toLowerCase(); executeSql(tableName, "alter table " + tableName + " add column " + tempColumnName + " oid"); - executeSql(tableName, "update " + tableName + " set " + tempColumnName + " = cast(" + columnName + " as oid) where " + columnName + " is not null"); + executeSql( + tableName, + "update " + tableName + " set " + tempColumnName + " = cast(" + columnName + " as oid) where " + + columnName + " is not null"); executeSql(tableName, "alter table " + tableName + " drop column " + columnName); executeSql(tableName, "alter table " + tableName + " rename column " + tempColumnName + " to " + columnName); - } } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java index 149b1b32a28..7cb486079d8 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java @@ -54,7 +54,11 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask { Set columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName()); if (!columnNames.contains(getColumnName())) { - logInfo(ourLog, "Column {} doesn't exist on table {} - No action performed", getColumnName(), getTableName()); + logInfo( + ourLog, + "Column {} doesn't exist on table {} - No action performed", + getColumnName(), + getTableName()); return; } @@ -83,7 +87,13 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask { boolean alreadyOfCorrectType = existingType.equals(getColumnType(), taskColumnLength); boolean alreadyCorrectNullable = isNullable() == nullable; if (alreadyOfCorrectType && alreadyCorrectNullable) { - logInfo(ourLog, "Column {} on table {} is already of type {} and has nullable {} - No action performed", getColumnName(), getTableName(), existingType, nullable); + logInfo( + ourLog, + "Column {} on table {} is already of type {} and has nullable {} - No action performed", + getColumnName(), + getTableName(), + existingType, + nullable); return; } @@ -95,7 +105,8 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask { switch (getDriverType()) { case DERBY_EMBEDDED: if (!alreadyOfCorrectType) { - sql = "alter table " + getTableName() + " alter column " + getColumnName() + " set data type " + type; + sql = "alter table " + getTableName() + " alter column " + getColumnName() + " set data type " + + type; } if (!alreadyCorrectNullable) { sqlNotNull = "alter table " + getTableName() + " alter column " + getColumnName() + notNull; @@ -113,15 +124,18 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask { } if (!alreadyCorrectNullable) { if (isNullable()) { - sqlNotNull = "alter table " + getTableName() + " alter column " + getColumnName() + " drop not null"; + sqlNotNull = + "alter table " + getTableName() + " alter column " + getColumnName() + " drop not null"; } else { - sqlNotNull = "alter table " + getTableName() + " alter column " + getColumnName() + " set not null"; + sqlNotNull = + "alter table " + getTableName() + " alter column " + getColumnName() + " set not null"; } } break; case ORACLE_12C: String oracleNullableStmt = !alreadyCorrectNullable ? notNull : ""; - sql = "alter table " + getTableName() + " modify ( " + getColumnName() + " " + type + oracleNullableStmt + " )"; + sql = "alter table " + getTableName() + " modify ( " + getColumnName() + " " + type + oracleNullableStmt + + " )"; break; case MSSQL_2012: sql = "alter table " + getTableName() + " alter column " + getColumnName() + " " + type + notNull; @@ -132,9 +146,11 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask { } if (!alreadyCorrectNullable) { if (isNullable()) { - sqlNotNull = "alter table " + getTableName() + " alter column " + getColumnName() + " drop not null"; + sqlNotNull = + "alter table " + getTableName() + " alter column " + getColumnName() + " drop not null"; } else { - sqlNotNull = "alter table " + getTableName() + " alter column " + getColumnName() + " set not null"; + sqlNotNull = + "alter table " + getTableName() + " alter column " + getColumnName() + " set not null"; } } break; @@ -162,25 +178,33 @@ public class ModifyColumnTask extends BaseTableColumnTypeTask { // Oracle sometimes stores the NULLABLE property in a Constraint, so override the result if this is an Oracle DB switch (getDriverType()) { case ORACLE_12C: - @Language("SQL") String findNullableConstraintSql = - "SELECT acc.owner, acc.table_name, acc.column_name, search_condition_vc " + - "FROM all_cons_columns acc, user_constraints uc " + - "WHERE acc.constraint_name = uc.constraint_name " + - "AND acc.table_name = uc.table_name " + - "AND uc.constraint_type = ? " + - "AND acc.table_name = ? " + - "AND acc.column_name = ? " + - "AND search_condition_vc = ? "; + @Language("SQL") + String findNullableConstraintSql = + "SELECT acc.owner, acc.table_name, acc.column_name, search_condition_vc " + + "FROM all_cons_columns acc, user_constraints uc " + + "WHERE acc.constraint_name = uc.constraint_name " + + "AND acc.table_name = uc.table_name " + + "AND uc.constraint_type = ? " + + "AND acc.table_name = ? " + + "AND acc.column_name = ? " + + "AND search_condition_vc = ? "; String[] params = new String[4]; params[0] = "C"; params[1] = tableName.toUpperCase(); params[2] = columnName.toUpperCase(); params[3] = "\"" + columnName.toUpperCase() + "\" IS NOT NULL"; - List> queryResults = getConnectionProperties().getTxTemplate().execute(t -> - getConnectionProperties().newJdbcTemplate().query(findNullableConstraintSql, params, new ColumnMapRowMapper())); - // If this query returns a row then the existence of that row indicates that a NOT NULL constraint exists + List> queryResults = getConnectionProperties() + .getTxTemplate() + .execute(t -> getConnectionProperties() + .newJdbcTemplate() + .query(findNullableConstraintSql, params, new ColumnMapRowMapper())); + // If this query returns a row then the existence of that row indicates that a NOT NULL constraint + // exists // on this Column and we must override whatever result was previously calculated and set it to false - if (queryResults != null && queryResults.size() > 0 && queryResults.get(0) != null && !queryResults.get(0).isEmpty()) { + if (queryResults != null + && queryResults.size() > 0 + && queryResults.get(0) != null + && !queryResults.get(0).isEmpty()) { result = false; } break; diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameColumnTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameColumnTask.java index af3bbc26aeb..39f6b941b88 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameColumnTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameColumnTask.java @@ -84,16 +84,26 @@ public class RenameColumnTask extends BaseTableTask { return jdbcTemplate.query(sql, new ColumnMapRowMapper()).size(); }); if (rowsWithData != null && rowsWithData > 0) { - throw new SQLException(Msg.code(54) + "Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because both columns exist and data exists in " + myNewName); + throw new SQLException(Msg.code(54) + "Can not rename " + getTableName() + "." + myOldName + " to " + + myNewName + " because both columns exist and data exists in " + myNewName); } if (getDriverType().equals(DriverTypeEnum.MYSQL_5_7) || mySimulateMySQLForTest) { - // Some DBs such as MYSQL require that foreign keys depending on the column be explicitly dropped before the column itself is dropped. - logInfo(ourLog, "Table {} has columns {} and {} - Going to drop any foreign keys depending on column {} before renaming", getTableName(), myOldName, myNewName, myNewName); - Set foreignKeys = JdbcUtils.getForeignKeysForColumn(getConnectionProperties(), myNewName, getTableName()); + // Some DBs such as MYSQL require that foreign keys depending on the column be explicitly dropped + // before the column itself is dropped. + logInfo( + ourLog, + "Table {} has columns {} and {} - Going to drop any foreign keys depending on column {} before renaming", + getTableName(), + myOldName, + myNewName, + myNewName); + Set foreignKeys = + JdbcUtils.getForeignKeysForColumn(getConnectionProperties(), myNewName, getTableName()); if (foreignKeys != null) { for (String foreignKey : foreignKeys) { - List dropFkSqls = DropForeignKeyTask.generateSql(getTableName(), foreignKey, getDriverType()); + List dropFkSqls = + DropForeignKeyTask.generateSql(getTableName(), foreignKey, getDriverType()); for (String dropFkSql : dropFkSqls) { executeSql(getTableName(), dropFkSql); } @@ -101,17 +111,25 @@ public class RenameColumnTask extends BaseTableTask { } } - logInfo(ourLog, "Table {} has columns {} and {} - Going to drop {} before renaming", getTableName(), myOldName, myNewName, myNewName); + logInfo( + ourLog, + "Table {} has columns {} and {} - Going to drop {} before renaming", + getTableName(), + myOldName, + myNewName, + myNewName); String sql = DropColumnTask.createSql(getTableName(), myNewName); executeSql(getTableName(), sql); } else { - throw new SQLException(Msg.code(55) + "Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because both columns exist!"); + throw new SQLException(Msg.code(55) + "Can not rename " + getTableName() + "." + myOldName + " to " + + myNewName + " because both columns exist!"); } } else if (!haveOldName && !haveNewName) { if (isOkayIfNeitherColumnExists()) { return; } - throw new SQLException(Msg.code(56) + "Can not rename " + getTableName() + "." + myOldName + " to " + myNewName + " because neither column exists!"); + throw new SQLException(Msg.code(56) + "Can not rename " + getTableName() + "." + myOldName + " to " + + myNewName + " because neither column exists!"); } else if (haveNewName) { logInfo(ourLog, "Column {} already exists on table {} - No action performed", myNewName, getTableName()); return; @@ -120,9 +138,12 @@ public class RenameColumnTask extends BaseTableTask { String existingType; String notNull; try { - JdbcUtils.ColumnType existingColumnType = JdbcUtils.getColumnType(getConnectionProperties(), getTableName(), myOldName); + JdbcUtils.ColumnType existingColumnType = + JdbcUtils.getColumnType(getConnectionProperties(), getTableName(), myOldName); existingType = getSqlType(existingColumnType.getColumnTypeEnum(), existingColumnType.getLength()); - notNull = JdbcUtils.isColumnNullable(getConnectionProperties(), getTableName(), myOldName) ? " null " : " not null"; + notNull = JdbcUtils.isColumnNullable(getConnectionProperties(), getTableName(), myOldName) + ? " null " + : " not null"; } catch (SQLException e) { throw new InternalErrorException(Msg.code(57) + e); } @@ -130,7 +151,6 @@ public class RenameColumnTask extends BaseTableTask { logInfo(ourLog, "Renaming column {} on table {} to {}", myOldName, getTableName(), myNewName); executeSql(getTableName(), sql); - } String buildRenameColumnSqlStatement(String theExistingType, String theExistingNotNull) { @@ -142,7 +162,8 @@ public class RenameColumnTask extends BaseTableTask { case MYSQL_5_7: case MARIADB_10_1: // Quote the column names as "SYSTEM" is a reserved word in MySQL - sql = "ALTER TABLE " + getTableName() + " CHANGE COLUMN `" + myOldName + "` `" + myNewName + "` " + theExistingType + " " + theExistingNotNull; + sql = "ALTER TABLE " + getTableName() + " CHANGE COLUMN `" + myOldName + "` `" + myNewName + "` " + + theExistingType + " " + theExistingNotNull; break; case POSTGRES_9_4: case ORACLE_12C: diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameIndexTask.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameIndexTask.java index 4dea9203855..7691e5acead 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameIndexTask.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/RenameIndexTask.java @@ -43,7 +43,13 @@ public class RenameIndexTask extends BaseTableTask { super(theProductVersion, theSchemaVersion); } - static List createRenameIndexSql(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theOldIndexName, String theNewIndexName, DriverTypeEnum theDriverType) throws SQLException { + static List createRenameIndexSql( + DriverTypeEnum.ConnectionProperties theConnectionProperties, + String theTableName, + String theOldIndexName, + String theNewIndexName, + DriverTypeEnum theDriverType) + throws SQLException { Validate.notBlank(theOldIndexName, "theOldIndexName must not be blank"); Validate.notBlank(theNewIndexName, "theNewIndexName must not be blank"); Validate.notBlank(theTableName, "theTableName must not be blank"); @@ -67,7 +73,7 @@ public class RenameIndexTask extends BaseTableTask { case H2_EMBEDDED: case POSTGRES_9_4: case ORACLE_12C: - case COCKROACHDB_21_1: + case COCKROACHDB_21_1: sql.add("alter index " + theOldIndexName + " rename to " + theNewIndexName); break; case MSSQL_2012: @@ -95,7 +101,8 @@ public class RenameIndexTask extends BaseTableTask { return; } - List sqls = createRenameIndexSql(getConnectionProperties(), getTableName(), myOldIndexName, myNewIndexName, getDriverType()); + List sqls = createRenameIndexSql( + getConnectionProperties(), getTableName(), myOldIndexName, myNewIndexName, getDriverType()); if (!sqls.isEmpty()) { logInfo(ourLog, "Renaming index from {} to {} on table {}", myOldIndexName, myNewIndexName, getTableName()); } diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/SchemaInitializationProvider.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/SchemaInitializationProvider.java index 78c21f6211c..68b3b51733b 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/SchemaInitializationProvider.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/SchemaInitializationProvider.java @@ -28,12 +28,12 @@ import com.google.common.base.Charsets; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.builder.HashCodeBuilder; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.trim; @@ -50,7 +50,11 @@ public class SchemaInitializationProvider implements ISchemaInitializationProvid * @param theSchemaExistsIndicatorTable a table name we can use to determine if this schema has already been initialized * @param theCanInitializeSchema this is a "root" schema initializer that creates the primary tables used by this app */ - public SchemaInitializationProvider(String theSchemaDescription, String theSchemaFileClassPath, String theSchemaExistsIndicatorTable, boolean theCanInitializeSchema) { + public SchemaInitializationProvider( + String theSchemaDescription, + String theSchemaFileClassPath, + String theSchemaExistsIndicatorTable, + boolean theCanInitializeSchema) { mySchemaDescription = theSchemaDescription; mySchemaFileClassPath = theSchemaFileClassPath; mySchemaExistsIndicatorTable = theSchemaExistsIndicatorTable; @@ -69,13 +73,15 @@ public class SchemaInitializationProvider implements ISchemaInitializationProvid try { InputStream sqlFileInputStream = SchemaInitializationProvider.class.getResourceAsStream(initScript); if (sqlFileInputStream == null) { - throw new ConfigurationException(Msg.code(49) + "Schema initialization script " + initScript + " not found on classpath"); + throw new ConfigurationException( + Msg.code(49) + "Schema initialization script " + initScript + " not found on classpath"); } // Assumes no escaped semicolons... String sqlString = IOUtils.toString(sqlFileInputStream, Charsets.UTF_8); parseSqlFileIntoIndividualStatements(theDriverType, retval, sqlString); } catch (IOException e) { - throw new ConfigurationException(Msg.code(50) + "Error reading schema initialization script " + initScript, e); + throw new ConfigurationException( + Msg.code(50) + "Error reading schema initialization script " + initScript, e); } return retval; } @@ -83,7 +89,7 @@ public class SchemaInitializationProvider implements ISchemaInitializationProvid @VisibleForTesting void parseSqlFileIntoIndividualStatements(DriverTypeEnum theDriverType, List retval, String theSqlString) { String sqlString = theSqlString.replaceAll("--.*", ""); - + String sqlStringNoComments = preProcessSqlString(theDriverType, sqlString); String[] statements = sqlStringNoComments.split("\\;"); for (String statement : statements) { @@ -125,8 +131,8 @@ public class SchemaInitializationProvider implements ISchemaInitializationProvid @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(this.getClass().getSimpleName()) - .toHashCode(); + .append(this.getClass().getSimpleName()) + .toHashCode(); } @Override @@ -155,4 +161,3 @@ public class SchemaInitializationProvider implements ISchemaInitializationProvid return myCanInitializeSchema; } } - diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/BaseMigrationTasks.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/BaseMigrationTasks.java index db3c519f52b..e5976112e0c 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/BaseMigrationTasks.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/BaseMigrationTasks.java @@ -28,12 +28,13 @@ import org.apache.commons.lang3.EnumUtils; import org.apache.commons.lang3.Validate; import org.flywaydb.core.api.MigrationVersion; -import javax.annotation.Nonnull; import java.util.Collection; +import javax.annotation.Nonnull; public class BaseMigrationTasks { MigrationVersion lastVersion; - private Multimap myTasks = MultimapBuilder.hashKeys().arrayListValues().build(); + private Multimap myTasks = + MultimapBuilder.hashKeys().arrayListValues().build(); @SuppressWarnings("unchecked") public MigrationTaskList getTaskList(@Nonnull T theFrom, @Nonnull T theTo) { @@ -85,12 +86,13 @@ public class BaseMigrationTasks { protected BaseTask getTaskWithVersion(String theMigrationVersion) { // First normalize the version number - String expectedVersion = MigrationVersion.fromVersion(theMigrationVersion).getVersion(); + String expectedVersion = + MigrationVersion.fromVersion(theMigrationVersion).getVersion(); return myTasks.values().stream() - .filter(task -> expectedVersion.equals(task.getMigrationVersion())) - .findFirst() - .get(); + .filter(task -> expectedVersion.equals(task.getMigrationVersion())) + .findFirst() + .get(); } void validate(Collection theTasks) { @@ -100,7 +102,9 @@ public class BaseMigrationTasks { MigrationVersion migrationVersion = MigrationVersion.fromVersion(version); if (lastVersion != null) { if (migrationVersion.compareTo(lastVersion) <= 0) { - throw new IllegalStateException(Msg.code(51) + "Migration version " + migrationVersion + " found after migration version " + lastVersion + ". Migrations need to be in order by version number."); + throw new IllegalStateException( + Msg.code(51) + "Migration version " + migrationVersion + " found after migration version " + + lastVersion + ". Migrations need to be in order by version number."); } } lastVersion = migrationVersion; diff --git a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java index 94f007dba3e..5b9822475b1 100644 --- a/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java +++ b/hapi-fhir-sql-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/api/Builder.java @@ -84,21 +84,24 @@ public class Builder { executeRawSqlOptional(true, theVersion, theSql); } - private ExecuteRawSqlTask executeRawSqlOptional(boolean theDoNothing, String theVersion, @Language("SQL") String theSql) { + private ExecuteRawSqlTask executeRawSqlOptional( + boolean theDoNothing, String theVersion, @Language("SQL") String theSql) { ExecuteRawSqlTask task = new ExecuteRawSqlTask(myRelease, theVersion).addSql(theSql); task.setDoNothing(theDoNothing); mySink.addTask(task); return task; } - public InitializeSchemaTask initializeSchema(String theVersion, ISchemaInitializationProvider theSchemaInitializationProvider) { + public InitializeSchemaTask initializeSchema( + String theVersion, ISchemaInitializationProvider theSchemaInitializationProvider) { InitializeSchemaTask task = new InitializeSchemaTask(myRelease, theVersion, theSchemaInitializationProvider); mySink.addTask(task); return task; } @SuppressWarnings("unused") - public InitializeSchemaTask initializeSchema(String theVersion, String theSchemaName, ISchemaInitializationProvider theSchemaInitializationProvider) { + public InitializeSchemaTask initializeSchema( + String theVersion, String theSchemaName, ISchemaInitializationProvider theSchemaInitializationProvider) { InitializeSchemaTask task = new InitializeSchemaTask(myRelease, theVersion, theSchemaInitializationProvider); task.setDescription("Initialize " + theSchemaName + " schema"); mySink.addTask(task); @@ -119,10 +122,9 @@ public class Builder { */ public Builder executeRawSql(String theVersion, Map theDriverToSql) { Map> singleSqlStatementMap = new HashMap<>(); - theDriverToSql.entrySet().stream() - .forEach(entry -> { - singleSqlStatementMap.put(entry.getKey(), Collections.singletonList(entry.getValue())); - }); + theDriverToSql.entrySet().stream().forEach(entry -> { + singleSqlStatementMap.put(entry.getKey(), Collections.singletonList(entry.getValue())); + }); return executeRawSqls(theVersion, singleSqlStatementMap); } @@ -136,10 +138,9 @@ public class Builder { */ public Builder executeRawSqls(String theVersion, Map> theDriverToSqls) { ExecuteRawSqlTask executeRawSqlTask = new ExecuteRawSqlTask(myRelease, theVersion); - theDriverToSqls.entrySet().stream() - .forEach(entry -> { - entry.getValue().forEach(sql -> executeRawSqlTask.addSql(entry.getKey(), sql)); - }); + theDriverToSqls.entrySet().stream().forEach(entry -> { + entry.getValue().forEach(sql -> executeRawSqlTask.addSql(entry.getKey(), sql)); + }); mySink.addTask(executeRawSqlTask); return this; } @@ -151,8 +152,10 @@ public class Builder { return this; } - public BuilderAddTableByColumns addTableByColumns(String theVersion, String theTableName, String... thePkColumnNames) { - return new BuilderAddTableByColumns(myRelease, theVersion, mySink, theTableName, Arrays.asList(thePkColumnNames)); + public BuilderAddTableByColumns addTableByColumns( + String theVersion, String theTableName, String... thePkColumnNames) { + return new BuilderAddTableByColumns( + myRelease, theVersion, mySink, theTableName, Arrays.asList(thePkColumnNames)); } public void addIdGenerator(String theVersion, String theGeneratorName) { @@ -226,7 +229,8 @@ public class Builder { renameIndexOptional(true, theVersion, theOldIndexName, theNewIndexName); } - private void renameIndexOptional(boolean theDoNothing, String theVersion, String theOldIndexName, String theNewIndexName) { + private void renameIndexOptional( + boolean theDoNothing, String theVersion, String theOldIndexName, String theNewIndexName) { RenameIndexTask task = new RenameIndexTask(myRelease, theVersion); task.setOldIndexName(theOldIndexName); task.setNewIndexName(theNewIndexName); @@ -282,7 +286,12 @@ public class Builder { * @param isOkayIfNeitherColumnExists Setting this to true means that it's not an error if neither column exists * @param theDeleteTargetColumnFirstIfBothExist Setting this to true causes the migrator to be ok with the target column existing. It will make sure that there is no data in the column with the new name, then delete it if so in order to make room for the renamed column. If there is data it will still bomb out. */ - public BuilderWithTableName renameColumn(String theVersion, String theOldName, String theNewName, boolean isOkayIfNeitherColumnExists, boolean theDeleteTargetColumnFirstIfBothExist) { + public BuilderWithTableName renameColumn( + String theVersion, + String theOldName, + String theNewName, + boolean isOkayIfNeitherColumnExists, + boolean theDeleteTargetColumnFirstIfBothExist) { RenameColumnTask task = new RenameColumnTask(myRelease, theVersion); task.setTableName(myTableName); task.setOldName(theOldName); @@ -306,7 +315,8 @@ public class Builder { } public void migratePostgresTextClobToBinaryClob(String theVersion, String theColumnName) { - MigratePostgresTextClobToBinaryClobTask task = new MigratePostgresTextClobToBinaryClobTask(myRelease, theVersion); + MigratePostgresTextClobToBinaryClobTask task = + new MigratePostgresTextClobToBinaryClobTask(myRelease, theVersion); task.setTableName(getTableName()); task.setColumnName(theColumnName); addTask(task); @@ -389,11 +399,14 @@ public class Builder { } public BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable nullable() { - return new BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable(myVersion, true); + return new BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable( + myVersion, true); } - public BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable nonNullable() { - return new BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable(myVersion, false); + public BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable + nonNullable() { + return new BuilderWithTableName.BuilderModifyColumnWithName.BuilderModifyColumnWithNameAndNullable( + myVersion, false); } public class BuilderModifyColumnWithNameAndNullable { @@ -413,11 +426,13 @@ public class Builder { public void withType(ColumnTypeEnum theColumnType, Integer theLength) { if (theColumnType == ColumnTypeEnum.STRING) { if (theLength == null || theLength == 0) { - throw new IllegalArgumentException(Msg.code(52) + "Can not specify length 0 for column of type " + theColumnType); + throw new IllegalArgumentException( + Msg.code(52) + "Can not specify length 0 for column of type " + theColumnType); } } else { if (theLength != null) { - throw new IllegalArgumentException(Msg.code(53) + "Can not specify length for column of type " + theColumnType); + throw new IllegalArgumentException( + Msg.code(53) + "Can not specify length for column of type " + theColumnType); } } @@ -449,8 +464,10 @@ public class Builder { myForeignKeyName = theForeignKeyName; } - public BuilderWithTableName.BuilderAddForeignKey.BuilderAddForeignKeyToColumn toColumn(String theColumnName) { - return new BuilderWithTableName.BuilderAddForeignKey.BuilderAddForeignKeyToColumn(myVersion, theColumnName); + public BuilderWithTableName.BuilderAddForeignKey.BuilderAddForeignKeyToColumn toColumn( + String theColumnName) { + return new BuilderWithTableName.BuilderAddForeignKey.BuilderAddForeignKeyToColumn( + myVersion, theColumnName); } public class BuilderAddForeignKeyToColumn extends BuilderWithTableName.BuilderModifyColumnWithName { @@ -477,7 +494,11 @@ public class Builder { private final String myColumnName; private final BaseMigrationTasks.IAcceptsTasks myTaskSink; - public BuilderAddColumnWithName(String theRelease, String theVersion, String theColumnName, BaseMigrationTasks.IAcceptsTasks theTaskSink) { + public BuilderAddColumnWithName( + String theRelease, + String theVersion, + String theColumnName, + BaseMigrationTasks.IAcceptsTasks theTaskSink) { myRelease = theRelease; myVersion = theVersion; myColumnName = theColumnName; @@ -485,11 +506,13 @@ public class Builder { } public BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable nullable() { - return new BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable(myRelease, myVersion, true); + return new BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable( + myRelease, myVersion, true); } public BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable nonNullable() { - return new BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable(myRelease, myVersion, false); + return new BuilderWithTableName.BuilderAddColumnWithName.BuilderAddColumnWithNameNullable( + myRelease, myVersion, false); } public class BuilderAddColumnWithNameNullable { @@ -519,7 +542,6 @@ public class Builder { return new BuilderCompleteTask(task); } - } } } @@ -564,7 +586,6 @@ public class Builder { addTask(myTask); } - public BuilderAddTableRawSql addSql(DriverTypeEnum theDriverTypeEnum, @Language("SQL") String theSql) { myTask.addSql(theDriverTypeEnum, theSql); return this; @@ -579,7 +600,12 @@ public class Builder { private final String myVersion; private final AddTableByColumnTask myTask; - public BuilderAddTableByColumns(String theRelease, String theVersion, BaseMigrationTasks.IAcceptsTasks theSink, String theTableName, List thePkColumnNames) { + public BuilderAddTableByColumns( + String theRelease, + String theVersion, + BaseMigrationTasks.IAcceptsTasks theSink, + String theTableName, + List thePkColumnNames) { super(theRelease, theSink, theTableName); myVersion = theVersion; myTask = new AddTableByColumnTask(myRelease, theVersion); @@ -606,5 +632,4 @@ public class Builder { return this; } } - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/Batch2JobsConfig.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/Batch2JobsConfig.java index 616efcd3f8a..32dde448693 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/Batch2JobsConfig.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/Batch2JobsConfig.java @@ -28,7 +28,7 @@ import ca.uhn.fhir.batch2.jobs.termcodesystem.TermCodeSystemJobConfig; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; -//When you define a new batch job, add it here. +// When you define a new batch job, add it here. @Configuration @Import({ BatchCommonCtx.class, @@ -39,6 +39,4 @@ import org.springframework.context.annotation.Import; TermCodeSystemJobConfig.class, BulkImportPullConfig.class, }) -public class Batch2JobsConfig { - -} +public class Batch2JobsConfig {} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/BatchCommonCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/BatchCommonCtx.java index 508b9ccd72b..ffdb6b5f030 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/BatchCommonCtx.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/config/BatchCommonCtx.java @@ -26,8 +26,8 @@ import org.springframework.context.annotation.Bean; public class BatchCommonCtx { @Bean - UrlPartitioner urlPartitioner(MatchUrlService theMatchUrlService, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { + UrlPartitioner urlPartitioner( + MatchUrlService theMatchUrlService, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { return new UrlPartitioner(theMatchUrlService, theRequestPartitionHelperSvc); } - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java index 6603e3c293f..d68bdea9ae0 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkDataExportProvider.java @@ -71,7 +71,6 @@ import org.hl7.fhir.r4.model.Parameters; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -83,6 +82,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import javax.servlet.http.HttpServletResponse; import static ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters.ExportStyle; import static ca.uhn.fhir.util.DatatypeUtil.toStringValue; @@ -92,17 +92,18 @@ import static org.slf4j.LoggerFactory.getLogger; public class BulkDataExportProvider { public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)"; - public static final List PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = List.of("Practitioner", "Organization"); + public static final List PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = + List.of("Practitioner", "Organization"); /** * Bulk data $export does not include the Binary type */ public static final String UNSUPPORTED_BINARY_TYPE = "Binary"; + private static final Logger ourLog = getLogger(BulkDataExportProvider.class); @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; - private Set myCompartmentResources; @Autowired @@ -123,36 +124,56 @@ public class BulkDataExportProvider { /** * $export */ - @Operation(name = JpaConstants.OPERATION_EXPORT, global = false /* set to true once we can handle this */, manualResponse = true, idempotent = true) + @Operation( + name = JpaConstants.OPERATION_EXPORT, + global = false /* set to true once we can handle this */, + manualResponse = true, + idempotent = true) public void export( - @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, - @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypeFilter, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypePostFetchFilterUrl, - @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportId, - ServletRequestDetails theRequestDetails - ) { + @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") + IPrimitiveType theOutputFormat, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") + IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") + IPrimitiveType theSince, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypeFilter, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypePostFetchFilterUrl, + @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") + IPrimitiveType theExportId, + ServletRequestDetails theRequestDetails) { // JPA export provider validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT); - BulkExportJobParameters BulkExportJobParameters = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl); + BulkExportJobParameters BulkExportJobParameters = buildSystemBulkExportOptions( + theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl); startJob(theRequestDetails, BulkExportJobParameters); } - private void startJob(ServletRequestDetails theRequestDetails, - BulkExportJobParameters theOptions) { + private void startJob(ServletRequestDetails theRequestDetails, BulkExportJobParameters theOptions) { // permission check - HookParams params = (new HookParams()).add(BulkExportJobParameters.class, theOptions) - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); - CompositeInterceptorBroadcaster.doCallHooks(this.myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_INITIATE_BULK_EXPORT, params); + HookParams params = (new HookParams()) + .add(BulkExportJobParameters.class, theOptions) + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + CompositeInterceptorBroadcaster.doCallHooks( + this.myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_INITIATE_BULK_EXPORT, params); // get cache boolean boolean useCache = shouldUseCache(theRequestDetails); - // Set the original request URL as part of the job information, as this is used in the poll-status-endpoint, and is needed for the report. + // Set the original request URL as part of the job information, as this is used in the poll-status-endpoint, and + // is needed for the report. theOptions.setOriginalRequestUrl(theRequestDetails.getCompleteUrl()); // If no _type parameter is provided, default to all resource types except Binary @@ -163,7 +184,8 @@ public class BulkDataExportProvider { } // Determine and validate partition permissions (if needed). - RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null); + RequestPartitionId partitionId = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null); myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId); theOptions.setPartitionId(partitionId); @@ -178,7 +200,8 @@ public class BulkDataExportProvider { } private boolean shouldUseCache(ServletRequestDetails theRequestDetails) { - CacheControlDirective cacheControlDirective = new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)); + CacheControlDirective cacheControlDirective = + new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)); return myStorageSettings.getEnableBulkExportJobReuse() && !cacheControlDirective.isNoCache(); } @@ -191,16 +214,30 @@ public class BulkDataExportProvider { */ @Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Group") public void groupExport( - @IdParam IIdType theIdParam, - @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, - @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypeFilter, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypePostFetchFilterUrl, - @OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType theMdm, - @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportIdentifier, - ServletRequestDetails theRequestDetails - ) { + @IdParam IIdType theIdParam, + @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") + IPrimitiveType theOutputFormat, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") + IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") + IPrimitiveType theSince, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypeFilter, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypePostFetchFilterUrl, + @OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") + IPrimitiveType theMdm, + @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") + IPrimitiveType theExportIdentifier, + ServletRequestDetails theRequestDetails) { ourLog.debug("Received Group Bulk Export Request for Group {}", theIdParam); ourLog.debug("_type={}", theType); ourLog.debug("_since={}", theSince); @@ -212,14 +249,22 @@ public class BulkDataExportProvider { // verify the Group exists before starting the job validateTargetsExists(theRequestDetails, "Group", List.of(theIdParam)); - BulkExportJobParameters BulkExportJobParameters = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theExportIdentifier, theTypePostFetchFilterUrl); + BulkExportJobParameters BulkExportJobParameters = buildGroupBulkExportOptions( + theOutputFormat, + theType, + theSince, + theTypeFilter, + theIdParam, + theMdm, + theExportIdentifier, + theTypePostFetchFilterUrl); if (isNotEmpty(BulkExportJobParameters.getResourceTypes())) { validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes()); } else { // all patient resource types Set groupTypes = new HashSet<>(getPatientCompartmentResources()); - groupTypes.removeIf(t-> !myDaoRegistry.isResourceTypeSupported(t)); + groupTypes.removeIf(t -> !myDaoRegistry.isResourceTypeSupported(t)); BulkExportJobParameters.setResourceTypes(groupTypes); } @@ -234,32 +279,37 @@ public class BulkDataExportProvider { * @param theTargetResourceName the type of the target * @param theIdParams the id(s) to verify exist */ - private void validateTargetsExists(RequestDetails theRequestDetails, String theTargetResourceName, Iterable theIdParams) { - RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequestDetails, theTargetResourceName, theIdParams.iterator().next()); + private void validateTargetsExists( + RequestDetails theRequestDetails, String theTargetResourceName, Iterable theIdParams) { + RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead( + theRequestDetails, theTargetResourceName, theIdParams.iterator().next()); SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId); for (IIdType nextId : theIdParams) { - myDaoRegistry.getResourceDao(theTargetResourceName) - .read(nextId, requestDetails); + myDaoRegistry.getResourceDao(theTargetResourceName).read(nextId, requestDetails); } - } private void validateResourceTypesAllContainPatientSearchParams(Collection theResourceTypes) { if (theResourceTypes != null) { List badResourceTypes = theResourceTypes.stream() - .filter(resourceType -> !PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(resourceType)) - .filter(resourceType -> !getPatientCompartmentResources().contains(resourceType)) - .collect(Collectors.toList()); + .filter(resourceType -> + !PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(resourceType)) + .filter(resourceType -> !getPatientCompartmentResources().contains(resourceType)) + .collect(Collectors.toList()); if (!badResourceTypes.isEmpty()) { - throw new InvalidRequestException(Msg.code(512) + String.format("Resource types [%s] are invalid for this type of export, as they do not contain search parameters that refer to patients.", String.join(",", badResourceTypes))); + throw new InvalidRequestException(Msg.code(512) + + String.format( + "Resource types [%s] are invalid for this type of export, as they do not contain search parameters that refer to patients.", + String.join(",", badResourceTypes))); } } } private Set getPatientCompartmentResources() { if (myCompartmentResources == null) { - myCompartmentResources = new HashSet<>(SearchParameterUtil.getAllResourceTypesThatAreInPatientCompartment(myFhirContext)); + myCompartmentResources = + new HashSet<>(SearchParameterUtil.getAllResourceTypesThatAreInPatientCompartment(myFhirContext)); myCompartmentResources.add("Device"); } return myCompartmentResources; @@ -270,22 +320,50 @@ public class BulkDataExportProvider { */ @Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Patient") public void patientExport( - @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, - @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypeFilter, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypePostFetchFilterUrl, - @OperationParam(name = JpaConstants.PARAM_EXPORT_PATIENT, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> thePatient, - @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportIdentifier, - ServletRequestDetails theRequestDetails - ) { + @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") + IPrimitiveType theOutputFormat, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") + IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") + IPrimitiveType theSince, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypeFilter, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypePostFetchFilterUrl, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_PATIENT, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> thePatient, + @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") + IPrimitiveType theExportIdentifier, + ServletRequestDetails theRequestDetails) { validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT); if (thePatient != null) { - validateTargetsExists(theRequestDetails, "Patient", thePatient.stream().map(s -> new IdDt(s.getValue())).collect(Collectors.toList())); + validateTargetsExists( + theRequestDetails, + "Patient", + thePatient.stream().map(s -> new IdDt(s.getValue())).collect(Collectors.toList())); } - BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, thePatient, theTypePostFetchFilterUrl); + BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions( + theOutputFormat, + theType, + theSince, + theTypeFilter, + theExportIdentifier, + thePatient, + theTypePostFetchFilterUrl); validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes()); startJob(theRequestDetails, BulkExportJobParameters); @@ -296,20 +374,40 @@ public class BulkDataExportProvider { */ @Operation(name = JpaConstants.OPERATION_EXPORT, manualResponse = true, idempotent = true, typeName = "Patient") public void patientInstanceExport( - @IdParam IIdType theIdParam, - @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType theOutputFormat, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType theType, - @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType theSince, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypeFilter, - @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theTypePostFetchFilterUrl, - @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") IPrimitiveType theExportIdentifier, - ServletRequestDetails theRequestDetails - ) { + @IdParam IIdType theIdParam, + @OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") + IPrimitiveType theOutputFormat, + @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") + IPrimitiveType theType, + @OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") + IPrimitiveType theSince, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypeFilter, + @OperationParam( + name = JpaConstants.PARAM_EXPORT_TYPE_POST_FETCH_FILTER_URL, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theTypePostFetchFilterUrl, + @OperationParam(name = JpaConstants.PARAM_EXPORT_IDENTIFIER, min = 0, max = 1, typeName = "string") + IPrimitiveType theExportIdentifier, + ServletRequestDetails theRequestDetails) { validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT); validateTargetsExists(theRequestDetails, "Patient", List.of(theIdParam)); - BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, theIdParam, theTypePostFetchFilterUrl); + BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions( + theOutputFormat, + theType, + theSince, + theTypeFilter, + theExportIdentifier, + theIdParam, + theTypePostFetchFilterUrl); validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes()); startJob(theRequestDetails, BulkExportJobParameters); @@ -319,11 +417,16 @@ public class BulkDataExportProvider { * $export-poll-status */ @SuppressWarnings("unchecked") - @Operation(name = JpaConstants.OPERATION_EXPORT_POLL_STATUS, manualResponse = true, idempotent = true, deleteEnabled = true) + @Operation( + name = JpaConstants.OPERATION_EXPORT_POLL_STATUS, + manualResponse = true, + idempotent = true, + deleteEnabled = true) public void exportPollStatus( - @OperationParam(name = JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) IPrimitiveType theJobId, - ServletRequestDetails theRequestDetails - ) throws IOException { + @OperationParam(name = JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) + IPrimitiveType theJobId, + ServletRequestDetails theRequestDetails) + throws IOException { HttpServletResponse response = theRequestDetails.getServletResponse(); theRequestDetails.getServer().addHeadersToResponse(response); @@ -332,9 +435,10 @@ public class BulkDataExportProvider { if (theJobId == null) { Parameters parameters = (Parameters) theRequestDetails.getResource(); Parameters.ParametersParameterComponent parameter = parameters.getParameter().stream() - .filter(param -> param.getName().equals(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID)) - .findFirst() - .orElseThrow(() -> new InvalidRequestException(Msg.code(2227) + "$export-poll-status requires a job ID, please provide the value of target jobId.")); + .filter(param -> param.getName().equals(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID)) + .findFirst() + .orElseThrow(() -> new InvalidRequestException(Msg.code(2227) + + "$export-poll-status requires a job ID, please provide the value of target jobId.")); theJobId = (IPrimitiveType) parameter.getValue(); } @@ -343,10 +447,12 @@ public class BulkDataExportProvider { BulkExportJobParameters parameters = info.getParameters(BulkExportJobParameters.class); if (parameters.getPartitionId() != null) { // Determine and validate permissions for partition (if needed) - RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null); + RequestPartitionId partitionId = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null); myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId); if (!parameters.getPartitionId().equals(partitionId)) { - throw new InvalidRequestException(Msg.code(2304) + "Invalid partition in request for Job ID " + theJobId); + throw new InvalidRequestException( + Msg.code(2304) + "Invalid partition in request for Job ID " + theJobId); } } @@ -364,7 +470,6 @@ public class BulkDataExportProvider { bulkResponseDocument.setRequiresAccessToken(true); - String report = info.getReport(); if (isEmpty(report)) { // this should never happen, but just in case... @@ -377,16 +482,18 @@ public class BulkDataExportProvider { String serverBase = getServerBase(theRequestDetails); - for (Map.Entry> entrySet : results.getResourceTypeToBinaryIds().entrySet()) { + for (Map.Entry> entrySet : + results.getResourceTypeToBinaryIds().entrySet()) { String resourceType = entrySet.getKey(); List binaryIds = entrySet.getValue(); for (String binaryId : binaryIds) { IIdType iId = new IdType(binaryId); - String nextUrl = serverBase + "/" + iId.toUnqualifiedVersionless().getValue(); + String nextUrl = serverBase + "/" + + iId.toUnqualifiedVersionless().getValue(); bulkResponseDocument - .addOutput() - .setType(resourceType) - .setUrl(nextUrl); + .addOutput() + .setType(resourceType) + .setUrl(nextUrl); } } JsonUtil.serialize(bulkResponseDocument, response.getWriter()); @@ -407,7 +514,9 @@ public class BulkDataExportProvider { break; default: // Deliberate fall through - ourLog.warn("Unrecognized status encountered: {}. Treating as BUILDING/SUBMITTED", info.getStatus().name()); + ourLog.warn( + "Unrecognized status encountered: {}. Treating as BUILDING/SUBMITTED", + info.getStatus().name()); //noinspection fallthrough case FINALIZE: case QUEUED: @@ -419,25 +528,34 @@ public class BulkDataExportProvider { } else { response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); String dateString = getTransitionTimeOfJobInfo(info); - response.addHeader(Constants.HEADER_X_PROGRESS, "Build in progress - Status set to " - + info.getStatus() - + " at " - + dateString); + response.addHeader( + Constants.HEADER_X_PROGRESS, + "Build in progress - Status set to " + info.getStatus() + " at " + dateString); response.addHeader(Constants.HEADER_RETRY_AFTER, "120"); } break; } } - private void handleDeleteRequest(IPrimitiveType theJobId, HttpServletResponse response, StatusEnum theOrigStatus) throws IOException { + private void handleDeleteRequest( + IPrimitiveType theJobId, HttpServletResponse response, StatusEnum theOrigStatus) + throws IOException { IBaseOperationOutcome outcome = OperationOutcomeUtil.newInstance(myFhirContext); JobOperationResultJson resultMessage = myJobCoordinator.cancelInstance(theJobId.getValueAsString()); if (theOrigStatus.equals(StatusEnum.COMPLETED)) { response.setStatus(Constants.STATUS_HTTP_404_NOT_FOUND); - OperationOutcomeUtil.addIssue(myFhirContext, outcome, "error", "Job instance <" + theJobId.getValueAsString() + "> was already cancelled or has completed. Nothing to do.", null, null); + OperationOutcomeUtil.addIssue( + myFhirContext, + outcome, + "error", + "Job instance <" + theJobId.getValueAsString() + + "> was already cancelled or has completed. Nothing to do.", + null, + null); } else { response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); - OperationOutcomeUtil.addIssue(myFhirContext, outcome, "information", resultMessage.getMessage(), null, "informational"); + OperationOutcomeUtil.addIssue( + myFhirContext, outcome, "information", resultMessage.getMessage(), null, "informational"); } myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(outcome, response.getWriter()); response.getWriter().close(); @@ -454,12 +572,40 @@ public class BulkDataExportProvider { } } - private BulkExportJobParameters buildSystemBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportId, List> theTypePostFetchFilterUrl) { - return buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkExportJobParameters.ExportStyle.SYSTEM, theTypePostFetchFilterUrl); + private BulkExportJobParameters buildSystemBulkExportOptions( + IPrimitiveType theOutputFormat, + IPrimitiveType theType, + IPrimitiveType theSince, + List> theTypeFilter, + IPrimitiveType theExportId, + List> theTypePostFetchFilterUrl) { + return buildBulkExportJobParameters( + theOutputFormat, + theType, + theSince, + theTypeFilter, + theExportId, + BulkExportJobParameters.ExportStyle.SYSTEM, + theTypePostFetchFilterUrl); } - private BulkExportJobParameters buildGroupBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IIdType theGroupId, IPrimitiveType theExpandMdm, IPrimitiveType theExportId, List> theTypePostFetchFilterUrl) { - BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportId, ExportStyle.GROUP, theTypePostFetchFilterUrl); + private BulkExportJobParameters buildGroupBulkExportOptions( + IPrimitiveType theOutputFormat, + IPrimitiveType theType, + IPrimitiveType theSince, + List> theTypeFilter, + IIdType theGroupId, + IPrimitiveType theExpandMdm, + IPrimitiveType theExportId, + List> theTypePostFetchFilterUrl) { + BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters( + theOutputFormat, + theType, + theSince, + theTypeFilter, + theExportId, + ExportStyle.GROUP, + theTypePostFetchFilterUrl); BulkExportJobParameters.setGroupId(toStringValue(theGroupId)); boolean mdm = false; @@ -471,26 +617,62 @@ public class BulkDataExportProvider { return BulkExportJobParameters; } - private BulkExportJobParameters buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, List> thePatientIds, List> theTypePostFetchFilterUrl) { + private BulkExportJobParameters buildPatientBulkExportOptions( + IPrimitiveType theOutputFormat, + IPrimitiveType theType, + IPrimitiveType theSince, + List> theTypeFilter, + IPrimitiveType theExportIdentifier, + List> thePatientIds, + List> theTypePostFetchFilterUrl) { IPrimitiveType type = theType; if (type == null) { // Type is optional, but the job requires it type = new StringDt("Patient"); } - BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, type, theSince, theTypeFilter, theExportIdentifier, ExportStyle.PATIENT, theTypePostFetchFilterUrl); + BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters( + theOutputFormat, + type, + theSince, + theTypeFilter, + theExportIdentifier, + ExportStyle.PATIENT, + theTypePostFetchFilterUrl); if (thePatientIds != null) { - BulkExportJobParameters.setPatientIds(thePatientIds.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toSet())); + BulkExportJobParameters.setPatientIds( + thePatientIds.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toSet())); } return BulkExportJobParameters; } - private BulkExportJobParameters buildPatientBulkExportOptions(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, IIdType thePatientId, List> theTypePostFetchFilterUrl) { - BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, ExportStyle.PATIENT, theTypePostFetchFilterUrl); + private BulkExportJobParameters buildPatientBulkExportOptions( + IPrimitiveType theOutputFormat, + IPrimitiveType theType, + IPrimitiveType theSince, + List> theTypeFilter, + IPrimitiveType theExportIdentifier, + IIdType thePatientId, + List> theTypePostFetchFilterUrl) { + BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters( + theOutputFormat, + theType, + theSince, + theTypeFilter, + theExportIdentifier, + ExportStyle.PATIENT, + theTypePostFetchFilterUrl); BulkExportJobParameters.setPatientIds(Collections.singleton(thePatientId.getValue())); return BulkExportJobParameters; } - private BulkExportJobParameters buildBulkExportJobParameters(IPrimitiveType theOutputFormat, IPrimitiveType theType, IPrimitiveType theSince, List> theTypeFilter, IPrimitiveType theExportIdentifier, BulkExportJobParameters.ExportStyle theExportStyle, List> theTypePostFetchFilterUrl) { + private BulkExportJobParameters buildBulkExportJobParameters( + IPrimitiveType theOutputFormat, + IPrimitiveType theType, + IPrimitiveType theSince, + List> theTypeFilter, + IPrimitiveType theExportIdentifier, + BulkExportJobParameters.ExportStyle theExportStyle, + List> theTypePostFetchFilterUrl) { String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : Constants.CT_FHIR_NDJSON; Set resourceTypes = null; @@ -526,7 +708,8 @@ public class BulkDataExportProvider { if (serverBase == null) { throw new InternalErrorException(Msg.code(2136) + "Unable to get the server base."); } - String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theInstanceId; + String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theInstanceId; pollLocation = UrlUtil.sanitizeHeaderValue(pollLocation); HttpServletResponse response = theRequestDetails.getServletResponse(); @@ -548,10 +731,9 @@ public class BulkDataExportProvider { for (IPrimitiveType next : theTypeFilter) { String typeFilterString = next.getValueAsString(); - Arrays - .stream(typeFilterString.split(FARM_TO_TABLE_TYPE_FILTER_REGEX)) - .filter(StringUtils::isNotBlank) - .forEach(retVal::add); + Arrays.stream(typeFilterString.split(FARM_TO_TABLE_TYPE_FILTER_REGEX)) + .filter(StringUtils::isNotBlank) + .forEach(retVal::add); } return retVal; diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java index 2c4b1a27fb8..1326534e3a5 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportAppCtx.java @@ -23,10 +23,10 @@ import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId; import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList; import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.batch2.model.JobDefinition; import ca.uhn.fhir.jpa.api.model.BulkExportJobResults; import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.util.Batch2JobDefinitionConstants; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -44,39 +44,32 @@ public class BulkExportAppCtx { builder.setJobDescription("FHIR Bulk Export"); builder.setJobDefinitionVersion(1); - JobDefinition def = builder.setParametersType(BulkExportJobParameters.class) - // validator - .setParametersValidator(bulkExportJobParametersValidator()) - .gatedExecution() - // first step - load in (all) ids and create id chunks of 1000 each - .addFirstStep( - "fetch-resources", - "Fetches resource PIDs for exporting", - ResourceIdList.class, - fetchResourceIdsStep() - ) - // expand out - fetch resources - .addIntermediateStep( - "expand-resources", - "Expand out resources", - ExpandedResourcesList.class, - expandResourcesStep() - ) - // write binaries and save to db - .addIntermediateStep( - WRITE_TO_BINARIES, - "Writes the expanded resources to the binaries and saves", - BulkExportBinaryFileId.class, - writeBinaryStep() - ) - // finalize the job (set to complete) - .addFinalReducerStep( - "create-report-step", - "Creates the output report from a bulk export job", - BulkExportJobResults.class, - createReportStep() - ) - .build(); + JobDefinition def = builder.setParametersType(BulkExportJobParameters.class) + // validator + .setParametersValidator(bulkExportJobParametersValidator()) + .gatedExecution() + // first step - load in (all) ids and create id chunks of 1000 each + .addFirstStep( + "fetch-resources", + "Fetches resource PIDs for exporting", + ResourceIdList.class, + fetchResourceIdsStep()) + // expand out - fetch resources + .addIntermediateStep( + "expand-resources", "Expand out resources", ExpandedResourcesList.class, expandResourcesStep()) + // write binaries and save to db + .addIntermediateStep( + WRITE_TO_BINARIES, + "Writes the expanded resources to the binaries and saves", + BulkExportBinaryFileId.class, + writeBinaryStep()) + // finalize the job (set to complete) + .addFinalReducerStep( + "create-report-step", + "Creates the output report from a bulk export job", + BulkExportJobResults.class, + createReportStep()) + .build(); return def; } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java index aea4759ae84..41a2f600f2a 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportCreateReportStep.java @@ -27,42 +27,48 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException; import ca.uhn.fhir.batch2.api.RunOutcome; import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.batch2.model.ChunkOutcome; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.jpa.api.model.BulkExportJobResults; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import org.slf4j.Logger; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; import static org.slf4j.LoggerFactory.getLogger; -public class BulkExportCreateReportStep implements IReductionStepWorker { +public class BulkExportCreateReportStep + implements IReductionStepWorker { private static final Logger ourLog = getLogger(BulkExportCreateReportStep.class); private Map> myResourceToBinaryIds; @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { BulkExportJobResults results = new BulkExportJobResults(); String requestUrl = getOriginatingRequestUrl(theStepExecutionDetails, results); results.setOriginalRequestUrl(requestUrl); if (myResourceToBinaryIds != null) { - ourLog.info("Bulk Export Report creation step for instance: {}", theStepExecutionDetails.getInstance().getInstanceId()); + ourLog.info( + "Bulk Export Report creation step for instance: {}", + theStepExecutionDetails.getInstance().getInstanceId()); results.setResourceTypeToBinaryIds(myResourceToBinaryIds); myResourceToBinaryIds = null; } else { - String msg = "Export complete, but no data to generate report for job instance: " + theStepExecutionDetails.getInstance().getInstanceId(); + String msg = "Export complete, but no data to generate report for job instance: " + + theStepExecutionDetails.getInstance().getInstanceId(); ourLog.warn(msg); results.setReportMsg(msg); @@ -75,8 +81,8 @@ public class BulkExportCreateReportStep implements IReductionStepWorker theChunkDetails) { + public ChunkOutcome consume( + ChunkExecutionDetails theChunkDetails) { BulkExportBinaryFileId fileId = theChunkDetails.getData(); if (myResourceToBinaryIds == null) { myResourceToBinaryIds = new HashMap<>(); @@ -89,7 +95,9 @@ public class BulkExportCreateReportStep implements IReductionStepWorker theStepExecutionDetails, BulkExportJobResults results) { + private static String getOriginatingRequestUrl( + @Nonnull StepExecutionDetails theStepExecutionDetails, + BulkExportJobResults results) { IJobInstance instance = theStepExecutionDetails.getInstance(); String url = ""; if (instance instanceof JobInstance) { diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java index 7c1ff82abe7..caa330197a6 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/BulkExportJobParametersValidator.java @@ -20,31 +20,33 @@ package ca.uhn.fhir.batch2.jobs.export; import ca.uhn.fhir.batch2.api.IJobParametersValidator; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult; import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; public class BulkExportJobParametersValidator implements IJobParametersValidator { /** @deprecated use BulkDataExportProvider.UNSUPPORTED_BINARY_TYPE instead */ - @Deprecated(since="6.3.10") + @Deprecated(since = "6.3.10") public static final String UNSUPPORTED_BINARY_TYPE = BulkDataExportProvider.UNSUPPORTED_BINARY_TYPE; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private InMemoryResourceMatcher myInMemoryResourceMatcher; @@ -84,11 +86,11 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator BulkExportJobParameters.ExportStyle style = theParameters.getExportStyle(); if (style == null) { errorMsgs.add("Export style is required"); - } - else { + } else { switch (style) { case GROUP: - if (theParameters.getGroupId() == null || theParameters.getGroupId().isEmpty()) { + if (theParameters.getGroupId() == null + || theParameters.getGroupId().isEmpty()) { errorMsgs.add("Group export requires a group id, but none provided."); } break; @@ -102,7 +104,8 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator // Validate post fetch filter URLs for (String next : theParameters.getPostFetchFilterUrls()) { if (!next.contains("?") || isBlank(next.substring(next.indexOf('?') + 1))) { - errorMsgs.add("Invalid post-fetch filter URL, must be in the format [resourceType]?[parameters]: " + next); + errorMsgs.add( + "Invalid post-fetch filter URL, must be in the format [resourceType]?[parameters]: " + next); continue; } String resourceType = next.substring(0, next.indexOf('?')); @@ -114,7 +117,8 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator try { InMemoryMatchResult inMemoryMatchResult = myInMemoryResourceMatcher.canBeEvaluatedInMemory(next); if (!inMemoryMatchResult.supported()) { - errorMsgs.add("Invalid post-fetch filter URL, filter is not supported for in-memory matching \"" + next + "\". Reason: " + inMemoryMatchResult.getUnsupportedReason()); + errorMsgs.add("Invalid post-fetch filter URL, filter is not supported for in-memory matching \"" + + next + "\". Reason: " + inMemoryMatchResult.getUnsupportedReason()); } } catch (InvalidRequestException e) { errorMsgs.add("Invalid post-fetch filter URL. Reason: " + e.getMessage()); @@ -123,5 +127,4 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator return errorMsgs; } - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java index b0bf68a051c..f0b228993b3 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/ExpandResourcesStep.java @@ -24,13 +24,12 @@ import ca.uhn.fhir.batch2.api.IJobStepWorker; import ca.uhn.fhir.batch2.api.JobExecutionFailedException; import ca.uhn.fhir.batch2.api.RunOutcome; import ca.uhn.fhir.batch2.api.StepExecutionDetails; -import ca.uhn.fhir.interceptor.api.HookParams; -import ca.uhn.fhir.interceptor.api.Pointcut; -import ca.uhn.fhir.interceptor.executor.InterceptorService; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList; import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.api.HookParams; +import ca.uhn.fhir.interceptor.api.Pointcut; +import ca.uhn.fhir.interceptor.executor.InterceptorService; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; @@ -45,6 +44,7 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.param.TokenOrListParam; import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc; @@ -55,18 +55,19 @@ import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.rest.api.Constants.PARAM_ID; import static org.slf4j.LoggerFactory.getLogger; -public class ExpandResourcesStep implements IJobStepWorker { +public class ExpandResourcesStep + implements IJobStepWorker { private static final Logger ourLog = getLogger(ExpandResourcesStep.class); @Autowired @@ -100,25 +101,29 @@ public class ExpandResourcesStep implements IJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { String instanceId = theStepExecutionDetails.getInstance().getInstanceId(); String chunkId = theStepExecutionDetails.getChunkId(); ResourceIdList idList = theStepExecutionDetails.getData(); BulkExportJobParameters parameters = theStepExecutionDetails.getParameters(); - ourLog.info("Bulk export instance[{}] chunk[{}] - About to expand {} resource IDs into their full resource bodies.", instanceId, chunkId, idList.getIds().size()); + ourLog.info( + "Bulk export instance[{}] chunk[{}] - About to expand {} resource IDs into their full resource bodies.", + instanceId, + chunkId, + idList.getIds().size()); // search the resources List allResources = fetchAllResources(idList, parameters.getPartitionId()); // Apply post-fetch filtering String resourceType = idList.getResourceType(); - List postFetchFilterUrls = parameters - .getPostFetchFilterUrls() - .stream() - .filter(t -> t.substring(0, t.indexOf('?')).equals(resourceType)) - .collect(Collectors.toList()); + List postFetchFilterUrls = parameters.getPostFetchFilterUrls().stream() + .filter(t -> t.substring(0, t.indexOf('?')).equals(resourceType)) + .collect(Collectors.toList()); if (!postFetchFilterUrls.isEmpty()) { applyPostFetchFiltering(allResources, postFetchFilterUrls, instanceId, chunkId); @@ -143,9 +148,10 @@ public class ExpandResourcesStep implements IJobStepWorker iter = allResources.iterator(); iter.hasNext(); ) { HookParams params = new HookParams() - .add(BulkExportJobParameters.class, theStepExecutionDetails.getParameters()) - .add(IBaseResource.class, iter.next()); - boolean outcome = myInterceptorService.callHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION, params); + .add(BulkExportJobParameters.class, theStepExecutionDetails.getParameters()) + .add(IBaseResource.class, iter.next()); + boolean outcome = + myInterceptorService.callHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION, params); if (!outcome) { iter.remove(); } @@ -163,17 +169,21 @@ public class ExpandResourcesStep implements IJobStepWorker theResources, List thePostFetchFilterUrls, String theInstanceId, String theChunkId) { + private void applyPostFetchFiltering( + List theResources, + List thePostFetchFilterUrls, + String theInstanceId, + String theChunkId) { int numRemoved = 0; for (Iterator iter = theResources.iterator(); iter.hasNext(); ) { boolean matched = applyPostFetchFilteringForSingleResource(thePostFetchFilterUrls, iter); @@ -185,11 +195,16 @@ public class ExpandResourcesStep implements IJobStepWorker 0) { - ourLog.info("Bulk export instance[{}] chunk[{}] - {} resources were filtered out because of post-fetch filter URLs", theInstanceId, theChunkId, numRemoved); + ourLog.info( + "Bulk export instance[{}] chunk[{}] - {} resources were filtered out because of post-fetch filter URLs", + theInstanceId, + theChunkId, + numRemoved); } } - private boolean applyPostFetchFilteringForSingleResource(List thePostFetchFilterUrls, Iterator iter) { + private boolean applyPostFetchFilteringForSingleResource( + List thePostFetchFilterUrls, Iterator iter) { IBaseResource nextResource = iter.next(); String nextResourceType = myFhirContext.getResourceType(nextResource); @@ -197,7 +212,8 @@ public class ExpandResourcesStep implements IJobStepWorker nextBatchOfPids = - allIds - .subList(0, batchSize) - .stream() + Set nextBatchOfPids = allIds.subList(0, batchSize).stream() .map(t -> myIdHelperService.newPidFromStringIdAndResourceName(t, resourceType)) .collect(Collectors.toSet()); allIds = allIds.subList(batchSize, allIds.size()); PersistentIdToForcedIdMap nextBatchOfResourceIds = myTransactionService - .withRequest(null) - .execute(() -> myIdHelperService.translatePidsToForcedIds(nextBatchOfPids)); + .withRequest(null) + .execute(() -> myIdHelperService.translatePidsToForcedIds(nextBatchOfPids)); TokenOrListParam idListParam = new TokenOrListParam(); for (IResourcePersistentId nextPid : nextBatchOfPids) { @@ -241,13 +254,11 @@ public class ExpandResourcesStep implements IJobStepWorker encodeToString(List theResources, BulkExportJobParameters theParameters) { + private ListMultimap encodeToString( + List theResources, BulkExportJobParameters theParameters) { IParser parser = getParser(theParameters); ListMultimap retVal = ArrayListMultimap.create(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java index 4e591a90cf8..62626d7e424 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/FetchResourceIdsStep.java @@ -25,24 +25,24 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException; import ca.uhn.fhir.batch2.api.RunOutcome; import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.api.VoidModel; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList; import ca.uhn.fhir.batch2.jobs.models.BatchResourceId; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor; import ca.uhn.fhir.jpa.bulk.export.model.ExportPIDIteratorParameters; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; public class FetchResourceIdsStep implements IFirstJobStepWorker { private static final Logger ourLog = LoggerFactory.getLogger(FetchResourceIdsStep.class); @@ -55,10 +55,14 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { BulkExportJobParameters params = theStepExecutionDetails.getParameters(); - ourLog.info("Fetching resource IDs for bulk export job instance[{}]", theStepExecutionDetails.getInstance().getInstanceId()); + ourLog.info( + "Fetching resource IDs for bulk export job instance[{}]", + theStepExecutionDetails.getInstance().getInstanceId()); ExportPIDIteratorParameters providerParams = new ExportPIDIteratorParameters(); providerParams.setInstanceId(theStepExecutionDetails.getInstance().getInstanceId()); @@ -89,8 +93,12 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker pidIterator = myBulkExportProcessor.getResourcePidIterator(providerParams); + ourLog.info( + "Running FetchResourceIdsStep for resource type: {} with params: {}", + resourceType, + providerParams); + Iterator pidIterator = + myBulkExportProcessor.getResourcePidIterator(providerParams); List idsToSubmit = new ArrayList<>(); if (!pidIterator.hasNext()) { @@ -138,10 +146,11 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker theBatchResourceIds, - String theResourceType, - BulkExportJobParameters theParams, - IJobDataSink theDataSink) { + private void submitWorkChunk( + List theBatchResourceIds, + String theResourceType, + BulkExportJobParameters theParams, + IJobDataSink theDataSink) { ResourceIdList idList = new ResourceIdList(); idList.setIds(theBatchResourceIds); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java index 59071628dad..265bf9a0b80 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/WriteBinaryStep.java @@ -25,25 +25,23 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException; import ca.uhn.fhir.batch2.api.RunOutcome; import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId; -import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.jpa.util.RandomTextUtils; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.jpa.util.RandomTextUtils; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.util.BinaryUtil; import ca.uhn.fhir.util.FhirTerser; -import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IBaseExtension; import org.hl7.fhir.instance.model.api.IBaseHasExtensions; @@ -51,15 +49,16 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.slf4j.LoggerFactory.getLogger; -public class WriteBinaryStep implements IJobStepWorker { +public class WriteBinaryStep + implements IJobStepWorker { private static final Logger ourLog = getLogger(WriteBinaryStep.class); @Autowired @@ -70,11 +69,14 @@ public class WriteBinaryStep implements IJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { ExpandedResourcesList expandedResources = theStepExecutionDetails.getData(); - final int numResourcesProcessed = expandedResources.getStringifiedResources().size(); + final int numResourcesProcessed = + expandedResources.getStringifiedResources().size(); ourLog.info("Write binary step of Job Export"); ourLog.info("Writing {} resources to binary file", numResourcesProcessed); @@ -105,9 +107,9 @@ public class WriteBinaryStep implements IJobStepWorker theStepExecutionDetails, ExpandedResourcesList expandedResources, IBaseBinary binary) { + private void addMetadataExtensionsToBinary( + @Nonnull StepExecutionDetails theStepExecutionDetails, + ExpandedResourcesList expandedResources, + IBaseBinary binary) { // Note that this applies only to hl7.org structures, so these extensions will not be added // to DSTU2 structures if (binary.getMeta() instanceof IBaseHasExtensions) { IBaseHasExtensions meta = (IBaseHasExtensions) binary.getMeta(); - //export identifier, potentially null. + // export identifier, potentially null. String exportIdentifier = theStepExecutionDetails.getParameters().getExportIdentifier(); if (!StringUtils.isBlank(exportIdentifier)) { IBaseExtension exportIdentifierExtension = meta.addExtension(); @@ -188,17 +201,19 @@ public class WriteBinaryStep implements IJobStepWorker jobExtension = meta.addExtension(); jobExtension.setUrl(JpaConstants.BULK_META_EXTENSION_JOB_ID); - jobExtension.setValue(myFhirContext.newPrimitiveString(theStepExecutionDetails.getInstance().getInstanceId())); + jobExtension.setValue(myFhirContext.newPrimitiveString( + theStepExecutionDetails.getInstance().getInstanceId())); - //resource type + // resource type IBaseExtension typeExtension = meta.addExtension(); typeExtension.setUrl(JpaConstants.BULK_META_EXTENSION_RESOURCE_TYPE); typeExtension.setValue(myFhirContext.newPrimitiveString(expandedResources.getResourceType())); } else { - ourLog.warn("Could not attach metadata extensions to binary resource, as this binary metadata does not support extensions"); + ourLog.warn( + "Could not attach metadata extensions to binary resource, as this binary metadata does not support extensions"); } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportBinaryFileId.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportBinaryFileId.java index 9197adce3ca..8eae14cdcc8 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportBinaryFileId.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportBinaryFileId.java @@ -29,8 +29,7 @@ public class BulkExportBinaryFileId extends BulkExportJobBase { @JsonProperty("resourceType") private String myResourceType; - public BulkExportBinaryFileId() { - } + public BulkExportBinaryFileId() {} public String getBinaryId() { return myBinaryId; diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobBase.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobBase.java index 56d19d35cd7..57fe4c0fe95 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobBase.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/export/models/BulkExportJobBase.java @@ -21,6 +21,4 @@ package ca.uhn.fhir.batch2.jobs.export.models; import ca.uhn.fhir.model.api.IModelJson; -public class BulkExportJobBase implements IModelJson { - -} +public class BulkExportJobBase implements IModelJson {} diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeAppCtx.java index ee67ce1a0fb..39624e7c350 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeAppCtx.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeAppCtx.java @@ -41,46 +41,54 @@ public class DeleteExpungeAppCtx { public static final String JOB_DELETE_EXPUNGE = "DELETE_EXPUNGE"; - @Bean public JobDefinition expungeJobDefinition( - IBatch2DaoSvc theBatch2DaoSvc, - HapiTransactionService theHapiTransactionService, - IDeleteExpungeSvc theDeleteExpungeSvc, - IIdHelperService theIdHelperService, - IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { - return JobDefinition - .newBuilder() - .setJobDefinitionId(JOB_DELETE_EXPUNGE) - .setJobDescription("Expunge resources") - .setJobDefinitionVersion(1) - .setParametersType(DeleteExpungeJobParameters.class) - .setParametersValidator(expungeJobParametersValidator(theBatch2DaoSvc, theDeleteExpungeSvc, theRequestPartitionHelperSvc)) - .gatedExecution() - .addFirstStep( - "generate-ranges", - "Generate data ranges to expunge", - PartitionedUrlChunkRangeJson.class, - expungeGenerateRangeChunksStep()) - .addIntermediateStep( - "load-ids", - "Load IDs of resources to expunge", - ResourceIdListWorkChunkJson.class, - new LoadIdsStep(theBatch2DaoSvc)) - .addLastStep("expunge", - "Perform the resource expunge", - expungeStep(theHapiTransactionService, theDeleteExpungeSvc, theIdHelperService) - ) - .build(); + IBatch2DaoSvc theBatch2DaoSvc, + HapiTransactionService theHapiTransactionService, + IDeleteExpungeSvc theDeleteExpungeSvc, + IIdHelperService theIdHelperService, + IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { + return JobDefinition.newBuilder() + .setJobDefinitionId(JOB_DELETE_EXPUNGE) + .setJobDescription("Expunge resources") + .setJobDefinitionVersion(1) + .setParametersType(DeleteExpungeJobParameters.class) + .setParametersValidator(expungeJobParametersValidator( + theBatch2DaoSvc, theDeleteExpungeSvc, theRequestPartitionHelperSvc)) + .gatedExecution() + .addFirstStep( + "generate-ranges", + "Generate data ranges to expunge", + PartitionedUrlChunkRangeJson.class, + expungeGenerateRangeChunksStep()) + .addIntermediateStep( + "load-ids", + "Load IDs of resources to expunge", + ResourceIdListWorkChunkJson.class, + new LoadIdsStep(theBatch2DaoSvc)) + .addLastStep( + "expunge", + "Perform the resource expunge", + expungeStep(theHapiTransactionService, theDeleteExpungeSvc, theIdHelperService)) + .build(); } @Bean - public DeleteExpungeJobParametersValidator expungeJobParametersValidator(IBatch2DaoSvc theBatch2DaoSvc, IDeleteExpungeSvc theDeleteExpungeSvc, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { - return new DeleteExpungeJobParametersValidator(new UrlListValidator(ProviderConstants.OPERATION_EXPUNGE, theBatch2DaoSvc), theDeleteExpungeSvc, theRequestPartitionHelperSvc); + public DeleteExpungeJobParametersValidator expungeJobParametersValidator( + IBatch2DaoSvc theBatch2DaoSvc, + IDeleteExpungeSvc theDeleteExpungeSvc, + IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { + return new DeleteExpungeJobParametersValidator( + new UrlListValidator(ProviderConstants.OPERATION_EXPUNGE, theBatch2DaoSvc), + theDeleteExpungeSvc, + theRequestPartitionHelperSvc); } @Bean - public DeleteExpungeStep expungeStep(HapiTransactionService theHapiTransactionService, IDeleteExpungeSvc theDeleteExpungeSvc, IIdHelperService theIdHelperService) { + public DeleteExpungeStep expungeStep( + HapiTransactionService theHapiTransactionService, + IDeleteExpungeSvc theDeleteExpungeSvc, + IIdHelperService theIdHelperService) { return new DeleteExpungeStep(theHapiTransactionService, theDeleteExpungeSvc, theIdHelperService); } @@ -90,7 +98,8 @@ public class DeleteExpungeAppCtx { } @Bean - public DeleteExpungeProvider deleteExpungeProvider(FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) { + public DeleteExpungeProvider deleteExpungeProvider( + FhirContext theFhirContext, IDeleteExpungeJobSubmitter theDeleteExpungeJobSubmitter) { return new DeleteExpungeProvider(theFhirContext, theDeleteExpungeJobSubmitter); } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobParameters.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobParameters.java index d4a13c807e3..579bf35cbc4 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobParameters.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobParameters.java @@ -25,6 +25,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class DeleteExpungeJobParameters extends PartitionedUrlListJobParameters { @JsonProperty("cascade") private boolean myCascade; + @JsonProperty("cascadeMaxRounds") private Integer myCascadeMaxRounds; diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobParametersValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobParametersValidator.java index 8bc58201101..8ac32866735 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobParametersValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobParametersValidator.java @@ -27,16 +27,19 @@ import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.util.ValidateUtil; +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; public class DeleteExpungeJobParametersValidator implements IJobParametersValidator { private final IUrlListValidator myUrlListValidator; private final IDeleteExpungeSvc myDeleteExpungeSvc; private final IRequestPartitionHelperSvc myRequestPartitionHelperSvc; - public DeleteExpungeJobParametersValidator(IUrlListValidator theUrlListValidator, IDeleteExpungeSvc theDeleteExpungeSvc, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { + public DeleteExpungeJobParametersValidator( + IUrlListValidator theUrlListValidator, + IDeleteExpungeSvc theDeleteExpungeSvc, + IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { myUrlListValidator = theUrlListValidator; myDeleteExpungeSvc = theDeleteExpungeSvc; myRequestPartitionHelperSvc = theRequestPartitionHelperSvc; @@ -52,13 +55,17 @@ public class DeleteExpungeJobParametersValidator implements IJobParametersValida } // Verify that the user has access to all requested partitions - myRequestPartitionHelperSvc.validateHasPartitionPermissions(theRequestDetails, null, theParameters.getRequestPartitionId()); + myRequestPartitionHelperSvc.validateHasPartitionPermissions( + theRequestDetails, null, theParameters.getRequestPartitionId()); for (PartitionedUrl partitionedUrl : theParameters.getPartitionedUrls()) { String url = partitionedUrl.getUrl(); - ValidateUtil.isTrueOrThrowInvalidRequest(url.matches("[a-zA-Z]+\\?.*"), "Delete expunge URLs must be in the format [resourceType]?[parameters]"); + ValidateUtil.isTrueOrThrowInvalidRequest( + url.matches("[a-zA-Z]+\\?.*"), + "Delete expunge URLs must be in the format [resourceType]?[parameters]"); if (partitionedUrl.getRequestPartitionId() != null) { - myRequestPartitionHelperSvc.validateHasPartitionPermissions(theRequestDetails, null, partitionedUrl.getRequestPartitionId()); + myRequestPartitionHelperSvc.validateHasPartitionPermissions( + theRequestDetails, null, partitionedUrl.getRequestPartitionId()); } } return myUrlListValidator.validatePartitionedUrls(theParameters.getPartitionedUrls()); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java index 4d0590aec98..14117e28fe9 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java @@ -51,48 +51,63 @@ import static ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx.JOB_DELETE_EXP public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter { @Autowired IJobCoordinator myJobCoordinator; + @Autowired FhirContext myFhirContext; + @Autowired MatchUrlService myMatchUrlService; + @Autowired IRequestPartitionHelperSvc myRequestPartitionHelperSvc; + @Autowired JpaStorageSettings myStorageSettings; + @Autowired IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired UrlPartitioner myUrlPartitioner; @Override @Transactional(propagation = Propagation.NEVER) - public String submitJob(Integer theBatchSize, List theUrlsToDeleteExpunge, boolean theCascade, Integer theCascadeMaxRounds, RequestDetails theRequestDetails) { + public String submitJob( + Integer theBatchSize, + List theUrlsToDeleteExpunge, + boolean theCascade, + Integer theCascadeMaxRounds, + RequestDetails theRequestDetails) { if (theBatchSize == null) { theBatchSize = myStorageSettings.getExpungeBatchSize(); } if (!myStorageSettings.canDeleteExpunge()) { - throw new ForbiddenOperationException(Msg.code(820) + "Delete Expunge not allowed: " + myStorageSettings.cannotDeleteExpungeReason()); + throw new ForbiddenOperationException( + Msg.code(820) + "Delete Expunge not allowed: " + myStorageSettings.cannotDeleteExpungeReason()); } for (String url : theUrlsToDeleteExpunge) { HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(String.class, url); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(String.class, url); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRE_DELETE_EXPUNGE, params); } DeleteExpungeJobParameters deleteExpungeJobParameters = new DeleteExpungeJobParameters(); // Set partition for each url since resource type can determine partition theUrlsToDeleteExpunge.stream() - .filter(StringUtils::isNotBlank) - .map(url -> myUrlPartitioner.partitionUrl(url, theRequestDetails)) - .forEach(deleteExpungeJobParameters::addPartitionedUrl); + .filter(StringUtils::isNotBlank) + .map(url -> myUrlPartitioner.partitionUrl(url, theRequestDetails)) + .forEach(deleteExpungeJobParameters::addPartitionedUrl); deleteExpungeJobParameters.setBatchSize(theBatchSize); - ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_DELETE_EXPUNGE); + ReadPartitionIdRequestDetails details = + ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_DELETE_EXPUNGE); // Also set toplevel partition in case there are no urls - RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); + RequestPartitionId requestPartition = + myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); deleteExpungeJobParameters.setRequestPartitionId(requestPartition); deleteExpungeJobParameters.setCascade(theCascade); deleteExpungeJobParameters.setCascadeMaxRounds(theCascadeMaxRounds); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeProvider.java index 83496f098c7..883ac4b55d6 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeProvider.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeProvider.java @@ -32,7 +32,6 @@ import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import java.math.BigDecimal; import java.util.List; import java.util.stream.Collectors; @@ -47,22 +46,34 @@ public class DeleteExpungeProvider { @Operation(name = ProviderConstants.OPERATION_DELETE_EXPUNGE, idempotent = false) public IBaseParameters deleteExpunge( - @OperationParam(name = ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, typeName = "string", min = 1) List> theUrlsToDeleteExpunge, - @OperationParam(name = ProviderConstants.OPERATION_DELETE_BATCH_SIZE, typeName = "integer", min = 0, max = 1) IPrimitiveType theBatchSize, - @OperationParam(name = ProviderConstants.OPERATION_DELETE_CASCADE, typeName = "boolean", min = 0, max = 1) IPrimitiveType theCascade, - @OperationParam(name = ProviderConstants.OPERATION_DELETE_CASCADE_MAX_ROUNDS, typeName = "integer", min = 0, max = 1) IPrimitiveType theCascadeMaxRounds, - RequestDetails theRequestDetails - ) { + @OperationParam(name = ProviderConstants.OPERATION_DELETE_EXPUNGE_URL, typeName = "string", min = 1) + List> theUrlsToDeleteExpunge, + @OperationParam( + name = ProviderConstants.OPERATION_DELETE_BATCH_SIZE, + typeName = "integer", + min = 0, + max = 1) + IPrimitiveType theBatchSize, + @OperationParam(name = ProviderConstants.OPERATION_DELETE_CASCADE, typeName = "boolean", min = 0, max = 1) + IPrimitiveType theCascade, + @OperationParam( + name = ProviderConstants.OPERATION_DELETE_CASCADE_MAX_ROUNDS, + typeName = "integer", + min = 0, + max = 1) + IPrimitiveType theCascadeMaxRounds, + RequestDetails theRequestDetails) { if (theUrlsToDeleteExpunge == null) { - throw new InvalidRequestException(Msg.code(2101) + "At least one `url` parameter to $delete-expunge must be provided."); + throw new InvalidRequestException( + Msg.code(2101) + "At least one `url` parameter to $delete-expunge must be provided."); } List urls = theUrlsToDeleteExpunge.stream() - .map(IPrimitiveType::getValue) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + .map(IPrimitiveType::getValue) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); Integer batchSize = null; - if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue() > 0) { + if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue() > 0) { batchSize = theBatchSize.getValue(); } @@ -76,10 +87,12 @@ public class DeleteExpungeProvider { cascadeMaxRounds = theCascadeMaxRounds.getValue(); } - String jobId = myDeleteExpungeJobSubmitter.submitJob(batchSize, urls, cascase, cascadeMaxRounds, theRequestDetails); + String jobId = + myDeleteExpungeJobSubmitter.submitJob(batchSize, urls, cascase, cascadeMaxRounds, theRequestDetails); IBaseParameters retval = ParametersUtil.newInstance(myFhirContext); - ParametersUtil.addParameterToParametersString(myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, jobId); + ParametersUtil.addParameterToParametersString( + myFhirContext, retval, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, jobId); return retval; } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeStep.java index 5c94bf82e0f..44bff8ef4ab 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeStep.java @@ -33,17 +33,21 @@ import org.slf4j.LoggerFactory; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; -public class DeleteExpungeStep implements IJobStepWorker { +public class DeleteExpungeStep + implements IJobStepWorker { private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeStep.class); private final HapiTransactionService myHapiTransactionService; private final IDeleteExpungeSvc myDeleteExpungeSvc; private final IIdHelperService myIdHelperService; - public DeleteExpungeStep(HapiTransactionService theHapiTransactionService, IDeleteExpungeSvc theDeleteExpungeSvc, IIdHelperService theIdHelperService) { + public DeleteExpungeStep( + HapiTransactionService theHapiTransactionService, + IDeleteExpungeSvc theDeleteExpungeSvc, + IIdHelperService theIdHelperService) { myHapiTransactionService = theHapiTransactionService; myDeleteExpungeSvc = theDeleteExpungeSvc; myIdHelperService = theIdHelperService; @@ -51,25 +55,50 @@ public class DeleteExpungeStep implements IJobStepWorker theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull + StepExecutionDetails + theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { ResourceIdListWorkChunkJson data = theStepExecutionDetails.getData(); boolean cascade = theStepExecutionDetails.getParameters().isCascade(); Integer cascadeMaxRounds = theStepExecutionDetails.getParameters().getCascadeMaxRounds(); - return doDeleteExpunge(data, theDataSink, theStepExecutionDetails.getInstance().getInstanceId(), theStepExecutionDetails.getChunkId(), cascade, cascadeMaxRounds); + return doDeleteExpunge( + data, + theDataSink, + theStepExecutionDetails.getInstance().getInstanceId(), + theStepExecutionDetails.getChunkId(), + cascade, + cascadeMaxRounds); } @Nonnull - public RunOutcome doDeleteExpunge(ResourceIdListWorkChunkJson theData, IJobDataSink theDataSink, String theInstanceId, String theChunkId, boolean theCascade, Integer theCascadeMaxRounds) { + public RunOutcome doDeleteExpunge( + ResourceIdListWorkChunkJson theData, + IJobDataSink theDataSink, + String theInstanceId, + String theChunkId, + boolean theCascade, + Integer theCascadeMaxRounds) { RequestDetails requestDetails = new SystemRequestDetails(); TransactionDetails transactionDetails = new TransactionDetails(); - DeleteExpungeJob job = new DeleteExpungeJob(theData, requestDetails, transactionDetails, theDataSink, theInstanceId, theChunkId, theCascade, theCascadeMaxRounds); + DeleteExpungeJob job = new DeleteExpungeJob( + theData, + requestDetails, + transactionDetails, + theDataSink, + theInstanceId, + theChunkId, + theCascade, + theCascadeMaxRounds); myHapiTransactionService - .withRequest(requestDetails) - .withTransactionDetails(transactionDetails) - .withRequestPartitionId(theData.getRequestPartitionId()) - .execute(job); + .withRequest(requestDetails) + .withTransactionDetails(transactionDetails) + .withRequestPartitionId(theData.getRequestPartitionId()) + .execute(job); return new RunOutcome(job.getRecordCount()); } @@ -85,7 +114,15 @@ public class DeleteExpungeStep implements IJobStepWorker theDataSink, String theInstanceId, String theChunkId, boolean theCascade, Integer theCascadeMaxRounds) { + public DeleteExpungeJob( + ResourceIdListWorkChunkJson theData, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + IJobDataSink theDataSink, + String theInstanceId, + String theChunkId, + boolean theCascade, + Integer theCascadeMaxRounds) { myData = theData; myRequestDetails = theRequestDetails; myTransactionDetails = theTransactionDetails; @@ -106,17 +143,22 @@ public class DeleteExpungeStep implements IJobStepWorker persistentIds = myData.getResourcePersistentIds(myIdHelperService); if (persistentIds.isEmpty()) { - ourLog.info("Starting delete expunge work chunk. There are no resources to delete expunge - Instance[{}] Chunk[{}]", myInstanceId, myChunkId); + ourLog.info( + "Starting delete expunge work chunk. There are no resources to delete expunge - Instance[{}] Chunk[{}]", + myInstanceId, + myChunkId); return null; } - ourLog.info("Starting delete expunge work chunk with {} resources - Instance[{}] Chunk[{}]", persistentIds.size(), myInstanceId, myChunkId); + ourLog.info( + "Starting delete expunge work chunk with {} resources - Instance[{}] Chunk[{}]", + persistentIds.size(), + myInstanceId, + myChunkId); myRecordCount = myDeleteExpungeSvc.deleteExpunge(persistentIds, myCascade, myCascadeMaxRounds); return null; } } - - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/BulkImportParameterValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/BulkImportParameterValidator.java index 0ed6e386369..0a287f635eb 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/BulkImportParameterValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/BulkImportParameterValidator.java @@ -27,10 +27,10 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.slf4j.LoggerFactory.getLogger; @@ -45,7 +45,8 @@ public class BulkImportParameterValidator implements IJobParametersValidator validate(RequestDetails theRequestDetails, @Nonnull Batch2BulkImportPullJobParameters theParameters) { + public List validate( + RequestDetails theRequestDetails, @Nonnull Batch2BulkImportPullJobParameters theParameters) { ourLog.info("BulkImportPull parameter validation begin"); ArrayList errors = new ArrayList<>(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/BulkImportPullConfig.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/BulkImportPullConfig.java index 3b141e9476a..440dccd79d9 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/BulkImportPullConfig.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/BulkImportPullConfig.java @@ -45,32 +45,28 @@ public class BulkImportPullConfig { @Bean public JobDefinition bulkImportPullJobDefinition() { - return JobDefinition - .newBuilder() - .setJobDefinitionId(BULK_IMPORT_JOB_NAME) - .setJobDescription("Performs bulk import pull job") - .setJobDefinitionVersion(1) - .gatedExecution() - .setParametersType(Batch2BulkImportPullJobParameters.class) - .setParametersValidator(importParameterValidator()) - .addFirstStep( - "FetchPartitionedFilesStep", - "Reads an import file and extracts the resources", - BulkImportFilePartitionResult.class, - fetchPartitionedFilesStep() - ) - .addIntermediateStep( - "ReadInResourcesFromFileStep", - "Reads the import file to get the serialized bundles", - BulkImportRecord.class, - readInResourcesFromFileStep() - ) - .addLastStep( - "WriteBundleForImportStep", - "Parses the bundle from previous step and writes it to the dv", - writeBundleForImportStep() - ) - .build(); + return JobDefinition.newBuilder() + .setJobDefinitionId(BULK_IMPORT_JOB_NAME) + .setJobDescription("Performs bulk import pull job") + .setJobDefinitionVersion(1) + .gatedExecution() + .setParametersType(Batch2BulkImportPullJobParameters.class) + .setParametersValidator(importParameterValidator()) + .addFirstStep( + "FetchPartitionedFilesStep", + "Reads an import file and extracts the resources", + BulkImportFilePartitionResult.class, + fetchPartitionedFilesStep()) + .addIntermediateStep( + "ReadInResourcesFromFileStep", + "Reads the import file to get the serialized bundles", + BulkImportRecord.class, + readInResourcesFromFileStep()) + .addLastStep( + "WriteBundleForImportStep", + "Parses the bundle from previous step and writes it to the dv", + writeBundleForImportStep()) + .build(); } @Bean diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/FetchPartitionedFilesStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/FetchPartitionedFilesStep.java index f9beaa11371..e1e9b2a595e 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/FetchPartitionedFilesStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/importpull/FetchPartitionedFilesStep.java @@ -35,7 +35,8 @@ import javax.annotation.Nonnull; import static org.slf4j.LoggerFactory.getLogger; -public class FetchPartitionedFilesStep implements IFirstJobStepWorker { +public class FetchPartitionedFilesStep + implements IFirstJobStepWorker { private static final Logger ourLog = getLogger(FetchPartitionedFilesStep.class); private final IBulkDataImportSvc myBulkDataImportSvc; @@ -47,9 +48,9 @@ public class FetchPartitionedFilesStep implements IFirstJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { String jobId = theStepExecutionDetails.getParameters().getJobId(); ourLog.info("Start FetchPartitionedFilesStep for jobID {} ", jobId); @@ -68,7 +69,10 @@ public class FetchPartitionedFilesStep implements IFirstJobStepWorker { +public class ReadInResourcesFromFileStep + implements IJobStepWorker { private static final Logger ourLog = LoggerFactory.getLogger(ReadInResourcesFromFileStep.class); @@ -55,9 +56,11 @@ public class ReadInResourcesFromFileStep implements IJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull + StepExecutionDetails + theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { String jobId = theStepExecutionDetails.getParameters().getJobId(); int fileIndex = theStepExecutionDetails.getData().getFileIndex(); JobFileRowProcessingModeEnum mode = theStepExecutionDetails.getData().getProcessingMode(); @@ -93,9 +96,7 @@ public class ReadInResourcesFromFileStep implements IJobStepWorker { +public class WriteBundleForImportStep + implements ILastJobStepWorker { private static final Logger ourLog = LoggerFactory.getLogger(WriteBundleForImportStep.class); @@ -56,9 +57,9 @@ public class WriteBundleForImportStep implements ILastJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { BulkImportRecord record = theStepExecutionDetails.getData(); @@ -69,12 +70,7 @@ public class WriteBundleForImportStep implements ILastJobStepWorker myResourceTypeOrder; /** @@ -102,7 +106,6 @@ public class BulkDataImportProvider { myRequestPartitionHelperService = theRequestPartitionHelperSvc; } - /** * $import operation (Import by Manifest) *

    @@ -119,35 +122,42 @@ public class BulkDataImportProvider { */ @Operation(name = JpaConstants.OPERATION_IMPORT, idempotent = false, manualResponse = true) public void importByManifest( - ServletRequestDetails theRequestDetails, - @ResourceParam IBaseParameters theRequest, - HttpServletResponse theResponse) throws IOException { + ServletRequestDetails theRequestDetails, + @ResourceParam IBaseParameters theRequest, + HttpServletResponse theResponse) + throws IOException { validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_IMPORT); BulkImportJobParameters jobParameters = new BulkImportJobParameters(); - String inputFormat = ParametersUtil.getNamedParameterValueAsString(myFhirCtx, theRequest, PARAM_INPUT_FORMAT).orElse(""); + String inputFormat = ParametersUtil.getNamedParameterValueAsString(myFhirCtx, theRequest, PARAM_INPUT_FORMAT) + .orElse(""); if (!Constants.CT_FHIR_NDJSON.equals(inputFormat)) { - throw new InvalidRequestException(Msg.code(2048) + "Input format must be \"" + Constants.CT_FHIR_NDJSON + "\""); + throw new InvalidRequestException( + Msg.code(2048) + "Input format must be \"" + Constants.CT_FHIR_NDJSON + "\""); } - Optional storageDetailOpt = ParametersUtil.getNamedParameter(myFhirCtx, theRequest, PARAM_STORAGE_DETAIL); + Optional storageDetailOpt = + ParametersUtil.getNamedParameter(myFhirCtx, theRequest, PARAM_STORAGE_DETAIL); if (storageDetailOpt.isPresent()) { IBase storageDetail = storageDetailOpt.get(); - String httpBasicCredential = ParametersUtil.getParameterPartValueAsString(myFhirCtx, storageDetail, PARAM_STORAGE_DETAIL_CREDENTIAL_HTTP_BASIC); + String httpBasicCredential = ParametersUtil.getParameterPartValueAsString( + myFhirCtx, storageDetail, PARAM_STORAGE_DETAIL_CREDENTIAL_HTTP_BASIC); if (isNotBlank(httpBasicCredential)) { jobParameters.setHttpBasicCredentials(httpBasicCredential); } - String maximumBatchResourceCount = ParametersUtil.getParameterPartValueAsString(myFhirCtx, storageDetail, PARAM_STORAGE_DETAIL_MAX_BATCH_RESOURCE_COUNT); + String maximumBatchResourceCount = ParametersUtil.getParameterPartValueAsString( + myFhirCtx, storageDetail, PARAM_STORAGE_DETAIL_MAX_BATCH_RESOURCE_COUNT); if (isNotBlank(maximumBatchResourceCount)) { jobParameters.setMaxBatchResourceCount(Integer.parseInt(maximumBatchResourceCount)); } } - RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null); + RequestPartitionId partitionId = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null); if (partitionId != null && !partitionId.isAllPartitions()) { myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId); jobParameters.setPartitionId(partitionId); @@ -157,9 +167,12 @@ public class BulkDataImportProvider { // likely to result in conflict (e.g. Patients before Observations // since Observations can reference Patients but not vice versa) List> typeAndUrls = new ArrayList<>(); - for (IBase input : ParametersUtil.getNamedParameters(myFhirCtx, theRequest, BulkDataImportProvider.PARAM_INPUT)) { - String type = ParametersUtil.getParameterPartValueAsString(myFhirCtx, input, BulkDataImportProvider.PARAM_INPUT_TYPE); - String url = ParametersUtil.getParameterPartValueAsString(myFhirCtx, input, BulkDataImportProvider.PARAM_INPUT_URL); + for (IBase input : + ParametersUtil.getNamedParameters(myFhirCtx, theRequest, BulkDataImportProvider.PARAM_INPUT)) { + String type = ParametersUtil.getParameterPartValueAsString( + myFhirCtx, input, BulkDataImportProvider.PARAM_INPUT_TYPE); + String url = ParametersUtil.getParameterPartValueAsString( + myFhirCtx, input, BulkDataImportProvider.PARAM_INPUT_URL); ValidateUtil.isNotBlankOrThrowInvalidRequest(type, "Missing type for input"); ValidateUtil.isNotBlankOrThrowInvalidRequest(url, "Missing url for input"); Pair typeAndUrl = Pair.of(type, url); @@ -184,21 +197,19 @@ public class BulkDataImportProvider { IBaseOperationOutcome response = OperationOutcomeUtil.newInstance(myFhirCtx); OperationOutcomeUtil.addIssue( - myFhirCtx, - response, - "information", - "Bulk import job has been submitted with ID: " + jobId, - null, - "informational" - ); + myFhirCtx, + response, + "information", + "Bulk import job has been submitted with ID: " + jobId, + null, + "informational"); OperationOutcomeUtil.addIssue( - myFhirCtx, - response, - "information", - "Use the following URL to poll for job status: " + createPollLocationLink(theRequestDetails, jobId), - null, - "informational" - ); + myFhirCtx, + response, + "information", + "Use the following URL to poll for job status: " + createPollLocationLink(theRequestDetails, jobId), + null, + "informational"); theResponse.setStatus(202); theResponse.setContentType(Constants.CT_FHIR_JSON + Constants.CHARSET_UTF8_CTSUFFIX); @@ -212,28 +223,30 @@ public class BulkDataImportProvider { */ @Operation(name = JpaConstants.OPERATION_IMPORT_POLL_STATUS, manualResponse = true, idempotent = true) public void importPollStatus( - @OperationParam(name = JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) IPrimitiveType theJobId, - ServletRequestDetails theRequestDetails - ) throws IOException { + @OperationParam(name = JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) + IPrimitiveType theJobId, + ServletRequestDetails theRequestDetails) + throws IOException { HttpServletResponse response = theRequestDetails.getServletResponse(); theRequestDetails.getServer().addHeadersToResponse(response); JobInstance instance = myJobCoordinator.getInstance(theJobId.getValueAsString()); BulkImportJobParameters parameters = instance.getParameters(BulkImportJobParameters.class); if (parameters != null && parameters.getPartitionId() != null) { // Determine and validate permissions for partition (if needed) - RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null); + RequestPartitionId partitionId = + myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null); myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId); if (!partitionId.equals(parameters.getPartitionId())) { - throw new InvalidRequestException(Msg.code(2310) + "Invalid partition in request for Job ID " + theJobId); + throw new InvalidRequestException( + Msg.code(2310) + "Invalid partition in request for Job ID " + theJobId); } } IBaseOperationOutcome oo; switch (instance.getStatus()) { case QUEUED: { response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); - String msg = "Job was created at " + renderTime(instance.getCreateTime()) + - " and is in " + instance.getStatus() + - " state."; + String msg = "Job was created at " + renderTime(instance.getCreateTime()) + " and is in " + + instance.getStatus() + " state."; response.addHeader(Constants.HEADER_X_PROGRESS, msg); response.addHeader(Constants.HEADER_RETRY_AFTER, "120"); streamOperationOutcomeResponse(response, msg, "information"); @@ -242,12 +255,12 @@ public class BulkDataImportProvider { case ERRORED: case IN_PROGRESS: { response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED); - String msg = "Job was created at " + renderTime(instance.getCreateTime()) + - ", started at " + renderTime(instance.getStartTime()) + - " and is in " + instance.getStatus() + - " state. Current completion: " + - new DecimalFormat("0.0").format(100.0 * instance.getProgress()) + - "% and ETA is " + instance.getEstimatedTimeRemaining(); + String msg = "Job was created at " + renderTime(instance.getCreateTime()) + ", started at " + + renderTime(instance.getStartTime()) + " and is in " + + instance.getStatus() + " state. Current completion: " + + new DecimalFormat("0.0").format(100.0 * instance.getProgress()) + + "% and ETA is " + + instance.getEstimatedTimeRemaining(); response.addHeader(Constants.HEADER_X_PROGRESS, msg); response.addHeader(Constants.HEADER_RETRY_AFTER, "120"); streamOperationOutcomeResponse(response, msg, "information"); @@ -261,8 +274,8 @@ public class BulkDataImportProvider { } case FAILED: { response.setStatus(Constants.STATUS_HTTP_500_INTERNAL_ERROR); - String msg = "Job is in " + instance.getStatus() + " state with " + - instance.getErrorCount() + " error count. Last error: " + instance.getErrorMessage(); + String msg = "Job is in " + instance.getStatus() + " state with " + instance.getErrorCount() + + " error count. Last error: " + instance.getErrorMessage(); streamOperationOutcomeResponse(response, msg, "error"); break; } @@ -275,7 +288,8 @@ public class BulkDataImportProvider { } } - private void streamOperationOutcomeResponse(HttpServletResponse response, String theMessage, String theSeverity) throws IOException { + private void streamOperationOutcomeResponse(HttpServletResponse response, String theMessage, String theSeverity) + throws IOException { response.setContentType(Constants.CT_FHIR_JSON); IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirCtx); OperationOutcomeUtil.addIssue(myFhirCtx, oo, theSeverity, theMessage, null, null); @@ -298,7 +312,8 @@ public class BulkDataImportProvider { @Nonnull private String createPollLocationLink(ServletRequestDetails theRequestDetails, String theJobId) { String serverBase = StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/"); - return serverBase + "/" + JpaConstants.OPERATION_IMPORT_POLL_STATUS + "?" + JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID + "=" + theJobId; + return serverBase + "/" + JpaConstants.OPERATION_IMPORT_POLL_STATUS + "?" + + JpaConstants.PARAM_IMPORT_POLL_STATUS_JOB_ID + "=" + theJobId; } private synchronized List getResourceTypeOrder() { @@ -316,6 +331,4 @@ public class BulkDataImportProvider { } return new InstantType(theTime).getValueAsString(); } - - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportAppCtx.java index 7f91e8f1417..83546badcd4 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportAppCtx.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportAppCtx.java @@ -33,22 +33,14 @@ public class BulkImportAppCtx { @Bean public JobDefinition bulkImport2JobDefinition() { - return JobDefinition - .newBuilder() - .setJobDefinitionId(JOB_BULK_IMPORT_PULL) - .setJobDescription("FHIR Bulk Import using pull-based data source") - .setJobDefinitionVersion(1) - .setParametersType(BulkImportJobParameters.class) - .addFirstStep( - "fetch-files", - "Fetch files for import", - NdJsonFileJson.class, - bulkImport2FetchFiles()) - .addLastStep( - "process-files", - "Process files", - bulkImport2ConsumeFiles()) - .build(); + return JobDefinition.newBuilder() + .setJobDefinitionId(JOB_BULK_IMPORT_PULL) + .setJobDescription("FHIR Bulk Import using pull-based data source") + .setJobDefinitionVersion(1) + .setParametersType(BulkImportJobParameters.class) + .addFirstStep("fetch-files", "Fetch files for import", NdJsonFileJson.class, bulkImport2FetchFiles()) + .addLastStep("process-files", "Process files", bulkImport2ConsumeFiles()) + .build(); } @Bean diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportFileServlet.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportFileServlet.java index 7ed24659f79..c0579dd167d 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportFileServlet.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportFileServlet.java @@ -29,10 +29,6 @@ import org.apache.commons.io.input.ReaderInputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -42,6 +38,9 @@ import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.UUID; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import static ca.uhn.fhir.rest.api.Constants.CHARSET_UTF8_CTSUFFIX; import static ca.uhn.fhir.rest.api.Constants.CT_FHIR_NDJSON; @@ -91,7 +90,8 @@ public class BulkImportFileServlet extends HttpServlet { throw new ResourceNotFoundException(Msg.code(2050) + "Missing or invalid index parameter"); } if (!myFileIds.containsKey(indexParam)) { - throw new ResourceNotFoundException(Msg.code(2051) + "Invalid index: " + UrlUtil.sanitizeUrlPart(indexParam)); + throw new ResourceNotFoundException( + Msg.code(2051) + "Invalid index: " + UrlUtil.sanitizeUrlPart(indexParam)); } ourLog.info("Serving Bulk Import NDJSON file index: {}", indexParam); @@ -113,10 +113,9 @@ public class BulkImportFileServlet extends HttpServlet { try (InputStream reader = supplier.get()) { IOUtils.copy(reader, theResponse.getOutputStream()); } - } - public String getHeaderContentType(){ + public String getHeaderContentType() { return DEFAULT_HEADER_CONTENT_TYPE; } @@ -156,8 +155,5 @@ public class BulkImportFileServlet extends HttpServlet { boolean isGzip(); InputStream get() throws IOException; - } - - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportJobParameters.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportJobParameters.java index 0063d34482d..22077e858ac 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportJobParameters.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/BulkImportJobParameters.java @@ -24,13 +24,13 @@ import ca.uhn.fhir.model.api.IModelJson; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.Validate; +import java.util.ArrayList; +import java.util.List; import javax.annotation.Nullable; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; -import java.util.ArrayList; -import java.util.List; /** * This class is the parameters model object for starting a diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/ConsumeFilesStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/ConsumeFilesStep.java index ce14173aeee..40edfd65ad7 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/ConsumeFilesStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/ConsumeFilesStep.java @@ -48,32 +48,39 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.io.StringReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class ConsumeFilesStep implements ILastJobStepWorker { private static final Logger ourLog = LoggerFactory.getLogger(ConsumeFilesStep.class); + @Autowired private FhirContext myCtx; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private HapiTransactionService myHapiTransactionService; + @Autowired private IIdHelperService myIdHelperService; + @Autowired private IFhirSystemDao mySystemDao; @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) { String ndjson = theStepExecutionDetails.getData().getNdJsonText(); String sourceName = theStepExecutionDetails.getData().getSourceName(); @@ -109,10 +116,16 @@ public class ConsumeFilesStep implements ILastJobStepWorker storeResourcesInsideTransaction(resources, requestDetails, transactionDetails)); + myHapiTransactionService.execute( + requestDetails, + transactionDetails, + tx -> storeResourcesInsideTransaction(resources, requestDetails, transactionDetails)); } - private Void storeResourcesInsideTransaction(List theResources, SystemRequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + private Void storeResourcesInsideTransaction( + List theResources, + SystemRequestDetails theRequestDetails, + TransactionDetails theTransactionDetails) { Map ids = new HashMap<>(); for (IBaseResource next : theResources) { if (!next.getIdElement().hasIdPart()) { @@ -127,7 +140,8 @@ public class ConsumeFilesStep implements ILastJobStepWorker idsList = new ArrayList<>(ids.keySet()); - List resolvedIds = myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestDetails.getRequestPartitionId(), idsList, true); + List resolvedIds = myIdHelperService.resolveResourcePersistentIdsWithCache( + theRequestDetails.getRequestPartitionId(), idsList, true); for (IResourcePersistentId next : resolvedIds) { IIdType resId = next.getAssociatedResourceId(); theTransactionDetails.addResolvedResourceId(resId, next); @@ -146,7 +160,8 @@ public class ConsumeFilesStep implements ILastJobStepWorker void updateResource(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, T theResource) { + private void updateResource( + RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, T theResource) { IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResource); try { dao.update(theResource, null, true, false, theRequestDetails, theTransactionDetails); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/FetchFilesStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/FetchFilesStep.java index d9c3755cfa5..d16285363d2 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/FetchFilesStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/imprt/FetchFilesStep.java @@ -27,7 +27,6 @@ import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.Constants; -import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.client.impl.HttpBasicAuthInterceptor; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.util.FileUtil; @@ -42,28 +41,35 @@ import org.apache.http.impl.client.HttpClientBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.List; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class FetchFilesStep implements IFirstJobStepWorker { private static final Logger ourLog = LoggerFactory.getLogger(FetchFilesStep.class); - private static final List ourValidContentTypes = Arrays.asList(Constants.CT_APP_NDJSON, Constants.CT_FHIR_NDJSON, Constants.CT_FHIR_JSON, Constants.CT_FHIR_JSON_NEW, Constants.CT_JSON, Constants.CT_TEXT); - private static final List ourValidNonNdJsonContentTypes = Arrays.asList(Constants.CT_FHIR_JSON, Constants.CT_FHIR_JSON_NEW, Constants.CT_JSON, Constants.CT_TEXT); + private static final List ourValidContentTypes = Arrays.asList( + Constants.CT_APP_NDJSON, + Constants.CT_FHIR_NDJSON, + Constants.CT_FHIR_JSON, + Constants.CT_FHIR_JSON_NEW, + Constants.CT_JSON, + Constants.CT_TEXT); + private static final List ourValidNonNdJsonContentTypes = + Arrays.asList(Constants.CT_FHIR_JSON, Constants.CT_FHIR_JSON_NEW, Constants.CT_JSON, Constants.CT_TEXT); @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) { - Integer maxBatchResourceCount = theStepExecutionDetails - .getParameters() - .getMaxBatchResourceCount(); + Integer maxBatchResourceCount = theStepExecutionDetails.getParameters().getMaxBatchResourceCount(); if (maxBatchResourceCount == null || maxBatchResourceCount <= 0) { maxBatchResourceCount = BulkImportAppCtx.PARAM_MAXIMUM_BATCH_SIZE_DEFAULT; } @@ -81,17 +87,27 @@ public class FetchFilesStep implements IFirstJobStepWorker= 400) { - throw new JobExecutionFailedException(Msg.code(2056) + "Received HTTP " + statusCode + " from URL: " + nextUrl); + throw new JobExecutionFailedException( + Msg.code(2056) + "Received HTTP " + statusCode + " from URL: " + nextUrl); } String contentType = response.getEntity().getContentType().getValue(); - Validate.isTrue(hasMatchingSubstring(contentType, ourValidContentTypes), "Received content type \"%s\" from URL: %s. This format is not one of the supported content type: %s", contentType, nextUrl, getContentTypesString()); + Validate.isTrue( + hasMatchingSubstring(contentType, ourValidContentTypes), + "Received content type \"%s\" from URL: %s. This format is not one of the supported content type: %s", + contentType, + nextUrl, + getContentTypesString()); if (hasMatchingSubstring(contentType, ourValidNonNdJsonContentTypes)) { - ourLog.info("Received non-NDJSON content type \"{}\" from URL: {}. It will be processed but it may not complete correctly if the actual data is not NDJSON.", contentType, nextUrl); + ourLog.info( + "Received non-NDJSON content type \"{}\" from URL: {}. It will be processed but it may not complete correctly if the actual data is not NDJSON.", + contentType, + nextUrl); } try (InputStream inputStream = response.getEntity().getContent()) { - try (LineIterator lineIterator = new LineIterator(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { + try (LineIterator lineIterator = + new LineIterator(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { int chunkCount = 0; int lineCount = 0; @@ -105,9 +121,16 @@ public class FetchFilesStep implements IFirstJobStepWorker= maxBatchResourceCount || charCount >= batchSizeChars || !lineIterator.hasNext()) { + if (lineCount >= maxBatchResourceCount + || charCount >= batchSizeChars + || !lineIterator.hasNext()) { - ourLog.info("Loaded chunk {} of {} NDJSON file with {} resources from URL: {}", chunkCount, FileUtil.formatFileSize(charCount), lineCount, nextUrl); + ourLog.info( + "Loaded chunk {} of {} NDJSON file with {} resources from URL: {}", + chunkCount, + FileUtil.formatFileSize(charCount), + lineCount, + nextUrl); NdJsonFileJson data = new NdJsonFileJson(); data.setNdJsonText(builder.toString()); @@ -118,15 +141,12 @@ public class FetchFilesStep implements IFirstJobStepWorker theStepExecutionDetails) { + private CloseableHttpClient newHttpClient( + StepExecutionDetails theStepExecutionDetails) { HttpClientBuilder builder = HttpClientBuilder.create(); String httpBasicCredentials = theStepExecutionDetails.getParameters().getHttpBasicCredentials(); if (isNotBlank(httpBasicCredentials)) { int colonIdx = httpBasicCredentials.indexOf(':'); if (colonIdx == -1) { - throw new JobExecutionFailedException(Msg.code(2055) + "Invalid credential parameter provided. Must be in the form \"username:password\"."); + throw new JobExecutionFailedException(Msg.code(2055) + + "Invalid credential parameter provided. Must be in the form \"username:password\"."); } String username = httpBasicCredentials.substring(0, colonIdx); String password = httpBasicCredentials.substring(colonIdx + 1); @@ -162,5 +184,4 @@ public class FetchFilesStep implements IFirstJobStepWorker params = typeDefinition - .getSearchParams() - .stream() - .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) - .collect(Collectors.toList()); + List params = typeDefinition.getSearchParams().stream() + .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) + .collect(Collectors.toList()); for (RuntimeSearchParam nextParam : params) { for (String targetType : nextParam.getTargets()) { int targetIndex = retVal.indexOf(targetType); if (targetIndex > i) { String nextParamName = nextParam.getName(); - String key = typeAtCurrentIndex + " " + nextParamName + " " + targetType + " " + i + " " + targetIndex; + String key = typeAtCurrentIndex + " " + nextParamName + " " + targetType + " " + i + " " + + targetIndex; if (!moves.add(key)) { continue; } - ourLog.debug("Resource[{}] at index[{}] has SP[{}] with target[{}] at index[{}] - moving to index[{}]", typeAtCurrentIndex, i, nextParamName, targetType, targetIndex, i); + ourLog.debug( + "Resource[{}] at index[{}] has SP[{}] with target[{}] at index[{}] - moving to index[{}]", + typeAtCurrentIndex, + i, + nextParamName, + targetType, + targetIndex, + i); retVal.set(targetIndex, typeAtCurrentIndex); retVal.set(i, targetType); @@ -99,7 +105,6 @@ public class ResourceOrderUtil { } } } - } ourLog.debug("Finished pass {} with {} changes", passCount, changeCount); @@ -113,56 +118,56 @@ public class ResourceOrderUtil { return retVal; } - -// public static List getResourceOrder(FhirContext theFhirContext) { -// LinkedList retVal = new LinkedList<>(theFhirContext.getResourceTypes()); -// Set moves = new HashSet<>(); -// StopWatch sw = new StopWatch(); -// -// for (int rep = 0; rep < retVal.size(); rep++) { -// ourLog.debug("Starting rep {}", rep); -// int changeCount = 0; -// -// for (int i = retVal.size() - 1; i >= 0; i--) { -// String typeAtCurrentIndex = retVal.get(i); -// -// RuntimeResourceDefinition typeDefinition = theFhirContext.getResourceDefinition(typeAtCurrentIndex); -// List params = typeDefinition -// .getSearchParams() -// .stream() -// .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) -// .collect(Collectors.toList()); -// for (RuntimeSearchParam nextParam : params) { -// for (String targetType : nextParam.getTargets()) { -// int targetIndex = retVal.indexOf(targetType); -// if (targetIndex > i) { -// -// String nextParamName = nextParam.getName(); -// String key = typeAtCurrentIndex + " " + nextParamName + " " + targetType + " " + i + " " + targetIndex; -// if (!moves.add(key)) { -// continue; -// } -// -// ourLog.debug("Resource[{}] at index[{}] has SP[{}] with target[{}] at index[{}] - moving to index[{}]", typeAtCurrentIndex, i, nextParamName, targetType, targetIndex, i); -// retVal.remove(targetIndex); -// retVal.add(i, targetType); -// i++; -// changeCount++; -// } -// } -// } -// -// } -// -// ourLog.debug("Finished pass {} with {} changes", rep, changeCount); -// if (changeCount == 0) { -// break; -// } -// } -// -// ourLog.info("Calculated optimal resource order in {}", sw); -// -// return retVal; -// } + // public static List getResourceOrder(FhirContext theFhirContext) { + // LinkedList retVal = new LinkedList<>(theFhirContext.getResourceTypes()); + // Set moves = new HashSet<>(); + // StopWatch sw = new StopWatch(); + // + // for (int rep = 0; rep < retVal.size(); rep++) { + // ourLog.debug("Starting rep {}", rep); + // int changeCount = 0; + // + // for (int i = retVal.size() - 1; i >= 0; i--) { + // String typeAtCurrentIndex = retVal.get(i); + // + // RuntimeResourceDefinition typeDefinition = theFhirContext.getResourceDefinition(typeAtCurrentIndex); + // List params = typeDefinition + // .getSearchParams() + // .stream() + // .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE) + // .collect(Collectors.toList()); + // for (RuntimeSearchParam nextParam : params) { + // for (String targetType : nextParam.getTargets()) { + // int targetIndex = retVal.indexOf(targetType); + // if (targetIndex > i) { + // + // String nextParamName = nextParam.getName(); + // String key = typeAtCurrentIndex + " " + nextParamName + " " + targetType + " " + i + " " + targetIndex; + // if (!moves.add(key)) { + // continue; + // } + // + // ourLog.debug("Resource[{}] at index[{}] has SP[{}] with target[{}] at index[{}] - moving to index[{}]", + // typeAtCurrentIndex, i, nextParamName, targetType, targetIndex, i); + // retVal.remove(targetIndex); + // retVal.add(i, targetType); + // i++; + // changeCount++; + // } + // } + // } + // + // } + // + // ourLog.debug("Finished pass {} with {} changes", rep, changeCount); + // if (changeCount == 0) { + // break; + // } + // } + // + // ourLog.info("Calculated optimal resource order in {}", sw); + // + // return retVal; + // } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceId.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceId.java index 3f61d5b9288..63069f7e9b7 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceId.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/models/BatchResourceId.java @@ -29,6 +29,7 @@ public class BatchResourceId implements IModelJson { @JsonProperty("type") private String myResourceType; + @JsonProperty("id") private String myId; @@ -65,7 +66,10 @@ public class BatchResourceId implements IModelJson { BatchResourceId batchResourceId = (BatchResourceId) theO; - return new EqualsBuilder().append(myResourceType, batchResourceId.myResourceType).append(myId, batchResourceId.myId).isEquals(); + return new EqualsBuilder() + .append(myResourceType, batchResourceId.myResourceType) + .append(myId, batchResourceId.myId) + .isEquals(); } @Override diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexAppCtx.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexAppCtx.java index f165add9f65..059d57527e4 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexAppCtx.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexAppCtx.java @@ -41,29 +41,25 @@ public class ReindexAppCtx { @Bean public JobDefinition reindexJobDefinition(IBatch2DaoSvc theBatch2DaoSvc) { - return JobDefinition - .newBuilder() - .setJobDefinitionId(JOB_REINDEX) - .setJobDescription("Reindex resources") - .setJobDefinitionVersion(1) - .setParametersType(ReindexJobParameters.class) - .setParametersValidator(reindexJobParametersValidator(theBatch2DaoSvc)) - .gatedExecution() - .addFirstStep( - "generate-ranges", - "Generate data ranges to reindex", - PartitionedUrlChunkRangeJson.class, - reindexGenerateRangeChunksStep()) - .addIntermediateStep( - "load-ids", - "Load IDs of resources to reindex", - ResourceIdListWorkChunkJson.class, - new LoadIdsStep(theBatch2DaoSvc)) - .addLastStep("reindex", - "Perform the resource reindex", - reindexStep() - ) - .build(); + return JobDefinition.newBuilder() + .setJobDefinitionId(JOB_REINDEX) + .setJobDescription("Reindex resources") + .setJobDefinitionVersion(1) + .setParametersType(ReindexJobParameters.class) + .setParametersValidator(reindexJobParametersValidator(theBatch2DaoSvc)) + .gatedExecution() + .addFirstStep( + "generate-ranges", + "Generate data ranges to reindex", + PartitionedUrlChunkRangeJson.class, + reindexGenerateRangeChunksStep()) + .addIntermediateStep( + "load-ids", + "Load IDs of resources to reindex", + ResourceIdListWorkChunkJson.class, + new LoadIdsStep(theBatch2DaoSvc)) + .addLastStep("reindex", "Perform the resource reindex", reindexStep()) + .build(); } @Bean @@ -73,7 +69,8 @@ public class ReindexAppCtx { @Bean public ReindexJobParametersValidator reindexJobParametersValidator(IBatch2DaoSvc theBatch2DaoSvc) { - return new ReindexJobParametersValidator(new UrlListValidator(ProviderConstants.OPERATION_REINDEX, theBatch2DaoSvc)); + return new ReindexJobParametersValidator( + new UrlListValidator(ProviderConstants.OPERATION_REINDEX, theBatch2DaoSvc)); } @Bean @@ -81,11 +78,12 @@ public class ReindexAppCtx { return new ReindexStep(); } - - @Bean - public ReindexProvider reindexProvider(FhirContext theFhirContext, IJobCoordinator theJobCoordinator, IRequestPartitionHelperSvc theRequestPartitionHelperSvc, UrlPartitioner theUrlPartitioner) { + public ReindexProvider reindexProvider( + FhirContext theFhirContext, + IJobCoordinator theJobCoordinator, + IRequestPartitionHelperSvc theRequestPartitionHelperSvc, + UrlPartitioner theUrlPartitioner) { return new ReindexProvider(theFhirContext, theJobCoordinator, theRequestPartitionHelperSvc, theUrlPartitioner); } - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexChunkIds.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexChunkIds.java deleted file mode 100644 index 4e460818584..00000000000 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexChunkIds.java +++ /dev/null @@ -1,19 +0,0 @@ -/*- - * #%L - * hapi-fhir-storage-batch2-jobs - * %% - * Copyright (C) 2014 - 2023 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexGenerateRangeChunksStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexGenerateRangeChunksStep.java index f7fbee6eeb2..6def68e4997 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexGenerateRangeChunksStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexGenerateRangeChunksStep.java @@ -32,15 +32,21 @@ import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; public class ReindexGenerateRangeChunksStep extends GenerateRangeChunksStep { - private static final Logger ourLog = LoggerFactory.getLogger(ReindexGenerateRangeChunksStep.class); + private static final Logger ourLog = LoggerFactory.getLogger(ReindexGenerateRangeChunksStep.class); - @Nonnull - @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + @Nonnull + @Override + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { - ReindexJobParameters parameters = theStepExecutionDetails.getParameters(); - ourLog.info("Beginning reindex job - OptimizeStorage[{}] - ReindexSearchParameters[{}]", parameters.getOptimizeStorage(), parameters.getReindexSearchParameters()); + ReindexJobParameters parameters = theStepExecutionDetails.getParameters(); + ourLog.info( + "Beginning reindex job - OptimizeStorage[{}] - ReindexSearchParameters[{}]", + parameters.getOptimizeStorage(), + parameters.getReindexSearchParameters()); - return super.run(theStepExecutionDetails, theDataSink); - } + return super.run(theStepExecutionDetails, theDataSink); + } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParameters.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParameters.java index 1312cb28e5b..9ba7802e8ed 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParameters.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParameters.java @@ -33,13 +33,24 @@ public class ReindexJobParameters extends PartitionedUrlListJobParameters { public static final String REINDEX_SEARCH_PARAMETERS = "reindexSearchParameters"; public static final String OPTIMISTIC_LOCK = "optimisticLock"; - @JsonProperty(value = OPTIMIZE_STORAGE, defaultValue = ReindexParameters.OPTIMIZE_STORAGE_DEFAULT_STRING, required = false) + @JsonProperty( + value = OPTIMIZE_STORAGE, + defaultValue = ReindexParameters.OPTIMIZE_STORAGE_DEFAULT_STRING, + required = false) @Nullable private ReindexParameters.OptimizeStorageModeEnum myOptimizeStorage; - @JsonProperty(value = REINDEX_SEARCH_PARAMETERS, defaultValue = ReindexParameters.REINDEX_SEARCH_PARAMETERS_DEFAULT_STRING, required = false) + + @JsonProperty( + value = REINDEX_SEARCH_PARAMETERS, + defaultValue = ReindexParameters.REINDEX_SEARCH_PARAMETERS_DEFAULT_STRING, + required = false) @Nullable private ReindexParameters.ReindexSearchParametersEnum myReindexSearchParameters; - @JsonProperty(value = OPTIMISTIC_LOCK, defaultValue = ReindexParameters.OPTIMISTIC_LOCK_DEFAULT + "", required = false) + + @JsonProperty( + value = OPTIMISTIC_LOCK, + defaultValue = ReindexParameters.OPTIMISTIC_LOCK_DEFAULT + "", + required = false) @Nullable private Boolean myOptimisticLock; @@ -65,11 +76,9 @@ public class ReindexJobParameters extends PartitionedUrlListJobParameters { return defaultIfNull(myReindexSearchParameters, ReindexParameters.REINDEX_SEARCH_PARAMETERS_DEFAULT); } - public ReindexJobParameters setReindexSearchParameters(ReindexParameters.ReindexSearchParametersEnum theReindexSearchParameters) { + public ReindexJobParameters setReindexSearchParameters( + ReindexParameters.ReindexSearchParametersEnum theReindexSearchParameters) { this.myReindexSearchParameters = theReindexSearchParameters; return this; } - - - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidator.java index 7a8b61b32c2..ce00a180973 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexJobParametersValidator.java @@ -24,10 +24,10 @@ import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl; import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator; import ca.uhn.fhir.rest.api.server.RequestDetails; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class ReindexJobParametersValidator implements IJobParametersValidator { diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java index 5e9ea1c0a66..3f69485354a 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java @@ -57,7 +57,11 @@ public class ReindexProvider { /** * Constructor */ - public ReindexProvider(FhirContext theFhirContext, IJobCoordinator theJobCoordinator, IRequestPartitionHelperSvc theRequestPartitionHelperSvc, UrlPartitioner theUrlPartitioner) { + public ReindexProvider( + FhirContext theFhirContext, + IJobCoordinator theJobCoordinator, + IRequestPartitionHelperSvc theRequestPartitionHelperSvc, + UrlPartitioner theUrlPartitioner) { myFhirContext = theFhirContext; myJobCoordinator = theJobCoordinator; myRequestPartitionHelperSvc = theRequestPartitionHelperSvc; @@ -66,20 +70,28 @@ public class ReindexProvider { @Operation(name = ProviderConstants.OPERATION_REINDEX, idempotent = false) public IBaseParameters reindex( - @Description("Optionally provides one ore more relative search parameter URLs (e.g. \"Patient?active=true\" or \"Observation?\") that will be reindexed. Note that the URL applies to the resources as they are currently indexed, so you should not use a search parameter that needs reindexing in the URL or some resources may be missed. If no URLs are provided, all resources of all types will be reindexed.") - @OperationParam(name = ProviderConstants.OPERATION_REINDEX_PARAM_URL, typeName = "string", min = 0, max = OperationParam.MAX_UNLIMITED) - List> theUrlsToReindex, - @Description("Should search parameters be reindexed (default: " + ReindexParameters.REINDEX_SEARCH_PARAMETERS_DEFAULT_STRING + ")") - @OperationParam(name = REINDEX_SEARCH_PARAMETERS, typeName = "code", min = 0, max = 1) - IPrimitiveType theReindexSearchParameters, - @Description("Should we attempt to optimize storage for resources (default: " + ReindexParameters.OPTIMIZE_STORAGE_DEFAULT_STRING + ")") - @OperationParam(name = OPTIMIZE_STORAGE, typeName = "code", min = 0, max = 1) - IPrimitiveType theOptimizeStorage, - @Description("Should we attempt to optimistically lock resources being reindexed in order to avoid concurrency issues (default: " + ReindexParameters.OPTIMISTIC_LOCK_DEFAULT + ")") - @OperationParam(name = ReindexJobParameters.OPTIMISTIC_LOCK, typeName = "boolean", min = 0, max = 1) - IPrimitiveType theOptimisticLock, - RequestDetails theRequestDetails - ) { + @Description( + "Optionally provides one ore more relative search parameter URLs (e.g. \"Patient?active=true\" or \"Observation?\") that will be reindexed. Note that the URL applies to the resources as they are currently indexed, so you should not use a search parameter that needs reindexing in the URL or some resources may be missed. If no URLs are provided, all resources of all types will be reindexed.") + @OperationParam( + name = ProviderConstants.OPERATION_REINDEX_PARAM_URL, + typeName = "string", + min = 0, + max = OperationParam.MAX_UNLIMITED) + List> theUrlsToReindex, + @Description("Should search parameters be reindexed (default: " + + ReindexParameters.REINDEX_SEARCH_PARAMETERS_DEFAULT_STRING + ")") + @OperationParam(name = REINDEX_SEARCH_PARAMETERS, typeName = "code", min = 0, max = 1) + IPrimitiveType theReindexSearchParameters, + @Description("Should we attempt to optimize storage for resources (default: " + + ReindexParameters.OPTIMIZE_STORAGE_DEFAULT_STRING + ")") + @OperationParam(name = OPTIMIZE_STORAGE, typeName = "code", min = 0, max = 1) + IPrimitiveType theOptimizeStorage, + @Description( + "Should we attempt to optimistically lock resources being reindexed in order to avoid concurrency issues (default: " + + ReindexParameters.OPTIMISTIC_LOCK_DEFAULT + ")") + @OperationParam(name = ReindexJobParameters.OPTIMISTIC_LOCK, typeName = "boolean", min = 0, max = 1) + IPrimitiveType theOptimisticLock, + RequestDetails theRequestDetails) { ReindexJobParameters params = new ReindexJobParameters(); @@ -87,7 +99,10 @@ public class ReindexProvider { String value = theReindexSearchParameters.getValue(); if (value != null) { value = Ascii.toUpperCase(value); - ValidateUtil.isTrueOrThrowInvalidRequest(EnumUtils.isValidEnum(ReindexParameters.ReindexSearchParametersEnum.class, value), "Invalid " + REINDEX_SEARCH_PARAMETERS + " value: " + UrlUtil.sanitizeUrlPart(theReindexSearchParameters.getValue())); + ValidateUtil.isTrueOrThrowInvalidRequest( + EnumUtils.isValidEnum(ReindexParameters.ReindexSearchParametersEnum.class, value), + "Invalid " + REINDEX_SEARCH_PARAMETERS + " value: " + + UrlUtil.sanitizeUrlPart(theReindexSearchParameters.getValue())); params.setReindexSearchParameters(ReindexParameters.ReindexSearchParametersEnum.valueOf(value)); } } @@ -95,7 +110,10 @@ public class ReindexProvider { String value = theOptimizeStorage.getValue(); if (value != null) { value = Ascii.toUpperCase(value); - ValidateUtil.isTrueOrThrowInvalidRequest(EnumUtils.isValidEnum(ReindexParameters.OptimizeStorageModeEnum.class, value), "Invalid " + OPTIMIZE_STORAGE + " value: " + UrlUtil.sanitizeUrlPart(theOptimizeStorage.getValue())); + ValidateUtil.isTrueOrThrowInvalidRequest( + EnumUtils.isValidEnum(ReindexParameters.OptimizeStorageModeEnum.class, value), + "Invalid " + OPTIMIZE_STORAGE + " value: " + + UrlUtil.sanitizeUrlPart(theOptimizeStorage.getValue())); params.setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.valueOf(value)); } } @@ -105,14 +123,16 @@ public class ReindexProvider { if (theUrlsToReindex != null) { theUrlsToReindex.stream() - .map(IPrimitiveType::getValue) - .filter(StringUtils::isNotBlank) - .map(url -> myUrlPartitioner.partitionUrl(url, theRequestDetails)) - .forEach(params::addPartitionedUrl); + .map(IPrimitiveType::getValue) + .filter(StringUtils::isNotBlank) + .map(url -> myUrlPartitioner.partitionUrl(url, theRequestDetails)) + .forEach(params::addPartitionedUrl); } - ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX); - RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); + ReadPartitionIdRequestDetails details = + ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX); + RequestPartitionId requestPartition = + myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details); params.setRequestPartitionId(requestPartition); JobInstanceStartRequest request = new JobInstanceStartRequest(); @@ -121,9 +141,8 @@ public class ReindexProvider { Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, request); IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext); - ParametersUtil.addParameterToParametersString(myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, response.getInstanceId()); + ParametersUtil.addParameterToParametersString( + myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, response.getInstanceId()); return retVal; } - - } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexStep.java index f3601ce4034..cbfe007fb37 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexStep.java @@ -46,46 +46,64 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.TimeUnit; +import javax.annotation.Nonnull; public class ReindexStep implements IJobStepWorker { public static final int REINDEX_MAX_RETRIES = 10; private static final Logger ourLog = LoggerFactory.getLogger(ReindexStep.class); + @Autowired private HapiTransactionService myHapiTransactionService; + @Autowired private IFhirSystemDao mySystemDao; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private IIdHelperService myIdHelperService; @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { ResourceIdListWorkChunkJson data = theStepExecutionDetails.getData(); ReindexJobParameters jobParameters = theStepExecutionDetails.getParameters(); - return doReindex(data, theDataSink, theStepExecutionDetails.getInstance().getInstanceId(), theStepExecutionDetails.getChunkId(), jobParameters); + return doReindex( + data, + theDataSink, + theStepExecutionDetails.getInstance().getInstanceId(), + theStepExecutionDetails.getChunkId(), + jobParameters); } @Nonnull - public RunOutcome doReindex(ResourceIdListWorkChunkJson data, IJobDataSink theDataSink, String theInstanceId, String theChunkId, ReindexJobParameters theJobParameters) { + public RunOutcome doReindex( + ResourceIdListWorkChunkJson data, + IJobDataSink theDataSink, + String theInstanceId, + String theChunkId, + ReindexJobParameters theJobParameters) { RequestDetails requestDetails = new SystemRequestDetails(); requestDetails.setRetry(true); requestDetails.setMaxRetries(REINDEX_MAX_RETRIES); TransactionDetails transactionDetails = new TransactionDetails(); - ReindexJob reindexJob = new ReindexJob(data, requestDetails, transactionDetails, theDataSink, theInstanceId, theChunkId, theJobParameters); + ReindexJob reindexJob = new ReindexJob( + data, requestDetails, transactionDetails, theDataSink, theInstanceId, theChunkId, theJobParameters); myHapiTransactionService - .withRequest(requestDetails) - .withTransactionDetails(transactionDetails) - .execute(reindexJob); + .withRequest(requestDetails) + .withTransactionDetails(transactionDetails) + .execute(reindexJob); return new RunOutcome(data.size()); } @@ -99,7 +117,14 @@ public class ReindexStep implements IJobStepWorker theDataSink, String theInstanceId, String theChunkId, ReindexJobParameters theJobParameters) { + public ReindexJob( + ResourceIdListWorkChunkJson theData, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + IJobDataSink theDataSink, + String theInstanceId, + String theChunkId, + ReindexJobParameters theJobParameters) { myData = theData; myRequestDetails = theRequestDetails; myTransactionDetails = theTransactionDetails; @@ -115,19 +140,29 @@ public class ReindexStep implements IJobStepWorker persistentIds = myData.getResourcePersistentIds(myIdHelperService); - ourLog.info("Starting reindex work chunk with {} resources - Instance[{}] Chunk[{}]", persistentIds.size(), myInstanceId, myChunkId); + ourLog.info( + "Starting reindex work chunk with {} resources - Instance[{}] Chunk[{}]", + persistentIds.size(), + myInstanceId, + myChunkId); StopWatch sw = new StopWatch(); // Prefetch Resources from DB - boolean reindexSearchParameters = myJobParameters.getReindexSearchParameters() != ReindexParameters.ReindexSearchParametersEnum.NONE; + boolean reindexSearchParameters = + myJobParameters.getReindexSearchParameters() != ReindexParameters.ReindexSearchParametersEnum.NONE; mySystemDao.preFetchResources(persistentIds, reindexSearchParameters); - ourLog.info("Prefetched {} resources in {} - Instance[{}] Chunk[{}]", persistentIds.size(), sw, myInstanceId, myChunkId); + ourLog.info( + "Prefetched {} resources in {} - Instance[{}] Chunk[{}]", + persistentIds.size(), + sw, + myInstanceId, + myChunkId); ReindexParameters parameters = new ReindexParameters() - .setReindexSearchParameters(myJobParameters.getReindexSearchParameters()) - .setOptimizeStorage(myJobParameters.getOptimizeStorage()) - .setOptimisticLock(myJobParameters.getOptimisticLock()); + .setReindexSearchParameters(myJobParameters.getReindexSearchParameters()) + .setOptimizeStorage(myJobParameters.getOptimizeStorage()) + .setOptimisticLock(myJobParameters.getOptimisticLock()); // Reindex @@ -139,18 +174,27 @@ public class ReindexStep implements IJobStepWorker resourcePersistentId = persistentIds.get(i); try { - ReindexOutcome outcome = dao.reindex(resourcePersistentId, parameters, myRequestDetails, myTransactionDetails); + ReindexOutcome outcome = + dao.reindex(resourcePersistentId, parameters, myRequestDetails, myTransactionDetails); outcome.getWarnings().forEach(myDataSink::recoveredError); } catch (BaseServerResponseException | DataFormatException e) { - String resourceForcedId = myIdHelperService.translatePidIdToForcedIdWithCache(resourcePersistentId).orElse(resourcePersistentId.toString()); + String resourceForcedId = myIdHelperService + .translatePidIdToForcedIdWithCache(resourcePersistentId) + .orElse(resourcePersistentId.toString()); String resourceId = nextResourceType + "/" + resourceForcedId; ourLog.debug("Failure during reindexing {}", resourceId, e); myDataSink.recoveredError("Failure reindexing " + resourceId + ": " + e.getMessage()); } } - ourLog.info("Finished reindexing {} resources in {} - {}/sec - Instance[{}] Chunk[{}]", persistentIds.size(), sw, sw.formatThroughput(persistentIds.size(), TimeUnit.SECONDS), myInstanceId, myChunkId); + ourLog.info( + "Finished reindexing {} resources in {} - {}/sec - Instance[{}] Chunk[{}]", + persistentIds.size(), + sw, + sw.formatThroughput(persistentIds.size(), TimeUnit.SECONDS), + myInstanceId, + myChunkId); return null; } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexWarningProcessor.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexWarningProcessor.java index 2440c9d0a19..63d32a34da1 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexWarningProcessor.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexWarningProcessor.java @@ -27,10 +27,14 @@ public class ReindexWarningProcessor implements IWarningProcessor { @Override public void recoverWarningMessage(String theErrorMessage) { - // save non-fatal error as warning, current only support unique search param reindexing error on existing duplicates - if (theErrorMessage.contains("Can not create resource") && theErrorMessage.contains("it would create a duplicate unique index matching query")) { - String searchParamName = theErrorMessage.substring(theErrorMessage.indexOf("SearchParameter"), theErrorMessage.length() - 1); - myRecoveredWarning = "Failed to reindex resource because unique search parameter " + searchParamName + " could not be enforced."; + // save non-fatal error as warning, current only support unique search param reindexing error on existing + // duplicates + if (theErrorMessage.contains("Can not create resource") + && theErrorMessage.contains("it would create a duplicate unique index matching query")) { + String searchParamName = + theErrorMessage.substring(theErrorMessage.indexOf("SearchParameter"), theErrorMessage.length() - 1); + myRecoveredWarning = "Failed to reindex resource because unique search parameter " + searchParamName + + " could not be enforced."; } } diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/TermCodeSystemJobConfig.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/TermCodeSystemJobConfig.java index 643fd641290..02a3c875a93 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/TermCodeSystemJobConfig.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/TermCodeSystemJobConfig.java @@ -39,19 +39,17 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class TermCodeSystemJobConfig { /** * TermCodeSystem delete */ - public static final String TERM_CODE_SYSTEM_DELETE_JOB_NAME = "termCodeSystemDeleteJob"; + public static final String TERM_CODE_SYSTEM_DELETE_JOB_NAME = "termCodeSystemDeleteJob"; /** * TermCodeSystemVersion delete */ - public static final String TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME = "termCodeSystemVersionDeleteJob"; - + public static final String TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME = "termCodeSystemVersionDeleteJob"; @Autowired private ITermCodeSystemDeleteJobSvc myITermCodeSystemSvc; @@ -62,28 +60,25 @@ public class TermCodeSystemJobConfig { */ @Bean public JobDefinition termCodeSystemVersionDeleteJobDefinition() { - return JobDefinition - .newBuilder() - .setJobDefinitionId(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME) - .setJobDescription("Term code system version job delete") - .setJobDefinitionVersion(1) - .gatedExecution() - .setParametersType(TermCodeSystemDeleteVersionJobParameters.class) - .setParametersValidator(deleteCodeSystemVersionPrameterValidator()) - .addFirstStep( - "DeleteCodeSystemVersionFirstStep", - "A first step for deleting code system versions; deletes the concepts for a provided code system version", - CodeSystemVersionPIDResult.class, - deleteCodeSystemVersionFirstStep() - ) - .addLastStep( - "DeleteCodeSystemVersionFinalStep", - "Deletes the code system version", - deleteCodeSystemVersionFinalStep() - ) - .completionHandler(deleteCodeSystemVersionCompletionHandler()) - .errorHandler(deleteCodeSystemVersionCompletionHandler()) - .build(); + return JobDefinition.newBuilder() + .setJobDefinitionId(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME) + .setJobDescription("Term code system version job delete") + .setJobDefinitionVersion(1) + .gatedExecution() + .setParametersType(TermCodeSystemDeleteVersionJobParameters.class) + .setParametersValidator(deleteCodeSystemVersionPrameterValidator()) + .addFirstStep( + "DeleteCodeSystemVersionFirstStep", + "A first step for deleting code system versions; deletes the concepts for a provided code system version", + CodeSystemVersionPIDResult.class, + deleteCodeSystemVersionFirstStep()) + .addLastStep( + "DeleteCodeSystemVersionFinalStep", + "Deletes the code system version", + deleteCodeSystemVersionFinalStep()) + .completionHandler(deleteCodeSystemVersionCompletionHandler()) + .errorHandler(deleteCodeSystemVersionCompletionHandler()) + .build(); } /** @@ -92,45 +87,39 @@ public class TermCodeSystemJobConfig { */ @Bean public JobDefinition termCodeSystemDeleteJobDefinition() { - return JobDefinition - .newBuilder() - .setJobDefinitionId(TERM_CODE_SYSTEM_DELETE_JOB_NAME) - .setJobDescription("Term code system job delete") - .setJobDefinitionVersion(1) - .gatedExecution() - .setParametersType(TermCodeSystemDeleteJobParameters.class) - .setParametersValidator(codeSystemDeleteParameterValidator()) - .addFirstStep( - "FetchVersionsStep", - "Fetches all term code system version PIDs for given Code System PID", - CodeSystemVersionPIDResult.class, - readCodeSystemVersionsStep() - ) - .addIntermediateStep( - "DeleteCodeSystemConceptsByVersionPidStep", - "Deletes the concept links, concept properties, concept designations, and concepts associated with a given code system version PID", - CodeSystemVersionPIDResult.class, - deleteCodeSystemConceptsStep() - ) - .addIntermediateStep( - "DeleteCodeSystemVersionStep", - "Deletes the specified code system version", - CodeSystemVersionPIDResult.class, - deleteCodeSystemVersionsStep() - ) - .addFinalReducerStep( - "DeleteCodeSystemStep", - "Deletes the code system itself", - VoidModel.class, - deleteCodeSystemFinalStep() - ) - .completionHandler(deleteCodeSystemCompletionHandler()) - .errorHandler(deleteCodeSystemCompletionHandler()) - .build(); + return JobDefinition.newBuilder() + .setJobDefinitionId(TERM_CODE_SYSTEM_DELETE_JOB_NAME) + .setJobDescription("Term code system job delete") + .setJobDefinitionVersion(1) + .gatedExecution() + .setParametersType(TermCodeSystemDeleteJobParameters.class) + .setParametersValidator(codeSystemDeleteParameterValidator()) + .addFirstStep( + "FetchVersionsStep", + "Fetches all term code system version PIDs for given Code System PID", + CodeSystemVersionPIDResult.class, + readCodeSystemVersionsStep()) + .addIntermediateStep( + "DeleteCodeSystemConceptsByVersionPidStep", + "Deletes the concept links, concept properties, concept designations, and concepts associated with a given code system version PID", + CodeSystemVersionPIDResult.class, + deleteCodeSystemConceptsStep()) + .addIntermediateStep( + "DeleteCodeSystemVersionStep", + "Deletes the specified code system version", + CodeSystemVersionPIDResult.class, + deleteCodeSystemVersionsStep()) + .addFinalReducerStep( + "DeleteCodeSystemStep", + "Deletes the code system itself", + VoidModel.class, + deleteCodeSystemFinalStep()) + .completionHandler(deleteCodeSystemCompletionHandler()) + .errorHandler(deleteCodeSystemCompletionHandler()) + .build(); } /** delete codesystem job **/ - @Bean public TermCodeSystemDeleteJobParametersValidator codeSystemDeleteParameterValidator() { return new TermCodeSystemDeleteJobParametersValidator(); @@ -162,7 +151,6 @@ public class TermCodeSystemJobConfig { } /** Delete code system version job **/ - @Bean public DeleteCodeSystemVersionParameterValidator deleteCodeSystemVersionPrameterValidator() { return new DeleteCodeSystemVersionParameterValidator(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemCompletionHandler.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemCompletionHandler.java index 7607bb95e2c..83e0905f0cf 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemCompletionHandler.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemCompletionHandler.java @@ -24,8 +24,7 @@ import ca.uhn.fhir.batch2.api.JobCompletionDetails; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc; import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters; -public class DeleteCodeSystemCompletionHandler - implements IJobCompletionHandler { +public class DeleteCodeSystemCompletionHandler implements IJobCompletionHandler { private final ITermCodeSystemDeleteJobSvc myTermCodeSystemSvc; diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemConceptsByVersionStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemConceptsByVersionStep.java index abc7475a9e5..798d795f3d9 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemConceptsByVersionStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemConceptsByVersionStep.java @@ -30,20 +30,24 @@ import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters; import javax.annotation.Nonnull; -public class DeleteCodeSystemConceptsByVersionStep implements IJobStepWorker { +public class DeleteCodeSystemConceptsByVersionStep + implements IJobStepWorker< + TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult, CodeSystemVersionPIDResult> { private final ITermCodeSystemDeleteJobSvc myITermCodeSystemSvc; - public DeleteCodeSystemConceptsByVersionStep (ITermCodeSystemDeleteJobSvc theCodeSystemDeleteJobSvc) { + public DeleteCodeSystemConceptsByVersionStep(ITermCodeSystemDeleteJobSvc theCodeSystemDeleteJobSvc) { myITermCodeSystemSvc = theCodeSystemDeleteJobSvc; } @Nonnull @Override public RunOutcome run( - @Nonnull StepExecutionDetails theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull + StepExecutionDetails + theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { CodeSystemVersionPIDResult versionPidResult = theStepExecutionDetails.getData(); myITermCodeSystemSvc.deleteCodeSystemConceptsByCodeSystemVersionPid(versionPidResult.getCodeSystemVersionPID()); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemStep.java index ac9cd16e853..9627546b191 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemStep.java @@ -33,7 +33,8 @@ import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters; import javax.annotation.Nonnull; -public class DeleteCodeSystemStep implements IReductionStepWorker { +public class DeleteCodeSystemStep + implements IReductionStepWorker { private final ITermCodeSystemDeleteJobSvc myITermCodeSystemSvc; @@ -44,9 +45,11 @@ public class DeleteCodeSystemStep implements IReductionStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull + StepExecutionDetails + theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { // final step long codeId = theStepExecutionDetails.getParameters().getTermPid(); myITermCodeSystemSvc.deleteCodeSystem(codeId); @@ -58,7 +61,8 @@ public class DeleteCodeSystemStep implements IReductionStepWorker theChunkDetails) { + public ChunkOutcome consume( + ChunkExecutionDetails theChunkDetails) { /* * A single code system can have multiple versions. * We don't want to call delete on all these systems diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemVersionStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemVersionStep.java index 2dd99624886..91e0bc8ea49 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemVersionStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/DeleteCodeSystemVersionStep.java @@ -30,7 +30,9 @@ import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters; import javax.annotation.Nonnull; -public class DeleteCodeSystemVersionStep implements IJobStepWorker { +public class DeleteCodeSystemVersionStep + implements IJobStepWorker< + TermCodeSystemDeleteJobParameters, CodeSystemVersionPIDResult, CodeSystemVersionPIDResult> { private final ITermCodeSystemDeleteJobSvc myITermCodeSystemSvc; @@ -41,9 +43,11 @@ public class DeleteCodeSystemVersionStep implements IJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull + StepExecutionDetails + theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { CodeSystemVersionPIDResult versionPidResult = theStepExecutionDetails.getData(); long versionId = versionPidResult.getCodeSystemVersionPID(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/ReadTermConceptVersionsStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/ReadTermConceptVersionsStep.java index 8f87b66d6ae..a8a5a309c21 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/ReadTermConceptVersionsStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/ReadTermConceptVersionsStep.java @@ -29,10 +29,11 @@ import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc; import ca.uhn.fhir.jpa.term.models.CodeSystemVersionPIDResult; import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters; -import javax.annotation.Nonnull; import java.util.Iterator; +import javax.annotation.Nonnull; -public class ReadTermConceptVersionsStep implements IFirstJobStepWorker { +public class ReadTermConceptVersionsStep + implements IFirstJobStepWorker { private final ITermCodeSystemDeleteJobSvc myITermCodeSystemSvc; @@ -43,9 +44,9 @@ public class ReadTermConceptVersionsStep implements IFirstJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { TermCodeSystemDeleteJobParameters parameters = theStepExecutionDetails.getParameters(); long pid = parameters.getTermPid(); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/TermCodeSystemDeleteJobParametersValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/TermCodeSystemDeleteJobParametersValidator.java index a13e9e5812c..e9e33d7cf0c 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/TermCodeSystemDeleteJobParametersValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemdelete/TermCodeSystemDeleteJobParametersValidator.java @@ -23,16 +23,18 @@ import ca.uhn.fhir.batch2.api.IJobParametersValidator; import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteJobParameters; import ca.uhn.fhir.rest.api.server.RequestDetails; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class TermCodeSystemDeleteJobParametersValidator implements IJobParametersValidator { +public class TermCodeSystemDeleteJobParametersValidator + implements IJobParametersValidator { @Nullable @Override - public List validate(RequestDetails theRequestDetails, @Nonnull TermCodeSystemDeleteJobParameters theParameters) { + public List validate( + RequestDetails theRequestDetails, @Nonnull TermCodeSystemDeleteJobParameters theParameters) { List errors = new ArrayList<>(); if (theParameters.getTermPid() <= 0) { errors.add("Invalid Term Code System PID " + theParameters.getTermPid()); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionCompletionHandler.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionCompletionHandler.java index 0cc3a889191..226a4e1fa36 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionCompletionHandler.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionCompletionHandler.java @@ -24,7 +24,8 @@ import ca.uhn.fhir.batch2.api.JobCompletionDetails; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemDeleteJobSvc; import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters; -public class DeleteCodeSystemVersionCompletionHandler implements IJobCompletionHandler { +public class DeleteCodeSystemVersionCompletionHandler + implements IJobCompletionHandler { private final ITermCodeSystemDeleteJobSvc myTermCodeSystemSvc; diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionFinalStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionFinalStep.java index 5f7feb62d91..2785fa08189 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionFinalStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionFinalStep.java @@ -31,7 +31,8 @@ import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters; import javax.annotation.Nonnull; -public class DeleteCodeSystemVersionFinalStep implements ILastJobStepWorker { +public class DeleteCodeSystemVersionFinalStep + implements ILastJobStepWorker { private final ITermCodeSystemDeleteJobSvc myTermCodeSystemSvc; @@ -42,9 +43,11 @@ public class DeleteCodeSystemVersionFinalStep implements ILastJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull + StepExecutionDetails + theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { long versionPid = theStepExecutionDetails.getParameters().getCodeSystemVersionPid(); myTermCodeSystemSvc.deleteCodeSystemVersion(versionPid); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionFirstStep.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionFirstStep.java index ba71d516b38..55049ed5272 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionFirstStep.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionFirstStep.java @@ -31,7 +31,8 @@ import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters; import javax.annotation.Nonnull; -public class DeleteCodeSystemVersionFirstStep implements IFirstJobStepWorker { +public class DeleteCodeSystemVersionFirstStep + implements IFirstJobStepWorker { private final ITermCodeSystemDeleteJobSvc myTermCodeSystemSvc; @@ -42,9 +43,9 @@ public class DeleteCodeSystemVersionFirstStep implements IFirstJobStepWorker theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink - ) throws JobExecutionFailedException { + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { long versionId = theStepExecutionDetails.getParameters().getCodeSystemVersionPid(); myTermCodeSystemSvc.deleteCodeSystemConceptsByCodeSystemVersionPid(versionId); diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionParameterValidator.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionParameterValidator.java index 09d47a97d57..7e7ad78d5cb 100644 --- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionParameterValidator.java +++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/termcodesystem/codesystemversiondelete/DeleteCodeSystemVersionParameterValidator.java @@ -23,16 +23,18 @@ import ca.uhn.fhir.batch2.api.IJobParametersValidator; import ca.uhn.fhir.jpa.term.models.TermCodeSystemDeleteVersionJobParameters; import ca.uhn.fhir.rest.api.server.RequestDetails; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class DeleteCodeSystemVersionParameterValidator implements IJobParametersValidator { +public class DeleteCodeSystemVersionParameterValidator + implements IJobParametersValidator { @Nullable @Override - public List validate(RequestDetails theRequestDetails, @Nonnull TermCodeSystemDeleteVersionJobParameters theParameters) { + public List validate( + RequestDetails theRequestDetails, @Nonnull TermCodeSystemDeleteVersionJobParameters theParameters) { ArrayList errors = new ArrayList<>(); long versionPID = theParameters.getCodeSystemVersionPid(); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ChunkExecutionDetails.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ChunkExecutionDetails.java index 69fc2ebc55d..061e9307197 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ChunkExecutionDetails.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ChunkExecutionDetails.java @@ -30,10 +30,7 @@ public class ChunkExecutionDetails private final String myChunkId; - public ChunkExecutionDetails(IT theData, - PT theParameters, - String theInstanceId, - String theChunkId) { + public ChunkExecutionDetails(IT theData, PT theParameters, String theInstanceId, String theChunkId) { myData = theData; myParameters = theParameters; myInstanceId = theInstanceId; diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IFirstJobStepWorker.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IFirstJobStepWorker.java index 950565ec16d..9a9801e3f15 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IFirstJobStepWorker.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IFirstJobStepWorker.java @@ -21,5 +21,5 @@ package ca.uhn.fhir.batch2.api; import ca.uhn.fhir.model.api.IModelJson; -public interface IFirstJobStepWorker extends IJobStepWorker { -} +public interface IFirstJobStepWorker + extends IJobStepWorker {} diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCompletionHandler.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCompletionHandler.java index e580a7f8975..db5712a6545 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCompletionHandler.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCompletionHandler.java @@ -24,5 +24,4 @@ import ca.uhn.fhir.model.api.IModelJson; public interface IJobCompletionHandler { void jobComplete(JobCompletionDetails theDetails); - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCoordinator.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCoordinator.java index a9246b75f90..46d38ff13b2 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCoordinator.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobCoordinator.java @@ -29,10 +29,10 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import org.springframework.data.domain.Page; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public interface IJobCoordinator { @@ -45,7 +45,8 @@ public interface IJobCoordinator { * @deprecated Use {@link #startInstance(RequestDetails, JobInstanceStartRequest)} */ @Deprecated(since = "6.8.0", forRemoval = true) - default Batch2JobStartResponse startInstance(JobInstanceStartRequest theStartRequest) throws InvalidRequestException { + default Batch2JobStartResponse startInstance(JobInstanceStartRequest theStartRequest) + throws InvalidRequestException { return startInstance(null, theStartRequest); } @@ -59,7 +60,8 @@ public interface IJobCoordinator { * @return Returns a unique ID for this job execution * @throws InvalidRequestException If the request is invalid (incorrect/missing parameters, etc) */ - Batch2JobStartResponse startInstance(RequestDetails theRequestDetails, JobInstanceStartRequest theStartRequest) throws InvalidRequestException; + Batch2JobStartResponse startInstance(RequestDetails theRequestDetails, JobInstanceStartRequest theStartRequest) + throws InvalidRequestException; /** * Fetch details about a job instance @@ -83,7 +85,8 @@ public interface IJobCoordinator { JobOperationResultJson cancelInstance(String theInstanceId) throws ResourceNotFoundException; - List getInstancesbyJobDefinitionIdAndEndedStatus(String theJobDefinitionId, @Nullable Boolean theEnded, int theCount, int theStart); + List getInstancesbyJobDefinitionIdAndEndedStatus( + String theJobDefinitionId, @Nullable Boolean theEnded, int theCount, int theStart); /** * Fetches all job instances tht meet the FetchRequest criteria @@ -95,7 +98,8 @@ public interface IJobCoordinator { /** * Fetches all job instances by job definition id and statuses */ - List getJobInstancesByJobDefinitionIdAndStatuses(String theJobDefinitionId, Set theStatuses, int theCount, int theStart); + List getJobInstancesByJobDefinitionIdAndStatuses( + String theJobDefinitionId, Set theStatuses, int theCount, int theStart); /** * Fetches all jobs by job definition id diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobDataSink.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobDataSink.java index 181c295a4b6..6d6768b02c7 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobDataSink.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobDataSink.java @@ -74,5 +74,4 @@ public interface IJobDataSink { * @param theWarningProcessor The processor for the warning. */ void setWarningProcessor(IWarningProcessor theWarningProcessor); - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java index 606ee1b4458..c909671b36f 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobMaintenanceService.java @@ -28,7 +28,7 @@ public interface IJobMaintenanceService { */ boolean triggerMaintenancePass(); - void runMaintenancePass(); + void runMaintenancePass(); /** * Forces a second maintenance run. diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobParametersValidator.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobParametersValidator.java index fe38c11b957..98ebb948adf 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobParametersValidator.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobParametersValidator.java @@ -22,9 +22,9 @@ package ca.uhn.fhir.batch2.api; import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.api.server.RequestDetails; +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** * This interface can be used to validate the parameters @@ -51,5 +51,4 @@ public interface IJobParametersValidator { */ @Nullable List validate(RequestDetails theRequestDetails, @Nonnull T theParameters); - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobPersistence.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobPersistence.java index 047ce305e50..5f4c50e5b79 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobPersistence.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobPersistence.java @@ -34,7 +34,6 @@ import org.springframework.data.domain.Pageable; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; import java.util.Collections; import java.util.Date; import java.util.Iterator; @@ -42,6 +41,7 @@ import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Stream; +import javax.annotation.Nonnull; /** * @@ -54,7 +54,6 @@ import java.util.stream.Stream; public interface IJobPersistence extends IWorkChunkPersistence { Logger ourLog = LoggerFactory.getLogger(IJobPersistence.class); - /** * Store a new job instance. This will be called when a new job instance is being kicked off. * @@ -71,7 +70,8 @@ public interface IJobPersistence extends IWorkChunkPersistence { Optional fetchInstance(String theInstanceId); // on implementations @Transactional(propagation = Propagation.REQUIRES_NEW) - List fetchInstances(String theJobDefinitionId, Set theStatuses, Date theCutoff, Pageable thePageable); + List fetchInstances( + String theJobDefinitionId, Set theStatuses, Date theCutoff, Pageable thePageable); /** * Fetches any existing jobs matching provided request parameters @@ -93,7 +93,8 @@ public interface IJobPersistence extends IWorkChunkPersistence { List fetchRecentInstances(int thePageSize, int thePageIndex); // on implementations @Transactional(propagation = Propagation.REQUIRES_NEW) - List fetchInstancesByJobDefinitionIdAndStatus(String theJobDefinitionId, Set theRequestedStatuses, int thePageSize, int thePageIndex); + List fetchInstancesByJobDefinitionIdAndStatus( + String theJobDefinitionId, Set theRequestedStatuses, int thePageSize, int thePageIndex); /** * Fetch all job instances for a given job definition id @@ -135,7 +136,7 @@ public interface IJobPersistence extends IWorkChunkPersistence { * Return true from the callback if the record write should continue, or false if * the change should be discarded. */ - interface JobInstanceUpdateCallback { + interface JobInstanceUpdateCallback { /** * Modify theInstance within a write-lock transaction. * @param theInstance a copy of the instance to modify. @@ -176,7 +177,8 @@ public interface IJobPersistence extends IWorkChunkPersistence { void deleteChunksAndMarkInstanceAsChunksPurged(String theInstanceId); @Transactional(propagation = Propagation.MANDATORY) - boolean markInstanceAsStatusWhenStatusIn(String theInstance, StatusEnum theStatusEnum, Set thePriorStates); + boolean markInstanceAsStatusWhenStatusIn( + String theInstance, StatusEnum theStatusEnum, Set thePriorStates); /** * Marks an instance as cancelled @@ -208,9 +210,9 @@ public interface IJobPersistence extends IWorkChunkPersistence { @Override public String toString() { return new ToStringBuilder(this) - .append("jobInstanceId", jobInstanceId) - .append("workChunkId", workChunkId) - .toString(); + .append("jobInstanceId", jobInstanceId) + .append("workChunkId", workChunkId) + .toString(); } } @@ -232,13 +234,16 @@ public interface IJobPersistence extends IWorkChunkPersistence { instance.setStatus(StatusEnum.QUEUED); String instanceId = storeNewInstance(instance); - ourLog.info("Stored new {} job {} with status {}", theJobDefinition.getJobDefinitionId(), instanceId, instance.getStatus()); + ourLog.info( + "Stored new {} job {} with status {}", + theJobDefinition.getJobDefinitionId(), + instanceId, + instance.getStatus()); ourLog.debug("Job parameters: {}", instance.getParameters()); WorkChunkCreateEvent batchWorkChunk = WorkChunkCreateEvent.firstChunk(theJobDefinition, instanceId); String chunkId = onWorkChunkCreate(batchWorkChunk); return new CreateResult(instanceId, chunkId); - } /** @@ -248,7 +253,7 @@ public interface IJobPersistence extends IWorkChunkPersistence { */ @Transactional(propagation = Propagation.MANDATORY) default boolean onChunkDequeued(String theJobInstanceId) { - return markInstanceAsStatusWhenStatusIn(theJobInstanceId, StatusEnum.IN_PROGRESS, Collections.singleton(StatusEnum.QUEUED)); + return markInstanceAsStatusWhenStatusIn( + theJobInstanceId, StatusEnum.IN_PROGRESS, Collections.singleton(StatusEnum.QUEUED)); } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobStepWorker.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobStepWorker.java index 2284d29d76f..3ce89138632 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobStepWorker.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IJobStepWorker.java @@ -47,5 +47,6 @@ public interface IJobStepWorker theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException; + RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException; } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ILastJobStepWorker.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ILastJobStepWorker.java index 9a290c51357..69cb53d26bd 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ILastJobStepWorker.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ILastJobStepWorker.java @@ -21,5 +21,5 @@ package ca.uhn.fhir.batch2.api; import ca.uhn.fhir.model.api.IModelJson; -public interface ILastJobStepWorker extends IJobStepWorker { -} +public interface ILastJobStepWorker + extends IJobStepWorker {} diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IReductionStepWorker.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IReductionStepWorker.java index 5a9d6e8797a..78e0519fc5c 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IReductionStepWorker.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IReductionStepWorker.java @@ -31,7 +31,7 @@ import javax.annotation.Nonnull; * @param Output Job Report Type */ public interface IReductionStepWorker - extends IJobStepWorker { + extends IJobStepWorker { /** * diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IWorkChunkPersistence.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IWorkChunkPersistence.java index d059fa74ef5..d29a476c214 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IWorkChunkPersistence.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/IWorkChunkPersistence.java @@ -94,7 +94,6 @@ public interface IWorkChunkPersistence { @Transactional(propagation = Propagation.REQUIRED) void onWorkChunkFailed(String theChunkId, String theErrorMessage); - /** * Report success and complete the chunk. * Transition to {@link WorkChunkStatusEnum#COMPLETED} @@ -113,7 +112,8 @@ public interface IWorkChunkPersistence { * @param theErrorMsg - error message (if status warrants it) */ @Transactional(propagation = Propagation.MANDATORY) - void markWorkChunksWithStatusAndWipeData(String theInstanceId, List theChunkIds, WorkChunkStatusEnum theStatus, String theErrorMsg); + void markWorkChunksWithStatusAndWipeData( + String theInstanceId, List theChunkIds, WorkChunkStatusEnum theStatus, String theErrorMsg); /** * Fetch all chunks for a given instance. @@ -124,7 +124,6 @@ public interface IWorkChunkPersistence { */ Iterator fetchAllWorkChunksIterator(String theInstanceId, boolean theWithData); - /** * Fetch all chunks with data for a given instance for a given step id * @@ -140,7 +139,6 @@ public interface IWorkChunkPersistence { * @param theStepId the step that is starting * @return the WorkChunk ids */ - List fetchAllChunkIdsForStepWithStatus(String theInstanceId, String theStepId, WorkChunkStatusEnum theStatusEnum); - - + List fetchAllChunkIdsForStepWithStatus( + String theInstanceId, String theStepId, WorkChunkStatusEnum theStatusEnum); } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/JobCompletionDetails.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/JobCompletionDetails.java index 0224178eb6b..ca3d04a259c 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/JobCompletionDetails.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/JobCompletionDetails.java @@ -54,5 +54,4 @@ public class JobCompletionDetails { public IJobInstance getInstance() { return myInstance; } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/JobOperationResultJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/JobOperationResultJson.java index 2f82c48a3ca..d320d11be7a 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/JobOperationResultJson.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/JobOperationResultJson.java @@ -25,8 +25,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class JobOperationResultJson implements IModelJson { @JsonProperty("operation") private String myOperation; + @JsonProperty("success") private Boolean mySuccess; + @JsonProperty("message") private String myMessage; diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ReductionStepExecutionDetails.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ReductionStepExecutionDetails.java index db24e40c1cd..620a8414b5e 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ReductionStepExecutionDetails.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/ReductionStepExecutionDetails.java @@ -33,11 +33,10 @@ import javax.annotation.Nullable; * @param - Output data type. Output will actually be a ListResult of these objects. */ public class ReductionStepExecutionDetails - extends StepExecutionDetails { + extends StepExecutionDetails { - public ReductionStepExecutionDetails(@Nonnull PT theParameters, - @Nullable IT theData, - @Nonnull JobInstance theInstance) { + public ReductionStepExecutionDetails( + @Nonnull PT theParameters, @Nullable IT theData, @Nonnull JobInstance theInstance) { // TODO KHS shouldn't the chunkId be null? super(theParameters, theData, theInstance, "VOID"); } @@ -49,7 +48,8 @@ public class ReductionStepExecutionDetails private final IJobInstance myInstance; private final String myChunkId; - public StepExecutionDetails(@Nonnull PT theParameters, - @Nullable IT theData, - @Nonnull JobInstance theInstance, - @Nonnull String theChunkId) { + public StepExecutionDetails( + @Nonnull PT theParameters, + @Nullable IT theData, + @Nonnull JobInstance theInstance, + @Nonnull String theChunkId) { Validate.notNull(theParameters); myParameters = theParameters; myData = theData; diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/VoidModel.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/VoidModel.java index ed6dcf67dca..e6df32d14a4 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/VoidModel.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/VoidModel.java @@ -24,5 +24,4 @@ import ca.uhn.fhir.model.api.IModelJson; /** * Used as the input type for the first step */ -public final class VoidModel implements IModelJson { -} +public final class VoidModel implements IModelJson {} diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/package-info.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/package-info.java index d7426d10cef..c2092b3b418 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/package-info.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/api/package-info.java @@ -48,4 +48,3 @@ * @since 6.0.0 */ package ca.uhn.fhir.batch2.api; - diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/BaseBatch2Config.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/BaseBatch2Config.java index 03482f90e36..071a89fe716 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/BaseBatch2Config.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/BaseBatch2Config.java @@ -49,6 +49,7 @@ public abstract class BaseBatch2Config { @Autowired private IJobPersistence myPersistence; + @Autowired private IChannelFactory myChannelFactory; @@ -68,56 +69,59 @@ public abstract class BaseBatch2Config { } @Bean - public IJobCoordinator batch2JobCoordinator(JobDefinitionRegistry theJobDefinitionRegistry, - BatchJobSender theBatchJobSender, - WorkChunkProcessor theExecutor, - IJobMaintenanceService theJobMaintenanceService, - IHapiTransactionService theTransactionService) { + public IJobCoordinator batch2JobCoordinator( + JobDefinitionRegistry theJobDefinitionRegistry, + BatchJobSender theBatchJobSender, + WorkChunkProcessor theExecutor, + IJobMaintenanceService theJobMaintenanceService, + IHapiTransactionService theTransactionService) { return new JobCoordinatorImpl( - theBatchJobSender, - batch2ProcessingChannelReceiver(myChannelFactory), - myPersistence, - theJobDefinitionRegistry, - theExecutor, - theJobMaintenanceService, - theTransactionService); + theBatchJobSender, + batch2ProcessingChannelReceiver(myChannelFactory), + myPersistence, + theJobDefinitionRegistry, + theExecutor, + theJobMaintenanceService, + theTransactionService); } @Bean - public IReductionStepExecutorService reductionStepExecutorService(IJobPersistence theJobPersistence, - IHapiTransactionService theTransactionService, - JobDefinitionRegistry theJobDefinitionRegistry) { + public IReductionStepExecutorService reductionStepExecutorService( + IJobPersistence theJobPersistence, + IHapiTransactionService theTransactionService, + JobDefinitionRegistry theJobDefinitionRegistry) { return new ReductionStepExecutorServiceImpl(theJobPersistence, theTransactionService, theJobDefinitionRegistry); } @Bean - public IJobMaintenanceService batch2JobMaintenanceService(ISchedulerService theSchedulerService, - JobDefinitionRegistry theJobDefinitionRegistry, - JpaStorageSettings theStorageSettings, - BatchJobSender theBatchJobSender, - WorkChunkProcessor theExecutor, - IReductionStepExecutorService theReductionStepExecutorService - ) { - return new JobMaintenanceServiceImpl(theSchedulerService, - myPersistence, - theStorageSettings, - theJobDefinitionRegistry, - theBatchJobSender, - theExecutor, - theReductionStepExecutorService); + public IJobMaintenanceService batch2JobMaintenanceService( + ISchedulerService theSchedulerService, + JobDefinitionRegistry theJobDefinitionRegistry, + JpaStorageSettings theStorageSettings, + BatchJobSender theBatchJobSender, + WorkChunkProcessor theExecutor, + IReductionStepExecutorService theReductionStepExecutorService) { + return new JobMaintenanceServiceImpl( + theSchedulerService, + myPersistence, + theStorageSettings, + theJobDefinitionRegistry, + theBatchJobSender, + theExecutor, + theReductionStepExecutorService); } @Bean public IChannelProducer batch2ProcessingChannelProducer(IChannelFactory theChannelFactory) { - ChannelProducerSettings settings = new ChannelProducerSettings() - .setConcurrentConsumers(getConcurrentConsumers()); + ChannelProducerSettings settings = + new ChannelProducerSettings().setConcurrentConsumers(getConcurrentConsumers()); return theChannelFactory.getOrCreateProducer(CHANNEL_NAME, JobWorkNotificationJsonMessage.class, settings); } @Bean public IChannelReceiver batch2ProcessingChannelReceiver(IChannelFactory theChannelFactory) { - ChannelConsumerSettings settings = new ChannelConsumerSettings() - .setConcurrentConsumers(getConcurrentConsumers()); + ChannelConsumerSettings settings = + new ChannelConsumerSettings().setConcurrentConsumers(getConcurrentConsumers()); return theChannelFactory.getOrCreateReceiver(CHANNEL_NAME, JobWorkNotificationJsonMessage.class, settings); } @@ -132,5 +136,4 @@ public abstract class BaseBatch2Config { protected int getConcurrentConsumers() { return 4; } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2JobRegisterer.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2JobRegisterer.java index 68f845a4ef7..93e01067f17 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2JobRegisterer.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/config/Batch2JobRegisterer.java @@ -38,10 +38,11 @@ public class Batch2JobRegisterer { @Autowired private ApplicationContext myApplicationContext; - // The timing of this call is sensitive. It needs to be called after all the job definition beans have been created - // but before any jobs are run. E.g. ValidationDataInitializerSvcImpl can start a REINDEX job, so we use an EventListener - // so we know all the JobDefinition beans have been created, but we use @Order(IHapiBootOrder.ADD_JOB_DEFINITIONS) to ensure it is called + // but before any jobs are run. E.g. ValidationDataInitializerSvcImpl can start a REINDEX job, so we use an + // EventListener + // so we know all the JobDefinition beans have been created, but we use @Order(IHapiBootOrder.ADD_JOB_DEFINITIONS) + // to ensure it is called // before any other EventListeners that might start a job. @EventListener(classes = ContextRefreshedEvent.class) @Order(IHapiBootOrder.ADD_JOB_DEFINITIONS) @@ -51,9 +52,11 @@ public class Batch2JobRegisterer { for (Map.Entry next : batchJobs.entrySet()) { JobDefinition jobDefinition = next.getValue(); - ourLog.info("Registering Batch2 Job Definition: {} / {}", jobDefinition.getJobDefinitionId(), jobDefinition.getJobDefinitionVersion()); + ourLog.info( + "Registering Batch2 Job Definition: {} / {}", + jobDefinition.getJobDefinitionId(), + jobDefinition.getJobDefinitionVersion()); jobRegistry.addJobDefinition(jobDefinition); } } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/BaseDataSink.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/BaseDataSink.java index 1a709663bed..c7a4d7aeeec 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/BaseDataSink.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/BaseDataSink.java @@ -27,7 +27,8 @@ import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.util.Logs; import org.slf4j.Logger; -abstract class BaseDataSink implements IJobDataSink { +abstract class BaseDataSink + implements IJobDataSink { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); private final String myInstanceId; @@ -36,8 +37,7 @@ abstract class BaseDataSink theJobWorkCursor) { + protected BaseDataSink(String theInstanceId, JobWorkCursor theJobWorkCursor) { myInstanceId = theInstanceId; myJobWorkCursor = theJobWorkCursor; myJobDefinitionId = theJobWorkCursor.getJobDefinition().getJobDefinitionId(); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/FinalStepDataSink.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/FinalStepDataSink.java index 6714a59130a..14335a5d2c1 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/FinalStepDataSink.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/FinalStepDataSink.java @@ -24,19 +24,22 @@ import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.batch2.model.JobWorkCursor; import ca.uhn.fhir.batch2.model.WorkChunkData; import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.util.Logs; import org.slf4j.Logger; import javax.annotation.Nonnull; -class FinalStepDataSink extends BaseDataSink { +class FinalStepDataSink extends BaseDataSink { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); /** * Constructor */ - FinalStepDataSink(@Nonnull String theJobDefinitionId, @Nonnull String theInstanceId, @Nonnull JobWorkCursor theJobWorkCursor) { + FinalStepDataSink( + @Nonnull String theJobDefinitionId, + @Nonnull String theInstanceId, + @Nonnull JobWorkCursor theJobWorkCursor) { super(theInstanceId, theJobWorkCursor); } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImpl.java index 82f1f16271e..23cf995fa6d 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImpl.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobCoordinatorImpl.java @@ -46,14 +46,14 @@ import org.springframework.messaging.MessageHandler; import org.springframework.transaction.support.TransactionSynchronizationAdapter; import org.springframework.transaction.support.TransactionSynchronizationManager; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -72,13 +72,14 @@ public class JobCoordinatorImpl implements IJobCoordinator { /** * Constructor */ - public JobCoordinatorImpl(@Nonnull BatchJobSender theBatchJobSender, - @Nonnull IChannelReceiver theWorkChannelReceiver, - @Nonnull IJobPersistence theJobPersistence, - @Nonnull JobDefinitionRegistry theJobDefinitionRegistry, - @Nonnull WorkChunkProcessor theExecutorSvc, - @Nonnull IJobMaintenanceService theJobMaintenanceService, - @Nonnull IHapiTransactionService theTransactionService) { + public JobCoordinatorImpl( + @Nonnull BatchJobSender theBatchJobSender, + @Nonnull IChannelReceiver theWorkChannelReceiver, + @Nonnull IJobPersistence theJobPersistence, + @Nonnull JobDefinitionRegistry theJobDefinitionRegistry, + @Nonnull WorkChunkProcessor theExecutorSvc, + @Nonnull IJobMaintenanceService theJobMaintenanceService, + @Nonnull IHapiTransactionService theTransactionService) { Validate.notNull(theJobPersistence); myJobPersistence = theJobPersistence; @@ -86,14 +87,21 @@ public class JobCoordinatorImpl implements IJobCoordinator { myWorkChannelReceiver = theWorkChannelReceiver; myJobDefinitionRegistry = theJobDefinitionRegistry; - myReceiverHandler = new WorkChannelMessageHandler(theJobPersistence, theJobDefinitionRegistry, theBatchJobSender, theExecutorSvc, theJobMaintenanceService, theTransactionService); + myReceiverHandler = new WorkChannelMessageHandler( + theJobPersistence, + theJobDefinitionRegistry, + theBatchJobSender, + theExecutorSvc, + theJobMaintenanceService, + theTransactionService); myJobQuerySvc = new JobQuerySvc(theJobPersistence, theJobDefinitionRegistry); myJobParameterJsonValidator = new JobParameterJsonValidator(); myTransactionService = theTransactionService; } @Override - public Batch2JobStartResponse startInstance(RequestDetails theRequestDetails, JobInstanceStartRequest theStartRequest) { + public Batch2JobStartResponse startInstance( + RequestDetails theRequestDetails, JobInstanceStartRequest theStartRequest) { String paramsString = theStartRequest.getParameters(); if (isBlank(paramsString)) { throw new InvalidRequestException(Msg.code(2065) + "No parameters supplied"); @@ -102,12 +110,14 @@ public class JobCoordinatorImpl implements IJobCoordinator { // if cache - use that first if (theStartRequest.isUseCache()) { - FetchJobInstancesRequest request = new FetchJobInstancesRequest(theStartRequest.getJobDefinitionId(), theStartRequest.getParameters(), getStatesThatTriggerCache()); + FetchJobInstancesRequest request = new FetchJobInstancesRequest( + theStartRequest.getJobDefinitionId(), theStartRequest.getParameters(), getStatesThatTriggerCache()); List existing = myJobPersistence.fetchInstances(request, 0, 1000); if (!existing.isEmpty()) { // we'll look for completed ones first... otherwise, take any of the others - existing.sort((o1, o2) -> -(o1.getStatus().ordinal() - o2.getStatus().ordinal())); + existing.sort( + (o1, o2) -> -(o1.getStatus().ordinal() - o2.getStatus().ordinal())); JobInstance first = existing.stream().findFirst().orElseThrow(); @@ -115,22 +125,29 @@ public class JobCoordinatorImpl implements IJobCoordinator { response.setInstanceId(first.getInstanceId()); response.setUsesCachedResult(true); - ourLog.info("Reusing cached {} job with status {} and id {}", first.getJobDefinitionId(), first.getStatus(), first.getInstanceId()); + ourLog.info( + "Reusing cached {} job with status {} and id {}", + first.getJobDefinitionId(), + first.getStatus(), + first.getInstanceId()); return response; } } JobDefinition jobDefinition = myJobDefinitionRegistry - .getLatestJobDefinition(theStartRequest.getJobDefinitionId()).orElseThrow(() -> new IllegalArgumentException(Msg.code(2063) + "Unknown job definition ID: " + theStartRequest.getJobDefinitionId())); + .getLatestJobDefinition(theStartRequest.getJobDefinitionId()) + .orElseThrow(() -> new IllegalArgumentException( + Msg.code(2063) + "Unknown job definition ID: " + theStartRequest.getJobDefinitionId())); myJobParameterJsonValidator.validateJobParameters(theRequestDetails, theStartRequest, jobDefinition); - IJobPersistence.CreateResult instanceAndFirstChunk = - myTransactionService.withSystemRequest().execute(() -> - myJobPersistence.onCreateWithFirstChunk(jobDefinition, theStartRequest.getParameters())); + IJobPersistence.CreateResult instanceAndFirstChunk = myTransactionService + .withSystemRequest() + .execute(() -> myJobPersistence.onCreateWithFirstChunk(jobDefinition, theStartRequest.getParameters())); - JobWorkNotification workNotification = JobWorkNotification.firstStepNotification(jobDefinition, instanceAndFirstChunk.jobInstanceId, instanceAndFirstChunk.workChunkId); + JobWorkNotification workNotification = JobWorkNotification.firstStepNotification( + jobDefinition, instanceAndFirstChunk.jobInstanceId, instanceAndFirstChunk.workChunkId); sendBatchJobWorkNotificationAfterCommit(workNotification); Batch2JobStartResponse response = new Batch2JobStartResponse(); @@ -166,7 +183,7 @@ public class JobCoordinatorImpl implements IJobCoordinator { * Cache will be used if an identical job is QUEUED or IN_PROGRESS. Otherwise a new one will kickoff. */ private StatusEnum[] getStatesThatTriggerCache() { - return new StatusEnum[]{StatusEnum.QUEUED, StatusEnum.IN_PROGRESS}; + return new StatusEnum[] {StatusEnum.QUEUED, StatusEnum.IN_PROGRESS}; } @Override @@ -186,18 +203,23 @@ public class JobCoordinatorImpl implements IJobCoordinator { } @Override - public List getInstancesbyJobDefinitionIdAndEndedStatus(String theJobDefinitionId, @Nullable Boolean theEnded, int theCount, int theStart) { - return myJobQuerySvc.getInstancesByJobDefinitionIdAndEndedStatus(theJobDefinitionId, theEnded, theCount, theStart); + public List getInstancesbyJobDefinitionIdAndEndedStatus( + String theJobDefinitionId, @Nullable Boolean theEnded, int theCount, int theStart) { + return myJobQuerySvc.getInstancesByJobDefinitionIdAndEndedStatus( + theJobDefinitionId, theEnded, theCount, theStart); } @Override - public List getJobInstancesByJobDefinitionIdAndStatuses(String theJobDefinitionId, Set theStatuses, int theCount, int theStart) { - return myJobQuerySvc.getInstancesByJobDefinitionAndStatuses(theJobDefinitionId, theStatuses, theCount, theStart); + public List getJobInstancesByJobDefinitionIdAndStatuses( + String theJobDefinitionId, Set theStatuses, int theCount, int theStart) { + return myJobQuerySvc.getInstancesByJobDefinitionAndStatuses( + theJobDefinitionId, theStatuses, theCount, theStart); } @Override public List getJobInstancesByJobDefinitionId(String theJobDefinitionId, int theCount, int theStart) { - return getJobInstancesByJobDefinitionIdAndStatuses(theJobDefinitionId, new HashSet<>(Arrays.asList(StatusEnum.values())), theCount, theStart); + return getJobInstancesByJobDefinitionIdAndStatuses( + theJobDefinitionId, new HashSet<>(Arrays.asList(StatusEnum.values())), theCount, theStart); } @Override @@ -205,7 +227,8 @@ public class JobCoordinatorImpl implements IJobCoordinator { return myJobQuerySvc.fetchAllInstances(theFetchRequest); } - // wipmb For 6.8 - Clarify this interface. We currently return a JobOperationResultJson, and don't throw ResourceNotFoundException + // wipmb For 6.8 - Clarify this interface. We currently return a JobOperationResultJson, and don't throw + // ResourceNotFoundException @Override public JobOperationResultJson cancelInstance(String theInstanceId) throws ResourceNotFoundException { return myJobPersistence.cancelInstance(theInstanceId); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobDataSink.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobDataSink.java index b87b40aceea..26fc1c922c7 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobDataSink.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobDataSink.java @@ -28,16 +28,17 @@ import ca.uhn.fhir.batch2.model.JobWorkNotification; import ca.uhn.fhir.batch2.model.WorkChunkCreateEvent; import ca.uhn.fhir.batch2.model.WorkChunkData; import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.util.JsonUtil; +import ca.uhn.fhir.util.Logs; import org.slf4j.Logger; -import javax.annotation.Nonnull; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import javax.annotation.Nonnull; -class JobDataSink extends BaseDataSink { +class JobDataSink + extends BaseDataSink { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); private final BatchJobSender myBatchJobSender; @@ -49,11 +50,12 @@ class JobDataSink myLastChunkId = new AtomicReference<>(); private final boolean myGatedExecution; - JobDataSink(@Nonnull BatchJobSender theBatchJobSender, - @Nonnull IJobPersistence theJobPersistence, - @Nonnull JobDefinition theDefinition, - @Nonnull String theInstanceId, - @Nonnull JobWorkCursor theJobWorkCursor) { + JobDataSink( + @Nonnull BatchJobSender theBatchJobSender, + @Nonnull IJobPersistence theJobPersistence, + @Nonnull JobDefinition theDefinition, + @Nonnull String theInstanceId, + @Nonnull JobWorkCursor theJobWorkCursor) { super(theInstanceId, theJobWorkCursor); myBatchJobSender = theBatchJobSender; myJobPersistence = theJobPersistence; @@ -72,12 +74,14 @@ class JobDataSink the job parameter type for the definition * @return true if it did not already exist and was registered */ - public synchronized boolean addJobDefinitionIfNotRegistered(@Nonnull JobDefinition theDefinition) { - Optional> orig = getJobDefinition(theDefinition.getJobDefinitionId(), theDefinition.getJobDefinitionVersion()); + public synchronized boolean addJobDefinitionIfNotRegistered( + @Nonnull JobDefinition theDefinition) { + Optional> orig = + getJobDefinition(theDefinition.getJobDefinitionId(), theDefinition.getJobDefinitionVersion()); if (orig.isPresent()) { return false; } @@ -73,17 +75,24 @@ public class JobDefinitionRegistry { Set stepIds = new HashSet<>(); for (JobDefinitionStep next : theDefinition.getSteps()) { if (!stepIds.add(next.getStepId())) { - throw new ConfigurationException(Msg.code(2046) + "Duplicate step[" + next.getStepId() + "] in definition[" + jobDefinitionId + "] version: " + theDefinition.getJobDefinitionVersion()); + throw new ConfigurationException( + Msg.code(2046) + "Duplicate step[" + next.getStepId() + "] in definition[" + jobDefinitionId + + "] version: " + theDefinition.getJobDefinitionVersion()); } } - NavigableMap> versionMap = myJobDefinitions.computeIfAbsent(jobDefinitionId, t -> new TreeMap<>()); + NavigableMap> versionMap = + myJobDefinitions.computeIfAbsent(jobDefinitionId, t -> new TreeMap<>()); if (versionMap.containsKey(theDefinition.getJobDefinitionVersion())) { if (versionMap.get(theDefinition.getJobDefinitionVersion()) == theDefinition) { - ourLog.warn("job[{}] version: {} already registered. Not registering again.", jobDefinitionId, theDefinition.getJobDefinitionVersion()); + ourLog.warn( + "job[{}] version: {} already registered. Not registering again.", + jobDefinitionId, + theDefinition.getJobDefinitionVersion()); return; } - throw new ConfigurationException(Msg.code(2047) + "Multiple definitions for job[" + jobDefinitionId + "] version: " + theDefinition.getJobDefinitionVersion()); + throw new ConfigurationException(Msg.code(2047) + "Multiple definitions for job[" + jobDefinitionId + + "] version: " + theDefinition.getJobDefinitionVersion()); } versionMap.put(theDefinition.getJobDefinitionVersion(), theDefinition); } @@ -109,7 +118,8 @@ public class JobDefinitionRegistry { return Optional.of(versionMap.lastEntry().getValue()); } - public Optional> getJobDefinition(@Nonnull String theJobDefinitionId, int theJobDefinitionVersion) { + public Optional> getJobDefinition( + @Nonnull String theJobDefinitionId, int theJobDefinitionVersion) { NavigableMap> versionMap = myJobDefinitions.get(theJobDefinitionId); if (versionMap == null || versionMap.isEmpty()) { return Optional.empty(); @@ -123,7 +133,8 @@ public class JobDefinitionRegistry { public JobDefinition getJobDefinitionOrThrowException(String theJobDefinitionId, int theJobDefinitionVersion) { Optional> opt = getJobDefinition(theJobDefinitionId, theJobDefinitionVersion); if (opt.isEmpty()) { - String msg = "Unknown job definition ID[" + theJobDefinitionId + "] version[" + theJobDefinitionVersion + "]"; + String msg = + "Unknown job definition ID[" + theJobDefinitionId + "] version[" + theJobDefinitionVersion + "]"; ourLog.warn(msg); throw new JobExecutionFailedException(Msg.code(2043) + msg); } @@ -139,10 +150,7 @@ public class JobDefinitionRegistry { * @return a list of Job Definition Ids in alphabetical order */ public List getJobDefinitionIds() { - return myJobDefinitions.keySet() - .stream() - .sorted() - .collect(Collectors.toList()); + return myJobDefinitions.keySet().stream().sorted().collect(Collectors.toList()); } public boolean isEmpty() { @@ -151,10 +159,13 @@ public class JobDefinitionRegistry { @SuppressWarnings("unchecked") public JobDefinition getJobDefinitionOrThrowException(JobInstance theJobInstance) { - return (JobDefinition) getJobDefinitionOrThrowException(theJobInstance.getJobDefinitionId(), theJobInstance.getJobDefinitionVersion()); + return (JobDefinition) getJobDefinitionOrThrowException( + theJobInstance.getJobDefinitionId(), theJobInstance.getJobDefinitionVersion()); } public Collection getJobDefinitionVersions(String theDefinitionId) { - return myJobDefinitions.getOrDefault(theDefinitionId, ImmutableSortedMap.of()).keySet(); + return myJobDefinitions + .getOrDefault(theDefinitionId, ImmutableSortedMap.of()) + .keySet(); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobParameterJsonValidator.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobParameterJsonValidator.java index af02a2c1721..279d0d8d9e0 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobParameterJsonValidator.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobParameterJsonValidator.java @@ -27,28 +27,34 @@ import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.validation.ConstraintViolation; import javax.validation.Validation; import javax.validation.Validator; import javax.validation.ValidatorFactory; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; class JobParameterJsonValidator { private final ValidatorFactory myValidatorFactory = Validation.buildDefaultValidatorFactory(); - void validateJobParameters(RequestDetails theRequestDetails, @Nonnull JobInstanceStartRequest theStartRequest, @Nonnull JobDefinition theJobDefinition) { + void validateJobParameters( + RequestDetails theRequestDetails, + @Nonnull JobInstanceStartRequest theStartRequest, + @Nonnull JobDefinition theJobDefinition) { // JSR 380 Validator validator = myValidatorFactory.getValidator(); PT parameters = theStartRequest.getParameters(theJobDefinition.getParametersType()); Set> constraintErrors = validator.validate(parameters); - List errorStrings = constraintErrors.stream().map(t -> t.getPropertyPath() + " - " + t.getMessage()).sorted().collect(Collectors.toList()); + List errorStrings = constraintErrors.stream() + .map(t -> t.getPropertyPath() + " - " + t.getMessage()) + .sorted() + .collect(Collectors.toList()); // Programmatic Validator IJobParametersValidator parametersValidator = theJobDefinition.getParametersValidator(); @@ -59,7 +65,8 @@ class JobParameterJsonValidator { } if (!errorStrings.isEmpty()) { - String message = "Failed to validate parameters for job of type " + theJobDefinition.getJobDefinitionId() + ": " + errorStrings.stream().map(t -> "\n * " + t).collect(Collectors.joining()); + String message = "Failed to validate parameters for job of type " + theJobDefinition.getJobDefinitionId() + + ": " + errorStrings.stream().map(t -> "\n * " + t).collect(Collectors.joining()); throw new InvalidRequestException(Msg.code(2039) + message); } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobQuerySvc.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobQuerySvc.java index 58f8b065573..d5507d66b22 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobQuerySvc.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobQuerySvc.java @@ -22,8 +22,8 @@ package ca.uhn.fhir.batch2.coordinator; import ca.uhn.fhir.batch2.api.IJobPersistence; import ca.uhn.fhir.batch2.model.JobDefinition; import ca.uhn.fhir.batch2.model.JobInstance; -import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest; import ca.uhn.fhir.batch2.model.StatusEnum; +import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.model.api.annotation.PasswordField; @@ -34,12 +34,12 @@ import ca.uhn.fhir.util.UrlUtil; import com.fasterxml.jackson.annotation.JsonProperty; import org.springframework.data.domain.Page; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.lang.reflect.Field; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Job Query services intended for end-users querying the status of jobs @@ -55,15 +55,17 @@ class JobQuerySvc { @Nonnull JobInstance fetchInstance(String theInstanceId) { - return myJobPersistence.fetchInstance(theInstanceId) - .map(this::massageInstanceForUserAccess) - .orElseThrow(() -> new ResourceNotFoundException(Msg.code(2040) + "Unknown instance ID: " + UrlUtil.escapeUrlParam(theInstanceId) + ". Please check if the input job ID is valid.")); + return myJobPersistence + .fetchInstance(theInstanceId) + .map(this::massageInstanceForUserAccess) + .orElseThrow(() -> new ResourceNotFoundException(Msg.code(2040) + "Unknown instance ID: " + + UrlUtil.escapeUrlParam(theInstanceId) + ". Please check if the input job ID is valid.")); } List fetchInstances(int thePageSize, int thePageIndex) { return myJobPersistence.fetchInstances(thePageSize, thePageIndex).stream() - .map(this::massageInstanceForUserAccess) - .collect(Collectors.toList()); + .map(this::massageInstanceForUserAccess) + .collect(Collectors.toList()); } public Page fetchAllInstances(JobInstanceFetchRequest theFetchRequest) { @@ -76,8 +78,8 @@ class JobQuerySvc { private List massageInstancesForUserAccess(List theFetchRecentInstances) { return theFetchRecentInstances.stream() - .map(this::massageInstanceForUserAccess) - .collect(Collectors.toList()); + .map(this::massageInstanceForUserAccess) + .collect(Collectors.toList()); } private JobInstance massageInstanceForUserAccess(JobInstance theInstance) { @@ -121,21 +123,24 @@ class JobQuerySvc { } } - public List getInstancesByJobDefinitionIdAndEndedStatus(String theJobDefinitionId, @Nullable Boolean theEnded, int theCount, int theStart) { - if (theEnded == null) { - return myJobPersistence.fetchInstancesByJobDefinitionId(theJobDefinitionId, theCount, theStart); - } + public List getInstancesByJobDefinitionIdAndEndedStatus( + String theJobDefinitionId, @Nullable Boolean theEnded, int theCount, int theStart) { + if (theEnded == null) { + return myJobPersistence.fetchInstancesByJobDefinitionId(theJobDefinitionId, theCount, theStart); + } - Set requestedStatus; - if (theEnded) { - requestedStatus = StatusEnum.getEndedStatuses(); - } else { - requestedStatus = StatusEnum.getNotEndedStatuses(); - } + Set requestedStatus; + if (theEnded) { + requestedStatus = StatusEnum.getEndedStatuses(); + } else { + requestedStatus = StatusEnum.getNotEndedStatuses(); + } return getInstancesByJobDefinitionAndStatuses(theJobDefinitionId, requestedStatus, theCount, theStart); - } + } - public List getInstancesByJobDefinitionAndStatuses(String theJobDefinitionId, Set theStatuses, int theCount, int theStart) { - return myJobPersistence.fetchInstancesByJobDefinitionIdAndStatus(theJobDefinitionId, theStatuses, theCount, theStart); - } + public List getInstancesByJobDefinitionAndStatuses( + String theJobDefinitionId, Set theStatuses, int theCount, int theStart) { + return myJobPersistence.fetchInstancesByJobDefinitionIdAndStatus( + theJobDefinitionId, theStatuses, theCount, theStart); + } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java index 42055edc03d..1347d0d4dd2 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutor.java @@ -31,8 +31,8 @@ import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.util.Logs; import org.slf4j.Logger; -import javax.annotation.Nonnull; import java.util.Date; +import javax.annotation.Nonnull; public class JobStepExecutor { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @@ -48,13 +48,14 @@ public class JobStepExecutor myCursor; - JobStepExecutor(@Nonnull IJobPersistence theJobPersistence, - @Nonnull JobInstance theInstance, - WorkChunk theWorkChunk, - @Nonnull JobWorkCursor theCursor, - @Nonnull WorkChunkProcessor theExecutor, - @Nonnull IJobMaintenanceService theJobMaintenanceService, - @Nonnull JobDefinitionRegistry theJobDefinitionRegistry) { + JobStepExecutor( + @Nonnull IJobPersistence theJobPersistence, + @Nonnull JobInstance theInstance, + WorkChunk theWorkChunk, + @Nonnull JobWorkCursor theCursor, + @Nonnull WorkChunkProcessor theExecutor, + @Nonnull IJobMaintenanceService theJobMaintenanceService, + @Nonnull JobDefinitionRegistry theJobDefinitionRegistry) { myJobPersistence = theJobPersistence; myDefinition = theCursor.jobDefinition; myInstance = theInstance; @@ -67,19 +68,18 @@ public class JobStepExecutor stepExecutorOutput = myJobExecutorSvc.doExecution( - myCursor, - myInstance, - myWorkChunk - ); + JobStepExecutorOutput stepExecutorOutput = + myJobExecutorSvc.doExecution(myCursor, myInstance, myWorkChunk); if (!stepExecutorOutput.isSuccessful()) { return; } if (stepExecutorOutput.getDataSink().firstStepProducedNothing()) { - ourLog.info("First step of job myInstance {} produced no work chunks, marking as completed and setting end date", myInstanceId); - myJobPersistence.updateInstance(myInstance.getInstanceId(), instance->{ + ourLog.info( + "First step of job myInstance {} produced no work chunks, marking as completed and setting end date", + myInstanceId); + myJobPersistence.updateInstance(myInstance.getInstanceId(), instance -> { instance.setEndTime(new Date()); myJobInstanceStatusUpdater.updateInstanceStatus(instance, StatusEnum.COMPLETED); return true; @@ -94,19 +94,27 @@ public class JobStepExecutor theDataSink) { if (theDataSink.getWorkChunkCount() <= 1) { - ourLog.debug("Gated job {} step {} produced exactly one chunk: Triggering a maintenance pass.", myDefinition.getJobDefinitionId(), myCursor.currentStep.getStepId()); + ourLog.debug( + "Gated job {} step {} produced exactly one chunk: Triggering a maintenance pass.", + myDefinition.getJobDefinitionId(), + myCursor.currentStep.getStepId()); // wipmb 6.8 either delete fast-tracking, or narrow this call to just this instance and step - // This runs full maintenance for EVERY job as each chunk completes in a fast tracked job. That's a LOT of work. + // This runs full maintenance for EVERY job as each chunk completes in a fast tracked job. That's a LOT of + // work. boolean success = myJobMaintenanceService.triggerMaintenancePass(); if (!success) { - myJobPersistence.updateInstance(myInstance.getInstanceId(), instance-> { + myJobPersistence.updateInstance(myInstance.getInstanceId(), instance -> { instance.setFastTracking(false); return true; }); } } else { - ourLog.debug("Gated job {} step {} produced {} chunks: Disabling fast tracking.", myDefinition.getJobDefinitionId(), myCursor.currentStep.getStepId(), theDataSink.getWorkChunkCount()); - myJobPersistence.updateInstance(myInstance.getInstanceId(), instance-> { + ourLog.debug( + "Gated job {} step {} produced {} chunks: Disabling fast tracking.", + myDefinition.getJobDefinitionId(), + myCursor.currentStep.getStepId(), + theDataSink.getWorkChunkCount()); + myJobPersistence.updateInstance(myInstance.getInstanceId(), instance -> { instance.setFastTracking(false); return true; }); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java index 69ddf12677b..867436cbee0 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/JobStepExecutorFactory.java @@ -36,11 +36,12 @@ public class JobStepExecutorFactory { private final IJobMaintenanceService myJobMaintenanceService; private final JobDefinitionRegistry myJobDefinitionRegistry; - public JobStepExecutorFactory(@Nonnull IJobPersistence theJobPersistence, - @Nonnull BatchJobSender theBatchJobSender, - @Nonnull WorkChunkProcessor theExecutorSvc, - @Nonnull IJobMaintenanceService theJobMaintenanceService, - @Nonnull JobDefinitionRegistry theJobDefinitionRegistry) { + public JobStepExecutorFactory( + @Nonnull IJobPersistence theJobPersistence, + @Nonnull BatchJobSender theBatchJobSender, + @Nonnull WorkChunkProcessor theExecutorSvc, + @Nonnull IJobMaintenanceService theJobMaintenanceService, + @Nonnull JobDefinitionRegistry theJobDefinitionRegistry) { myJobPersistence = theJobPersistence; myBatchJobSender = theBatchJobSender; myJobStepExecutorSvc = theExecutorSvc; @@ -48,7 +49,18 @@ public class JobStepExecutorFactory { myJobDefinitionRegistry = theJobDefinitionRegistry; } - public JobStepExecutor newJobStepExecutor(@Nonnull JobInstance theInstance, WorkChunk theWorkChunk, @Nonnull JobWorkCursor theCursor) { - return new JobStepExecutor<>(myJobPersistence, theInstance, theWorkChunk, theCursor, myJobStepExecutorSvc, myJobMaintenanceService, myJobDefinitionRegistry); + public + JobStepExecutor newJobStepExecutor( + @Nonnull JobInstance theInstance, + WorkChunk theWorkChunk, + @Nonnull JobWorkCursor theCursor) { + return new JobStepExecutor<>( + myJobPersistence, + theInstance, + theWorkChunk, + theCursor, + myJobStepExecutorSvc, + myJobMaintenanceService, + myJobDefinitionRegistry); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepChunkProcessingResponse.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepChunkProcessingResponse.java index 4b8f5aef992..05603c392c9 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepChunkProcessingResponse.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepChunkProcessingResponse.java @@ -31,7 +31,7 @@ public class ReductionStepChunkProcessingResponse { private List myFailedChunksIds; private boolean myIsSuccessful; - public ReductionStepChunkProcessingResponse(boolean theDefaultSuccessValue){ + public ReductionStepChunkProcessingResponse(boolean theDefaultSuccessValue) { mySuccessfulChunkIds = new ArrayList<>(); myFailedChunksIds = new ArrayList<>(); myIsSuccessful = theDefaultSuccessValue; @@ -41,11 +41,11 @@ public class ReductionStepChunkProcessingResponse { return mySuccessfulChunkIds; } - public boolean hasSuccessfulChunksIds(){ + public boolean hasSuccessfulChunksIds() { return !CollectionUtils.isEmpty(mySuccessfulChunkIds); } - public void addSuccessfulChunkId(WorkChunk theWorkChunk){ + public void addSuccessfulChunkId(WorkChunk theWorkChunk) { mySuccessfulChunkIds.add(theWorkChunk.getId()); } @@ -53,19 +53,19 @@ public class ReductionStepChunkProcessingResponse { return myFailedChunksIds; } - public boolean hasFailedChunkIds(){ + public boolean hasFailedChunkIds() { return !CollectionUtils.isEmpty(myFailedChunksIds); } - public void addFailedChunkId(WorkChunk theWorChunk){ + public void addFailedChunkId(WorkChunk theWorChunk) { myFailedChunksIds.add(theWorChunk.getId()); } - public boolean isSuccessful(){ + public boolean isSuccessful() { return myIsSuccessful; } - public void setSuccessful(boolean theSuccessValue){ + public void setSuccessful(boolean theSuccessValue) { myIsSuccessful = theSuccessValue; } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepDataSink.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepDataSink.java index 639d8f9e4cb..faf5db40667 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepDataSink.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepDataSink.java @@ -37,16 +37,17 @@ import org.slf4j.Logger; import java.util.Date; public class ReductionStepDataSink - extends BaseDataSink { + extends BaseDataSink { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); private final IJobPersistence myJobPersistence; private final JobDefinitionRegistry myJobDefinitionRegistry; - public ReductionStepDataSink(String theInstanceId, - JobWorkCursor theJobWorkCursor, - IJobPersistence thePersistence, - JobDefinitionRegistry theJobDefinitionRegistry) { + public ReductionStepDataSink( + String theInstanceId, + JobWorkCursor theJobWorkCursor, + IJobPersistence thePersistence, + JobDefinitionRegistry theJobDefinitionRegistry) { super(theInstanceId, theJobWorkCursor); myJobPersistence = thePersistence; myJobDefinitionRegistry = theJobDefinitionRegistry; @@ -58,13 +59,16 @@ public class ReductionStepDataSink { Validate.validState( - StatusEnum.FINALIZE.equals(instance.getStatus()), - "Job %s must be in FINALIZE state. In %s", instanceId, instance.getStatus()); + StatusEnum.FINALIZE.equals(instance.getStatus()), + "Job %s must be in FINALIZE state. In %s", + instanceId, + instance.getStatus()); if (instance.getReport() != null) { // last in wins - so we won't throw @@ -92,10 +96,11 @@ public class ReductionStepDataSink JsonUtil.serialize(instance)) - .log("New instance state: {}"); + ourLog.info( + "Finalizing job instance {} with report length {} chars", + instance.getInstanceId(), + dataString.length()); + ourLog.atTrace().addArgument(() -> JsonUtil.serialize(instance)).log("New instance state: {}"); return true; }); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java index e9b7cdbda77..21d00887907 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/ReductionStepExecutorServiceImpl.java @@ -49,7 +49,6 @@ import org.springframework.context.event.EventListener; import org.springframework.scheduling.concurrent.CustomizableThreadFactory; import org.springframework.transaction.annotation.Propagation; -import javax.annotation.Nonnull; import java.util.Collections; import java.util.EnumSet; import java.util.LinkedHashMap; @@ -62,6 +61,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; +import javax.annotation.Nonnull; import static ca.uhn.fhir.batch2.model.StatusEnum.ERRORED; import static ca.uhn.fhir.batch2.model.StatusEnum.FINALIZE; @@ -70,7 +70,8 @@ import static ca.uhn.fhir.batch2.model.StatusEnum.IN_PROGRESS; public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorService, IHasScheduledJobs { public static final String SCHEDULED_JOB_ID = ReductionStepExecutorScheduledJob.class.getName(); private static final Logger ourLog = LoggerFactory.getLogger(ReductionStepExecutorServiceImpl.class); - private final Map myInstanceIdToJobWorkCursor = Collections.synchronizedMap(new LinkedHashMap<>()); + private final Map myInstanceIdToJobWorkCursor = + Collections.synchronizedMap(new LinkedHashMap<>()); private final ExecutorService myReducerExecutor; private final IJobPersistence myJobPersistence; private final IHapiTransactionService myTransactionService; @@ -79,11 +80,13 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS private final JobDefinitionRegistry myJobDefinitionRegistry; private Timer myHeartbeatTimer; - /** * Constructor */ - public ReductionStepExecutorServiceImpl(IJobPersistence theJobPersistence, IHapiTransactionService theTransactionService, JobDefinitionRegistry theJobDefinitionRegistry) { + public ReductionStepExecutorServiceImpl( + IJobPersistence theJobPersistence, + IHapiTransactionService theTransactionService, + JobDefinitionRegistry theJobDefinitionRegistry) { myJobPersistence = theJobPersistence; myTransactionService = theTransactionService; myJobDefinitionRegistry = theJobDefinitionRegistry; @@ -91,12 +94,12 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS myReducerExecutor = Executors.newSingleThreadExecutor(new CustomizableThreadFactory("batch2-reducer")); } - @EventListener(ContextRefreshedEvent.class) public void start() { if (myHeartbeatTimer == null) { myHeartbeatTimer = new Timer("batch2-reducer-heartbeat"); - myHeartbeatTimer.schedule(new HeartbeatTimerTask(), DateUtils.MILLIS_PER_MINUTE, DateUtils.MILLIS_PER_MINUTE); + myHeartbeatTimer.schedule( + new HeartbeatTimerTask(), DateUtils.MILLIS_PER_MINUTE, DateUtils.MILLIS_PER_MINUTE); } } @@ -119,7 +122,6 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS } } - @Override public void triggerReductionStep(String theInstanceId, JobWorkCursor theJobWorkCursor) { myInstanceIdToJobWorkCursor.putIfAbsent(theInstanceId, theJobWorkCursor); @@ -154,24 +156,30 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS } @VisibleForTesting - ReductionStepChunkProcessingResponse executeReductionStep(String theInstanceId, JobWorkCursor theJobWorkCursor) { + + ReductionStepChunkProcessingResponse executeReductionStep( + String theInstanceId, JobWorkCursor theJobWorkCursor) { JobDefinitionStep step = theJobWorkCursor.getCurrentStep(); // wipmb For 6.8 - this runs four tx. That's at least 2 too many // combine the fetch and the case statement. Use optional for the boolean. - JobInstance instance = executeInTransactionWithSynchronization(() -> - myJobPersistence.fetchInstance(theInstanceId).orElseThrow(() -> new InternalErrorException("Unknown instance: " + theInstanceId))); + JobInstance instance = executeInTransactionWithSynchronization(() -> myJobPersistence + .fetchInstance(theInstanceId) + .orElseThrow(() -> new InternalErrorException("Unknown instance: " + theInstanceId))); boolean shouldProceed = false; switch (instance.getStatus()) { case IN_PROGRESS: case ERRORED: // this will take a write lock on the JobInstance, preventing duplicates. - boolean changed = executeInTransactionWithSynchronization(() -> - myJobPersistence.markInstanceAsStatusWhenStatusIn(instance.getInstanceId(), FINALIZE, EnumSet.of(IN_PROGRESS, ERRORED))); + boolean changed = + executeInTransactionWithSynchronization(() -> myJobPersistence.markInstanceAsStatusWhenStatusIn( + instance.getInstanceId(), FINALIZE, EnumSet.of(IN_PROGRESS, ERRORED))); if (changed) { - ourLog.info("Job instance {} has been set to FINALIZE state - Beginning reducer step", instance.getInstanceId()); + ourLog.info( + "Job instance {} has been set to FINALIZE state - Beginning reducer step", + instance.getInstanceId()); shouldProceed = true; } break; @@ -185,17 +193,18 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS if (!shouldProceed) { ourLog.warn( - "JobInstance[{}] should not be finalized at this time. In memory status is {}. Reduction step will not rerun!" - + " This could be a long running reduction job resulting in the processed msg not being acknowledged," - + " or the result of a failed process or server restarting.", - instance.getInstanceId(), - instance.getStatus() - ); + "JobInstance[{}] should not be finalized at this time. In memory status is {}. Reduction step will not rerun!" + + " This could be a long running reduction job resulting in the processed msg not being acknowledged," + + " or the result of a failed process or server restarting.", + instance.getInstanceId(), + instance.getStatus()); return new ReductionStepChunkProcessingResponse(false); } - PT parameters = instance.getParameters(theJobWorkCursor.getJobDefinition().getParametersType()); - IReductionStepWorker reductionStepWorker = (IReductionStepWorker) step.getJobStepWorker(); + PT parameters = + instance.getParameters(theJobWorkCursor.getJobDefinition().getParametersType()); + IReductionStepWorker reductionStepWorker = + (IReductionStepWorker) step.getJobStepWorker(); instance.setStatus(FINALIZE); @@ -204,19 +213,26 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS try { executeInTransactionWithSynchronization(() -> { - try (Stream chunkIterator = myJobPersistence.fetchAllWorkChunksForStepStream(instance.getInstanceId(), step.getStepId())) { + try (Stream chunkIterator = + myJobPersistence.fetchAllWorkChunksForStepStream(instance.getInstanceId(), step.getStepId())) { chunkIterator.forEach(chunk -> - processChunk(chunk, instance, parameters, reductionStepWorker, response, theJobWorkCursor)); + processChunk(chunk, instance, parameters, reductionStepWorker, response, theJobWorkCursor)); } return null; }); } finally { executeInTransactionWithSynchronization(() -> { - ourLog.info("Reduction step for instance[{}] produced {} successful and {} failed chunks", instance.getInstanceId(), response.getSuccessfulChunkIds().size(), response.getFailedChunksIds().size()); + ourLog.info( + "Reduction step for instance[{}] produced {} successful and {} failed chunks", + instance.getInstanceId(), + response.getSuccessfulChunkIds().size(), + response.getFailedChunksIds().size()); - ReductionStepDataSink dataSink = new ReductionStepDataSink<>(instance.getInstanceId(), theJobWorkCursor, myJobPersistence, myJobDefinitionRegistry); - StepExecutionDetails chunkDetails = new StepExecutionDetails<>(parameters, null, instance, "REDUCTION"); + ReductionStepDataSink dataSink = new ReductionStepDataSink<>( + instance.getInstanceId(), theJobWorkCursor, myJobPersistence, myJobDefinitionRegistry); + StepExecutionDetails chunkDetails = + new StepExecutionDetails<>(parameters, null, instance, "REDUCTION"); if (response.isSuccessful()) { reductionStepWorker.run(chunkDetails, dataSink); @@ -224,23 +240,24 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS if (response.hasSuccessfulChunksIds()) { // complete the steps without making a new work chunk - myJobPersistence.markWorkChunksWithStatusAndWipeData(instance.getInstanceId(), - response.getSuccessfulChunkIds(), - WorkChunkStatusEnum.COMPLETED, - null // error message - none - ); + myJobPersistence.markWorkChunksWithStatusAndWipeData( + instance.getInstanceId(), + response.getSuccessfulChunkIds(), + WorkChunkStatusEnum.COMPLETED, + null // error message - none + ); } if (response.hasFailedChunkIds()) { // mark any failed chunks as failed for aborting - myJobPersistence.markWorkChunksWithStatusAndWipeData(instance.getInstanceId(), - response.getFailedChunksIds(), - WorkChunkStatusEnum.FAILED, - "JOB ABORTED"); + myJobPersistence.markWorkChunksWithStatusAndWipeData( + instance.getInstanceId(), + response.getFailedChunksIds(), + WorkChunkStatusEnum.FAILED, + "JOB ABORTED"); } return null; }); - } // if no successful chunks, return false @@ -253,12 +270,11 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS private T executeInTransactionWithSynchronization(Callable runnable) { return myTransactionService - .withRequest(null) - .withPropagation(Propagation.REQUIRES_NEW) - .execute(runnable); + .withRequest(null) + .withPropagation(Propagation.REQUIRES_NEW) + .execute(runnable); } - @Override public void scheduleJobs(ISchedulerService theSchedulerService) { theSchedulerService.scheduleClusteredJob(10 * DateUtils.MILLIS_PER_SECOND, buildJobDefinition()); @@ -272,17 +288,21 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS return jobDefinition; } - private - void processChunk(WorkChunk theChunk, - JobInstance theInstance, - PT theParameters, - IReductionStepWorker theReductionStepWorker, - ReductionStepChunkProcessingResponse theResponseObject, - JobWorkCursor theJobWorkCursor) { + private void processChunk( + WorkChunk theChunk, + JobInstance theInstance, + PT theParameters, + IReductionStepWorker theReductionStepWorker, + ReductionStepChunkProcessingResponse theResponseObject, + JobWorkCursor theJobWorkCursor) { if (!theChunk.getStatus().isIncomplete()) { // This should never happen since jobs with reduction are required to be gated - ourLog.error("Unexpected chunk {} with status {} found while reducing {}. No chunks feeding into a reduction step should be complete.", theChunk.getId(), theChunk.getStatus(), theInstance); + ourLog.error( + "Unexpected chunk {} with status {} found while reducing {}. No chunks feeding into a reduction step should be complete.", + theChunk.getId(), + theChunk.getStatus(), + theInstance); return; } @@ -294,8 +314,10 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS // feed them into our reduction worker // this is the most likely area to throw, // as this is where db actions and processing is likely to happen - IT chunkData = theChunk.getData(theJobWorkCursor.getCurrentStep().getInputType()); - ChunkExecutionDetails chunkDetails = new ChunkExecutionDetails<>(chunkData, theParameters, theInstance.getInstanceId(), theChunk.getId()); + IT chunkData = + theChunk.getData(theJobWorkCursor.getCurrentStep().getInputType()); + ChunkExecutionDetails chunkDetails = new ChunkExecutionDetails<>( + chunkData, theParameters, theInstance.getInstanceId(), theChunk.getId()); ChunkOutcome outcome = theReductionStepWorker.consume(chunkDetails); @@ -314,10 +336,8 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS } } catch (Exception e) { String msg = String.format( - "Reduction step failed to execute chunk reduction for chunk %s with exception: %s.", - theChunk.getId(), - e.getMessage() - ); + "Reduction step failed to execute chunk reduction for chunk %s with exception: %s.", + theChunk.getId(), e.getMessage()); // we got a failure in a reduction ourLog.error(msg, e); theResponseObject.setSuccessful(false); @@ -343,6 +363,4 @@ public class ReductionStepExecutorServiceImpl implements IReductionStepExecutorS myTarget.reducerPass(); } } - - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/StepExecutor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/StepExecutor.java index 385d9b0cb79..3d7df4e9978 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/StepExecutor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/StepExecutor.java @@ -46,10 +46,9 @@ public class StepExecutor { * Calls the worker execution step, and performs error handling logic for jobs that failed. */ boolean executeStep( - StepExecutionDetails theStepExecutionDetails, - IJobStepWorker theStepWorker, - BaseDataSink theDataSink - ) { + StepExecutionDetails theStepExecutionDetails, + IJobStepWorker theStepWorker, + BaseDataSink theDataSink) { String jobDefinitionId = theDataSink.getJobDefinitionId(); String targetStepId = theDataSink.getTargetStep().getStepId(); String chunkId = theStepExecutionDetails.getChunkId(); @@ -59,26 +58,37 @@ public class StepExecutor { outcome = theStepWorker.run(theStepExecutionDetails, theDataSink); Validate.notNull(outcome, "Step theWorker returned null: %s", theStepWorker.getClass()); } catch (JobExecutionFailedException e) { - ourLog.error("Unrecoverable failure executing job {} step {} chunk {}", - jobDefinitionId, - targetStepId, - chunkId, - e); + ourLog.error( + "Unrecoverable failure executing job {} step {} chunk {}", + jobDefinitionId, + targetStepId, + chunkId, + e); if (theStepExecutionDetails.hasAssociatedWorkChunk()) { myJobPersistence.onWorkChunkFailed(chunkId, e.toString()); } return false; } catch (Exception e) { if (theStepExecutionDetails.hasAssociatedWorkChunk()) { - ourLog.info("Temporary problem executing job {} step {}, marking chunk {} as retriable ERRORED", jobDefinitionId, targetStepId, chunkId); + ourLog.info( + "Temporary problem executing job {} step {}, marking chunk {} as retriable ERRORED", + jobDefinitionId, + targetStepId, + chunkId); WorkChunkErrorEvent parameters = new WorkChunkErrorEvent(chunkId, e.getMessage()); WorkChunkStatusEnum newStatus = myJobPersistence.onWorkChunkError(parameters); if (newStatus == WorkChunkStatusEnum.FAILED) { - ourLog.error("Exhausted retries: Failure executing job {} step {}, marking chunk {} as ERRORED", jobDefinitionId, targetStepId, chunkId, e); + ourLog.error( + "Exhausted retries: Failure executing job {} step {}, marking chunk {} as ERRORED", + jobDefinitionId, + targetStepId, + chunkId, + e); return false; } } else { - ourLog.error("Failure executing job {} step {}, no associated work chunk", jobDefinitionId, targetStepId, e); + ourLog.error( + "Failure executing job {} step {}, no associated work chunk", jobDefinitionId, targetStepId, e); } throw new JobStepFailedException(Msg.code(2041) + e.getMessage(), e); } catch (Throwable t) { @@ -92,7 +102,8 @@ public class StepExecutor { if (theStepExecutionDetails.hasAssociatedWorkChunk()) { int recordsProcessed = outcome.getRecordsProcessed(); int recoveredErrorCount = theDataSink.getRecoveredErrorCount(); - WorkChunkCompletionEvent event = new WorkChunkCompletionEvent(chunkId, recordsProcessed, recoveredErrorCount, theDataSink.getRecoveredWarning()); + WorkChunkCompletionEvent event = new WorkChunkCompletionEvent( + chunkId, recordsProcessed, recoveredErrorCount, theDataSink.getRecoveredWarning()); myJobPersistence.onWorkChunkCompletion(event); } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java index 075560b93ff..21d5f2cba87 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChannelMessageHandler.java @@ -35,9 +35,9 @@ import org.springframework.messaging.Message; import org.springframework.messaging.MessageHandler; import org.springframework.messaging.MessagingException; -import javax.annotation.Nonnull; import java.util.Optional; import java.util.function.Supplier; +import javax.annotation.Nonnull; /** * This handler receives batch work request messages and performs the batch work requested by the message @@ -49,16 +49,22 @@ class WorkChannelMessageHandler implements MessageHandler { private final JobStepExecutorFactory myJobStepExecutorFactory; private final IHapiTransactionService myHapiTransactionService; - WorkChannelMessageHandler(@Nonnull IJobPersistence theJobPersistence, - @Nonnull JobDefinitionRegistry theJobDefinitionRegistry, - @Nonnull BatchJobSender theBatchJobSender, - @Nonnull WorkChunkProcessor theExecutorSvc, - @Nonnull IJobMaintenanceService theJobMaintenanceService, - IHapiTransactionService theHapiTransactionService) { + WorkChannelMessageHandler( + @Nonnull IJobPersistence theJobPersistence, + @Nonnull JobDefinitionRegistry theJobDefinitionRegistry, + @Nonnull BatchJobSender theBatchJobSender, + @Nonnull WorkChunkProcessor theExecutorSvc, + @Nonnull IJobMaintenanceService theJobMaintenanceService, + IHapiTransactionService theHapiTransactionService) { myJobPersistence = theJobPersistence; myJobDefinitionRegistry = theJobDefinitionRegistry; myHapiTransactionService = theHapiTransactionService; - myJobStepExecutorFactory = new JobStepExecutorFactory(theJobPersistence, theBatchJobSender, theExecutorSvc, theJobMaintenanceService, theJobDefinitionRegistry); + myJobStepExecutorFactory = new JobStepExecutorFactory( + theJobPersistence, + theBatchJobSender, + theExecutorSvc, + theJobMaintenanceService, + theJobDefinitionRegistry); } @Override @@ -101,7 +107,8 @@ class WorkChannelMessageHandler implements MessageHandler { // Do not catch this exception - that will discard this chunk. // Failing to load a job definition probably means this is an old process during upgrade. // Retry those until this node is killed/restarted. - myJobDefinition = myJobDefinitionRegistry.getJobDefinitionOrThrowException(jobDefinitionId, jobDefinitionVersion); + myJobDefinition = + myJobDefinitionRegistry.getJobDefinitionOrThrowException(jobDefinitionId, jobDefinitionVersion); return Optional.of(this); } @@ -109,39 +116,52 @@ class WorkChannelMessageHandler implements MessageHandler { * Fetch the job instance including the job definition. */ Optional loadJobInstance() { - return myJobPersistence.fetchInstance(myWorkNotification.getInstanceId()) - .or(()->{ - ourLog.error("No instance {} exists for chunk notification {}", myWorkNotification.getInstanceId(), myWorkNotification); - return Optional.empty(); - }) - .map(instance->{ - myJobInstance = instance; - instance.setJobDefinition(myJobDefinition); - return this; - }); + return myJobPersistence + .fetchInstance(myWorkNotification.getInstanceId()) + .or(() -> { + ourLog.error( + "No instance {} exists for chunk notification {}", + myWorkNotification.getInstanceId(), + myWorkNotification); + return Optional.empty(); + }) + .map(instance -> { + myJobInstance = instance; + instance.setJobDefinition(myJobDefinition); + return this; + }); } /** * Load the chunk, and mark it as dequeued. */ Optional updateChunkStatusAndValidate() { - return myJobPersistence.onWorkChunkDequeue(myChunkId) - .or(()->{ - ourLog.error("Unable to find chunk with ID {} - Aborting. {}", myChunkId, myWorkNotification); - return Optional.empty(); - }) - .map(chunk->{ - myWorkChunk = chunk; - ourLog.debug("Worker picked up chunk. [chunkId={}, stepId={}, startTime={}]", myChunkId, myWorkChunk.getTargetStepId(), myWorkChunk.getStartTime()); - return this; - }); + return myJobPersistence + .onWorkChunkDequeue(myChunkId) + .or(() -> { + ourLog.error("Unable to find chunk with ID {} - Aborting. {}", myChunkId, myWorkNotification); + return Optional.empty(); + }) + .map(chunk -> { + myWorkChunk = chunk; + ourLog.debug( + "Worker picked up chunk. [chunkId={}, stepId={}, startTime={}]", + myChunkId, + myWorkChunk.getTargetStepId(), + myWorkChunk.getStartTime()); + return this; + }); } /** * Move QUEUED jobs to IN_PROGRESS, and make sure we are not already in final state. */ Optional updateAndValidateJobStatus() { - ourLog.trace("Check status {} of job {} for chunk {}", myJobInstance.getStatus(), myJobInstance.getInstanceId(), myChunkId); + ourLog.trace( + "Check status {} of job {} for chunk {}", + myJobInstance.getStatus(), + myJobInstance.getInstanceId(), + myChunkId); switch (myJobInstance.getStatus()) { case QUEUED: // Update the job as started. @@ -156,7 +176,10 @@ class WorkChannelMessageHandler implements MessageHandler { case COMPLETED: // this is an error, but we can't do much about it. - ourLog.error("Received chunk {}, but job instance is {}. Skipping.", myChunkId, myJobInstance.getStatus()); + ourLog.error( + "Received chunk {}, but job instance is {}. Skipping.", + myChunkId, + myJobInstance.getStatus()); return Optional.empty(); case CANCELLED: @@ -166,24 +189,29 @@ class WorkChannelMessageHandler implements MessageHandler { ourLog.info("Skipping chunk {} because job instance is {}", myChunkId, myJobInstance.getStatus()); return Optional.empty(); } - + return Optional.of(this); } - Optional buildCursor() { - myCursor = JobWorkCursor.fromJobDefinitionAndRequestedStepId(myJobDefinition, myWorkNotification.getTargetStepId()); + myCursor = JobWorkCursor.fromJobDefinitionAndRequestedStepId( + myJobDefinition, myWorkNotification.getTargetStepId()); if (!myWorkChunk.getTargetStepId().equals(myCursor.getCurrentStepId())) { - ourLog.error("Chunk {} has target step {} but expected {}", myChunkId, myWorkChunk.getTargetStepId(), myCursor.getCurrentStepId()); + ourLog.error( + "Chunk {} has target step {} but expected {}", + myChunkId, + myWorkChunk.getTargetStepId(), + myCursor.getCurrentStepId()); return Optional.empty(); } return Optional.of(this); } public Optional buildStepExecutor() { - this.myStepExector = myJobStepExecutorFactory.newJobStepExecutor(this.myJobInstance, this.myWorkChunk, this.myCursor); + this.myStepExector = + myJobStepExecutorFactory.newJobStepExecutor(this.myJobInstance, this.myWorkChunk, this.myCursor); return Optional.of(this); } @@ -197,54 +225,52 @@ class WorkChannelMessageHandler implements MessageHandler { // 1. Normal execution. We validate, load, update statuses, all in a tx. Then we process the chunk. // 2. Discard chunk. If some validation fails (e.g. no chunk with that id), we log and discard the chunk. // Probably a db rollback, with a stale queue. - // 3. Fail and retry. If we throw an exception out of here, Spring will put the queue message back, and redeliver later. + // 3. Fail and retry. If we throw an exception out of here, Spring will put the queue message back, and + // redeliver later. // // We use Optional chaining here to simplify all the cases where we short-circuit exit. // A step that returns an empty Optional means discard the chunk. // - executeInTxRollbackWhenEmpty(() -> ( - // Use a chain of Optional flatMap to handle all the setup short-circuit exits cleanly. - Optional.of(new MessageProcess(workNotification)) - // validate and load info - .flatMap(MessageProcess::validateChunkId) - // no job definition should be retried - we must be a stale process encountering a new job definition. - .flatMap(MessageProcess::loadJobDefinitionOrThrow) - .flatMap(MessageProcess::loadJobInstance) - // update statuses now in the db: QUEUED->IN_PROGRESS - .flatMap(MessageProcess::updateChunkStatusAndValidate) - .flatMap(MessageProcess::updateAndValidateJobStatus) - // ready to execute - .flatMap(MessageProcess::buildCursor) - .flatMap(MessageProcess::buildStepExecutor) - )) - .ifPresentOrElse( - // all the setup is happy and committed. Do the work. - process -> process.myStepExector.executeStep(), - // discard the chunk - () -> ourLog.debug("Discarding chunk notification {}", workNotification) - ); - + executeInTxRollbackWhenEmpty(() -> + ( + // Use a chain of Optional flatMap to handle all the setup short-circuit exits cleanly. + Optional.of(new MessageProcess(workNotification)) + // validate and load info + .flatMap(MessageProcess::validateChunkId) + // no job definition should be retried - we must be a stale process encountering a new + // job definition. + .flatMap(MessageProcess::loadJobDefinitionOrThrow) + .flatMap(MessageProcess::loadJobInstance) + // update statuses now in the db: QUEUED->IN_PROGRESS + .flatMap(MessageProcess::updateChunkStatusAndValidate) + .flatMap(MessageProcess::updateAndValidateJobStatus) + // ready to execute + .flatMap(MessageProcess::buildCursor) + .flatMap(MessageProcess::buildStepExecutor))) + .ifPresentOrElse( + // all the setup is happy and committed. Do the work. + process -> process.myStepExector.executeStep(), + // discard the chunk + () -> ourLog.debug("Discarding chunk notification {}", workNotification)); } /** * Run theCallback in TX, rolling back if the supplied Optional is empty. */ Optional executeInTxRollbackWhenEmpty(Supplier> theCallback) { - return myHapiTransactionService.withSystemRequest() - .execute(theTransactionStatus -> { + return myHapiTransactionService.withSystemRequest().execute(theTransactionStatus -> { - // run the processing - Optional setupProcessing = theCallback.get(); + // run the processing + Optional setupProcessing = theCallback.get(); - if (setupProcessing.isEmpty()) { - // If any setup failed, roll back the chunk and instance status changes. - ourLog.debug("WorkChunk setup failed - rollback tx"); - theTransactionStatus.setRollbackOnly(); - } - // else COMMIT the work. + if (setupProcessing.isEmpty()) { + // If any setup failed, roll back the chunk and instance status changes. + ourLog.debug("WorkChunk setup failed - rollback tx"); + theTransactionStatus.setRollbackOnly(); + } + // else COMMIT the work. - return setupProcessing; - }); + return setupProcessing; + }); } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java index fb05234939f..0c5dd4ba7c9 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/coordinator/WorkChunkProcessor.java @@ -34,8 +34,8 @@ import ca.uhn.fhir.util.Logs; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; -import javax.annotation.Nullable; import java.util.Optional; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -73,12 +73,9 @@ public class WorkChunkProcessor { * @param - Step output parameters Type * @return - JobStepExecution output. Contains the datasink and whether or not the execution had succeeded. */ - public JobStepExecutorOutput - doExecution( - JobWorkCursor theCursor, - JobInstance theInstance, - @Nullable WorkChunk theWorkChunk - ) { + public + JobStepExecutorOutput doExecution( + JobWorkCursor theCursor, JobInstance theInstance, @Nullable WorkChunk theWorkChunk) { JobDefinitionStep step = theCursor.getCurrentStep(); JobDefinition jobDefinition = theCursor.getJobDefinition(); String instanceId = theInstance.getInstanceId(); @@ -92,7 +89,8 @@ public class WorkChunkProcessor { // all other kinds of steps Validate.notNull(theWorkChunk); - Optional> stepExecutionDetailsOpt = getExecutionDetailsForNonReductionStep(theWorkChunk, theInstance, inputType, parameters); + Optional> stepExecutionDetailsOpt = + getExecutionDetailsForNonReductionStep(theWorkChunk, theInstance, inputType, parameters); if (!stepExecutionDetailsOpt.isPresent()) { return new JobStepExecutorOutput<>(false, dataSink); } @@ -110,18 +108,18 @@ public class WorkChunkProcessor { * Get the correct datasink for the cursor/job provided. */ @SuppressWarnings("unchecked") - protected BaseDataSink getDataSink( - JobWorkCursor theCursor, - JobDefinition theJobDefinition, - String theInstanceId - ) { + protected + BaseDataSink getDataSink( + JobWorkCursor theCursor, JobDefinition theJobDefinition, String theInstanceId) { BaseDataSink dataSink; assert !theCursor.isReductionStep(); if (theCursor.isFinalStep()) { - dataSink = (BaseDataSink) new FinalStepDataSink<>(theJobDefinition.getJobDefinitionId(), theInstanceId, theCursor.asFinalCursor()); + dataSink = (BaseDataSink) new FinalStepDataSink<>( + theJobDefinition.getJobDefinitionId(), theInstanceId, theCursor.asFinalCursor()); } else { - dataSink = new JobDataSink<>(myBatchJobSender, myJobPersistence, theJobDefinition, theInstanceId, theCursor); + dataSink = + new JobDataSink<>(myBatchJobSender, myJobPersistence, theJobDefinition, theInstanceId, theCursor); } return dataSink; } @@ -129,17 +127,18 @@ public class WorkChunkProcessor { /** * Construct execution details for non-reduction step */ - private Optional> getExecutionDetailsForNonReductionStep( - WorkChunk theWorkChunk, - JobInstance theInstance, - Class theInputType, - PT theParameters - ) { + private + Optional> getExecutionDetailsForNonReductionStep( + WorkChunk theWorkChunk, JobInstance theInstance, Class theInputType, PT theParameters) { IT inputData = null; if (!theInputType.equals(VoidModel.class)) { if (isBlank(theWorkChunk.getData())) { - ourLog.info("Ignoring chunk[{}] for step[{}] in status[{}] because it has no data", theWorkChunk.getId(), theWorkChunk.getTargetStepId(), theWorkChunk.getStatus()); + ourLog.info( + "Ignoring chunk[{}] for step[{}] in status[{}] because it has no data", + theWorkChunk.getId(), + theWorkChunk.getTargetStepId(), + theWorkChunk.getStatus()); return Optional.empty(); } inputData = theWorkChunk.getData(theInputType); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ChunkRangeJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ChunkRangeJson.java index c2b24b07497..ec9fdf1ec87 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ChunkRangeJson.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ChunkRangeJson.java @@ -19,15 +19,15 @@ */ package ca.uhn.fhir.batch2.jobs.chunk; +import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.server.util.JsonDateDeserializer; import ca.uhn.fhir.rest.server.util.JsonDateSerializer; -import ca.uhn.fhir.model.api.IModelJson; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import javax.annotation.Nonnull; import java.util.Date; +import javax.annotation.Nonnull; public class ChunkRangeJson implements IModelJson { @JsonSerialize(using = JsonDateSerializer.class) diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java index 5d1889ae48c..6fe36f32a4d 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java @@ -37,6 +37,7 @@ public class ResourceIdListWorkChunkJson implements IModelJson { @JsonProperty("requestPartitionId") private RequestPartitionId myRequestPartitionId; + @JsonProperty("ids") private List myTypedPids; @@ -50,7 +51,8 @@ public class ResourceIdListWorkChunkJson implements IModelJson { /** * Constructor */ - public ResourceIdListWorkChunkJson(Collection theTypedPids, RequestPartitionId theRequestPartitionId) { + public ResourceIdListWorkChunkJson( + Collection theTypedPids, RequestPartitionId theRequestPartitionId) { this(); getTypedPids().addAll(theTypedPids); myRequestPartitionId = theRequestPartitionId; @@ -70,8 +72,8 @@ public class ResourceIdListWorkChunkJson implements IModelJson { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("ids", myTypedPids) - .toString(); + .append("ids", myTypedPids) + .toString(); } public List getResourcePersistentIds(IIdHelperService theIdHelperService) { @@ -79,13 +81,12 @@ public class ResourceIdListWorkChunkJson implements IModelJson { return Collections.emptyList(); } - return myTypedPids - .stream() - .map(t -> { - T retval = theIdHelperService.newPidFromStringIdAndResourceName(t.getPid(), t.getResourceType()); - return retval; - }) - .collect(Collectors.toList()); + return myTypedPids.stream() + .map(t -> { + T retval = theIdHelperService.newPidFromStringIdAndResourceName(t.getPid(), t.getResourceType()); + return retval; + }) + .collect(Collectors.toList()); } public int size() { @@ -99,5 +100,4 @@ public class ResourceIdListWorkChunkJson implements IModelJson { public String getResourceType(int index) { return getTypedPids().get(index).getResourceType(); } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/TypedPidJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/TypedPidJson.java index 745c2e81b3f..032631ce788 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/TypedPidJson.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/TypedPidJson.java @@ -29,11 +29,11 @@ public class TypedPidJson implements IModelJson { @JsonProperty("type") private String myResourceType; + @JsonProperty("id") private String myPid; - public TypedPidJson() { - } + public TypedPidJson() {} public TypedPidJson(String theResourceType, String theId) { myResourceType = theResourceType; @@ -78,7 +78,10 @@ public class TypedPidJson implements IModelJson { TypedPidJson id = (TypedPidJson) theO; - return new EqualsBuilder().append(myResourceType, id.myResourceType).append(myPid, id.myPid).isEquals(); + return new EqualsBuilder() + .append(myResourceType, id.myResourceType) + .append(myPid, id.myPid) + .isEquals(); } @Override diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/IUrlListValidator.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/IUrlListValidator.java index feefea8b5f5..9b04957a2b6 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/IUrlListValidator.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/IUrlListValidator.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.batch2.jobs.parameters; +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; public interface IUrlListValidator { @Nullable diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedJobParameters.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedJobParameters.java index 4cbbfb2fae4..097daa08a49 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedJobParameters.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedJobParameters.java @@ -29,6 +29,7 @@ public class PartitionedJobParameters implements IModelJson { @JsonProperty(value = "partitionId") @Nullable private RequestPartitionId myRequestPartitionId; + @JsonProperty("batchSize") @Nullable private Integer myBatchSize; diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedUrl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedUrl.java index ab56e339366..5504f50c4b2 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedUrl.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedUrl.java @@ -37,8 +37,11 @@ public class PartitionedUrl implements IModelJson { } @JsonProperty("url") - @Pattern(regexp = "^[A-Z][A-Za-z0-9]+\\?.*", message = "If populated, URL must be a search URL in the form '{resourceType}?[params]'") + @Pattern( + regexp = "^[A-Z][A-Za-z0-9]+\\?.*", + message = "If populated, URL must be a search URL in the form '{resourceType}?[params]'") String myUrl; + @JsonProperty("requestPartitionId") RequestPartitionId myRequestPartitionId; diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedUrlListJobParameters.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedUrlListJobParameters.java index 04cd5e4fb4d..b5de120c791 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedUrlListJobParameters.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/PartitionedUrlListJobParameters.java @@ -19,14 +19,13 @@ */ package ca.uhn.fhir.batch2.jobs.parameters; -import ca.uhn.fhir.interceptor.model.RequestPartitionId; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class PartitionedUrlListJobParameters extends PartitionedJobParameters { @JsonProperty("partitionedUrl") diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/UrlListValidator.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/UrlListValidator.java index 6fa99946f71..83dbf5d59e3 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/UrlListValidator.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/UrlListValidator.java @@ -21,11 +21,11 @@ package ca.uhn.fhir.batch2.jobs.parameters; import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UrlListValidator implements IUrlListValidator { private final String myOperationName; @@ -36,22 +36,23 @@ public class UrlListValidator implements IUrlListValidator { myBatch2DaoSvc = theBatch2DaoSvc; } - @Nullable @Override public List validateUrls(@Nonnull List theUrls) { - if (theUrls.isEmpty()) { - if (!myBatch2DaoSvc.isAllResourceTypeSupported()) { - return Collections.singletonList("At least one type-specific search URL must be provided for " + myOperationName + " on this server"); - } + if (theUrls.isEmpty()) { + if (!myBatch2DaoSvc.isAllResourceTypeSupported()) { + return Collections.singletonList("At least one type-specific search URL must be provided for " + + myOperationName + " on this server"); } + } return Collections.emptyList(); } @Nullable @Override public List validatePartitionedUrls(@Nonnull List thePartitionedUrls) { - List urls = thePartitionedUrls.stream().map(PartitionedUrl::getUrl).collect(Collectors.toList()); + List urls = + thePartitionedUrls.stream().map(PartitionedUrl::getUrl).collect(Collectors.toList()); return validateUrls(urls); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/UrlPartitioner.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/UrlPartitioner.java index 37d4abcaf76..1c5caff0bef 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/UrlPartitioner.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/parameters/UrlPartitioner.java @@ -32,12 +32,16 @@ public class UrlPartitioner { public UrlPartitioner(MatchUrlService theMatchUrlService, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { myMatchUrlService = theMatchUrlService; myRequestPartitionHelperSvc = theRequestPartitionHelperSvc; - } public PartitionedUrl partitionUrl(String theUrl, RequestDetails theRequestDetails) { ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl); - RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceSearch.getResourceName(), resourceSearch.getSearchParameterMap(), null); + RequestPartitionId requestPartitionId = + myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType( + theRequestDetails, + resourceSearch.getResourceName(), + resourceSearch.getSearchParameterMap(), + null); PartitionedUrl retval = new PartitionedUrl(); retval.setUrl(theUrl); retval.setRequestPartitionId(requestPartitionId); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java index 053bc9f18ec..23e590f6009 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/GenerateRangeChunksStep.java @@ -31,17 +31,21 @@ import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters; import ca.uhn.fhir.util.Logs; import org.slf4j.Logger; -import javax.annotation.Nonnull; import java.util.Date; +import javax.annotation.Nonnull; import static ca.uhn.fhir.batch2.util.Batch2Constants.BATCH_START_DATE; -public class GenerateRangeChunksStep implements IFirstJobStepWorker { +public class GenerateRangeChunksStep + implements IFirstJobStepWorker { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { PT params = theStepExecutionDetails.getParameters(); Date start = BATCH_START_DATE; @@ -66,5 +70,4 @@ public class GenerateRangeChunksStep return RunOutcome.SUCCESS; } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/IIdChunkProducer.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/IIdChunkProducer.java index 43e177802b9..ac16061c87c 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/IIdChunkProducer.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/IIdChunkProducer.java @@ -23,9 +23,9 @@ import ca.uhn.fhir.batch2.jobs.chunk.ChunkRangeJson; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.pid.IResourcePidList; +import java.util.Date; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Date; /** * A service that produces pages of resource pids based on the data provided by a previous batch step. Typically the @@ -44,5 +44,10 @@ public interface IIdChunkProducer { * @param theData defines the query we are using * @return a list of Resource pids */ - IResourcePidList fetchResourceIdsPage(Date theNextStart, Date theEnd, @Nonnull Integer thePageSize, @Nullable RequestPartitionId theRequestPartitionId, IT theData); + IResourcePidList fetchResourceIdsPage( + Date theNextStart, + Date theEnd, + @Nonnull Integer thePageSize, + @Nullable RequestPartitionId theRequestPartitionId, + IT theData); } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/LoadIdsStep.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/LoadIdsStep.java index 109db92c3e6..abc285b51bd 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/LoadIdsStep.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/LoadIdsStep.java @@ -34,21 +34,28 @@ import javax.annotation.Nonnull; import static org.slf4j.LoggerFactory.getLogger; -public class LoadIdsStep implements IJobStepWorker { +public class LoadIdsStep + implements IJobStepWorker< + PartitionedUrlListJobParameters, PartitionedUrlChunkRangeJson, ResourceIdListWorkChunkJson> { private static final Logger ourLog = getLogger(LoadIdsStep.class); - - private final ResourceIdListStep myResourceIdListStep; + private final ResourceIdListStep + myResourceIdListStep; public LoadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) { - IIdChunkProducer idChunkProducer = new PartitionedUrlListIdChunkProducer(theBatch2DaoSvc); + IIdChunkProducer idChunkProducer = + new PartitionedUrlListIdChunkProducer(theBatch2DaoSvc); myResourceIdListStep = new ResourceIdListStep<>(idChunkProducer); } @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull + StepExecutionDetails + theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { return myResourceIdListStep.run(theStepExecutionDetails, theDataSink); } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/PartitionedUrlListIdChunkProducer.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/PartitionedUrlListIdChunkProducer.java index 810619a543a..9e23825ab12 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/PartitionedUrlListIdChunkProducer.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/PartitionedUrlListIdChunkProducer.java @@ -27,9 +27,9 @@ import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc; import ca.uhn.fhir.util.Logs; import org.slf4j.Logger; +import java.util.Date; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Date; public class PartitionedUrlListIdChunkProducer implements IIdChunkProducer { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @@ -40,19 +40,29 @@ public class PartitionedUrlListIdChunkProducer implements IIdChunkProducer implements IJobStepWorker { +public class ResourceIdListStep + implements IJobStepWorker { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); public static final int DEFAULT_PAGE_SIZE = 20000; @@ -58,7 +59,10 @@ public class ResourceIdListStep theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { IT data = theStepExecutionDetails.getData(); Date start = data.getStart(); @@ -72,7 +76,8 @@ public class ResourceIdListStep idBuffer = new LinkedHashSet<>(); long previousLastTime = 0L; int totalIdsFound = 0; @@ -84,7 +89,8 @@ public class ResourceIdListStep theTypedPids, RequestPartitionId theRequestPartitionId, IJobDataSink theDataSink) { + private void submitWorkChunk( + Collection theTypedPids, + RequestPartitionId theRequestPartitionId, + IJobDataSink theDataSink) { if (theTypedPids.isEmpty()) { return; } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobChunkProgressAccumulator.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobChunkProgressAccumulator.java index 5d72c738e66..64709b227d6 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobChunkProgressAccumulator.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobChunkProgressAccumulator.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.batch2.maintenance; - import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum; import ca.uhn.fhir.util.Logs; @@ -28,12 +27,12 @@ import com.google.common.collect.Multimap; import org.apache.commons.lang3.ArrayUtils; import org.slf4j.Logger; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static java.util.Collections.emptyList; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; @@ -51,15 +50,19 @@ public class JobChunkProgressAccumulator { private final Multimap myInstanceIdToChunkStatuses = ArrayListMultimap.create(); int getTotalChunkCountForInstanceAndStep(String theInstanceId, String theStepId) { - return myInstanceIdToChunkStatuses.get(theInstanceId).stream().filter(chunkCount -> chunkCount.myStepId.equals(theStepId)).collect(Collectors.toList()).size(); + return myInstanceIdToChunkStatuses.get(theInstanceId).stream() + .filter(chunkCount -> chunkCount.myStepId.equals(theStepId)) + .collect(Collectors.toList()) + .size(); } - public List getChunkIdsWithStatus(String theInstanceId, String theStepId, WorkChunkStatusEnum... theStatuses) { + public List getChunkIdsWithStatus( + String theInstanceId, String theStepId, WorkChunkStatusEnum... theStatuses) { return getChunkStatuses(theInstanceId).stream() - .filter(t -> t.myStepId.equals(theStepId)) - .filter(t -> ArrayUtils.contains(theStatuses, t.myStatus)) - .map(t -> t.myChunkId) - .collect(Collectors.toList()); + .filter(t -> t.myStepId.equals(theStepId)) + .filter(t -> ArrayUtils.contains(theStatuses, t.myStatus)) + .map(t -> t.myChunkId) + .collect(Collectors.toList()); } @Nonnull @@ -75,8 +78,14 @@ public class JobChunkProgressAccumulator { // Note: If chunks are being written while we're executing, we may see the same chunk twice. This // check avoids adding it twice. if (myConsumedInstanceAndChunkIds.add(instanceId + " " + chunkId)) { - ourLog.debug("Adding chunk to accumulator. [chunkId={}, instanceId={}, status={}, step={}]", chunkId, instanceId, theChunk.getStatus(), theChunk.getTargetStepId()); - myInstanceIdToChunkStatuses.put(instanceId, new ChunkStatusCountValue(chunkId, theChunk.getTargetStepId(), theChunk.getStatus())); + ourLog.debug( + "Adding chunk to accumulator. [chunkId={}, instanceId={}, status={}, step={}]", + chunkId, + instanceId, + theChunk.getStatus(), + theChunk.getTargetStepId()); + myInstanceIdToChunkStatuses.put( + instanceId, new ChunkStatusCountValue(chunkId, theChunk.getTargetStepId(), theChunk.getStatus())); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java index 3d95da06013..501e21be3b7 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobInstanceProcessor.java @@ -52,19 +52,21 @@ public class JobInstanceProcessor { private final String myInstanceId; private final JobDefinitionRegistry myJobDefinitionegistry; - public JobInstanceProcessor(IJobPersistence theJobPersistence, - BatchJobSender theBatchJobSender, - String theInstanceId, - JobChunkProgressAccumulator theProgressAccumulator, - IReductionStepExecutorService theReductionStepExecutorService, - JobDefinitionRegistry theJobDefinitionRegistry) { + public JobInstanceProcessor( + IJobPersistence theJobPersistence, + BatchJobSender theBatchJobSender, + String theInstanceId, + JobChunkProgressAccumulator theProgressAccumulator, + IReductionStepExecutorService theReductionStepExecutorService, + JobDefinitionRegistry theJobDefinitionRegistry) { myJobPersistence = theJobPersistence; myBatchJobSender = theBatchJobSender; myInstanceId = theInstanceId; myProgressAccumulator = theProgressAccumulator; myReductionStepExecutorService = theReductionStepExecutorService; myJobDefinitionegistry = theJobDefinitionRegistry; - myJobInstanceProgressCalculator = new JobInstanceProgressCalculator(theJobPersistence, theProgressAccumulator, theJobDefinitionRegistry); + myJobInstanceProgressCalculator = + new JobInstanceProgressCalculator(theJobPersistence, theProgressAccumulator, theJobDefinitionRegistry); myJobInstanceStatusUpdater = new JobInstanceStatusUpdater(theJobDefinitionRegistry); } @@ -84,7 +86,7 @@ public class JobInstanceProcessor { } cleanupInstance(theInstance); triggerGatedExecutions(theInstance); - + ourLog.debug("Finished job processing: {} - {}", myInstanceId, stopWatch); } @@ -132,7 +134,8 @@ public class JobInstanceProcessor { break; case CANCELLED: purgeExpiredInstance(theInstance); - //wipmb For 6.8 - Are we deliberately not purging chunks for cancelled jobs? This is a very complicated way to say that. + // wipmb For 6.8 - Are we deliberately not purging chunks for cancelled jobs? This is a very + // complicated way to say that. return; } @@ -155,8 +158,10 @@ public class JobInstanceProcessor { private void triggerGatedExecutions(JobInstance theInstance) { if (!theInstance.isRunning()) { - ourLog.debug("JobInstance {} is not in a \"running\" state. Status {}", - theInstance.getInstanceId(), theInstance.getStatus()); + ourLog.debug( + "JobInstance {} is not in a \"running\" state. Status {}", + theInstance.getInstanceId(), + theInstance.getStatus()); return; } @@ -164,8 +169,10 @@ public class JobInstanceProcessor { return; } - JobDefinition jobDefinition = myJobDefinitionegistry.getJobDefinitionOrThrowException(theInstance); - JobWorkCursor jobWorkCursor = JobWorkCursor.fromJobDefinitionAndRequestedStepId(jobDefinition, theInstance.getCurrentGatedStepId()); + JobDefinition jobDefinition = + myJobDefinitionegistry.getJobDefinitionOrThrowException(theInstance); + JobWorkCursor jobWorkCursor = + JobWorkCursor.fromJobDefinitionAndRequestedStepId(jobDefinition, theInstance.getCurrentGatedStepId()); // final step if (jobWorkCursor.isFinalStep() && !jobWorkCursor.isReductionStep()) { @@ -178,31 +185,46 @@ public class JobInstanceProcessor { boolean shouldAdvance = myJobPersistence.canAdvanceInstanceToNextStep(instanceId, currentStepId); if (shouldAdvance) { String nextStepId = jobWorkCursor.nextStep.getStepId(); - ourLog.info("All processing is complete for gated execution of instance {} step {}. Proceeding to step {}", instanceId, currentStepId, nextStepId); + ourLog.info( + "All processing is complete for gated execution of instance {} step {}. Proceeding to step {}", + instanceId, + currentStepId, + nextStepId); if (jobWorkCursor.nextStep.isReductionStep()) { - JobWorkCursor nextJobWorkCursor = JobWorkCursor.fromJobDefinitionAndRequestedStepId(jobDefinition, jobWorkCursor.nextStep.getStepId()); + JobWorkCursor nextJobWorkCursor = JobWorkCursor.fromJobDefinitionAndRequestedStepId( + jobDefinition, jobWorkCursor.nextStep.getStepId()); myReductionStepExecutorService.triggerReductionStep(instanceId, nextJobWorkCursor); } else { // otherwise, continue processing as expected processChunksForNextSteps(theInstance, nextStepId); } } else { - ourLog.debug("Not ready to advance gated execution of instance {} from step {} to {}.", - instanceId, currentStepId, jobWorkCursor.nextStep.getStepId()); + ourLog.debug( + "Not ready to advance gated execution of instance {} from step {} to {}.", + instanceId, + currentStepId, + jobWorkCursor.nextStep.getStepId()); } } private void processChunksForNextSteps(JobInstance theInstance, String nextStepId) { String instanceId = theInstance.getInstanceId(); - List queuedChunksForNextStep = myProgressAccumulator.getChunkIdsWithStatus(instanceId, nextStepId, WorkChunkStatusEnum.QUEUED); + List queuedChunksForNextStep = + myProgressAccumulator.getChunkIdsWithStatus(instanceId, nextStepId, WorkChunkStatusEnum.QUEUED); int totalChunksForNextStep = myProgressAccumulator.getTotalChunkCountForInstanceAndStep(instanceId, nextStepId); if (totalChunksForNextStep != queuedChunksForNextStep.size()) { - ourLog.debug("Total ProgressAccumulator QUEUED chunk count does not match QUEUED chunk size! [instanceId={}, stepId={}, totalChunks={}, queuedChunks={}]", instanceId, nextStepId, totalChunksForNextStep, queuedChunksForNextStep.size()); + ourLog.debug( + "Total ProgressAccumulator QUEUED chunk count does not match QUEUED chunk size! [instanceId={}, stepId={}, totalChunks={}, queuedChunks={}]", + instanceId, + nextStepId, + totalChunksForNextStep, + queuedChunksForNextStep.size()); } // Note on sequence: we don't have XA transactions, and are talking to two stores (JPA + Queue) // Sequence: 1 - So we run the query to minimize the work overlapping. - List chunksToSubmit = myJobPersistence.fetchAllChunkIdsForStepWithStatus(instanceId, nextStepId, WorkChunkStatusEnum.QUEUED); + List chunksToSubmit = + myJobPersistence.fetchAllChunkIdsForStepWithStatus(instanceId, nextStepId, WorkChunkStatusEnum.QUEUED); // Sequence: 2 - update the job step so the workers will process them. boolean changed = myJobPersistence.updateInstance(instanceId, instance -> { if (instance.getCurrentGatedStepId().equals(nextStepId)) { @@ -226,7 +248,10 @@ public class JobInstanceProcessor { JobWorkNotification workNotification = new JobWorkNotification(theInstance, nextStepId, nextChunkId); myBatchJobSender.sendWorkChannelMessage(workNotification); } - ourLog.debug("Submitted a batch of chunks for processing. [chunkCount={}, instanceId={}, stepId={}]", chunksToSubmit.size(), instanceId, nextStepId); + ourLog.debug( + "Submitted a batch of chunks for processing. [chunkCount={}, instanceId={}, stepId={}]", + chunksToSubmit.size(), + instanceId, + nextStepId); } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java index f41148fb1ec..7970ea69537 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/maintenance/JobMaintenanceServiceImpl.java @@ -40,12 +40,12 @@ import org.quartz.JobExecutionContext; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; +import javax.annotation.Nonnull; /** * This class performs regular polls of the stored jobs in order to @@ -100,13 +100,14 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc /** * Constructor */ - public JobMaintenanceServiceImpl(@Nonnull ISchedulerService theSchedulerService, - @Nonnull IJobPersistence theJobPersistence, - JpaStorageSettings theStorageSettings, - @Nonnull JobDefinitionRegistry theJobDefinitionRegistry, - @Nonnull BatchJobSender theBatchJobSender, - @Nonnull WorkChunkProcessor theExecutor, - @Nonnull IReductionStepExecutorService theReductionStepExecutorService) { + public JobMaintenanceServiceImpl( + @Nonnull ISchedulerService theSchedulerService, + @Nonnull IJobPersistence theJobPersistence, + JpaStorageSettings theStorageSettings, + @Nonnull JobDefinitionRegistry theJobDefinitionRegistry, + @Nonnull BatchJobSender theBatchJobSender, + @Nonnull WorkChunkProcessor theExecutor, + @Nonnull IReductionStepExecutorService theReductionStepExecutorService) { myStorageSettings = theStorageSettings; myReductionStepExecutorService = theReductionStepExecutorService; Validate.notNull(theSchedulerService); @@ -147,8 +148,8 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc return false; } if (mySchedulerService.isClusteredSchedulingEnabled()) { - mySchedulerService.triggerClusteredJobImmediately(buildJobDefinition()); - return true; + mySchedulerService.triggerClusteredJobImmediately(buildJobDefinition()); + return true; } else { // We are probably running a unit test return runMaintenanceDirectlyWithTimeout(); @@ -157,14 +158,19 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc private boolean runMaintenanceDirectlyWithTimeout() { if (getQueueLength() > 0) { - ourLog.debug("There are already {} threads waiting to run a maintenance pass. Ignoring request.", getQueueLength()); + ourLog.debug( + "There are already {} threads waiting to run a maintenance pass. Ignoring request.", + getQueueLength()); return false; } try { - ourLog.debug("There is no clustered scheduling service. Requesting semaphore to run maintenance pass directly."); - // Some unit test, esp. the Loinc terminology tests, depend on this maintenance pass being run shortly after it is requested - if (myRunMaintenanceSemaphore.tryAcquire(MAINTENANCE_TRIGGER_RUN_WITHOUT_SCHEDULER_TIMEOUT, TimeUnit.MINUTES)) { + ourLog.debug( + "There is no clustered scheduling service. Requesting semaphore to run maintenance pass directly."); + // Some unit test, esp. the Loinc terminology tests, depend on this maintenance pass being run shortly after + // it is requested + if (myRunMaintenanceSemaphore.tryAcquire( + MAINTENANCE_TRIGGER_RUN_WITHOUT_SCHEDULER_TIMEOUT, TimeUnit.MINUTES)) { ourLog.debug("Semaphore acquired. Starting maintenance pass."); doMaintenancePass(); } @@ -186,10 +192,7 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc @VisibleForTesting public void forceMaintenancePass() { // to simulate a long running job! - ourLog.info( - "Forcing a maintenance pass run; semaphore at {}", - getQueueLength() - ); + ourLog.info("Forcing a maintenance pass run; semaphore at {}", getQueueLength()); doMaintenancePass(); } @@ -218,17 +221,29 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc for (JobInstance instance : instances) { String instanceId = instance.getInstanceId(); - if (myJobDefinitionRegistry.getJobDefinition(instance.getJobDefinitionId(),instance.getJobDefinitionVersion()).isPresent()) { + if (myJobDefinitionRegistry + .getJobDefinition(instance.getJobDefinitionId(), instance.getJobDefinitionVersion()) + .isPresent()) { if (processedInstanceIds.add(instanceId)) { myJobDefinitionRegistry.setJobDefinition(instance); - JobInstanceProcessor jobInstanceProcessor = new JobInstanceProcessor(myJobPersistence, - myBatchJobSender, instanceId, progressAccumulator, myReductionStepExecutorService, myJobDefinitionRegistry); - ourLog.debug("Triggering maintenance process for instance {} in status {}", instanceId, instance.getStatus()); + JobInstanceProcessor jobInstanceProcessor = new JobInstanceProcessor( + myJobPersistence, + myBatchJobSender, + instanceId, + progressAccumulator, + myReductionStepExecutorService, + myJobDefinitionRegistry); + ourLog.debug( + "Triggering maintenance process for instance {} in status {}", + instanceId, + instance.getStatus()); jobInstanceProcessor.process(); } - } - else { - ourLog.warn("Job definition {} for instance {} is currently unavailable", instance.getJobDefinitionId(), instanceId); + } else { + ourLog.warn( + "Job definition {} for instance {} is currently unavailable", + instance.getJobDefinitionId(), + instanceId); } } @@ -256,5 +271,4 @@ public class JobMaintenanceServiceImpl implements IJobMaintenanceService, IHasSc myTarget.runMaintenancePass(); } } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/FetchJobInstancesRequest.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/FetchJobInstancesRequest.java index bfe33b6aaf1..a29e24754fe 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/FetchJobInstancesRequest.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/FetchJobInstancesRequest.java @@ -19,27 +19,25 @@ */ package ca.uhn.fhir.batch2.model; -import javax.annotation.Nonnull; import java.util.HashSet; import java.util.Set; +import javax.annotation.Nonnull; public class FetchJobInstancesRequest { - + private final String myJobDefinition; - + private final String myParameters; private final Set myStatuses = new HashSet<>(); - public FetchJobInstancesRequest(@Nonnull String theJobDefinition, - @Nonnull String theParameters) { + public FetchJobInstancesRequest(@Nonnull String theJobDefinition, @Nonnull String theParameters) { myJobDefinition = theJobDefinition; myParameters = theParameters; } - public FetchJobInstancesRequest(@Nonnull String theJobDefinition, - @Nonnull String theParameters, - StatusEnum... theStatuses) { + public FetchJobInstancesRequest( + @Nonnull String theJobDefinition, @Nonnull String theParameters, StatusEnum... theStatuses) { myJobDefinition = theJobDefinition; myParameters = theParameters; for (StatusEnum status : theStatuses) { diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinition.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinition.java index 8d4b3b52ac5..8a7aea8190b 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinition.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinition.java @@ -26,17 +26,17 @@ import ca.uhn.fhir.batch2.api.IReductionStepWorker; import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.model.api.IModelJson; +import ca.uhn.fhir.util.Logs; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class JobDefinition { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @@ -56,7 +56,16 @@ public class JobDefinition { /** * Constructor */ - private JobDefinition(String theJobDefinitionId, int theJobDefinitionVersion, String theJobDescription, Class theParametersType, List> theSteps, IJobParametersValidator theParametersValidator, boolean theGatedExecution, IJobCompletionHandler theCompletionHandler, IJobCompletionHandler theErrorHandler) { + private JobDefinition( + String theJobDefinitionId, + int theJobDefinitionVersion, + String theJobDescription, + Class theParametersType, + List> theSteps, + IJobParametersValidator theParametersValidator, + boolean theGatedExecution, + IJobCompletionHandler theCompletionHandler, + IJobCompletionHandler theErrorHandler) { Validate.isTrue(theJobDefinitionId.length() <= ID_MAX_LENGTH, "Maximum ID length is %d", ID_MAX_LENGTH); Validate.notBlank(theJobDefinitionId, "No job definition ID supplied"); Validate.notBlank(theJobDescription, "No job description supplied"); @@ -150,8 +159,10 @@ public class JobDefinition { private String myJobDescription; private Class myJobParametersType; private Class myNextInputType; + @Nullable private IJobParametersValidator myParametersValidator; + private boolean myGatedExecution; private IJobCompletionHandler myCompletionHandler; private IJobCompletionHandler myErrorHandler; @@ -160,7 +171,17 @@ public class JobDefinition { mySteps = new ArrayList<>(); } - Builder(List> theSteps, String theJobDefinitionId, int theJobDefinitionVersion, String theJobDescription, Class theJobParametersType, Class theNextInputType, @Nullable IJobParametersValidator theParametersValidator, boolean theGatedExecution, IJobCompletionHandler theCompletionHandler, IJobCompletionHandler theErrorHandler) { + Builder( + List> theSteps, + String theJobDefinitionId, + int theJobDefinitionVersion, + String theJobDescription, + Class theJobParametersType, + Class theNextInputType, + @Nullable IJobParametersValidator theParametersValidator, + boolean theGatedExecution, + IJobCompletionHandler theCompletionHandler, + IJobCompletionHandler theErrorHandler) { mySteps = theSteps; myJobDefinitionId = theJobDefinitionId; myJobDefinitionVersion = theJobDefinitionVersion; @@ -199,9 +220,24 @@ public class JobDefinition { * @param theStepDescription A description of this step * @param theStepWorker The worker that will actually perform this step */ - public Builder addFirstStep(String theStepId, String theStepDescription, Class theOutputType, IJobStepWorker theStepWorker) { - mySteps.add(new JobDefinitionStep<>(theStepId, theStepDescription, theStepWorker, VoidModel.class, theOutputType)); - return new Builder<>(mySteps, myJobDefinitionId, myJobDefinitionVersion, myJobDescription, myJobParametersType, theOutputType, myParametersValidator, myGatedExecution, myCompletionHandler, myErrorHandler); + public Builder addFirstStep( + String theStepId, + String theStepDescription, + Class theOutputType, + IJobStepWorker theStepWorker) { + mySteps.add(new JobDefinitionStep<>( + theStepId, theStepDescription, theStepWorker, VoidModel.class, theOutputType)); + return new Builder<>( + mySteps, + myJobDefinitionId, + myJobDefinitionVersion, + myJobDescription, + myJobParametersType, + theOutputType, + myParametersValidator, + myGatedExecution, + myCompletionHandler, + myErrorHandler); } /** @@ -213,9 +249,24 @@ public class JobDefinition { * @param theStepDescription A description of this step * @param theStepWorker The worker that will actually perform this step */ - public Builder addIntermediateStep(String theStepId, String theStepDescription, Class theOutputType, IJobStepWorker theStepWorker) { - mySteps.add(new JobDefinitionStep<>(theStepId, theStepDescription, theStepWorker, myNextInputType, theOutputType)); - return new Builder<>(mySteps, myJobDefinitionId, myJobDefinitionVersion, myJobDescription, myJobParametersType, theOutputType, myParametersValidator, myGatedExecution, myCompletionHandler, myErrorHandler); + public Builder addIntermediateStep( + String theStepId, + String theStepDescription, + Class theOutputType, + IJobStepWorker theStepWorker) { + mySteps.add(new JobDefinitionStep<>( + theStepId, theStepDescription, theStepWorker, myNextInputType, theOutputType)); + return new Builder<>( + mySteps, + myJobDefinitionId, + myJobDefinitionVersion, + myJobDescription, + myJobParametersType, + theOutputType, + myParametersValidator, + myGatedExecution, + myCompletionHandler, + myErrorHandler); } /** @@ -227,22 +278,59 @@ public class JobDefinition { * @param theStepDescription A description of this step * @param theStepWorker The worker that will actually perform this step */ - public Builder addLastStep(String theStepId, String theStepDescription, IJobStepWorker theStepWorker) { - mySteps.add(new JobDefinitionStep<>(theStepId, theStepDescription, theStepWorker, myNextInputType, VoidModel.class)); - return new Builder<>(mySteps, myJobDefinitionId, myJobDefinitionVersion, myJobDescription, myJobParametersType, VoidModel.class, myParametersValidator, myGatedExecution, myCompletionHandler, myErrorHandler); + public Builder addLastStep( + String theStepId, String theStepDescription, IJobStepWorker theStepWorker) { + mySteps.add(new JobDefinitionStep<>( + theStepId, theStepDescription, theStepWorker, myNextInputType, VoidModel.class)); + return new Builder<>( + mySteps, + myJobDefinitionId, + myJobDefinitionVersion, + myJobDescription, + myJobParametersType, + VoidModel.class, + myParametersValidator, + myGatedExecution, + myCompletionHandler, + myErrorHandler); } - public Builder addFinalReducerStep(String theStepId, String theStepDescription, Class theOutputType, IReductionStepWorker theStepWorker) { + public Builder addFinalReducerStep( + String theStepId, + String theStepDescription, + Class theOutputType, + IReductionStepWorker theStepWorker) { if (!myGatedExecution) { - throw new ConfigurationException(Msg.code(2106) + String.format("Job Definition %s has a reducer step but is not gated", myJobDefinitionId)); + throw new ConfigurationException(Msg.code(2106) + + String.format("Job Definition %s has a reducer step but is not gated", myJobDefinitionId)); } - mySteps.add(new JobDefinitionReductionStep(theStepId, theStepDescription, theStepWorker, myNextInputType, theOutputType)); - return new Builder(mySteps, myJobDefinitionId, myJobDefinitionVersion, myJobDescription, myJobParametersType, theOutputType, myParametersValidator, myGatedExecution, myCompletionHandler, myErrorHandler); + mySteps.add(new JobDefinitionReductionStep( + theStepId, theStepDescription, theStepWorker, myNextInputType, theOutputType)); + return new Builder( + mySteps, + myJobDefinitionId, + myJobDefinitionVersion, + myJobDescription, + myJobParametersType, + theOutputType, + myParametersValidator, + myGatedExecution, + myCompletionHandler, + myErrorHandler); } public JobDefinition build() { Validate.notNull(myJobParametersType, "No job parameters type was supplied"); - return new JobDefinition<>(myJobDefinitionId, myJobDefinitionVersion, myJobDescription, myJobParametersType, Collections.unmodifiableList(mySteps), myParametersValidator, myGatedExecution, myCompletionHandler, myErrorHandler); + return new JobDefinition<>( + myJobDefinitionId, + myJobDefinitionVersion, + myJobDescription, + myJobParametersType, + Collections.unmodifiableList(mySteps), + myParametersValidator, + myGatedExecution, + myCompletionHandler, + myErrorHandler); } public Builder setJobDescription(String theJobDescription) { @@ -275,7 +363,10 @@ public class JobDefinition { @SuppressWarnings("unchecked") public Builder setParametersType(@Nonnull Class theJobParametersType) { Validate.notNull(theJobParametersType, "theJobParametersType must not be null"); - Validate.isTrue(myJobParametersType == null, "Can not supply multiple parameters types, already have: %s", myJobParametersType); + Validate.isTrue( + myJobParametersType == null, + "Can not supply multiple parameters types, already have: %s", + myJobParametersType); myJobParametersType = (Class) theJobParametersType; return (Builder) this; } @@ -289,7 +380,10 @@ public class JobDefinition { */ public Builder setParametersValidator(@Nonnull IJobParametersValidator theParametersValidator) { Validate.notNull(theParametersValidator, "theParametersValidator must not be null"); - Validate.isTrue(myParametersValidator == null, "Can not supply multiple parameters validators. Already have: %s", myParametersValidator); + Validate.isTrue( + myParametersValidator == null, + "Can not supply multiple parameters validators. Already have: %s", + myParametersValidator); myParametersValidator = theParametersValidator; return this; } @@ -342,11 +436,9 @@ public class JobDefinition { myErrorHandler = theErrorHandler; return this; } - } public static Builder newBuilder() { return new Builder<>(); } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinitionReductionStep.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinitionReductionStep.java index 98a2e5591ff..93cd06ca461 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinitionReductionStep.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinitionReductionStep.java @@ -26,15 +26,20 @@ import ca.uhn.fhir.model.api.IModelJson; import javax.annotation.Nonnull; public class JobDefinitionReductionStep - extends JobDefinitionStep { + extends JobDefinitionStep { - - public JobDefinitionReductionStep(@Nonnull String theStepId, - @Nonnull String theStepDescription, - @Nonnull IReductionStepWorker theJobStepWorker, - @Nonnull Class theInputType, - @Nonnull Class theOutputType) { - super(theStepId, theStepDescription, (IJobStepWorker) theJobStepWorker, theInputType, theOutputType); + public JobDefinitionReductionStep( + @Nonnull String theStepId, + @Nonnull String theStepDescription, + @Nonnull IReductionStepWorker theJobStepWorker, + @Nonnull Class theInputType, + @Nonnull Class theOutputType) { + super( + theStepId, + theStepDescription, + (IJobStepWorker) theJobStepWorker, + theInputType, + theOutputType); } @Override diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinitionStep.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinitionStep.java index cbdf98bfb20..47b5e6a32a6 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinitionStep.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobDefinitionStep.java @@ -36,11 +36,12 @@ public class JobDefinitionStep myOutputType; - public JobDefinitionStep(@Nonnull String theStepId, - @Nonnull String theStepDescription, - @Nonnull IJobStepWorker theJobStepWorker, - @Nonnull Class theInputType, - @Nonnull Class theOutputType) { + public JobDefinitionStep( + @Nonnull String theStepId, + @Nonnull String theStepDescription, + @Nonnull IJobStepWorker theJobStepWorker, + @Nonnull Class theInputType, + @Nonnull Class theOutputType) { Validate.notBlank(theStepId, "No step ID specified"); Validate.isTrue(theStepId.length() <= ID_MAX_LENGTH, "Maximum ID length is %d", ID_MAX_LENGTH); Validate.notBlank(theStepDescription); diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java index 9b7d8207210..09fb8344dea 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstance.java @@ -20,9 +20,9 @@ package ca.uhn.fhir.batch2.model; import ca.uhn.fhir.batch2.api.IJobInstance; +import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.server.util.JsonDateDeserializer; import ca.uhn.fhir.rest.server.util.JsonDateSerializer; -import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.util.JsonUtil; import ca.uhn.fhir.util.Logs; import com.fasterxml.jackson.annotation.JsonProperty; @@ -97,16 +97,22 @@ public class JobInstance implements IModelJson, IJobInstance { @JsonProperty(value = "progress", access = JsonProperty.Access.READ_ONLY) private double myProgress; + @JsonProperty(value = "currentGatedStepId", access = JsonProperty.Access.READ_ONLY) private String myCurrentGatedStepId; + @JsonProperty(value = "errorMessage", access = JsonProperty.Access.READ_ONLY) private String myErrorMessage; + @JsonProperty(value = "errorCount", access = JsonProperty.Access.READ_ONLY) private int myErrorCount; + @JsonProperty(value = "estimatedCompletion", access = JsonProperty.Access.READ_ONLY) private String myEstimatedTimeRemaining; + @JsonProperty(value = "report", access = JsonProperty.Access.READ_WRITE) private String myReport; + @JsonProperty(value = "warningMessages", access = JsonProperty.Access.READ_ONLY) private String myWarningMessages; @@ -145,7 +151,6 @@ public class JobInstance implements IModelJson, IJobInstance { setWarningMessages(theJobInstance.getWarningMessages()); } - public String getJobDefinitionId() { return myJobDefinitionId; } @@ -346,6 +351,7 @@ public class JobInstance implements IModelJson, IJobInstance { myWarningMessages = theWarningMessages; return this; } + public void setJobDefinition(JobDefinition theJobDefinition) { setJobDefinitionId(theJobDefinition.getJobDefinitionId()); setJobDefinitionVersion(theJobDefinition.getJobDefinitionVersion()); @@ -372,25 +378,25 @@ public class JobInstance implements IModelJson, IJobInstance { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("jobDefinitionId", getJobDefinitionId() + "/" + myJobDefinitionVersion) - .append("instanceId", myInstanceId) - .append("status", myStatus) - .append("myCancelled", myCancelled) - .append("createTime", myCreateTime) - .append("startTime", myStartTime) - .append("endTime", myEndTime) - .append("updateTime", myUpdateTime) - .append("combinedRecordsProcessed", myCombinedRecordsProcessed) - .append("combinedRecordsProcessedPerSecond", myCombinedRecordsProcessedPerSecond) - .append("totalElapsedMillis", myTotalElapsedMillis) - .append("workChunksPurged", myWorkChunksPurged) - .append("progress", myProgress) - .append("errorMessage", myErrorMessage) - .append("errorCount", myErrorCount) - .append("estimatedTimeRemaining", myEstimatedTimeRemaining) - .append("report", myReport) - .append("warningMessages", myWarningMessages) - .toString(); + .append("jobDefinitionId", getJobDefinitionId() + "/" + myJobDefinitionVersion) + .append("instanceId", myInstanceId) + .append("status", myStatus) + .append("myCancelled", myCancelled) + .append("createTime", myCreateTime) + .append("startTime", myStartTime) + .append("endTime", myEndTime) + .append("updateTime", myUpdateTime) + .append("combinedRecordsProcessed", myCombinedRecordsProcessed) + .append("combinedRecordsProcessedPerSecond", myCombinedRecordsProcessedPerSecond) + .append("totalElapsedMillis", myTotalElapsedMillis) + .append("workChunksPurged", myWorkChunksPurged) + .append("progress", myProgress) + .append("errorMessage", myErrorMessage) + .append("errorCount", myErrorCount) + .append("estimatedTimeRemaining", myEstimatedTimeRemaining) + .append("report", myReport) + .append("warningMessages", myWarningMessages) + .toString(); } /** @@ -420,9 +426,7 @@ public class JobInstance implements IModelJson, IJobInstance { } public boolean isFinished() { - return myStatus == StatusEnum.COMPLETED || - myStatus == StatusEnum.FAILED || - myStatus == StatusEnum.CANCELLED; + return myStatus == StatusEnum.COMPLETED || myStatus == StatusEnum.FAILED || myStatus == StatusEnum.CANCELLED; } public boolean hasGatedStep() { diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstanceStartRequest.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstanceStartRequest.java index 51d1f66e663..f097d1fc13c 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstanceStartRequest.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobInstanceStartRequest.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.batch2.model; import ca.uhn.fhir.model.api.IModelJson; -import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters; import ca.uhn.fhir.util.JsonUtil; import com.fasterxml.jackson.annotation.JsonProperty; @@ -99,17 +98,15 @@ public class JobInstanceStartRequest implements IModelJson { return myUseCache; } - public void setUseCache(boolean theUseCache) { myUseCache = theUseCache; } @Override public String toString() { - return "JobInstanceStartRequest{" + - "myJobDefinitionId='" + myJobDefinitionId + '\'' + - ", myParameters='" + myParameters + '\'' + - ", myUseCache=" + myUseCache + - '}'; + return "JobInstanceStartRequest{" + "myJobDefinitionId='" + + myJobDefinitionId + '\'' + ", myParameters='" + + myParameters + '\'' + ", myUseCache=" + + myUseCache + '}'; } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkCursor.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkCursor.java index b9f97d27457..ac59ea09169 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkCursor.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkCursor.java @@ -21,9 +21,9 @@ package ca.uhn.fhir.batch2.model; import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.i18n.Msg; -import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import ca.uhn.fhir.util.Logs; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; @@ -44,7 +44,11 @@ public class JobWorkCursor currentStep; public final JobDefinitionStep nextStep; - public JobWorkCursor(JobDefinition theJobDefinition, boolean theIsFirstStep, JobDefinitionStep theCurrentStep, JobDefinitionStep theNextStep) { + public JobWorkCursor( + JobDefinition theJobDefinition, + boolean theIsFirstStep, + JobDefinitionStep theCurrentStep, + JobDefinitionStep theNextStep) { jobDefinition = theJobDefinition; isFirstStep = theIsFirstStep; currentStep = theCurrentStep; @@ -56,17 +60,19 @@ public class JobWorkCursor JobWorkCursor fromJobDefinitionAndRequestedStepId(JobDefinition theJobDefinition, String theRequestedStepId) { + public static JobWorkCursor fromJobDefinitionAndRequestedStepId( + JobDefinition theJobDefinition, String theRequestedStepId) { boolean isFirstStep = false; - JobDefinitionStep currentStep = null; - JobDefinitionStep nextStep = null; + JobDefinitionStep currentStep = null; + JobDefinitionStep nextStep = null; List> steps = theJobDefinition.getSteps(); for (int i = 0; i < steps.size(); i++) { @@ -84,7 +90,9 @@ public class JobWorkCursor asFinalCursor() { + public JobWorkCursor asFinalCursor() { Validate.isTrue(isFinalStep()); - return (JobWorkCursor)this; + return (JobWorkCursor) this; } public JobDefinition getJobDefinition() { diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkNotification.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkNotification.java index dc369ba06ed..55246403af3 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkNotification.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkNotification.java @@ -41,10 +41,14 @@ public class JobWorkNotification implements IModelJson { @JsonProperty(value = "instanceId") private String myInstanceId; - public JobWorkNotification() { - } + public JobWorkNotification() {} - public JobWorkNotification(@Nonnull String theJobDefinitionId, int jobDefinitionVersion, @Nonnull String theInstanceId, @Nonnull String theTargetStepId, @Nonnull String theChunkId) { + public JobWorkNotification( + @Nonnull String theJobDefinitionId, + int jobDefinitionVersion, + @Nonnull String theInstanceId, + @Nonnull String theTargetStepId, + @Nonnull String theChunkId) { setJobDefinitionId(theJobDefinitionId); setJobDefinitionVersion(jobDefinitionVersion); setChunkId(theChunkId); @@ -52,11 +56,17 @@ public class JobWorkNotification implements IModelJson { setTargetStepId(theTargetStepId); } - public JobWorkNotification(JobInstance theInstance, String theNextStepId, String theNextChunkId) { - this(theInstance.getJobDefinitionId(), theInstance.getJobDefinitionVersion(), theInstance.getInstanceId(), theNextStepId, theNextChunkId); - } + public JobWorkNotification(JobInstance theInstance, String theNextStepId, String theNextChunkId) { + this( + theInstance.getJobDefinitionId(), + theInstance.getJobDefinitionVersion(), + theInstance.getInstanceId(), + theNextStepId, + theNextChunkId); + } - public static JobWorkNotification firstStepNotification(JobDefinition theJobDefinition, String theInstanceId, String theChunkId) { + public static JobWorkNotification firstStepNotification( + JobDefinition theJobDefinition, String theInstanceId, String theChunkId) { String firstStepId = theJobDefinition.getFirstStepId(); String jobDefinitionId = theJobDefinition.getJobDefinitionId(); int jobDefinitionVersion = theJobDefinition.getJobDefinitionVersion(); @@ -105,6 +115,7 @@ public class JobWorkNotification implements IModelJson { @Override public String toString() { - return String.format("job[%s] instance[%s] step[%s] chunk[%s]", myJobDefinitionId, myInstanceId, myTargetStepId, myChunkId); + return String.format( + "job[%s] instance[%s] step[%s] chunk[%s]", myJobDefinitionId, myInstanceId, myTargetStepId, myChunkId); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkNotificationJsonMessage.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkNotificationJsonMessage.java index c325e83fe8c..f5413b63210 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkNotificationJsonMessage.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/JobWorkNotificationJsonMessage.java @@ -52,5 +52,4 @@ public class JobWorkNotificationJsonMessage extends BaseJsonMessage> fromStates = new EnumMap<>(StatusEnum.class); EnumMap> toStates = new EnumMap<>(StatusEnum.class); - for (StatusEnum nextEnum: StatusEnum.values()) { + for (StatusEnum nextEnum : StatusEnum.values()) { fromStates.put(nextEnum, EnumSet.noneOf(StatusEnum.class)); toStates.put(nextEnum, EnumSet.noneOf(StatusEnum.class)); } - for (StatusEnum nextPriorEnum: StatusEnum.values()) { - for (StatusEnum nextNextEnum: StatusEnum.values()) { + for (StatusEnum nextPriorEnum : StatusEnum.values()) { + for (StatusEnum nextNextEnum : StatusEnum.values()) { if (isLegalStateTransition(nextPriorEnum, nextNextEnum)) { fromStates.get(nextNextEnum).add(nextPriorEnum); toStates.get(nextPriorEnum).add(nextNextEnum); @@ -194,10 +195,10 @@ public enum StatusEnum { break; case CANCELLED: // terminal state cannot transition - canTransition = false; + canTransition = false; break; case COMPLETED: - canTransition = false; + canTransition = false; break; case FAILED: canTransition = theNewStatus == FAILED; @@ -211,7 +212,10 @@ public enum StatusEnum { if (!canTransition) { // we have a bug? - ourLog.debug("Tried to execute an illegal state transition. [origStatus={}, newStatus={}]", theOrigStatus, theNewStatus); + ourLog.debug( + "Tried to execute an illegal state transition. [origStatus={}, newStatus={}]", + theOrigStatus, + theNewStatus); } return canTransition; } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunk.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunk.java index 5c26d391976..63aae8abc80 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunk.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunk.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.batch2.model; +import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.server.util.JsonDateDeserializer; import ca.uhn.fhir.rest.server.util.JsonDateSerializer; -import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.util.JsonUtil; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; @@ -45,7 +45,8 @@ public class WorkChunk implements IModelJson { private String myId; @JsonProperty("sequence") - // TODO MB danger - these repeat with a job or even a single step. They start at 0 for every parent chunk. Review after merge. + // TODO MB danger - these repeat with a job or even a single step. They start at 0 for every parent chunk. Review + // after merge. private int mySequence; @JsonProperty("status") @@ -85,6 +86,7 @@ public class WorkChunk implements IModelJson { @JsonSerialize(using = JsonDateSerializer.class) @JsonDeserialize(using = JsonDateDeserializer.class) private Date myUpdateTime; + @JsonProperty(value = "recordsProcessed", access = JsonProperty.Access.READ_ONLY) private Integer myRecordsProcessed; diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkCompletionEvent.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkCompletionEvent.java index 1f1100784a3..045ea97a382 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkCompletionEvent.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkCompletionEvent.java @@ -36,7 +36,8 @@ public class WorkChunkCompletionEvent extends BaseWorkChunkEvent { myRecoveredErrorCount = theRecoveredErrorCount; } - public WorkChunkCompletionEvent(String theChunkId, int theRecordsProcessed, int theRecoveredErrorCount, String theRecoveredWarningMessage) { + public WorkChunkCompletionEvent( + String theChunkId, int theRecordsProcessed, int theRecoveredErrorCount, String theRecoveredWarningMessage) { this(theChunkId, theRecordsProcessed, theRecoveredErrorCount); myRecoveredWarningMessage = theRecoveredWarningMessage; } @@ -61,11 +62,21 @@ public class WorkChunkCompletionEvent extends BaseWorkChunkEvent { WorkChunkCompletionEvent that = (WorkChunkCompletionEvent) theO; - return new EqualsBuilder().appendSuper(super.equals(theO)).append(myRecordsProcessed, that.myRecordsProcessed).append(myRecoveredErrorCount, that.myRecoveredErrorCount).append(myRecoveredWarningMessage, that.myRecoveredWarningMessage).isEquals(); + return new EqualsBuilder() + .appendSuper(super.equals(theO)) + .append(myRecordsProcessed, that.myRecordsProcessed) + .append(myRecoveredErrorCount, that.myRecoveredErrorCount) + .append(myRecoveredWarningMessage, that.myRecoveredWarningMessage) + .isEquals(); } @Override public int hashCode() { - return new HashCodeBuilder(17, 37).appendSuper(super.hashCode()).append(myRecordsProcessed).append(myRecoveredErrorCount).append(myRecoveredWarningMessage).toHashCode(); + return new HashCodeBuilder(17, 37) + .appendSuper(super.hashCode()) + .append(myRecordsProcessed) + .append(myRecoveredErrorCount) + .append(myRecoveredWarningMessage) + .toHashCode(); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkCreateEvent.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkCreateEvent.java index d0a06aa46a6..c381711c5db 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkCreateEvent.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkCreateEvent.java @@ -47,7 +47,13 @@ public class WorkChunkCreateEvent { * @param theInstanceId The instance ID associated with this chunk * @param theSerializedData The data. This will be in the form of a map where the values may be strings, lists, and other maps (i.e. JSON) */ - public WorkChunkCreateEvent(@Nonnull String theJobDefinitionId, int theJobDefinitionVersion, @Nonnull String theTargetStepId, @Nonnull String theInstanceId, int theSequence, @Nullable String theSerializedData) { + public WorkChunkCreateEvent( + @Nonnull String theJobDefinitionId, + int theJobDefinitionVersion, + @Nonnull String theTargetStepId, + @Nonnull String theInstanceId, + int theSequence, + @Nullable String theSerializedData) { jobDefinitionId = theJobDefinitionId; jobDefinitionVersion = theJobDefinitionVersion; targetStepId = theTargetStepId; @@ -60,7 +66,7 @@ public class WorkChunkCreateEvent { String firstStepId = theJobDefinition.getFirstStepId(); String jobDefinitionId = theJobDefinition.getJobDefinitionId(); int jobDefinitionVersion = theJobDefinition.getJobDefinitionVersion(); - return new WorkChunkCreateEvent(jobDefinitionId, jobDefinitionVersion, firstStepId, theInstanceId, 0, null); + return new WorkChunkCreateEvent(jobDefinitionId, jobDefinitionVersion, firstStepId, theInstanceId, 0, null); } @Override @@ -72,24 +78,24 @@ public class WorkChunkCreateEvent { WorkChunkCreateEvent that = (WorkChunkCreateEvent) theO; return new EqualsBuilder() - .append(jobDefinitionId, that.jobDefinitionId) - .append(jobDefinitionVersion, that.jobDefinitionVersion) - .append(targetStepId, that.targetStepId) - .append(instanceId, that.instanceId) - .append(sequence, that.sequence) - .append(serializedData, that.serializedData) - .isEquals(); + .append(jobDefinitionId, that.jobDefinitionId) + .append(jobDefinitionVersion, that.jobDefinitionVersion) + .append(targetStepId, that.targetStepId) + .append(instanceId, that.instanceId) + .append(sequence, that.sequence) + .append(serializedData, that.serializedData) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(jobDefinitionId) - .append(jobDefinitionVersion) - .append(targetStepId) - .append(instanceId) - .append(sequence) - .append(serializedData) - .toHashCode(); + .append(jobDefinitionId) + .append(jobDefinitionVersion) + .append(targetStepId) + .append(instanceId) + .append(sequence) + .append(serializedData) + .toHashCode(); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkData.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkData.java index 89e1644cb90..d4c7e92358d 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkData.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkData.java @@ -35,5 +35,4 @@ public class WorkChunkData { public OT getData() { return myData; } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkErrorEvent.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkErrorEvent.java index 99dcb2c1cc9..7c948f12440 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkErrorEvent.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkErrorEvent.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.batch2.model; - import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; @@ -58,18 +57,18 @@ public class WorkChunkErrorEvent extends BaseWorkChunkEvent { WorkChunkErrorEvent that = (WorkChunkErrorEvent) theO; return new EqualsBuilder() - .appendSuper(super.equals(theO)) - .append(myChunkId, that.myChunkId) - .append(myErrorMsg, that.myErrorMsg) - .isEquals(); + .appendSuper(super.equals(theO)) + .append(myChunkId, that.myChunkId) + .append(myErrorMsg, that.myErrorMsg) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .appendSuper(super.hashCode()) - .append(myChunkId) - .append(myErrorMsg) - .toHashCode(); + .appendSuper(super.hashCode()) + .append(myChunkId) + .append(myErrorMsg) + .toHashCode(); } } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkStatusEnum.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkStatusEnum.java index b2621034f19..5e366a67593 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkStatusEnum.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/model/WorkChunkStatusEnum.java @@ -32,22 +32,26 @@ import java.util.Set; */ public enum WorkChunkStatusEnum { // wipmb For 6.8 Add WAITING for gated, and READY for in db, but not yet sent to channel. - QUEUED, IN_PROGRESS, ERRORED, FAILED, COMPLETED; + QUEUED, + IN_PROGRESS, + ERRORED, + FAILED, + COMPLETED; private static final EnumMap> ourPriorStates; + static { ourPriorStates = new EnumMap<>(WorkChunkStatusEnum.class); - for (WorkChunkStatusEnum nextEnum: WorkChunkStatusEnum.values()) { + for (WorkChunkStatusEnum nextEnum : WorkChunkStatusEnum.values()) { ourPriorStates.put(nextEnum, EnumSet.noneOf(WorkChunkStatusEnum.class)); } - for (WorkChunkStatusEnum nextPriorEnum: WorkChunkStatusEnum.values()) { - for (WorkChunkStatusEnum nextEnum: nextPriorEnum.getNextStates()) { + for (WorkChunkStatusEnum nextPriorEnum : WorkChunkStatusEnum.values()) { + for (WorkChunkStatusEnum nextEnum : nextPriorEnum.getNextStates()) { ourPriorStates.get(nextEnum).add(nextPriorEnum); } } } - public boolean isIncomplete() { return (this != WorkChunkStatusEnum.COMPLETED); } @@ -60,7 +64,7 @@ public enum WorkChunkStatusEnum { return EnumSet.of(IN_PROGRESS, ERRORED, FAILED, COMPLETED); case ERRORED: return EnumSet.of(IN_PROGRESS, FAILED, COMPLETED); - // terminal states + // terminal states case FAILED: case COMPLETED: default: diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/InstanceProgress.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/InstanceProgress.java index a3dce8b671d..f9fa4165468 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/InstanceProgress.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/InstanceProgress.java @@ -67,8 +67,9 @@ public class InstanceProgress { } private void updateCompletionStatus(WorkChunk theChunk) { - //Update the status map first. - Map statusToCountMap = myStepToStatusCountMap.getOrDefault(theChunk.getTargetStepId(), new HashMap<>()); + // Update the status map first. + Map statusToCountMap = + myStepToStatusCountMap.getOrDefault(theChunk.getTargetStepId(), new HashMap<>()); statusToCountMap.put(theChunk.getStatus(), statusToCountMap.getOrDefault(theChunk.getStatus(), 0) + 1); switch (theChunk.getStatus()) { @@ -94,13 +95,15 @@ public class InstanceProgress { } private void updateLatestEndTime(WorkChunk theChunk) { - if (theChunk.getEndTime() != null && (myLatestEndTime == null || myLatestEndTime.before(theChunk.getEndTime()))) { + if (theChunk.getEndTime() != null + && (myLatestEndTime == null || myLatestEndTime.before(theChunk.getEndTime()))) { myLatestEndTime = theChunk.getEndTime(); } } private void updateEarliestTime(WorkChunk theChunk) { - if (theChunk.getStartTime() != null && (myEarliestStartTime == null || myEarliestStartTime.after(theChunk.getStartTime()))) { + if (theChunk.getStartTime() != null + && (myEarliestStartTime == null || myEarliestStartTime.after(theChunk.getStartTime()))) { myEarliestStartTime = theChunk.getStartTime(); } } @@ -160,7 +163,8 @@ public class InstanceProgress { double throughput = StopWatch.getThroughput(myRecordsProcessed, elapsedTime, TimeUnit.SECONDS); theInstance.setCombinedRecordsProcessedPerSecond(throughput); - String estimatedTimeRemaining = StopWatch.formatEstimatedTimeRemaining(myCompleteChunkCount, getChunkCount(), elapsedTime); + String estimatedTimeRemaining = + StopWatch.formatEstimatedTimeRemaining(myCompleteChunkCount, getChunkCount(), elapsedTime); theInstance.setEstimatedTimeRemaining(estimatedTimeRemaining); } } @@ -172,8 +176,13 @@ public class InstanceProgress { } ourLog.trace("Updating status for instance with errors: {}", myErroredChunkCount); - ourLog.trace("Statistics for job {}: complete/in-progress/errored/failed chunk count {}/{}/{}/{}", - theInstance.getInstanceId(), myCompleteChunkCount, myIncompleteChunkCount, myErroredChunkCount, myFailedChunkCount); + ourLog.trace( + "Statistics for job {}: complete/in-progress/errored/failed chunk count {}/{}/{}/{}", + theInstance.getInstanceId(), + myCompleteChunkCount, + myIncompleteChunkCount, + myErroredChunkCount, + myFailedChunkCount); } private int getChunkCount() { @@ -196,18 +205,16 @@ public class InstanceProgress { @Override public String toString() { ToStringBuilder builder = new ToStringBuilder(this) - .append("myIncompleteChunkCount", myIncompleteChunkCount) - .append("myCompleteChunkCount", myCompleteChunkCount) - .append("myErroredChunkCount", myErroredChunkCount) - .append("myFailedChunkCount", myFailedChunkCount) - .append("myErrormessage", myErrormessage) - .append("myRecordsProcessed", myRecordsProcessed); + .append("myIncompleteChunkCount", myIncompleteChunkCount) + .append("myCompleteChunkCount", myCompleteChunkCount) + .append("myErroredChunkCount", myErroredChunkCount) + .append("myFailedChunkCount", myFailedChunkCount) + .append("myErrormessage", myErrormessage) + .append("myRecordsProcessed", myRecordsProcessed); builder.append("myStepToStatusCountMap", myStepToStatusCountMap); return builder.toString(); - - } public StatusEnum getNewStatus() { diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/JobInstanceProgressCalculator.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/JobInstanceProgressCalculator.java index 402c8f6f44c..1cfde264368 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/JobInstanceProgressCalculator.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/JobInstanceProgressCalculator.java @@ -22,14 +22,13 @@ package ca.uhn.fhir.batch2.progress; import ca.uhn.fhir.batch2.api.IJobPersistence; import ca.uhn.fhir.batch2.coordinator.JobDefinitionRegistry; import ca.uhn.fhir.batch2.maintenance.JobChunkProgressAccumulator; -import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.util.StopWatch; import org.slf4j.Logger; -import javax.annotation.Nonnull; import java.util.Iterator; +import javax.annotation.Nonnull; public class JobInstanceProgressCalculator { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @@ -37,7 +36,10 @@ public class JobInstanceProgressCalculator { private final JobChunkProgressAccumulator myProgressAccumulator; private final JobInstanceStatusUpdater myJobInstanceStatusUpdater; - public JobInstanceProgressCalculator(IJobPersistence theJobPersistence, JobChunkProgressAccumulator theProgressAccumulator, JobDefinitionRegistry theJobDefinitionRegistry) { + public JobInstanceProgressCalculator( + IJobPersistence theJobPersistence, + JobChunkProgressAccumulator theProgressAccumulator, + JobDefinitionRegistry theJobDefinitionRegistry) { myJobPersistence = theJobPersistence; myProgressAccumulator = theProgressAccumulator; myJobInstanceStatusUpdater = new JobInstanceStatusUpdater(theJobDefinitionRegistry); @@ -49,13 +51,25 @@ public class JobInstanceProgressCalculator { InstanceProgress instanceProgress = calculateInstanceProgress(theInstanceId); - myJobPersistence.updateInstance(theInstanceId, currentInstance->{ + myJobPersistence.updateInstance(theInstanceId, currentInstance -> { instanceProgress.updateInstance(currentInstance); if (currentInstance.getCombinedRecordsProcessed() > 0) { - ourLog.info("Job {} of type {} has status {} - {} records processed ({}/sec) - ETA: {}", currentInstance.getInstanceId(), currentInstance.getJobDefinitionId(), currentInstance.getStatus(), currentInstance.getCombinedRecordsProcessed(), currentInstance.getCombinedRecordsProcessedPerSecond(), currentInstance.getEstimatedTimeRemaining()); + ourLog.info( + "Job {} of type {} has status {} - {} records processed ({}/sec) - ETA: {}", + currentInstance.getInstanceId(), + currentInstance.getJobDefinitionId(), + currentInstance.getStatus(), + currentInstance.getCombinedRecordsProcessed(), + currentInstance.getCombinedRecordsProcessedPerSecond(), + currentInstance.getEstimatedTimeRemaining()); } else { - ourLog.info("Job {} of type {} has status {} - {} records processed", currentInstance.getInstanceId(), currentInstance.getJobDefinitionId(), currentInstance.getStatus(), currentInstance.getCombinedRecordsProcessed()); + ourLog.info( + "Job {} of type {} has status {} - {} records processed", + currentInstance.getInstanceId(), + currentInstance.getJobDefinitionId(), + currentInstance.getStatus(), + currentInstance.getCombinedRecordsProcessed()); } ourLog.debug(instanceProgress.toString()); @@ -86,5 +100,4 @@ public class JobInstanceProgressCalculator { return instanceProgress; } - } diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/JobInstanceStatusUpdater.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/JobInstanceStatusUpdater.java index b391761fc90..29453f33238 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/JobInstanceStatusUpdater.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/progress/JobInstanceStatusUpdater.java @@ -49,11 +49,21 @@ public class JobInstanceStatusUpdater { return false; } if (!StatusEnum.isLegalStateTransition(origStatus, theNewStatus)) { - ourLog.error("Ignoring illegal state transition for job instance {} of type {} from {} to {}", theJobInstance.getInstanceId(), theJobInstance.getJobDefinitionId(), origStatus, theNewStatus); + ourLog.error( + "Ignoring illegal state transition for job instance {} of type {} from {} to {}", + theJobInstance.getInstanceId(), + theJobInstance.getJobDefinitionId(), + origStatus, + theNewStatus); return false; } theJobInstance.setStatus(theNewStatus); - ourLog.debug("Updating job instance {} of type {} from {} to {}", theJobInstance.getInstanceId(), theJobInstance.getJobDefinitionId(), origStatus, theNewStatus); + ourLog.debug( + "Updating job instance {} of type {} from {} to {}", + theJobInstance.getInstanceId(), + theJobInstance.getJobDefinitionId(), + origStatus, + theNewStatus); handleStatusChange(theJobInstance); return true; @@ -80,7 +90,10 @@ public class JobInstanceStatusUpdater { } } - private void invokeCompletionHandler(JobInstance theJobInstance, JobDefinition theJobDefinition, IJobCompletionHandler theJobCompletionHandler) { + private void invokeCompletionHandler( + JobInstance theJobInstance, + JobDefinition theJobDefinition, + IJobCompletionHandler theJobCompletionHandler) { if (theJobCompletionHandler == null) { return; } @@ -88,5 +101,4 @@ public class JobInstanceStatusUpdater { JobCompletionDetails completionDetails = new JobCompletionDetails<>(jobParameters, theJobInstance); theJobCompletionHandler.jobComplete(completionDetails); } - } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/BundleIterable.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/BundleIterable.java index 23dc63bbeba..0716e87f71d 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/BundleIterable.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/BundleIterable.java @@ -23,21 +23,19 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.concurrent.NotThreadSafe; import java.util.Iterator; import java.util.List; +import javax.annotation.concurrent.NotThreadSafe; /** * This class leverages IBundleProvider Iterable to provide an iterator for processing bundle search results into manageable paginated chunks. This helped to avoid loading large result sets into lists **/ - @NotThreadSafe public class BundleIterable implements Iterable { private final IBundleProvider sourceBundleProvider; private final RequestDetails requestDetails; - public BundleIterable(RequestDetails requestDetails, IBundleProvider bundleProvider) { this.sourceBundleProvider = bundleProvider; this.requestDetails = requestDetails; @@ -60,7 +58,6 @@ public class BundleIterable implements Iterable { private int currentResourceListIndex = 0; - public BundleIterator(RequestDetails requestDetails, IBundleProvider bundleProvider) { this.bundleProvider = bundleProvider; this.requestDetails = requestDetails; @@ -71,7 +68,7 @@ public class BundleIterable implements Iterable { this.currentResourceList = this.bundleProvider.getResources(offset, increment + offset); // next offset created offset += increment; - //restart counter on new chunk + // restart counter on new chunk currentResourceListIndex = 0; } @@ -94,7 +91,6 @@ public class BundleIterable implements Iterable { return this.hasNext(); } - @Override public IBaseResource next() { assert this.currentResourceListIndex < this.currentResourceList.size(); @@ -105,4 +101,3 @@ public class BundleIterable implements Iterable { } } } - diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CodeCacheResourceChangeListener.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CodeCacheResourceChangeListener.java index eba7f902b08..2ddc2d36457 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CodeCacheResourceChangeListener.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CodeCacheResourceChangeListener.java @@ -41,16 +41,16 @@ import java.util.function.Function; **/ public class CodeCacheResourceChangeListener implements IResourceChangeListener { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory - .getLogger(CodeCacheResourceChangeListener.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(CodeCacheResourceChangeListener.class); private final IFhirResourceDao myValueSetDao; private final Map> myGlobalCodeCache; private final Function myUrlFunction; private final Function myVersionFunction; - public CodeCacheResourceChangeListener(DaoRegistry theDaoRegistry, - Map> theGlobalCodeCache) { + public CodeCacheResourceChangeListener( + DaoRegistry theDaoRegistry, Map> theGlobalCodeCache) { this.myValueSetDao = theDaoRegistry.getResourceDao("ValueSet"); this.myGlobalCodeCache = theGlobalCodeCache; this.myUrlFunction = Reflections.getUrlFunction(myValueSetDao.getResourceType()); @@ -94,8 +94,9 @@ public class CodeCacheResourceChangeListener implements IResourceChangeListener // This happens when a Library is deleted entirely, so it's impossible to look up // name and version. catch (ResourceGoneException | ResourceNotFoundException e) { - ourLog.debug("Failed to locate resource {} to look up url and version. Clearing all codes from cache.", - theId.getValueAsString()); + ourLog.debug( + "Failed to locate resource {} to look up url and version. Clearing all codes from cache.", + theId.getValueAsString()); this.myGlobalCodeCache.clear(); return; } @@ -103,7 +104,6 @@ public class CodeCacheResourceChangeListener implements IResourceChangeListener String url = this.myUrlFunction.apply(valueSet); String version = this.myVersionFunction.apply(valueSet); - this.myGlobalCodeCache.remove(new VersionedIdentifier().withId(url) - .withVersion(version)); + this.myGlobalCodeCache.remove(new VersionedIdentifier().withId(url).withVersion(version)); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CqlExceptionHandlingInterceptor.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CqlExceptionHandlingInterceptor.java index 9e78e9eae3f..fae68b2bdb6 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CqlExceptionHandlingInterceptor.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/CqlExceptionHandlingInterceptor.java @@ -27,10 +27,10 @@ import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.opencds.cqf.cql.engine.exception.CqlException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.lang.reflect.InvocationTargetException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; /** * This class represents clinical reasoning interceptor used for cql exception handling and logging @@ -39,8 +39,12 @@ import java.lang.reflect.InvocationTargetException; public class CqlExceptionHandlingInterceptor { @Hook(Pointcut.SERVER_HANDLE_EXCEPTION) - public boolean handleException(RequestDetails theRequestDetails, BaseServerResponseException theException, - HttpServletRequest theServletRequest, HttpServletResponse theServletResponse) throws IOException { + public boolean handleException( + RequestDetails theRequestDetails, + BaseServerResponseException theException, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws IOException { CqlException cqlException = getCqlException(theException); if (cqlException == null) { @@ -84,7 +88,8 @@ public class CqlExceptionHandlingInterceptor { String message = theCqlException.getMessage(); if (theCqlException.getSourceLocator() != null) { - message += "\nat CQL source location: " + theCqlException.getSourceLocator().toString(); + message += "\nat CQL source location: " + + theCqlException.getSourceLocator().toString(); } if (theCqlException.getCause() != null) { @@ -98,7 +103,7 @@ public class CqlExceptionHandlingInterceptor { if (theException.getCause() instanceof CqlException) { return (CqlException) theException.getCause(); } else if (theException.getCause() instanceof InvocationTargetException) { - InvocationTargetException ite = (InvocationTargetException)theException.getCause(); + InvocationTargetException ite = (InvocationTargetException) theException.getCause(); if (ite.getCause() instanceof CqlException) { return (CqlException) ite.getCause(); } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/ElmCacheResourceChangeListener.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/ElmCacheResourceChangeListener.java index 4b0e55139dc..e23e3b00b3b 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/ElmCacheResourceChangeListener.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/ElmCacheResourceChangeListener.java @@ -41,16 +41,16 @@ import java.util.function.Function; **/ public class ElmCacheResourceChangeListener implements IResourceChangeListener { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory - .getLogger(ElmCacheResourceChangeListener.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(ElmCacheResourceChangeListener.class); private final IFhirResourceDao myLibraryDao; private final Map myGlobalLibraryCache; private final Function myNameFunction; private final Function myVersionFunction; - public ElmCacheResourceChangeListener(DaoRegistry theDaoRegistry, - Map theGlobalLibraryCache) { + public ElmCacheResourceChangeListener( + DaoRegistry theDaoRegistry, Map theGlobalLibraryCache) { this.myLibraryDao = theDaoRegistry.getResourceDao("Library"); this.myGlobalLibraryCache = theGlobalLibraryCache; this.myNameFunction = Reflections.getNameFunction(myLibraryDao.getResourceType()); @@ -90,8 +90,7 @@ public class ElmCacheResourceChangeListener implements IResourceChangeListener { IBaseResource library; try { library = this.myLibraryDao.read(theId); - } - catch (ResourceGoneException | ResourceNotFoundException e) { + } catch (ResourceGoneException | ResourceNotFoundException e) { // TODO: This needs to be smarter... the issue is that ELM is cached with // library name and version as the key since // that's the access path the CQL engine uses, but change notifications occur @@ -99,8 +98,9 @@ public class ElmCacheResourceChangeListener implements IResourceChangeListener { // necessarily tied to the resource name. In any event, if a unknown resource is // deleted, clear all libraries as a workaround. // One option is to maintain a cache with multiple indices. - ourLog.debug("Failed to locate resource {} to look up name and version. Clearing all libraries from cache.", - theId.getValueAsString()); + ourLog.debug( + "Failed to locate resource {} to look up name and version. Clearing all libraries from cache.", + theId.getValueAsString()); this.myGlobalLibraryCache.clear(); return; } @@ -108,7 +108,6 @@ public class ElmCacheResourceChangeListener implements IResourceChangeListener { String name = this.myNameFunction.apply(library); String version = this.myVersionFunction.apply(library); - this.myGlobalLibraryCache.remove(new VersionedIdentifier().withId(name) - .withVersion(version)); + this.myGlobalLibraryCache.remove(new VersionedIdentifier().withId(name).withVersion(version)); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiFhirDal.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiFhirDal.java index 2c958efc38f..c83f1fb2b9d 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiFhirDal.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiFhirDal.java @@ -37,7 +37,7 @@ public class HapiFhirDal implements FhirDal { protected final RequestDetails myRequestDetails; public HapiFhirDal(DaoRegistry theDaoRegistry) { - this(theDaoRegistry,null); + this(theDaoRegistry, null); } public HapiFhirDal(DaoRegistry theDaoRegistry, RequestDetails theRequestDetails) { @@ -63,23 +63,20 @@ public class HapiFhirDal implements FhirDal { @Override public void delete(IIdType theId) { this.myDaoRegistry.getResourceDao(theId.getResourceType()).delete(theId, myRequestDetails); - } // TODO: the search interfaces need some work @Override public Iterable search(String theResourceType) { - var b = this.myDaoRegistry.getResourceDao(theResourceType) - .search(new SearchParameterMap(), myRequestDetails); + var b = this.myDaoRegistry.getResourceDao(theResourceType).search(new SearchParameterMap(), myRequestDetails); return new BundleIterable(myRequestDetails, b); } @Override public Iterable searchByUrl(String theResourceType, String theUrl) { - var b = this.myDaoRegistry.getResourceDao(theResourceType) - .search(new SearchParameterMap().add("url", new UriParam(theUrl)), myRequestDetails); + var b = this.myDaoRegistry + .getResourceDao(theResourceType) + .search(new SearchParameterMap().add("url", new UriParam(theUrl)), myRequestDetails); return new BundleIterable(myRequestDetails, b); } - - } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiFhirRetrieveProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiFhirRetrieveProvider.java index d3200d6df73..fe8b8e89d20 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiFhirRetrieveProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiFhirRetrieveProvider.java @@ -48,13 +48,12 @@ public class HapiFhirRetrieveProvider extends SearchParamFhirRetrieveProvider im private final DaoRegistry myDaoRegistry; private final RequestDetails myRequestDetails; - public HapiFhirRetrieveProvider(DaoRegistry theDaoRegistry, SearchParameterResolver theSearchParameterResolver) { this(theDaoRegistry, theSearchParameterResolver, new SystemRequestDetails()); } - public HapiFhirRetrieveProvider(DaoRegistry registry, SearchParameterResolver searchParameterResolver, - RequestDetails requestDetails) { + public HapiFhirRetrieveProvider( + DaoRegistry registry, SearchParameterResolver searchParameterResolver, RequestDetails requestDetails) { super(searchParameterResolver); this.myDaoRegistry = registry; this.myRequestDetails = requestDetails; @@ -70,7 +69,10 @@ public class HapiFhirRetrieveProvider extends SearchParamFhirRetrieveProvider im private final BiFunction> queryFunc; - public QueryIterable(String dataType, List queries, BiFunction> queryFunc) { + public QueryIterable( + String dataType, + List queries, + BiFunction> queryFunc) { this.dataType = dataType; this.queries = queries; this.queryFunc = queryFunc; @@ -85,7 +87,10 @@ public class HapiFhirRetrieveProvider extends SearchParamFhirRetrieveProvider im Iterator currentResult = null; - public QueryIterator(String dataType, List queries, BiFunction> queryFunc) { + public QueryIterator( + String dataType, + List queries, + BiFunction> queryFunc) { this.dataType = dataType; this.queries = queries; this.queryFunc = queryFunc; @@ -117,7 +122,7 @@ public class HapiFhirRetrieveProvider extends SearchParamFhirRetrieveProvider im if (index >= queries.size()) { return null; } - //extract next query result + // extract next query result var result = this.queryFunc.apply(dataType, queries.get(index)).iterator(); index++; return result; @@ -144,7 +149,6 @@ public class HapiFhirRetrieveProvider extends SearchParamFhirRetrieveProvider im for (Map.Entry>> entry : map.entrySet()) { hapiMap.put(entry.getKey(), entry.getValue()); - } } catch (Exception e) { diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiLibrarySourceProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiLibrarySourceProvider.java index 02ee27194b9..71007e09d93 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiLibrarySourceProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiLibrarySourceProvider.java @@ -37,8 +37,7 @@ import java.util.ArrayList; * interface which is used for loading * library resources during CQL evaluation. */ -public class HapiLibrarySourceProvider - implements LibrarySourceProvider, IDaoRegistryUser { +public class HapiLibrarySourceProvider implements LibrarySourceProvider, IDaoRegistryUser { protected final DaoRegistry myDaoRegistry; protected final RequestDetails myRequestDetails; @@ -56,19 +55,17 @@ public class HapiLibrarySourceProvider return this.myDaoRegistry; } - @Override - public InputStream getLibraryContent(VersionedIdentifier theLibraryIdentifier, - LibraryContentType theLibraryContentType) { + public InputStream getLibraryContent( + VersionedIdentifier theLibraryIdentifier, LibraryContentType theLibraryContentType) { String name = theLibraryIdentifier.getId(); String version = theLibraryIdentifier.getVersion(); var libraries = search(getClass("Library"), Searches.byName(name), myRequestDetails); var libraryList = new ArrayList(); - for(var l:libraries){ + for (var l : libraries) { libraryList.add(l); } - IBaseResource library = Versions.selectByVersion(libraryList, version, - Libraries::getVersion); + IBaseResource library = Versions.selectByVersion(libraryList, version, Libraries::getVersion); if (library == null) { return null; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiTerminologyProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiTerminologyProvider.java index d3f5383f9bb..620b970982b 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiTerminologyProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/HapiTerminologyProvider.java @@ -48,13 +48,15 @@ public class HapiTerminologyProvider implements TerminologyProvider { private final IValidationSupport myValidationSupport; private final Map> myGlobalCodeCache; - public HapiTerminologyProvider(IValidationSupport theValidationSupport, - Map> theGlobalCodeCache) { + public HapiTerminologyProvider( + IValidationSupport theValidationSupport, Map> theGlobalCodeCache) { this(theValidationSupport, theGlobalCodeCache, null); } - public HapiTerminologyProvider(IValidationSupport theValidationSupport, - Map> theGlobalCodeCache, RequestDetails theRequestDetails) { + public HapiTerminologyProvider( + IValidationSupport theValidationSupport, + Map> theGlobalCodeCache, + RequestDetails theRequestDetails) { myValidationSupport = theValidationSupport; myGlobalCodeCache = theGlobalCodeCache; } @@ -77,7 +79,8 @@ public class HapiTerminologyProvider implements TerminologyProvider { // This could possibly be refactored into a single call to the underlying HAPI // Terminology service. Need to think through that.., - VersionedIdentifier vsId = new VersionedIdentifier().withId(theValueSet.getId()).withVersion(theValueSet.getVersion()); + VersionedIdentifier vsId = + new VersionedIdentifier().withId(theValueSet.getId()).withVersion(theValueSet.getVersion()); if (this.myGlobalCodeCache.containsKey(vsId)) { return this.myGlobalCodeCache.get(vsId); @@ -87,13 +90,14 @@ public class HapiTerminologyProvider implements TerminologyProvider { valueSetExpansionOptions.setFailOnMissingCodeSystem(false); valueSetExpansionOptions.setCount(Integer.MAX_VALUE); - if (theValueSet.getVersion() != null && Canonicals.getUrl(theValueSet.getId()) != null - && Canonicals.getVersion(theValueSet.getId()) == null) { + if (theValueSet.getVersion() != null + && Canonicals.getUrl(theValueSet.getId()) != null + && Canonicals.getVersion(theValueSet.getId()) == null) { theValueSet.setId(theValueSet.getId() + "|" + theValueSet.getVersion()); } - IValidationSupport.ValueSetExpansionOutcome vs = - myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), valueSetExpansionOptions, theValueSet.getId()); + IValidationSupport.ValueSetExpansionOutcome vs = myValidationSupport.expandValueSet( + new ValidationSupportContext(myValidationSupport), valueSetExpansionOptions, theValueSet.getId()); List codes = getCodes(vs.getValueSet()); this.myGlobalCodeCache.put(vsId, codes); @@ -104,8 +108,7 @@ public class HapiTerminologyProvider implements TerminologyProvider { public Code lookup(Code theCode, CodeSystemInfo theCodeSystem) throws ResourceNotFoundException { LookupCodeResult cs = myValidationSupport.lookupCode( - new ValidationSupportContext(myValidationSupport), theCodeSystem.getId(), theCode.getCode()); - + new ValidationSupportContext(myValidationSupport), theCodeSystem.getId(), theCode.getCode()); if (cs != null) { theCode.setDisplay(cs.getCodeDisplay()); @@ -132,14 +135,14 @@ public class HapiTerminologyProvider implements TerminologyProvider { case R5: return getCodesR5((org.hl7.fhir.r5.model.ValueSet) theValueSet); default: - throw new IllegalArgumentException(Msg.code(2225) + String.format("FHIR version %s is unsupported.", version.getFhirVersionString())); + throw new IllegalArgumentException(Msg.code(2225) + + String.format("FHIR version %s is unsupported.", version.getFhirVersionString())); } } protected List getCodesDstu2Hl7(org.hl7.fhir.dstu2.model.ValueSet theValueSet) { var codes = new ArrayList(); - for (var vse : theValueSet.getExpansion() - .getContains()) { + for (var vse : theValueSet.getExpansion().getContains()) { codes.add(new Code().withCode(vse.getCode()).withSystem(vse.getSystem())); } @@ -148,8 +151,7 @@ public class HapiTerminologyProvider implements TerminologyProvider { protected List getCodesDstu21(org.hl7.fhir.dstu2016may.model.ValueSet theValueSet) { var codes = new ArrayList(); - for (var vse : theValueSet.getExpansion() - .getContains()) { + for (var vse : theValueSet.getExpansion().getContains()) { codes.add(new Code().withCode(vse.getCode()).withSystem(vse.getSystem())); } @@ -158,8 +160,7 @@ public class HapiTerminologyProvider implements TerminologyProvider { protected List getCodesDstu3(org.hl7.fhir.dstu3.model.ValueSet theValueSet) { var codes = new ArrayList(); - for (var vse : theValueSet.getExpansion() - .getContains()) { + for (var vse : theValueSet.getExpansion().getContains()) { codes.add(new Code().withCode(vse.getCode()).withSystem(vse.getSystem())); } @@ -168,8 +169,7 @@ public class HapiTerminologyProvider implements TerminologyProvider { protected List getCodesR4(org.hl7.fhir.r4.model.ValueSet theValueSet) { var codes = new ArrayList(); - for (var vse : theValueSet.getExpansion() - .getContains()) { + for (var vse : theValueSet.getExpansion().getContains()) { codes.add(new Code().withCode(vse.getCode()).withSystem(vse.getSystem())); } @@ -178,19 +178,16 @@ public class HapiTerminologyProvider implements TerminologyProvider { protected List getCodesR4B(org.hl7.fhir.r4b.model.ValueSet theValueSet) { var codes = new ArrayList(); - for (var vse : theValueSet.getExpansion() - .getContains()) { + for (var vse : theValueSet.getExpansion().getContains()) { codes.add(new Code().withCode(vse.getCode()).withSystem(vse.getSystem())); } return codes; - } protected List getCodesR5(org.hl7.fhir.r5.model.ValueSet theValueSet) { var codes = new ArrayList(); - for (var vse : theValueSet.getExpansion() - .getContains()) { + for (var vse : theValueSet.getExpansion().getContains()) { codes.add(new Code().withCode(vse.getCode()).withSystem(vse.getSystem())); } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IDaoRegistryUser.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IDaoRegistryUser.java index e6295fbf513..c78929c9f4e 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IDaoRegistryUser.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/IDaoRegistryUser.java @@ -62,7 +62,8 @@ public interface IDaoRegistryUser { */ @SuppressWarnings("unchecked") default Class getClass(String theResourceName) { - return (Class) getFhirContext().getResourceDefinition(theResourceName).getImplementingClass(); + return (Class) + getFhirContext().getResourceDefinition(theResourceName).getImplementingClass(); } /** @@ -126,8 +127,8 @@ public interface IDaoRegistryUser { default DaoMethodOutcome create(T theResource, RequestDetails requestDetails) { checkNotNull(theResource); - return ((IFhirResourceDao) getDaoRegistry().getResourceDao(theResource.fhirType())).create(theResource, - requestDetails); + return ((IFhirResourceDao) getDaoRegistry().getResourceDao(theResource.fhirType())) + .create(theResource, requestDetails); } /** @@ -155,8 +156,8 @@ public interface IDaoRegistryUser { default DaoMethodOutcome update(T theResource, RequestDetails requestDetails) { checkNotNull(theResource); - return ((IFhirResourceDao) getDaoRegistry().getResourceDao(theResource.fhirType())).update(theResource, - requestDetails); + return ((IFhirResourceDao) getDaoRegistry().getResourceDao(theResource.fhirType())) + .update(theResource, requestDetails); } /** @@ -225,8 +226,8 @@ public interface IDaoRegistryUser { * @param theSearchMap the Search Parameters * @return Bundle provider */ - default Iterable search(Class theResourceClass, - SearchParameterMap theSearchMap) { + default Iterable search( + Class theResourceClass, SearchParameterMap theSearchMap) { checkNotNull(theResourceClass); checkNotNull(theSearchMap); @@ -243,9 +244,8 @@ public interface IDaoRegistryUser { * @param theRequestDetails multi-tenancy information * @return Bundle provider */ - default Iterable search(Class theResourceClass, - SearchParameterMap theSearchMap, - RequestDetails theRequestDetails) { + default Iterable search( + Class theResourceClass, SearchParameterMap theSearchMap, RequestDetails theRequestDetails) { checkNotNull(theResourceClass); checkNotNull(theSearchMap); diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/PreExpandedValidationSupport.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/PreExpandedValidationSupport.java index 58ebe97567f..fe816778207 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/PreExpandedValidationSupport.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/PreExpandedValidationSupport.java @@ -33,7 +33,6 @@ import javax.annotation.Nullable; * This class provides an implementation of IValidationSupport * interface which is used for validation of terminology services. */ - public class PreExpandedValidationSupport implements IValidationSupport { private final FhirContext myFhirContext; @@ -42,16 +41,22 @@ public class PreExpandedValidationSupport implements IValidationSupport { } @Override - public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, - @Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) { + public ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + @Nullable ValueSetExpansionOptions theExpansionOptions, + @Nonnull IBaseResource theValueSetToExpand) { Validate.notNull(theValueSetToExpand, "theValueSetToExpand must not be null or blank"); - if (!getFhirContext().getResourceDefinition("ValueSet").getChildByName("expansion").getAccessor() - .getValues(theValueSetToExpand).isEmpty()) { + if (!getFhirContext() + .getResourceDefinition("ValueSet") + .getChildByName("expansion") + .getAccessor() + .getValues(theValueSetToExpand) + .isEmpty()) { return new ValueSetExpansionOutcome(theValueSetToExpand); } else { - return IValidationSupport.super.expandValueSet(theValidationSupportContext, theExpansionOptions, - theValueSetToExpand); + return IValidationSupport.super.expandValueSet( + theValidationSupportContext, theExpansionOptions, theValueSetToExpand); } } @@ -59,5 +64,4 @@ public class PreExpandedValidationSupport implements IValidationSupport { public FhirContext getFhirContext() { return this.myFhirContext; } - } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/Searches.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/Searches.java index 0091129cc1e..e565ffe41b6 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/Searches.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/common/Searches.java @@ -48,8 +48,7 @@ public class Searches { /** * Constructor */ - private Searches() { - } + private Searches() {} /** * Creates and returns Parameter search results diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/BaseClinicalReasoningConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/BaseClinicalReasoningConfig.java index ffc3a8725ed..d0f6a28d4e2 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/BaseClinicalReasoningConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/BaseClinicalReasoningConfig.java @@ -57,7 +57,6 @@ import org.opencds.cqf.cql.evaluator.CqlOptions; import org.opencds.cqf.cql.evaluator.builder.DataProviderComponents; import org.opencds.cqf.cql.evaluator.builder.EndpointInfo; import org.opencds.cqf.cql.evaluator.cql2elm.util.LibraryVersionSelector; -import org.opencds.cqf.cql.evaluator.engine.execution.CacheAwareLibraryLoaderDecorator; import org.opencds.cqf.cql.evaluator.engine.execution.TranslatingLibraryLoader; import org.opencds.cqf.cql.evaluator.engine.model.CachingModelResolverDecorator; import org.opencds.cqf.cql.evaluator.engine.retrieve.BundleRetrieveProvider; @@ -82,15 +81,18 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.ForkJoinPool; - @Configuration @Import({AdapterConfiguration.class, BaseRepositoryConfig.class}) public abstract class BaseClinicalReasoningConfig { private static final Logger ourLogger = LoggerFactory.getLogger(BaseClinicalReasoningConfig.class); - @Bean - EvaluationSettings evaluationSettings(CqlOptions theCqlOptions, Map theGlobalModelCache, Map theGlobalLibraryCache) { + @Bean + EvaluationSettings evaluationSettings( + CqlOptions theCqlOptions, + Map theGlobalModelCache, + Map + theGlobalLibraryCache) { var evaluationSettings = new EvaluationSettings(); evaluationSettings.setCqlOptions(theCqlOptions); evaluationSettings.setModelCache(theGlobalModelCache); @@ -98,13 +100,17 @@ public abstract class BaseClinicalReasoningConfig { return evaluationSettings; } + @Bean CrProviderFactory cqlProviderFactory() { return new CrProviderFactory(); } @Bean - CrProviderLoader cqlProviderLoader(FhirContext theFhirContext, ResourceProviderFactory theResourceProviderFactory, CrProviderFactory theCqlProviderFactory) { + CrProviderLoader cqlProviderLoader( + FhirContext theFhirContext, + ResourceProviderFactory theResourceProviderFactory, + CrProviderFactory theCqlProviderFactory) { return new CrProviderLoader(theFhirContext, theResourceProviderFactory, theCqlProviderFactory); } @@ -139,15 +145,18 @@ public abstract class BaseClinicalReasoningConfig { } @Bean - public CqlTranslatorOptions cqlTranslatorOptions(FhirContext theFhirContext, CrProperties.CqlProperties theCqlProperties) { + public CqlTranslatorOptions cqlTranslatorOptions( + FhirContext theFhirContext, CrProperties.CqlProperties theCqlProperties) { CqlTranslatorOptions options = theCqlProperties.getCqlOptions().getCqlTranslatorOptions(); if (theFhirContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.R4) - && (options.getCompatibilityLevel().equals("1.5") || options.getCompatibilityLevel().equals("1.4"))) { - ourLogger.warn("{} {} {}", - "This server is configured to use CQL version > 1.4 and FHIR version <= DSTU3.", - "Most available CQL content for DSTU3 and below is for CQL versions 1.3.", - "If your CQL content causes translation errors, try setting the CQL compatibility level to 1.3"); + && (options.getCompatibilityLevel().equals("1.5") + || options.getCompatibilityLevel().equals("1.4"))) { + ourLogger.warn( + "{} {} {}", + "This server is configured to use CQL version > 1.4 and FHIR version <= DSTU3.", + "Most available CQL content for DSTU3 and below is for CQL versions 1.3.", + "If your CQL content causes translation errors, try setting the CQL compatibility level to 1.3"); } return options; @@ -155,8 +164,7 @@ public abstract class BaseClinicalReasoningConfig { @Bean @Scope("prototype") - public ModelManager modelManager( - Map theGlobalModelCache) { + public ModelManager modelManager(Map theGlobalModelCache) { return new ModelManager(theGlobalModelCache); } @@ -171,10 +179,13 @@ public abstract class BaseClinicalReasoningConfig { } @Bean - IDataProviderFactory dataProviderFactory(ModelResolver theModelResolver, DaoRegistry theDaoRegistry, - SearchParameterResolver theSearchParameterResolver) { + IDataProviderFactory dataProviderFactory( + ModelResolver theModelResolver, + DaoRegistry theDaoRegistry, + SearchParameterResolver theSearchParameterResolver) { return (rd, t) -> { - HapiFhirRetrieveProvider provider = new HapiFhirRetrieveProvider(theDaoRegistry, theSearchParameterResolver, rd); + HapiFhirRetrieveProvider provider = + new HapiFhirRetrieveProvider(theDaoRegistry, theSearchParameterResolver, rd); if (t != null) { provider.setTerminologyProvider(t); provider.setExpandValueSets(true); @@ -186,7 +197,8 @@ public abstract class BaseClinicalReasoningConfig { } @Bean - org.opencds.cqf.cql.evaluator.builder.DataProviderFactory builderDataProviderFactory(FhirContext theFhirContext, ModelResolver theModelResolver) { + org.opencds.cqf.cql.evaluator.builder.DataProviderFactory builderDataProviderFactory( + FhirContext theFhirContext, ModelResolver theModelResolver) { return new org.opencds.cqf.cql.evaluator.builder.DataProviderFactory() { @Override public DataProviderComponents create(EndpointInfo theEndpointInfo) { @@ -196,25 +208,25 @@ public abstract class BaseClinicalReasoningConfig { @Override public DataProviderComponents create(IBaseBundle theDataBundle) { - return new DataProviderComponents(Constants.FHIR_MODEL_URI, theModelResolver, - new BundleRetrieveProvider(theFhirContext, theDataBundle)); + return new DataProviderComponents( + Constants.FHIR_MODEL_URI, + theModelResolver, + new BundleRetrieveProvider(theFhirContext, theDataBundle)); } }; - } @Bean - public HapiFhirRetrieveProvider fhirRetrieveProvider(DaoRegistry theDaoRegistry, - SearchParameterResolver theSearchParameterResolver) { + public HapiFhirRetrieveProvider fhirRetrieveProvider( + DaoRegistry theDaoRegistry, SearchParameterResolver theSearchParameterResolver) { return new HapiFhirRetrieveProvider(theDaoRegistry, theSearchParameterResolver); } @Bean public ITerminologyProviderFactory terminologyProviderFactory( - IValidationSupport theValidationSupport, - Map> theGlobalCodeCache) { - return rd -> new HapiTerminologyProvider(theValidationSupport, theGlobalCodeCache, - rd); + IValidationSupport theValidationSupport, + Map> theGlobalCodeCache) { + return rd -> new HapiTerminologyProvider(theValidationSupport, theGlobalCodeCache, rd); } @Bean @@ -225,10 +237,12 @@ public abstract class BaseClinicalReasoningConfig { @Bean @Scope("prototype") ILibraryLoaderFactory libraryLoaderFactory( - Map theGlobalLibraryCache, - ModelManager theModelManager, CqlTranslatorOptions theCqlTranslatorOptions, CrProperties.CqlProperties theCqlProperties) { + Map + theGlobalLibraryCache, + ModelManager theModelManager, + CqlTranslatorOptions theCqlTranslatorOptions, + CrProperties.CqlProperties theCqlProperties) { return lcp -> { - if (theCqlProperties.getCqlOptions().useEmbeddedLibraries()) { lcp.add(new FhirLibrarySourceProvider()); } @@ -239,7 +253,8 @@ public abstract class BaseClinicalReasoningConfig { // TODO: Use something like caffeine caching for this so that growth is limited. @Bean - public Map globalLibraryCache() { + public Map + globalLibraryCache() { return new ConcurrentHashMap<>(); } @@ -256,22 +271,27 @@ public abstract class BaseClinicalReasoningConfig { @Bean @Primary public ElmCacheResourceChangeListener elmCacheResourceChangeListener( - IResourceChangeListenerRegistry theResourceChangeListenerRegistry, DaoRegistry theDaoRegistry, - Map theGlobalLibraryCache) { - ElmCacheResourceChangeListener listener = new ElmCacheResourceChangeListener(theDaoRegistry, theGlobalLibraryCache); - theResourceChangeListenerRegistry.registerResourceResourceChangeListener("Library", - SearchParameterMap.newSynchronous(), listener, 1000); + IResourceChangeListenerRegistry theResourceChangeListenerRegistry, + DaoRegistry theDaoRegistry, + Map + theGlobalLibraryCache) { + ElmCacheResourceChangeListener listener = + new ElmCacheResourceChangeListener(theDaoRegistry, theGlobalLibraryCache); + theResourceChangeListenerRegistry.registerResourceResourceChangeListener( + "Library", SearchParameterMap.newSynchronous(), listener, 1000); return listener; } @Bean @Primary public CodeCacheResourceChangeListener codeCacheResourceChangeListener( - IResourceChangeListenerRegistry theResourceChangeListenerRegistry, DaoRegistry theDaoRegistry, - Map> theGlobalCodeCache) { - CodeCacheResourceChangeListener listener = new CodeCacheResourceChangeListener(theDaoRegistry, theGlobalCodeCache); - theResourceChangeListenerRegistry.registerResourceResourceChangeListener("ValueSet", - SearchParameterMap.newSynchronous(), listener, 1000); + IResourceChangeListenerRegistry theResourceChangeListenerRegistry, + DaoRegistry theDaoRegistry, + Map> theGlobalCodeCache) { + CodeCacheResourceChangeListener listener = + new CodeCacheResourceChangeListener(theDaoRegistry, theGlobalCodeCache); + theResourceChangeListenerRegistry.registerResourceResourceChangeListener( + "ValueSet", SearchParameterMap.newSynchronous(), listener, 1000); return listener; } @@ -283,7 +303,9 @@ public abstract class BaseClinicalReasoningConfig { case DSTU3: return new CachingModelResolverDecorator(new Dstu3FhirModelResolver()); default: - throw new IllegalStateException(Msg.code(2224) + "CQL support not yet implemented for this FHIR version. Please change versions or disable the CQL plugin."); + throw new IllegalStateException( + Msg.code(2224) + + "CQL support not yet implemented for this FHIR version. Please change versions or disable the CQL plugin."); } } @@ -295,17 +317,15 @@ public abstract class BaseClinicalReasoningConfig { @Bean public Executor cqlExecutor() { CqlForkJoinWorkerThreadFactory factory = new CqlForkJoinWorkerThreadFactory(); - ForkJoinPool myCommonPool = new ForkJoinPool(Math.min(32767, Runtime.getRuntime().availableProcessors()), - factory, - null, false); + ForkJoinPool myCommonPool = + new ForkJoinPool(Math.min(32767, Runtime.getRuntime().availableProcessors()), factory, null, false); - return new DelegatingSecurityContextExecutor(myCommonPool, - SecurityContextHolder.getContext()); + return new DelegatingSecurityContextExecutor(myCommonPool, SecurityContextHolder.getContext()); } @Bean - public PreExpandedValidationSupportLoader preExpandedValidationSupportLoader(ValidationSupportChain theSupportChain, - FhirContext theFhirContext) { + public PreExpandedValidationSupportLoader preExpandedValidationSupportLoader( + ValidationSupportChain theSupportChain, FhirContext theFhirContext) { return new PreExpandedValidationSupportLoader(theSupportChain, theFhirContext); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/BaseRepositoryConfig.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/BaseRepositoryConfig.java index 4b64e3ef696..5e0b6b7747e 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/BaseRepositoryConfig.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/BaseRepositoryConfig.java @@ -23,10 +23,8 @@ import ca.uhn.fhir.cr.common.IRepositoryFactory; import ca.uhn.fhir.cr.repo.HapiFhirRepository; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.rest.server.RestfulServer; -import org.opencds.cqf.cql.evaluator.spring.fhir.adapter.AdapterConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; @Configuration public abstract class BaseRepositoryConfig { diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrDstu3Config.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrDstu3Config.java index 7847014dff7..9c09900c051 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrDstu3Config.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrDstu3Config.java @@ -35,7 +35,8 @@ import java.util.function.Function; public class CrDstu3Config { @Bean - public Function dstu3MeasureServiceFactory(ApplicationContext theApplicationContext) { + public Function dstu3MeasureServiceFactory( + ApplicationContext theApplicationContext) { return r -> { var ms = theApplicationContext.getBean(MeasureService.class); ms.setRequestDetails(r); @@ -53,5 +54,4 @@ public class CrDstu3Config { public MeasureOperationsProvider dstu3measureOperationsProvider() { return new MeasureOperationsProvider(); } - } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProperties.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProperties.java index 3306c3acd85..58c4d845b5a 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProperties.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProperties.java @@ -30,10 +30,10 @@ public class CrProperties { private MeasureProperties myMeasureProperties; private CqlProperties myCqlProperties = new CqlProperties(); - public CrProperties () { + public CrProperties() { this.myMeasureProperties = new MeasureProperties(); - }; - + } + ; public boolean isCqlEnabled() { return myCqlEnabled; @@ -67,10 +67,10 @@ public class CrProperties { public MeasureProperties() { myMeasureEvaluationOptions = MeasureEvaluationOptions.defaultOptions(); - }; + } + ; - - //care gaps + // care gaps public boolean getThreadedCareGapsEnabled() { return myThreadedCareGapsEnabled; } @@ -78,11 +78,12 @@ public class CrProperties { public void setThreadedCareGapsEnabled(boolean theThreadedCareGapsEnabled) { myThreadedCareGapsEnabled = theThreadedCareGapsEnabled; } + public boolean isThreadedCareGapsEnabled() { return myThreadedCareGapsEnabled; } - //report configuration + // report configuration public MeasureReportConfiguration getMeasureReportConfiguration() { return myMeasureReportConfiguration; } @@ -91,8 +92,7 @@ public class CrProperties { myMeasureReportConfiguration = theMeasureReport; } - - //measure evaluations + // measure evaluations public void setMeasureEvaluationOptions(MeasureEvaluationOptions theMeasureEvaluation) { myMeasureEvaluationOptions = theMeasureEvaluation; } @@ -141,10 +141,8 @@ public class CrProperties { myCareGapsCompositionSectionAuthor = theCareGapsCompositionSectionAuthor; } } - } - public static class CqlProperties { private boolean myCqlUseOfEmbeddedLibraries = true; @@ -152,7 +150,6 @@ public class CrProperties { private CqlEngineOptions myCqlRuntimeOptions = CqlEngineOptions.defaultOptions(); private CqlTranslatorOptions myCqlTranslatorOptions = CqlTranslatorOptions.defaultOptions(); - public boolean isCqlUseOfEmbeddedLibraries() { return myCqlUseOfEmbeddedLibraries; } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProviderFactory.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProviderFactory.java index d785ab2e6a0..2a73d73c6dd 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProviderFactory.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProviderFactory.java @@ -45,7 +45,8 @@ public class CrProviderFactory { case R4: return myApplicationContext.getBean(ca.uhn.fhir.cr.r4.measure.MeasureOperationsProvider.class); default: - throw new ConfigurationException(Msg.code(1654) + "CQL is not supported for FHIR version " + myFhirContext.getVersion().getVersion()); + throw new ConfigurationException(Msg.code(1654) + "CQL is not supported for FHIR version " + + myFhirContext.getVersion().getVersion()); } } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProviderLoader.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProviderLoader.java index 24080337b28..74e356ff887 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProviderLoader.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrProviderLoader.java @@ -39,7 +39,10 @@ public class CrProviderLoader { private final ResourceProviderFactory myResourceProviderFactory; private final CrProviderFactory myCqlProviderFactory; - public CrProviderLoader(FhirContext theFhirContext, ResourceProviderFactory theResourceProviderFactory, CrProviderFactory theCqlProviderFactory) { + public CrProviderLoader( + FhirContext theFhirContext, + ResourceProviderFactory theResourceProviderFactory, + CrProviderFactory theCqlProviderFactory) { myFhirContext = theFhirContext; myResourceProviderFactory = theResourceProviderFactory; myCqlProviderFactory = theCqlProviderFactory; @@ -54,7 +57,8 @@ public class CrProviderLoader { myResourceProviderFactory.addSupplier(() -> myCqlProviderFactory.getMeasureOperationsProvider()); break; default: - throw new ConfigurationException(Msg.code(1653) + "CQL not supported for FHIR version " + myFhirContext.getVersion().getVersion()); + throw new ConfigurationException(Msg.code(1653) + "CQL not supported for FHIR version " + + myFhirContext.getVersion().getVersion()); } } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrR4Config.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrR4Config.java index 7c7aee0f540..9b1e2f0277e 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrR4Config.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/CrR4Config.java @@ -62,9 +62,11 @@ public class CrR4Config { } @Bean - public Function r4CareGapsServiceFactory(Function theR4MeasureServiceFactory, - CrProperties theCrProperties, - DaoRegistry theDaoRegistry, Executor cqlExecutor) { + public Function r4CareGapsServiceFactory( + Function theR4MeasureServiceFactory, + CrProperties theCrProperties, + DaoRegistry theDaoRegistry, + Executor cqlExecutor) { return r -> { var ms = theR4MeasureServiceFactory.apply(r); var cs = new CareGapsService(theCrProperties, ms, theDaoRegistry, cqlExecutor, r); @@ -73,18 +75,18 @@ public class CrR4Config { } @Bean - public CareGapsOperationProvider r4CareGapsProvider(Function theCareGapsServiceFunction){ + public CareGapsOperationProvider r4CareGapsProvider( + Function theCareGapsServiceFunction) { return new CareGapsOperationProvider(theCareGapsServiceFunction); } @Bean - public ISubmitDataService r4SubmitDataService(DaoRegistry theDaoRegistry){ + public ISubmitDataService r4SubmitDataService(DaoRegistry theDaoRegistry) { return requestDetails -> new SubmitDataService(theDaoRegistry, requestDetails); } @Bean - public SubmitDataProvider r4SubmitDataProvider(ISubmitDataService theSubmitDataService){ + public SubmitDataProvider r4SubmitDataProvider(ISubmitDataService theSubmitDataService) { return new SubmitDataProvider(theSubmitDataService); } - } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/PreExpandedValidationSupportLoader.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/PreExpandedValidationSupportLoader.java index 8c410c131e7..87f1936093e 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/PreExpandedValidationSupportLoader.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/config/PreExpandedValidationSupportLoader.java @@ -20,17 +20,15 @@ package ca.uhn.fhir.cr.config; import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.cr.common.PreExpandedValidationSupport; -import org.hl7.fhir.common.hapi.validation.support.BaseValidationSupportWrapper; import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain; /** * This class loads the validation of terminology services. */ - public class PreExpandedValidationSupportLoader { - public PreExpandedValidationSupportLoader(ValidationSupportChain theValidationSupportChain, FhirContext theFhirContext) { + public PreExpandedValidationSupportLoader( + ValidationSupportChain theValidationSupportChain, FhirContext theFhirContext) { var preExpandedValidationSupport = new PreExpandedValidationSupport(theFhirContext); theValidationSupportChain.addValidationSupport(0, preExpandedValidationSupport); } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/CareCapsConstants.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/CareCapsConstants.java index c4789cbc952..f709b030785 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/CareCapsConstants.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/CareCapsConstants.java @@ -20,12 +20,18 @@ package ca.uhn.fhir.cr.constant; public class CareCapsConstants { - private CareCapsConstants(){} - public static final String CARE_GAPS_REPORT_PROFILE = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/indv-measurereport-deqm"; - public static final String CARE_GAPS_BUNDLE_PROFILE = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-bundle-deqm"; - public static final String CARE_GAPS_COMPOSITION_PROFILE = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-composition-deqm"; - public static final String CARE_GAPS_DETECTED_ISSUE_PROFILE = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-detectedissue-deqm"; - public static final String CARE_GAPS_GAP_STATUS_EXTENSION = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/extension-gapStatus"; - public static final String CARE_GAPS_GAP_STATUS_SYSTEM = "http://hl7.org/fhir/us/davinci-deqm/CodeSystem/gaps-status"; + private CareCapsConstants() {} + public static final String CARE_GAPS_REPORT_PROFILE = + "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/indv-measurereport-deqm"; + public static final String CARE_GAPS_BUNDLE_PROFILE = + "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-bundle-deqm"; + public static final String CARE_GAPS_COMPOSITION_PROFILE = + "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-composition-deqm"; + public static final String CARE_GAPS_DETECTED_ISSUE_PROFILE = + "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/gaps-detectedissue-deqm"; + public static final String CARE_GAPS_GAP_STATUS_EXTENSION = + "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/extension-gapStatus"; + public static final String CARE_GAPS_GAP_STATUS_SYSTEM = + "http://hl7.org/fhir/us/davinci-deqm/CodeSystem/gaps-status"; } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/HtmlConstants.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/HtmlConstants.java index ac0363c68ca..446f961fc86 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/HtmlConstants.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/HtmlConstants.java @@ -21,7 +21,8 @@ package ca.uhn.fhir.cr.constant; public class HtmlConstants { - private HtmlConstants(){} + private HtmlConstants() {} + public static final String HTML_DIV_CONTENT = "

    %s
    "; public static final String HTML_PARAGRAPH_CONTENT = "

    %s

    "; public static final String HTML_DIV_PARAGRAPH_CONTENT = String.format(HTML_DIV_CONTENT, HTML_PARAGRAPH_CONTENT); diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/MeasureReportConstants.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/MeasureReportConstants.java index 5a71cdc2db4..bf980d17131 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/MeasureReportConstants.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/constant/MeasureReportConstants.java @@ -26,15 +26,20 @@ public class MeasureReportConstants { private MeasureReportConstants() {} - public static final String MEASUREREPORT_IMPROVEMENT_NOTATION_SYSTEM = "http://terminology.hl7.org/CodeSystem/measure-improvement-notation"; - public static final String MEASUREREPORT_MEASURE_POPULATION_SYSTEM = "http://terminology.hl7.org/CodeSystem/measure-population"; - public static final String MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION = "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/extension-supplementalData"; - public static final String MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_URL = "http://hl7.org/fhir/us/davinci-deqm/SearchParameter/measurereport-supplemental-data"; - public static final String MEASUREREPORT_PRODUCT_LINE_EXT_URL = "http://hl7.org/fhir/us/cqframework/cqfmeasures/StructureDefinition/cqfm-productLine"; + public static final String MEASUREREPORT_IMPROVEMENT_NOTATION_SYSTEM = + "http://terminology.hl7.org/CodeSystem/measure-improvement-notation"; + public static final String MEASUREREPORT_MEASURE_POPULATION_SYSTEM = + "http://terminology.hl7.org/CodeSystem/measure-population"; + public static final String MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION = + "http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/extension-supplementalData"; + public static final String MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_URL = + "http://hl7.org/fhir/us/davinci-deqm/SearchParameter/measurereport-supplemental-data"; + public static final String MEASUREREPORT_PRODUCT_LINE_EXT_URL = + "http://hl7.org/fhir/us/cqframework/cqfmeasures/StructureDefinition/cqfm-productLine"; public static final String MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_VERSION = "0.1.0"; - public static final Date MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_DEFINITION_DATE = Date.valueOf(LocalDate.of(2022, 7, 20)); + public static final Date MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_DEFINITION_DATE = + Date.valueOf(LocalDate.of(2022, 7, 20)); public static final String COUNTRY_CODING_SYSTEM_CODE = "urn:iso:std:iso:3166"; public static final String US_COUNTRY_CODE = "US"; public static final String US_COUNTRY_DISPLAY = "United States of America"; - } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/activitydefinition/ActivityDefinitionOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/activitydefinition/ActivityDefinitionOperationsProvider.java index 5b783e3a9a0..cccd829b7f1 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/activitydefinition/ActivityDefinitionOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/activitydefinition/ActivityDefinitionOperationsProvider.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.cr.dstu3.activitydefinition; */ import ca.uhn.fhir.cr.common.IRepositoryFactory; -import ca.uhn.fhir.cr.config.CrDstu3Config; import ca.uhn.fhir.cr.dstu3.IActivityDefinitionProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; @@ -29,22 +28,21 @@ import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; -import org.hl7.fhir.dstu3.model.StringType; -import org.hl7.fhir.exceptions.FHIRException; -import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.dstu3.model.ActivityDefinition; import org.hl7.fhir.dstu3.model.Endpoint; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.Parameters; +import org.hl7.fhir.dstu3.model.StringType; +import org.hl7.fhir.exceptions.FHIRException; +import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import java.util.function.Function; - @Component public class ActivityDefinitionOperationsProvider { @Autowired IRepositoryFactory myRepositoryFactory; + @Autowired IActivityDefinitionProcessorFactory myDstu3ActivityDefinitionServiceFactory; @@ -82,40 +80,43 @@ public class ActivityDefinitionOperationsProvider { * @return The resource that is the result of applying the definition */ @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = ActivityDefinition.class) - public IBaseResource apply(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "activityDefinition") ActivityDefinition theActivityDefinition, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "encounter") String theEncounter, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "userType") String theUserType, - @OperationParam(name = "userLanguage") String theUserLanguage, - @OperationParam(name = "userTaskContext") String theUserTaskContext, - @OperationParam(name = "setting") String theSetting, - @OperationParam(name = "settingContext") String theSettingContext, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseResource apply( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "activityDefinition") ActivityDefinition theActivityDefinition, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "encounter") String theEncounter, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "userType") String theUserType, + @OperationParam(name = "userLanguage") String theUserLanguage, + @OperationParam(name = "userTaskContext") String theUserTaskContext, + @OperationParam(name = "setting") String theSetting, + @OperationParam(name = "settingContext") String theSettingContext, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return this.myDstu3ActivityDefinitionServiceFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .apply(theId, - new StringType(theCanonical), - theActivityDefinition, - theSubject, - theEncounter, - thePractitioner, - theOrganization, - theUserType, - theUserLanguage, - theUserTaskContext, - theSetting, - theSettingContext, - theParameters, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .apply( + theId, + new StringType(theCanonical), + theActivityDefinition, + theSubject, + theEncounter, + thePractitioner, + theOrganization, + theUserType, + theUserLanguage, + theUserTaskContext, + theSetting, + theSettingContext, + theParameters, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/measure/MeasureOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/measure/MeasureOperationsProvider.java index 5ab96c6460b..85cf3cefc4d 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/measure/MeasureOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/measure/MeasureOperationsProvider.java @@ -66,29 +66,31 @@ public class MeasureOperationsProvider { * @return the calculated MeasureReport */ @Operation(name = ProviderConstants.CQL_EVALUATE_MEASURE, idempotent = true, type = Measure.class) - public MeasureReport evaluateMeasure(@IdParam IdType theId, - @OperationParam(name = "periodStart") String thePeriodStart, - @OperationParam(name = "periodEnd") String thePeriodEnd, - @OperationParam(name = "reportType") String theReportType, - @OperationParam(name = "patient") String thePatient, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "lastReceivedOn") String theLastReceivedOn, - @OperationParam(name = "productLine") String theProductLine, - @OperationParam(name = "additionalData") Bundle theAdditionalData, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public MeasureReport evaluateMeasure( + @IdParam IdType theId, + @OperationParam(name = "periodStart") String thePeriodStart, + @OperationParam(name = "periodEnd") String thePeriodEnd, + @OperationParam(name = "reportType") String theReportType, + @OperationParam(name = "patient") String thePatient, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "lastReceivedOn") String theLastReceivedOn, + @OperationParam(name = "productLine") String theProductLine, + @OperationParam(name = "additionalData") Bundle theAdditionalData, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return this.myDstu3MeasureServiceFactory - .apply(theRequestDetails) - .evaluateMeasure( - theId, - thePeriodStart, - thePeriodEnd, - theReportType, - thePatient, - thePractitioner, - theLastReceivedOn, - theProductLine, - theAdditionalData, - theTerminologyEndpoint); + .apply(theRequestDetails) + .evaluateMeasure( + theId, + thePeriodStart, + thePeriodEnd, + theReportType, + thePatient, + thePractitioner, + theLastReceivedOn, + theProductLine, + theAdditionalData, + theTerminologyEndpoint); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/measure/MeasureService.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/measure/MeasureService.java index 5b673dacabe..8b348f9ce1a 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/measure/MeasureService.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/measure/MeasureService.java @@ -65,43 +65,36 @@ import static ca.uhn.fhir.cr.constant.MeasureReportConstants.US_COUNTRY_DISPLAY; public class MeasureService implements IDaoRegistryUser { - public static final List CQI_CONTACT_DETAIL = Collections.singletonList( - new ContactDetail() - .addTelecom( - new ContactPoint() + public static final List CQI_CONTACT_DETAIL = Collections.singletonList(new ContactDetail() + .addTelecom(new ContactPoint() .setSystem(ContactPoint.ContactPointSystem.URL) .setValue("http://www.hl7.org/Special/committees/cqi/index.cfm"))); - public static final List US_JURISDICTION_CODING = Collections.singletonList( - new CodeableConcept() - .addCoding( - new Coding(COUNTRY_CODING_SYSTEM_CODE, US_COUNTRY_CODE, US_COUNTRY_DISPLAY))); + public static final List US_JURISDICTION_CODING = Collections.singletonList(new CodeableConcept() + .addCoding(new Coding(COUNTRY_CODING_SYSTEM_CODE, US_COUNTRY_CODE, US_COUNTRY_DISPLAY))); public static final SearchParameter SUPPLEMENTAL_DATA_SEARCHPARAMETER = (SearchParameter) new SearchParameter() - .setUrl(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_URL) - .setVersion(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_VERSION) - .setName("DEQMMeasureReportSupplementalData") - .setStatus(Enumerations.PublicationStatus.ACTIVE) - .setDate(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_DEFINITION_DATE) - .setPublisher("HL7 International - Clinical Quality Information Work Group") - .setContact(CQI_CONTACT_DETAIL) - .setDescription( - String.format( - "Returns resources (supplemental data) from references on extensions on the MeasureReport with urls matching %s.", - MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) - .setJurisdiction(US_JURISDICTION_CODING) - .addBase("MeasureReport") - .setCode("supplemental-data") - .setType(Enumerations.SearchParamType.REFERENCE) - .setExpression( - String.format("MeasureReport.extension('%s').value", - MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) - .setXpath( - String.format("f:MeasureReport/f:extension[@url='%s'].value", - MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) - .setXpathUsage(SearchParameter.XPathUsageType.NORMAL) - .setTitle("Supplemental Data") - .setId("deqm-measurereport-supplemental-data"); + .setUrl(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_URL) + .setVersion(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_VERSION) + .setName("DEQMMeasureReportSupplementalData") + .setStatus(Enumerations.PublicationStatus.ACTIVE) + .setDate(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_DEFINITION_DATE) + .setPublisher("HL7 International - Clinical Quality Information Work Group") + .setContact(CQI_CONTACT_DETAIL) + .setDescription(String.format( + "Returns resources (supplemental data) from references on extensions on the MeasureReport with urls matching %s.", + MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) + .setJurisdiction(US_JURISDICTION_CODING) + .addBase("MeasureReport") + .setCode("supplemental-data") + .setType(Enumerations.SearchParamType.REFERENCE) + .setExpression(String.format( + "MeasureReport.extension('%s').value", MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) + .setXpath(String.format( + "f:MeasureReport/f:extension[@url='%s'].value", MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) + .setXpathUsage(SearchParameter.XPathUsageType.NORMAL) + .setTitle("Supplemental Data") + .setId("deqm-measurereport-supplemental-data"); @Autowired protected ITerminologyProviderFactory myTerminologyProviderFactory; @@ -119,7 +112,8 @@ public class MeasureService implements IDaoRegistryUser { protected IFhirDalFactory myFhirDalFactory; @Autowired - protected Map myGlobalLibraryCache; + protected Map + myGlobalLibraryCache; @Autowired protected CqlOptions myCqlOptions; @@ -165,16 +159,17 @@ public class MeasureService implements IDaoRegistryUser { * @param theTerminologyEndpoint the endpoint of terminology services for your measure valuesets * @return the calculated MeasureReport */ - public MeasureReport evaluateMeasure(IdType theId, - String thePeriodStart, - String thePeriodEnd, - String theReportType, - String theSubject, - String thePractitioner, - String theLastReceivedOn, - String theProductLine, - Bundle theAdditionalData, - Endpoint theTerminologyEndpoint) { + public MeasureReport evaluateMeasure( + IdType theId, + String thePeriodStart, + String thePeriodEnd, + String theReportType, + String theSubject, + String thePractitioner, + String theLastReceivedOn, + String theProductLine, + Bundle theAdditionalData, + Endpoint theTerminologyEndpoint) { ensureSupplementalDataElementSearchParameter(); @@ -194,12 +189,31 @@ public class MeasureService implements IDaoRegistryUser { FhirDal fhirDal = this.myFhirDalFactory.create(myRequestDetails); var measureProcessor = new org.opencds.cqf.cql.evaluator.measure.dstu3.Dstu3MeasureProcessor( - null, this.myDataProviderFactory, null, null, null, terminologyProvider, libraryContentProvider, dataProvider, - fhirDal, myMeasureEvaluationOptions, myCqlOptions, - null); + null, + this.myDataProviderFactory, + null, + null, + null, + terminologyProvider, + libraryContentProvider, + dataProvider, + fhirDal, + myMeasureEvaluationOptions, + myCqlOptions, + null); - MeasureReport report = measureProcessor.evaluateMeasure(measure.getUrl(), thePeriodStart, thePeriodEnd, theReportType, - theSubject, null, theLastReceivedOn, null, null, null, theAdditionalData); + MeasureReport report = measureProcessor.evaluateMeasure( + measure.getUrl(), + thePeriodStart, + thePeriodEnd, + theReportType, + theSubject, + null, + theLastReceivedOn, + null, + null, + null, + theAdditionalData); if (theProductLine != null) { Extension ext = new Extension(); @@ -217,10 +231,10 @@ public class MeasureService implements IDaoRegistryUser { } protected void ensureSupplementalDataElementSearchParameter() { - //create a transaction bundle + // create a transaction bundle BundleBuilder builder = new BundleBuilder(getFhirContext()); - //set the request to be condition on code == supplemental data + // set the request to be condition on code == supplemental data builder.addTransactionCreateEntry(SUPPLEMENTAL_DATA_SEARCHPARAMETER).conditional("code=supplemental-data"); transaction(builder.getBundle(), this.myRequestDetails); } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionOperationsProvider.java index 64aaae6e9f8..a4c3e646e07 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/plandefinition/PlanDefinitionOperationsProvider.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.cr.dstu3.plandefinition; */ import ca.uhn.fhir.cr.common.IRepositoryFactory; -import ca.uhn.fhir.cr.config.CrDstu3Config; import ca.uhn.fhir.cr.dstu3.IPlanDefinitionProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; @@ -29,23 +28,22 @@ import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; -import org.hl7.fhir.dstu3.model.StringType; -import org.hl7.fhir.exceptions.FHIRException; -import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.Endpoint; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.Parameters; import org.hl7.fhir.dstu3.model.PlanDefinition; +import org.hl7.fhir.dstu3.model.StringType; +import org.hl7.fhir.exceptions.FHIRException; +import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import java.util.function.Function; - @Component public class PlanDefinitionOperationsProvider { @Autowired IRepositoryFactory myRepositoryFactory; + @Autowired IPlanDefinitionProcessorFactory myDstu3PlanDefinitionServiceFactory; @@ -85,44 +83,47 @@ public class PlanDefinitionOperationsProvider { * @return The CarePlan that is the result of applying the plan definition */ @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) - public IBaseResource apply(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "encounter") String theEncounter, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "userType") String theUserType, - @OperationParam(name = "userLanguage") String theUserLanguage, - @OperationParam(name = "userTaskContext") String theUserTaskContext, - @OperationParam(name = "setting") String theSetting, - @OperationParam(name = "settingContext") String theSettingContext, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "data") Bundle theData, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseResource apply( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "encounter") String theEncounter, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "userType") String theUserType, + @OperationParam(name = "userLanguage") String theUserLanguage, + @OperationParam(name = "userTaskContext") String theUserTaskContext, + @OperationParam(name = "setting") String theSetting, + @OperationParam(name = "settingContext") String theSettingContext, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return this.myDstu3PlanDefinitionServiceFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .apply(theId, - new StringType(theCanonical), - thePlanDefinition, - theSubject, - theEncounter, - thePractitioner, - theOrganization, - theUserType, - theUserLanguage, - theUserTaskContext, - theSetting, - theSettingContext, - theParameters, - true, - theData, - null, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .apply( + theId, + new StringType(theCanonical), + thePlanDefinition, + theSubject, + theEncounter, + thePractitioner, + theOrganization, + theUserType, + theUserLanguage, + theUserTaskContext, + theSetting, + theSettingContext, + theParameters, + true, + theData, + null, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaire/QuestionnaireOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaire/QuestionnaireOperationsProvider.java index cfefaa28835..bd98654b8a8 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaire/QuestionnaireOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaire/QuestionnaireOperationsProvider.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.cr.dstu3.questionnaire; */ import ca.uhn.fhir.cr.common.IRepositoryFactory; -import ca.uhn.fhir.cr.config.CrDstu3Config; import ca.uhn.fhir.cr.dstu3.IQuestionnaireProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; @@ -42,6 +41,7 @@ import org.springframework.beans.factory.annotation.Autowired; public class QuestionnaireOperationsProvider { @Autowired IRepositoryFactory myRepositoryFactory; + @Autowired IQuestionnaireProcessorFactory myDstu3QuestionnaireServiceFactory; @@ -69,27 +69,30 @@ public class QuestionnaireOperationsProvider { * @return The partially (or fully)-populated set of answers for the specified Questionnaire. */ @Operation(name = ProviderConstants.CR_OPERATION_PREPOPULATE, idempotent = true, type = Questionnaire.class) - public Questionnaire prepopulate(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "bundle") Bundle theBundle, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public Questionnaire prepopulate( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "bundle") Bundle theBundle, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return this.myDstu3QuestionnaireServiceFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .prePopulate(theId, - new StringType(theCanonical), - theQuestionnaire, - theSubject, - theParameters, - theBundle, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .prePopulate( + theId, + new StringType(theCanonical), + theQuestionnaire, + theSubject, + theParameters, + theBundle, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } /** @@ -114,27 +117,30 @@ public class QuestionnaireOperationsProvider { * @return The partially (or fully)-populated set of answers for the specified Questionnaire. */ @Operation(name = ProviderConstants.CR_OPERATION_POPULATE, idempotent = true, type = Questionnaire.class) - public QuestionnaireResponse populate(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "bundle") Bundle theBundle, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public QuestionnaireResponse populate( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "bundle") Bundle theBundle, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return (QuestionnaireResponse) this.myDstu3QuestionnaireServiceFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .populate(theId, - new StringType(theCanonical), - theQuestionnaire, - theSubject, - theParameters, - theBundle, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .populate( + theId, + new StringType(theCanonical), + theQuestionnaire, + theSubject, + theParameters, + theBundle, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } /** @@ -148,12 +154,13 @@ public class QuestionnaireOperationsProvider { * @return A Bundle containing the Questionnaire and all related Library, CodeSystem and ValueSet resources */ @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = Questionnaire.class) - public Bundle packageQuestionnaire(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - RequestDetails theRequestDetails) { + public Bundle packageQuestionnaire( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + RequestDetails theRequestDetails) { return (Bundle) this.myDstu3QuestionnaireServiceFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .packageQuestionnaire(theId, new StringType(theCanonical), null, false); + .create(myRepositoryFactory.create(theRequestDetails)) + .packageQuestionnaire(theId, new StringType(theCanonical), null, false); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaireresponse/QuestionnaireResponseOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaireresponse/QuestionnaireResponseOperationsProvider.java index c88e012024e..ab21ffa266c 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaireresponse/QuestionnaireResponseOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/dstu3/questionnaireresponse/QuestionnaireResponseOperationsProvider.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.cr.dstu3.questionnaireresponse; */ import ca.uhn.fhir.cr.common.IRepositoryFactory; -import ca.uhn.fhir.cr.config.CrDstu3Config; import ca.uhn.fhir.cr.dstu3.IQuestionnaireResponseProcessorFactory; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Operation; @@ -38,6 +37,7 @@ import org.springframework.beans.factory.annotation.Autowired; public class QuestionnaireResponseOperationsProvider { @Autowired IRepositoryFactory myRepositoryFactory; + @Autowired IQuestionnaireResponseProcessorFactory myDstu3QuestionnaireResponseServiceFactory; @@ -54,10 +54,13 @@ public class QuestionnaireResponseOperationsProvider { * @return The resulting FHIR resource produced after extracting data. This will either be a single resource or a Transaction Bundle that contains multiple resources. */ @Operation(name = ProviderConstants.CR_OPERATION_EXTRACT, idempotent = true, type = QuestionnaireResponse.class) - public IBaseBundle extract(@IdParam IdType theId, @ResourceParam QuestionnaireResponse theQuestionnaireResponse, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseBundle extract( + @IdParam IdType theId, + @ResourceParam QuestionnaireResponse theQuestionnaireResponse, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return this.myDstu3QuestionnaireResponseServiceFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .extract(theId, theQuestionnaireResponse, null, null, null); + .create(myRepositoryFactory.create(theRequestDetails)) + .extract(theId, theQuestionnaireResponse, null, null, null); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/enumeration/CareGapsStatusCode.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/enumeration/CareGapsStatusCode.java index 1cc35b4fab1..ea18e008c9b 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/enumeration/CareGapsStatusCode.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/enumeration/CareGapsStatusCode.java @@ -22,7 +22,9 @@ package ca.uhn.fhir.cr.enumeration; import ca.uhn.fhir.i18n.Msg; public enum CareGapsStatusCode { - OPEN_GAP("open-gap"), CLOSED_GAP("closed-gap"), NOT_APPLICABLE("not-applicable"); + OPEN_GAP("open-gap"), + CLOSED_GAP("closed-gap"), + NOT_APPLICABLE("not-applicable"); private final String myValue; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/activitydefinition/ActivityDefinitionOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/activitydefinition/ActivityDefinitionOperationsProvider.java index e2212c2f653..474907438f3 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/activitydefinition/ActivityDefinitionOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/activitydefinition/ActivityDefinitionOperationsProvider.java @@ -42,6 +42,7 @@ import org.springframework.stereotype.Component; public class ActivityDefinitionOperationsProvider { @Autowired IRepositoryFactory myRepositoryFactory; + @Autowired IActivityDefinitionProcessorFactory myR4ActivityDefinitionProcessorFactory; @@ -79,77 +80,83 @@ public class ActivityDefinitionOperationsProvider { * @return The resource that is the result of applying the definition */ @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = ActivityDefinition.class) - public IBaseResource apply(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "activityDefinition") ActivityDefinition theActivityDefinition, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "encounter") String theEncounter, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "userType") String theUserType, - @OperationParam(name = "userLanguage") String theUserLanguage, - @OperationParam(name = "userTaskContext") String theUserTaskContext, - @OperationParam(name = "setting") String theSetting, - @OperationParam(name = "settingContext") String theSettingContext, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseResource apply( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "activityDefinition") ActivityDefinition theActivityDefinition, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "encounter") String theEncounter, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "userType") String theUserType, + @OperationParam(name = "userLanguage") String theUserLanguage, + @OperationParam(name = "userTaskContext") String theUserTaskContext, + @OperationParam(name = "setting") String theSetting, + @OperationParam(name = "settingContext") String theSettingContext, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4ActivityDefinitionProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .apply(theId, - new CanonicalType(theCanonical), - theActivityDefinition, - theSubject, - theEncounter, - thePractitioner, - theOrganization, - theUserType, - theUserLanguage, - theUserTaskContext, - theSetting, - theSettingContext, - theParameters, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .apply( + theId, + new CanonicalType(theCanonical), + theActivityDefinition, + theSubject, + theEncounter, + thePractitioner, + theOrganization, + theUserType, + theUserLanguage, + theUserTaskContext, + theSetting, + theSettingContext, + theParameters, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = ActivityDefinition.class) - public IBaseResource apply(@OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "activityDefinition") ActivityDefinition theActivityDefinition, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "encounter") String theEncounter, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "userType") String theUserType, - @OperationParam(name = "userLanguage") String theUserLanguage, - @OperationParam(name = "userTaskContext") String theUserTaskContext, - @OperationParam(name = "setting") String theSetting, - @OperationParam(name = "settingContext") String theSettingContext, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseResource apply( + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "activityDefinition") ActivityDefinition theActivityDefinition, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "encounter") String theEncounter, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "userType") String theUserType, + @OperationParam(name = "userLanguage") String theUserLanguage, + @OperationParam(name = "userTaskContext") String theUserTaskContext, + @OperationParam(name = "setting") String theSetting, + @OperationParam(name = "settingContext") String theSettingContext, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4ActivityDefinitionProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .apply(null, - new CanonicalType(theCanonical), - theActivityDefinition, - theSubject, - theEncounter, - thePractitioner, - theOrganization, - theUserType, - theUserLanguage, - theUserTaskContext, - theSetting, - theSettingContext, - theParameters, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .apply( + null, + new CanonicalType(theCanonical), + theActivityDefinition, + theSubject, + theEncounter, + thePractitioner, + theOrganization, + theUserType, + theUserLanguage, + theUserTaskContext, + theSetting, + theSettingContext, + theParameters, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CareGapsOperationProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CareGapsOperationProvider.java index 367e31a359e..231037b27ac 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CareGapsOperationProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CareGapsOperationProvider.java @@ -91,25 +91,28 @@ public class CareGapsOperationProvider { * clinical organization) participates in * @return Parameters of bundles of Care Gap Measure Reports */ - @Description(shortDefinition = "$care-gaps operation", value = "Implements the $care-gaps operation found in the Da Vinci DEQM FHIR Implementation Guide which is an extension of the $care-gaps operation found in the FHIR Clinical Reasoning Module.") + @Description( + shortDefinition = "$care-gaps operation", + value = + "Implements the $care-gaps operation found in the Da Vinci DEQM FHIR Implementation Guide which is an extension of the $care-gaps operation found in the FHIR Clinical Reasoning Module.") @Operation(name = "$care-gaps", idempotent = true, type = Measure.class) public Parameters careGapsReport( - RequestDetails theRequestDetails, - @OperationParam(name = "periodStart", typeName = "date") IPrimitiveType thePeriodStart, - @OperationParam(name = "periodEnd", typeName = "date") IPrimitiveType thePeriodEnd, - @OperationParam(name = "topic") List theTopic, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "status") List theStatus, - @OperationParam(name = "measureId") List theMeasureId, - @OperationParam(name = "measureIdentifier") List theMeasureIdentifier, - @OperationParam(name = "measureUrl") List theMeasureUrl, - @OperationParam(name = "program") List theProgram) { + RequestDetails theRequestDetails, + @OperationParam(name = "periodStart", typeName = "date") IPrimitiveType thePeriodStart, + @OperationParam(name = "periodEnd", typeName = "date") IPrimitiveType thePeriodEnd, + @OperationParam(name = "topic") List theTopic, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "status") List theStatus, + @OperationParam(name = "measureId") List theMeasureId, + @OperationParam(name = "measureIdentifier") List theMeasureIdentifier, + @OperationParam(name = "measureUrl") List theMeasureUrl, + @OperationParam(name = "program") List theProgram) { return myCareGapsServiceFunction - .apply(theRequestDetails) - .getCareGapsReport( + .apply(theRequestDetails) + .getCareGapsReport( thePeriodStart, thePeriodEnd, theTopic, @@ -120,7 +123,6 @@ public class CareGapsOperationProvider { theMeasureId, theMeasureIdentifier, theMeasureUrl, - theProgram - ); + theProgram); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CareGapsService.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CareGapsService.java index c2f4fd28793..142fad26cf7 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CareGapsService.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/CareGapsService.java @@ -92,12 +92,13 @@ public class CareGapsService implements IDaoRegistryUser { private static final Logger ourLog = LoggerFactory.getLogger(CareGapsService.class); public static final Map CARE_GAPS_CODES = ofEntries( - new AbstractMap.SimpleEntry<>("http://loinc.org/96315-7", - new CodeableConceptSettings().add( - "http://loinc.org", "96315-7", "Gaps in care report")), - new AbstractMap.SimpleEntry<>("http://terminology.hl7.org/CodeSystem/v3-ActCode/CAREGAP", - new CodeableConceptSettings().add( - "http://terminology.hl7.org/CodeSystem/v3-ActCode", "CAREGAP", "Care Gaps"))); + new AbstractMap.SimpleEntry<>( + "http://loinc.org/96315-7", + new CodeableConceptSettings().add("http://loinc.org", "96315-7", "Gaps in care report")), + new AbstractMap.SimpleEntry<>( + "http://terminology.hl7.org/CodeSystem/v3-ActCode/CAREGAP", + new CodeableConceptSettings() + .add("http://terminology.hl7.org/CodeSystem/v3-ActCode", "CAREGAP", "Care Gaps"))); private RequestDetails myRequestDetails; @@ -111,11 +112,12 @@ public class CareGapsService implements IDaoRegistryUser { private final Map myConfiguredResources = new HashMap<>(); - public CareGapsService(CrProperties theCrProperties, - MeasureService theMeasureService, - DaoRegistry theDaoRegistry, - Executor theExecutor, - RequestDetails theRequestDetails){ + public CareGapsService( + CrProperties theCrProperties, + MeasureService theMeasureService, + DaoRegistry theDaoRegistry, + Executor theExecutor, + RequestDetails theRequestDetails) { this.myDaoRegistry = theDaoRegistry; this.myCrProperties = theCrProperties; this.myR4MeasureService = theMeasureService; @@ -139,79 +141,119 @@ public class CareGapsService implements IDaoRegistryUser { * @return Parameters that includes zero to many document bundles that * include Care Gap Measure Reports will be returned. */ - public Parameters getCareGapsReport(IPrimitiveType thePeriodStart, - IPrimitiveType thePeriodEnd, - List theTopic, - String theSubject, - String thePractitioner, - String theOrganization, - List theStatuses, - List theMeasureIds, - List theMeasureIdentifiers, - List theMeasureUrls, - List thePrograms) { + public Parameters getCareGapsReport( + IPrimitiveType thePeriodStart, + IPrimitiveType thePeriodEnd, + List theTopic, + String theSubject, + String thePractitioner, + String theOrganization, + List theStatuses, + List theMeasureIds, + List theMeasureIdentifiers, + List theMeasureUrls, + List thePrograms) { validateConfiguration(); - List measures = ensureMeasures(getMeasures(theMeasureIds, theMeasureIdentifiers, theMeasureUrls, myRequestDetails)); + List measures = + ensureMeasures(getMeasures(theMeasureIds, theMeasureIdentifiers, theMeasureUrls, myRequestDetails)); List patients; if (!Strings.isNullOrEmpty(theSubject)) { patients = getPatientListFromSubject(theSubject); } else { - throw new NotImplementedOperationException(Msg.code(2275) + "Only the subject parameter has been implemented."); + throw new NotImplementedOperationException( + Msg.code(2275) + "Only the subject parameter has been implemented."); } List> futures = new ArrayList<>(); Parameters result = initializeResult(); if (myCrProperties.getMeasureProperties().getThreadedCareGapsEnabled()) { - patients - .forEach( - patient -> { - Parameters.ParametersParameterComponent patientReports = patientReports(myRequestDetails, - thePeriodStart.getValueAsString(), thePeriodEnd.getValueAsString(), patient, theStatuses, measures, - theOrganization); - futures.add(CompletableFuture.supplyAsync(() -> patientReports, myCqlExecutor)); - }); + patients.forEach(patient -> { + Parameters.ParametersParameterComponent patientReports = patientReports( + myRequestDetails, + thePeriodStart.getValueAsString(), + thePeriodEnd.getValueAsString(), + patient, + theStatuses, + measures, + theOrganization); + futures.add(CompletableFuture.supplyAsync(() -> patientReports, myCqlExecutor)); + }); futures.forEach(x -> result.addParameter(x.join())); } else { - patients.forEach( - patient -> { - Parameters.ParametersParameterComponent patientReports = patientReports(myRequestDetails, - thePeriodStart.getValueAsString(), thePeriodEnd.getValueAsString(), patient, theStatuses, measures, + patients.forEach(patient -> { + Parameters.ParametersParameterComponent patientReports = patientReports( + myRequestDetails, + thePeriodStart.getValueAsString(), + thePeriodEnd.getValueAsString(), + patient, + theStatuses, + measures, theOrganization); - if (patientReports != null) { - result.addParameter(patientReports); - } - }); + if (patientReports != null) { + result.addParameter(patientReports); + } + }); } return result; } public void validateConfiguration() { - checkNotNull(myCrProperties.getMeasureProperties(), - "The measure_report setting properties are required for the $care-gaps operation."); - checkNotNull(myCrProperties.getMeasureProperties().getMeasureReportConfiguration(), - "The measure_report setting is required for the $care-gaps operation."); - checkArgument(!Strings.isNullOrEmpty(myCrProperties.getMeasureProperties().getMeasureReportConfiguration().getCareGapsReporter()), - "The measure_report.care_gaps_reporter setting is required for the $care-gaps operation."); - checkArgument(!Strings.isNullOrEmpty(myCrProperties.getMeasureProperties().getMeasureReportConfiguration().getCareGapsCompositionSectionAuthor()), - "The measure_report.care_gaps_composition_section_author setting is required for the $care-gaps operation."); + checkNotNull( + myCrProperties.getMeasureProperties(), + "The measure_report setting properties are required for the $care-gaps operation."); + checkNotNull( + myCrProperties.getMeasureProperties().getMeasureReportConfiguration(), + "The measure_report setting is required for the $care-gaps operation."); + checkArgument( + !Strings.isNullOrEmpty(myCrProperties + .getMeasureProperties() + .getMeasureReportConfiguration() + .getCareGapsReporter()), + "The measure_report.care_gaps_reporter setting is required for the $care-gaps operation."); + checkArgument( + !Strings.isNullOrEmpty(myCrProperties + .getMeasureProperties() + .getMeasureReportConfiguration() + .getCareGapsCompositionSectionAuthor()), + "The measure_report.care_gaps_composition_section_author setting is required for the $care-gaps operation."); - Resource configuredReporter = addConfiguredResource(Organization.class, - myCrProperties.getMeasureProperties().getMeasureReportConfiguration().getCareGapsReporter(), "care_gaps_reporter"); - Resource configuredAuthor = addConfiguredResource(Organization.class, - myCrProperties.getMeasureProperties().getMeasureReportConfiguration().getCareGapsCompositionSectionAuthor(), - "care_gaps_composition_section_author"); + Resource configuredReporter = addConfiguredResource( + Organization.class, + myCrProperties + .getMeasureProperties() + .getMeasureReportConfiguration() + .getCareGapsReporter(), + "care_gaps_reporter"); + Resource configuredAuthor = addConfiguredResource( + Organization.class, + myCrProperties + .getMeasureProperties() + .getMeasureReportConfiguration() + .getCareGapsCompositionSectionAuthor(), + "care_gaps_composition_section_author"); - checkNotNull(configuredReporter, String.format( - "The %s Resource is configured as the measure_report.care_gaps_reporter but the Resource could not be read.", - myCrProperties.getMeasureProperties().getMeasureReportConfiguration().getCareGapsReporter())); - checkNotNull(configuredAuthor, String.format( - "The %s Resource is configured as the measure_report.care_gaps_composition_section_author but the Resource could not be read.", - myCrProperties.getMeasureProperties().getMeasureReportConfiguration().getCareGapsCompositionSectionAuthor())); + checkNotNull( + configuredReporter, + String.format( + "The %s Resource is configured as the measure_report.care_gaps_reporter but the Resource could not be read.", + myCrProperties + .getMeasureProperties() + .getMeasureReportConfiguration() + .getCareGapsReporter())); + checkNotNull( + configuredAuthor, + String.format( + "The %s Resource is configured as the measure_report.care_gaps_composition_section_author but the Resource could not be read.", + myCrProperties + .getMeasureProperties() + .getMeasureReportConfiguration() + .getCareGapsCompositionSectionAuthor())); } + List getPatientListFromSubject(String theSubject) { if (theSubject.startsWith("Patient/")) { return Collections.singletonList(validatePatientExists(theSubject)); @@ -255,8 +297,11 @@ public class CareGapsService implements IDaoRegistryUser { return patient; } - List getMeasures(List theMeasureIds, List theMeasureIdentifiers, - List theMeasureCanonicals, RequestDetails theRequestDetails) { + List getMeasures( + List theMeasureIds, + List theMeasureIdentifiers, + List theMeasureCanonicals, + RequestDetails theRequestDetails) { boolean hasMeasureIds = theMeasureIds != null && !theMeasureIds.isEmpty(); boolean hasMeasureIdentifiers = theMeasureIdentifiers != null && !theMeasureIdentifiers.isEmpty(); boolean hasMeasureUrls = theMeasureCanonicals != null && !theMeasureCanonicals.isEmpty(); @@ -271,14 +316,16 @@ public class CareGapsService implements IDaoRegistryUser { populateMeasures(measureList, measureSearchResults); } - if(hasMeasureUrls) { - measureSearchResults = search(Measure.class, Searches.byCanonicals(theMeasureCanonicals), theRequestDetails); + if (hasMeasureUrls) { + measureSearchResults = + search(Measure.class, Searches.byCanonicals(theMeasureCanonicals), theRequestDetails); populateMeasures(measureList, measureSearchResults); } // TODO: implement searching by measure identifiers if (hasMeasureIdentifiers) { - throw new NotImplementedOperationException(Msg.code(2278) + "Measure identifiers have not yet been implemented."); + throw new NotImplementedOperationException( + Msg.code(2278) + "Measure identifiers have not yet been implemented."); } Map result = new HashMap<>(); @@ -288,21 +335,21 @@ public class CareGapsService implements IDaoRegistryUser { } private void populateMeasures(List measureList, Iterable measureSearchResults) { - if(measureSearchResults != null){ + if (measureSearchResults != null) { Iterator measures = measureSearchResults.iterator(); - while(measures.hasNext()){ - measureList.add((Measure)measures.next()); + while (measures.hasNext()) { + measureList.add((Measure) measures.next()); } } } private T addConfiguredResource(Class theResourceClass, String theId, String theKey) { - //T resource = repo.search(theResourceClass, Searches.byId(theId)).firstOrNull(); + // T resource = repo.search(theResourceClass, Searches.byId(theId)).firstOrNull(); Iterable resourceResult = search(theResourceClass, Searches.byId(theId), myRequestDetails); T resource = null; - if(resourceResult != null){ + if (resourceResult != null) { Iterator resources = resourceResult.iterator(); - while(resources.hasNext()){ + while (resources.hasNext()) { resource = (T) resources.next(); break; } @@ -327,35 +374,67 @@ public class CareGapsService implements IDaoRegistryUser { return theMeasures; } - private Parameters.ParametersParameterComponent patientReports(RequestDetails theRequestDetails, String thePeriodStart, - String thePeriodEnd, Patient thePatient, List theStatuses, List theMeasures, String theOrganization) { + private Parameters.ParametersParameterComponent patientReports( + RequestDetails theRequestDetails, + String thePeriodStart, + String thePeriodEnd, + Patient thePatient, + List theStatuses, + List theMeasures, + String theOrganization) { // TODO: add organization to report, if it exists. Composition composition = getComposition(thePatient); List detectedIssues = new ArrayList<>(); Map evalPlusSDE = new HashMap<>(); - List reports = getReports(theRequestDetails, thePeriodStart, thePeriodEnd, thePatient, theStatuses, theMeasures, - composition, detectedIssues, evalPlusSDE); + List reports = getReports( + theRequestDetails, + thePeriodStart, + thePeriodEnd, + thePatient, + theStatuses, + theMeasures, + composition, + detectedIssues, + evalPlusSDE); if (reports.isEmpty()) { return null; } - return initializePatientParameter(thePatient).setResource( - addBundleEntries(theRequestDetails.getFhirServerBase(), composition, detectedIssues, reports, evalPlusSDE)); + return initializePatientParameter(thePatient) + .setResource(addBundleEntries( + theRequestDetails.getFhirServerBase(), composition, detectedIssues, reports, evalPlusSDE)); } - private List getReports(RequestDetails theRequestDetails, String thePeriodStart, String thePeriodEnd, - Patient thePatient, List theStatuses, List theMeasures, Composition theComposition, - List theDetectedIssues, Map theEvalPlusSDEs) { + private List getReports( + RequestDetails theRequestDetails, + String thePeriodStart, + String thePeriodEnd, + Patient thePatient, + List theStatuses, + List theMeasures, + Composition theComposition, + List theDetectedIssues, + Map theEvalPlusSDEs) { List reports = new ArrayList<>(); MeasureReport report; for (Measure measure : theMeasures) { - report = myR4MeasureService.evaluateMeasure(measure.getIdElement(), thePeriodStart, - thePeriodEnd, "patient", Ids.simple(thePatient), null, null, null, null, null); - if (!report.hasGroup()) { - ourLog.info("Report does not include a group so skipping.\nSubject: {}\nMeasure: {}", + report = myR4MeasureService.evaluateMeasure( + measure.getIdElement(), + thePeriodStart, + thePeriodEnd, + "patient", Ids.simple(thePatient), - Ids.simplePart(measure)); + null, + null, + null, + null, + null); + if (!report.hasGroup()) { + ourLog.info( + "Report does not include a group so skipping.\nSubject: {}\nMeasure: {}", + Ids.simple(thePatient), + Ids.simplePart(measure)); continue; } @@ -382,7 +461,11 @@ public class CareGapsService implements IDaoRegistryUser { IIdType id = Ids.newId(MeasureReport.class, UUID.randomUUID().toString()); theMeasureReport.setId(id); } - Reference reporter = new Reference().setReference(myCrProperties.getMeasureProperties().getMeasureReportConfiguration().getCareGapsReporter()); + Reference reporter = new Reference() + .setReference(myCrProperties + .getMeasureProperties() + .getMeasureReportConfiguration() + .getCareGapsReporter()); // TODO: figure out what this extension is for // reporter.addExtension(new // Extension().setUrl(CARE_GAPS_MEASUREREPORT_REPORTER_EXTENSION)); @@ -395,20 +478,26 @@ public class CareGapsService implements IDaoRegistryUser { } private Parameters.ParametersParameterComponent initializePatientParameter(Patient thePatient) { - Parameters.ParametersParameterComponent patientParameter = Resources - .newBackboneElement(Parameters.ParametersParameterComponent.class) - .setName("return"); + Parameters.ParametersParameterComponent patientParameter = Resources.newBackboneElement( + Parameters.ParametersParameterComponent.class) + .setName("return"); patientParameter.setId("subject-" + Ids.simplePart(thePatient)); return patientParameter; } - private Bundle addBundleEntries(String theServerBase, Composition theComposition, List theDetectedIssues, - List theMeasureReports, Map theEvalPlusSDEs) { + private Bundle addBundleEntries( + String theServerBase, + Composition theComposition, + List theDetectedIssues, + List theMeasureReports, + Map theEvalPlusSDEs) { Bundle reportBundle = getBundle(); reportBundle.addEntry(getBundleEntry(theServerBase, theComposition)); theMeasureReports.forEach(report -> reportBundle.addEntry(getBundleEntry(theServerBase, report))); theDetectedIssues.forEach(detectedIssue -> reportBundle.addEntry(getBundleEntry(theServerBase, detectedIssue))); - myConfiguredResources.values().forEach(resource -> reportBundle.addEntry(getBundleEntry(theServerBase, resource))); + myConfiguredResources + .values() + .forEach(resource -> reportBundle.addEntry(getBundleEntry(theServerBase, resource))); theEvalPlusSDEs.values().forEach(resource -> reportBundle.addEntry(getBundleEntry(theServerBase, resource))); return reportBundle; } @@ -417,14 +506,14 @@ public class CareGapsService implements IDaoRegistryUser { Pair inNumerator = new MutablePair<>("numerator", false); theMeasureReport.getGroup().forEach(group -> group.getPopulation().forEach(population -> { if (population.hasCode() - && population.getCode().hasCoding(MEASUREREPORT_MEASURE_POPULATION_SYSTEM, inNumerator.getKey()) - && population.getCount() == 1) { + && population.getCode().hasCoding(MEASUREREPORT_MEASURE_POPULATION_SYSTEM, inNumerator.getKey()) + && population.getCount() == 1) { inNumerator.setValue(true); } })); - boolean isPositive = theMeasure.getImprovementNotation().hasCoding(MEASUREREPORT_IMPROVEMENT_NOTATION_SYSTEM, - "increase"); + boolean isPositive = + theMeasure.getImprovementNotation().hasCoding(MEASUREREPORT_IMPROVEMENT_NOTATION_SYSTEM, "increase"); if ((isPositive && !inNumerator.getValue()) || (!isPositive && inNumerator.getValue())) { return CareGapsStatusCode.OPEN_GAP; @@ -434,56 +523,66 @@ public class CareGapsService implements IDaoRegistryUser { } private Bundle.BundleEntryComponent getBundleEntry(String theServerBase, Resource theResource) { - return new Bundle.BundleEntryComponent().setResource(theResource) - .setFullUrl(getFullUrl(theServerBase, theResource)); + return new Bundle.BundleEntryComponent() + .setResource(theResource) + .setFullUrl(getFullUrl(theServerBase, theResource)); } - private Composition.SectionComponent getSection(Measure theMeasure, MeasureReport theMeasureReport, DetectedIssue theDetectedIssue, - CareGapsStatusCode theGapStatus) { - String narrative = String.format(HTML_DIV_PARAGRAPH_CONTENT, - theGapStatus == CareGapsStatusCode.CLOSED_GAP ? "No detected issues." - : String.format("Issues detected. See %s for details.", Ids.simple(theDetectedIssue))); + private Composition.SectionComponent getSection( + Measure theMeasure, + MeasureReport theMeasureReport, + DetectedIssue theDetectedIssue, + CareGapsStatusCode theGapStatus) { + String narrative = String.format( + HTML_DIV_PARAGRAPH_CONTENT, + theGapStatus == CareGapsStatusCode.CLOSED_GAP + ? "No detected issues." + : String.format("Issues detected. See %s for details.", Ids.simple(theDetectedIssue))); return new CompositionSectionComponentBuilder<>(Composition.SectionComponent.class) - .withTitle(theMeasure.hasTitle() ? theMeasure.getTitle() : theMeasure.getUrl()) - .withFocus(Ids.simple(theMeasureReport)) - .withText(new NarrativeSettings(narrative)) - .withEntry(Ids.simple(theDetectedIssue)) - .build(); + .withTitle(theMeasure.hasTitle() ? theMeasure.getTitle() : theMeasure.getUrl()) + .withFocus(Ids.simple(theMeasureReport)) + .withText(new NarrativeSettings(narrative)) + .withEntry(Ids.simple(theDetectedIssue)) + .build(); } private Bundle getBundle() { return new BundleBuilder<>(Bundle.class) - .withProfile(CARE_GAPS_BUNDLE_PROFILE) - .withType(Bundle.BundleType.DOCUMENT.toString()) - .build(); + .withProfile(CARE_GAPS_BUNDLE_PROFILE) + .withType(Bundle.BundleType.DOCUMENT.toString()) + .build(); } private Composition getComposition(Patient thePatient) { return new CompositionBuilder<>(Composition.class) - .withProfile(CARE_GAPS_COMPOSITION_PROFILE) - .withType(CARE_GAPS_CODES.get("http://loinc.org/96315-7")) - .withStatus(Composition.CompositionStatus.FINAL.toString()) - .withTitle("Care Gap Report for " + Ids.simplePart(thePatient)) - .withSubject(Ids.simple(thePatient)) - .withAuthor(Ids.simple(myConfiguredResources.get("care_gaps_composition_section_author"))) - // .withCustodian(organization) // TODO: Optional: identifies the organization - // who is responsible for ongoing maintenance of and accessing to this gaps in - // care report. Add as a setting and optionally read if it's there. - .build(); + .withProfile(CARE_GAPS_COMPOSITION_PROFILE) + .withType(CARE_GAPS_CODES.get("http://loinc.org/96315-7")) + .withStatus(Composition.CompositionStatus.FINAL.toString()) + .withTitle("Care Gap Report for " + Ids.simplePart(thePatient)) + .withSubject(Ids.simple(thePatient)) + .withAuthor(Ids.simple(myConfiguredResources.get("care_gaps_composition_section_author"))) + // .withCustodian(organization) // TODO: Optional: identifies the organization + // who is responsible for ongoing maintenance of and accessing to this gaps in + // care report. Add as a setting and optionally read if it's there. + .build(); } - private DetectedIssue getDetectedIssue(Patient thePatient, MeasureReport theMeasureReport, CareGapsStatusCode theCareGapStatusCode) { + private DetectedIssue getDetectedIssue( + Patient thePatient, MeasureReport theMeasureReport, CareGapsStatusCode theCareGapStatusCode) { return new DetectedIssueBuilder<>(DetectedIssue.class) - .withProfile(CARE_GAPS_DETECTED_ISSUE_PROFILE) - .withStatus(DetectedIssue.DetectedIssueStatus.FINAL.toString()) - .withCode(CARE_GAPS_CODES.get("http://terminology.hl7.org/CodeSystem/v3-ActCode/CAREGAP")) - .withPatient(Ids.simple(thePatient)) - .withEvidenceDetail(Ids.simple(theMeasureReport)) - .withModifierExtension(new ImmutablePair<>( - CARE_GAPS_GAP_STATUS_EXTENSION, - new CodeableConceptSettings().add(CARE_GAPS_GAP_STATUS_SYSTEM, theCareGapStatusCode.toString(), - theCareGapStatusCode.toDisplayString()))) - .build(); + .withProfile(CARE_GAPS_DETECTED_ISSUE_PROFILE) + .withStatus(DetectedIssue.DetectedIssueStatus.FINAL.toString()) + .withCode(CARE_GAPS_CODES.get("http://terminology.hl7.org/CodeSystem/v3-ActCode/CAREGAP")) + .withPatient(Ids.simple(thePatient)) + .withEvidenceDetail(Ids.simple(theMeasureReport)) + .withModifierExtension(new ImmutablePair<>( + CARE_GAPS_GAP_STATUS_EXTENSION, + new CodeableConceptSettings() + .add( + CARE_GAPS_GAP_STATUS_SYSTEM, + theCareGapStatusCode.toString(), + theCareGapStatusCode.toDisplayString()))) + .build(); } protected void populateEvaluatedResources(MeasureReport theMeasureReport, Map theResources) { @@ -505,9 +604,11 @@ public class CareGapsService implements IDaoRegistryUser { for (Extension extension : theMeasureReport.getExtension()) { if (extension.hasUrl() && extension.getUrl().equals(MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) { Reference sdeRef = extension.hasValue() && extension.getValue() instanceof Reference - ? (Reference) extension.getValue() - : null; - if (sdeRef != null && sdeRef.hasReference() && !sdeRef.getReference().startsWith("#")) { + ? (Reference) extension.getValue() + : null; + if (sdeRef != null + && sdeRef.hasReference() + && !sdeRef.getReference().startsWith("#")) { IdType sdeId = new IdType(sdeRef.getReference()); if (!theResources.containsKey(Ids.simple(sdeId))) { theResources.put(Ids.simple(sdeId), read(sdeId)); @@ -517,19 +618,21 @@ public class CareGapsService implements IDaoRegistryUser { } } } + private Parameters initializeResult() { return newResource(Parameters.class, "care-gaps-report-" + UUID.randomUUID()); } public static String getFullUrl(String theServerAddress, IBaseResource theResource) { - checkArgument(theResource.getIdElement().hasIdPart(), - "Cannot generate a fullUrl because the resource does not have an id."); + checkArgument( + theResource.getIdElement().hasIdPart(), + "Cannot generate a fullUrl because the resource does not have an id."); return getFullUrl(theServerAddress, theResource.fhirType(), Ids.simplePart(theResource)); } public static String getFullUrl(String theServerAddress, String theFhirType, String theElementId) { - return String.format("%s%s/%s", theServerAddress + (theServerAddress.endsWith("/") ? "" : "/"), theFhirType, - theElementId); + return String.format( + "%s%s/%s", theServerAddress + (theServerAddress.endsWith("/") ? "" : "/"), theFhirType, theElementId); } @Override @@ -540,5 +643,4 @@ public class CareGapsService implements IDaoRegistryUser { public CrProperties getCrProperties() { return myCrProperties; } - } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/ISubmitDataService.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/ISubmitDataService.java index 999c3933f3f..18dff29865b 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/ISubmitDataService.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/ISubmitDataService.java @@ -23,5 +23,4 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import java.util.function.Function; -public interface ISubmitDataService extends Function { -} +public interface ISubmitDataService extends Function {} diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/MeasureOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/MeasureOperationsProvider.java index b9ec350c50b..33a2984d26a 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/MeasureOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/MeasureOperationsProvider.java @@ -32,7 +32,6 @@ import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Measure; import org.hl7.fhir.r4.model.MeasureReport; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; import java.util.function.Function; @@ -64,29 +63,31 @@ public class MeasureOperationsProvider { * @return the calculated MeasureReport */ @Operation(name = ProviderConstants.CQL_EVALUATE_MEASURE, idempotent = true, type = Measure.class) - public MeasureReport evaluateMeasure(@IdParam IdType theId, - @OperationParam(name = "periodStart") String thePeriodStart, - @OperationParam(name = "periodEnd") String thePeriodEnd, - @OperationParam(name = "reportType") String theReportType, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "lastReceivedOn") String theLastReceivedOn, - @OperationParam(name = "productLine") String theProductLine, - @OperationParam(name = "additionalData") Bundle theAdditionalData, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public MeasureReport evaluateMeasure( + @IdParam IdType theId, + @OperationParam(name = "periodStart") String thePeriodStart, + @OperationParam(name = "periodEnd") String thePeriodEnd, + @OperationParam(name = "reportType") String theReportType, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "lastReceivedOn") String theLastReceivedOn, + @OperationParam(name = "productLine") String theProductLine, + @OperationParam(name = "additionalData") Bundle theAdditionalData, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return this.myR4MeasureServiceFactory - .apply(theRequestDetails) - .evaluateMeasure( - theId, - thePeriodStart, - thePeriodEnd, - theReportType, - theSubject, - thePractitioner, - theLastReceivedOn, - theProductLine, - theAdditionalData, - theTerminologyEndpoint); + .apply(theRequestDetails) + .evaluateMeasure( + theId, + thePeriodStart, + thePeriodEnd, + theReportType, + theSubject, + thePractitioner, + theLastReceivedOn, + theProductLine, + theAdditionalData, + theTerminologyEndpoint); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/MeasureService.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/MeasureService.java index 0d29c97768a..0d83c70c525 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/MeasureService.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/MeasureService.java @@ -76,43 +76,36 @@ public class MeasureService implements IDaoRegistryUser { private Logger ourLogger = LoggerFactory.getLogger(MeasureService.class); - public static final List CQI_CONTACTDETAIL = Collections.singletonList( - new ContactDetail() - .addTelecom( - new ContactPoint() + public static final List CQI_CONTACTDETAIL = Collections.singletonList(new ContactDetail() + .addTelecom(new ContactPoint() .setSystem(ContactPoint.ContactPointSystem.URL) .setValue("http://www.hl7.org/Special/committees/cqi/index.cfm"))); - public static final List US_JURISDICTION_CODING = Collections.singletonList( - new CodeableConcept() - .addCoding( - new Coding(COUNTRY_CODING_SYSTEM_CODE, US_COUNTRY_CODE, US_COUNTRY_DISPLAY))); + public static final List US_JURISDICTION_CODING = Collections.singletonList(new CodeableConcept() + .addCoding(new Coding(COUNTRY_CODING_SYSTEM_CODE, US_COUNTRY_CODE, US_COUNTRY_DISPLAY))); public static final SearchParameter SUPPLEMENTAL_DATA_SEARCHPARAMETER = (SearchParameter) new SearchParameter() - .setUrl(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_URL) - .setVersion(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_VERSION) - .setName("DEQMMeasureReportSupplementalData") - .setStatus(Enumerations.PublicationStatus.ACTIVE) - .setDate(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_DEFINITION_DATE) - .setPublisher("HL7 International - Clinical Quality Information Work Group") - .setContact(CQI_CONTACTDETAIL) - .setDescription( - String.format( - "Returns resources (supplemental data) from references on extensions on the MeasureReport with urls matching %s.", - MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) - .setJurisdiction(US_JURISDICTION_CODING) - .addBase("MeasureReport") - .setCode("supplemental-data") - .setType(Enumerations.SearchParamType.REFERENCE) - .setExpression( - String.format("MeasureReport.extension('%s').value", - MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) - .setXpath( - String.format("f:MeasureReport/f:extension[@url='%s'].value", - MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) - .setXpathUsage(SearchParameter.XPathUsageType.NORMAL) - .setTitle("Supplemental Data") - .setId("deqm-measurereport-supplemental-data"); + .setUrl(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_URL) + .setVersion(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_VERSION) + .setName("DEQMMeasureReportSupplementalData") + .setStatus(Enumerations.PublicationStatus.ACTIVE) + .setDate(MEASUREREPORT_SUPPLEMENTALDATA_SEARCHPARAMETER_DEFINITION_DATE) + .setPublisher("HL7 International - Clinical Quality Information Work Group") + .setContact(CQI_CONTACTDETAIL) + .setDescription(String.format( + "Returns resources (supplemental data) from references on extensions on the MeasureReport with urls matching %s.", + MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) + .setJurisdiction(US_JURISDICTION_CODING) + .addBase("MeasureReport") + .setCode("supplemental-data") + .setType(Enumerations.SearchParamType.REFERENCE) + .setExpression(String.format( + "MeasureReport.extension('%s').value", MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) + .setXpath(String.format( + "f:MeasureReport/f:extension[@url='%s'].value", MEASUREREPORT_MEASURE_SUPPLEMENTALDATA_EXTENSION)) + .setXpathUsage(SearchParameter.XPathUsageType.NORMAL) + .setTitle("Supplemental Data") + .setId("deqm-measurereport-supplemental-data"); @Autowired protected ITerminologyProviderFactory myTerminologyProviderFactory; @@ -130,7 +123,8 @@ public class MeasureService implements IDaoRegistryUser { protected IFhirDalFactory myFhirDalFactory; @Autowired - protected Map myGlobalLibraryCache; + protected Map + myGlobalLibraryCache; @Autowired protected CqlOptions myCqlOptions; @@ -177,16 +171,17 @@ public class MeasureService implements IDaoRegistryUser { * @param theTerminologyEndpoint the endpoint of terminology services for your measure valuesets * @return the calculated MeasureReport */ - public MeasureReport evaluateMeasure(IdType theId, - String thePeriodStart, - String thePeriodEnd, - String theReportType, - String theSubject, - String thePractitioner, - String theLastReceivedOn, - String theProductLine, - Bundle theAdditionalData, - Endpoint theTerminologyEndpoint) { + public MeasureReport evaluateMeasure( + IdType theId, + String thePeriodStart, + String thePeriodEnd, + String theReportType, + String theSubject, + String thePractitioner, + String theLastReceivedOn, + String theProductLine, + Bundle theAdditionalData, + Endpoint theTerminologyEndpoint) { ensureSupplementalDataElementSearchParameter(); @@ -205,23 +200,62 @@ public class MeasureService implements IDaoRegistryUser { LibrarySourceProvider libraryContentProvider = this.myLibraryContentProviderFactory.create(myRequestDetails); FhirDal fhirDal = this.myFhirDalFactory.create(myRequestDetails); - org.opencds.cqf.cql.evaluator.measure.r4.R4MeasureProcessor measureProcessor = new org.opencds.cqf.cql.evaluator.measure.r4.R4MeasureProcessor( - null, this.myDataProviderFactory, null, null, null, terminologyProvider, libraryContentProvider, dataProvider, - fhirDal, myMeasureEvaluationOptions, myCqlOptions, - null); + org.opencds.cqf.cql.evaluator.measure.r4.R4MeasureProcessor measureProcessor = + new org.opencds.cqf.cql.evaluator.measure.r4.R4MeasureProcessor( + null, + this.myDataProviderFactory, + null, + null, + null, + terminologyProvider, + libraryContentProvider, + dataProvider, + fhirDal, + myMeasureEvaluationOptions, + myCqlOptions, + null); MeasureReport measureReport = null; if (StringUtils.isBlank(theSubject) && StringUtils.isNotBlank(thePractitioner)) { List subjectIds = getPractitionerPatients(thePractitioner, myRequestDetails); - measureReport = measureProcessor.evaluateMeasure(measure.getUrl(), thePeriodStart, thePeriodEnd, theReportType, - subjectIds, theLastReceivedOn, null, null, null, theAdditionalData); + measureReport = measureProcessor.evaluateMeasure( + measure.getUrl(), + thePeriodStart, + thePeriodEnd, + theReportType, + subjectIds, + theLastReceivedOn, + null, + null, + null, + theAdditionalData); } else if (StringUtils.isNotBlank(theSubject)) { - measureReport = measureProcessor.evaluateMeasure(measure.getUrl(), thePeriodStart, thePeriodEnd, theReportType, - theSubject, null, theLastReceivedOn, null, null, null, theAdditionalData); + measureReport = measureProcessor.evaluateMeasure( + measure.getUrl(), + thePeriodStart, + thePeriodEnd, + theReportType, + theSubject, + null, + theLastReceivedOn, + null, + null, + null, + theAdditionalData); } else if (StringUtils.isBlank(theSubject) && StringUtils.isBlank(thePractitioner)) { - measureReport = measureProcessor.evaluateMeasure(measure.getUrl(), thePeriodStart, thePeriodEnd, theReportType, - null, null, theLastReceivedOn, null, null, null, theAdditionalData); + measureReport = measureProcessor.evaluateMeasure( + measure.getUrl(), + thePeriodStart, + thePeriodEnd, + theReportType, + null, + null, + theLastReceivedOn, + null, + null, + null, + theAdditionalData); } addProductLineExtension(measureReport, theProductLine); @@ -231,12 +265,18 @@ public class MeasureService implements IDaoRegistryUser { private List getPractitionerPatients(String thePractitioner, RequestDetails theRequestDetails) { SearchParameterMap map = SearchParameterMap.newSynchronous(); - map.add("general-practitioner", new ReferenceParam( - thePractitioner.startsWith("Practitioner/") ? thePractitioner : "Practitioner/" + thePractitioner)); + map.add( + "general-practitioner", + new ReferenceParam( + thePractitioner.startsWith("Practitioner/") + ? thePractitioner + : "Practitioner/" + thePractitioner)); List patients = new ArrayList<>(); - IBundleProvider patientProvider = myDaoRegistry.getResourceDao("Patient").search(map, theRequestDetails); + IBundleProvider patientProvider = + myDaoRegistry.getResourceDao("Patient").search(map, theRequestDetails); List patientList = patientProvider.getAllResources(); - patientList.forEach(x -> patients.add(x.getIdElement().getResourceType() + "/" + x.getIdElement().getIdPart())); + patientList.forEach(x -> patients.add( + x.getIdElement().getResourceType() + "/" + x.getIdElement().getIdPart())); return patients; } @@ -255,10 +295,10 @@ public class MeasureService implements IDaoRegistryUser { } protected void ensureSupplementalDataElementSearchParameter() { - //create a transaction bundle + // create a transaction bundle BundleBuilder builder = new BundleBuilder(getFhirContext()); - //set the request to be condition on code == supplemental data + // set the request to be condition on code == supplemental data builder.addTransactionCreateEntry(SUPPLEMENTAL_DATA_SEARCHPARAMETER).conditional("code=supplemental-data"); transaction(builder.getBundle(), this.myRequestDetails); } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/SubmitDataProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/SubmitDataProvider.java index 1a630aa8cfb..d3b825e37df 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/SubmitDataProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/SubmitDataProvider.java @@ -31,7 +31,6 @@ import org.hl7.fhir.r4.model.Measure; import org.hl7.fhir.r4.model.MeasureReport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; import java.util.List; import java.util.function.Function; @@ -52,15 +51,15 @@ public class SubmitDataProvider { * Reasoning Module per the * Da * Vinci DEQM FHIR Implementation Guide. - * - * + * + * * The submitted MeasureReport and Resources will be saved to the local server. * A Bundle reporting the result of the transaction will be returned. - * + * * Usage: * URL: [base]/Measure/$submit-data * URL: [base]/Measure/[id]/$submit-data - * + * * @param theRequestDetails generally auto-populated by the HAPI server * framework. * @param theId the Id of the Measure to submit data for @@ -68,13 +67,16 @@ public class SubmitDataProvider { * @param theResources the resources to be submitted * @return Bundle the transaction result */ - @Description(shortDefinition = "$submit-data", value = "Implements the $submit-data operation found in the FHIR Clinical Reasoning Module per the Da Vinci DEQM FHIR Implementation Guide.") + @Description( + shortDefinition = "$submit-data", + value = + "Implements the $submit-data operation found in the FHIR Clinical Reasoning Module per the Da Vinci DEQM FHIR Implementation Guide.") @Operation(name = "$submit-data", type = Measure.class) - public Bundle submitData(RequestDetails theRequestDetails, + public Bundle submitData( + RequestDetails theRequestDetails, @IdParam IdType theId, @OperationParam(name = "measureReport", min = 1, max = 1) MeasureReport theReport, @OperationParam(name = "resource") List theResources) { - return mySubmitDataServiceFunction.apply(theRequestDetails) - .submitData(theId, theReport, theResources); + return mySubmitDataServiceFunction.apply(theRequestDetails).submitData(theId, theReport, theResources); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/SubmitDataService.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/SubmitDataService.java index 28603c4abf5..b3004f0a1ce 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/SubmitDataService.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/measure/SubmitDataService.java @@ -31,14 +31,14 @@ import org.slf4j.LoggerFactory; import java.util.List; -public class SubmitDataService{ +public class SubmitDataService { private static final Logger ourLogger = LoggerFactory.getLogger(SubmitDataService.class); private final DaoRegistry myDaoRegistry; private final RequestDetails myRequestDetails; - public SubmitDataService(DaoRegistry theDaoRegistry, RequestDetails theRequestDetails){ + public SubmitDataService(DaoRegistry theDaoRegistry, RequestDetails theRequestDetails) { this.myDaoRegistry = theDaoRegistry; this.myRequestDetails = theRequestDetails; } @@ -59,9 +59,8 @@ public class SubmitDataService{ * StructureDefinition from URL or must it be stored in Ruler?) */ - Bundle transactionBundle = new Bundle() - .setType(Bundle.BundleType.TRANSACTION) - .addEntry(createEntry(theReport)); + Bundle transactionBundle = + new Bundle().setType(Bundle.BundleType.TRANSACTION).addEntry(createEntry(theReport)); if (theResources != null) { for (IBaseResource res : theResources) { @@ -81,20 +80,17 @@ public class SubmitDataService{ private Bundle.BundleEntryComponent createEntry(IBaseResource theResource) { return new Bundle.BundleEntryComponent() - .setResource((Resource) theResource) - .setRequest(createRequest(theResource)); + .setResource((Resource) theResource) + .setRequest(createRequest(theResource)); } private Bundle.BundleEntryRequestComponent createRequest(IBaseResource theResource) { Bundle.BundleEntryRequestComponent request = new Bundle.BundleEntryRequestComponent(); if (theResource.getIdElement().hasValue()) { - request - .setMethod(Bundle.HTTPVerb.PUT) - .setUrl(theResource.getIdElement().getValue()); + request.setMethod(Bundle.HTTPVerb.PUT) + .setUrl(theResource.getIdElement().getValue()); } else { - request - .setMethod(Bundle.HTTPVerb.POST) - .setUrl(theResource.fhirType()); + request.setMethod(Bundle.HTTPVerb.POST).setUrl(theResource.fhirType()); } return request; diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/plandefinition/PlanDefinitionOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/plandefinition/PlanDefinitionOperationsProvider.java index 24510f6db86..a59fc333cfd 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/plandefinition/PlanDefinitionOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/plandefinition/PlanDefinitionOperationsProvider.java @@ -39,6 +39,7 @@ import org.springframework.stereotype.Component; public class PlanDefinitionOperationsProvider { @Autowired IRepositoryFactory myRepositoryFactory; + @Autowired IPlanDefinitionProcessorFactory myR4PlanDefinitionProcessorFactory; @@ -78,86 +79,92 @@ public class PlanDefinitionOperationsProvider { * @return The CarePlan that is the result of applying the plan definition */ @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) - public IBaseResource apply(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "encounter") String theEncounter, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "userType") String theUserType, - @OperationParam(name = "userLanguage") String theUserLanguage, - @OperationParam(name = "userTaskContext") String theUserTaskContext, - @OperationParam(name = "setting") String theSetting, - @OperationParam(name = "settingContext") String theSettingContext, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "data") Bundle theData, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseResource apply( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "encounter") String theEncounter, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "userType") String theUserType, + @OperationParam(name = "userLanguage") String theUserLanguage, + @OperationParam(name = "userTaskContext") String theUserTaskContext, + @OperationParam(name = "setting") String theSetting, + @OperationParam(name = "settingContext") String theSettingContext, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4PlanDefinitionProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .apply(theId, - new CanonicalType(theCanonical), - thePlanDefinition, - theSubject, - theEncounter, - thePractitioner, - theOrganization, - theUserType, - theUserLanguage, - theUserTaskContext, - theSetting, - theSettingContext, - theParameters, - true, - theData, - null, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .apply( + theId, + new CanonicalType(theCanonical), + thePlanDefinition, + theSubject, + theEncounter, + thePractitioner, + theOrganization, + theUserType, + theUserLanguage, + theUserTaskContext, + theSetting, + theSettingContext, + theParameters, + true, + theData, + null, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } @Operation(name = ProviderConstants.CR_OPERATION_APPLY, idempotent = true, type = PlanDefinition.class) - public IBaseResource apply(@OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "encounter") String theEncounter, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "userType") String theUserType, - @OperationParam(name = "userLanguage") String theUserLanguage, - @OperationParam(name = "userTaskContext") String theUserTaskContext, - @OperationParam(name = "setting") String theSetting, - @OperationParam(name = "settingContext") String theSettingContext, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "data") Bundle theData, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseResource apply( + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "encounter") String theEncounter, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "userType") String theUserType, + @OperationParam(name = "userLanguage") String theUserLanguage, + @OperationParam(name = "userTaskContext") String theUserTaskContext, + @OperationParam(name = "setting") String theSetting, + @OperationParam(name = "settingContext") String theSettingContext, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4PlanDefinitionProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .apply(null, - new CanonicalType(theCanonical), - thePlanDefinition, - theSubject, - theEncounter, - thePractitioner, - theOrganization, - theUserType, - theUserLanguage, - theUserTaskContext, - theSetting, - theSettingContext, - theParameters, - true, - theData, - null, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .apply( + null, + new CanonicalType(theCanonical), + thePlanDefinition, + theSubject, + theEncounter, + thePractitioner, + theOrganization, + theUserType, + theUserLanguage, + theUserTaskContext, + theSetting, + theSettingContext, + theParameters, + true, + theData, + null, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } /** @@ -196,105 +203,119 @@ public class PlanDefinitionOperationsProvider { * @return The Bundle that is the result of applying the plan definition */ @Operation(name = ProviderConstants.CR_OPERATION_R5_APPLY, idempotent = true, type = PlanDefinition.class) - public IBaseResource applyR5(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "encounter") String theEncounter, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "userType") String theUserType, - @OperationParam(name = "userLanguage") String theUserLanguage, - @OperationParam(name = "userTaskContext") String theUserTaskContext, - @OperationParam(name = "setting") String theSetting, - @OperationParam(name = "settingContext") String theSettingContext, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "data") Bundle theData, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseResource applyR5( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "encounter") String theEncounter, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "userType") String theUserType, + @OperationParam(name = "userLanguage") String theUserLanguage, + @OperationParam(name = "userTaskContext") String theUserTaskContext, + @OperationParam(name = "setting") String theSetting, + @OperationParam(name = "settingContext") String theSettingContext, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4PlanDefinitionProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .applyR5(theId, - new CanonicalType(theCanonical), - thePlanDefinition, - theSubject, - theEncounter, - thePractitioner, - theOrganization, - theUserType, - theUserLanguage, - theUserTaskContext, - theSetting, - theSettingContext, - theParameters, - true, - theData, - null, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .applyR5( + theId, + new CanonicalType(theCanonical), + thePlanDefinition, + theSubject, + theEncounter, + thePractitioner, + theOrganization, + theUserType, + theUserLanguage, + theUserTaskContext, + theSetting, + theSettingContext, + theParameters, + true, + theData, + null, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } @Operation(name = ProviderConstants.CR_OPERATION_R5_APPLY, idempotent = true, type = PlanDefinition.class) - public IBaseResource applyR5(@OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "encounter") String theEncounter, - @OperationParam(name = "practitioner") String thePractitioner, - @OperationParam(name = "organization") String theOrganization, - @OperationParam(name = "userType") String theUserType, - @OperationParam(name = "userLanguage") String theUserLanguage, - @OperationParam(name = "userTaskContext") String theUserTaskContext, - @OperationParam(name = "setting") String theSetting, - @OperationParam(name = "settingContext") String theSettingContext, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "data") Bundle theData, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseResource applyR5( + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "planDefinition") PlanDefinition thePlanDefinition, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "encounter") String theEncounter, + @OperationParam(name = "practitioner") String thePractitioner, + @OperationParam(name = "organization") String theOrganization, + @OperationParam(name = "userType") String theUserType, + @OperationParam(name = "userLanguage") String theUserLanguage, + @OperationParam(name = "userTaskContext") String theUserTaskContext, + @OperationParam(name = "setting") String theSetting, + @OperationParam(name = "settingContext") String theSettingContext, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "data") Bundle theData, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4PlanDefinitionProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .applyR5(null, - new CanonicalType(theCanonical), - thePlanDefinition, - theSubject, - theEncounter, - thePractitioner, - theOrganization, - theUserType, - theUserLanguage, - theUserTaskContext, - theSetting, - theSettingContext, - theParameters, - true, - theData, - null, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .applyR5( + null, + new CanonicalType(theCanonical), + thePlanDefinition, + theSubject, + theEncounter, + thePractitioner, + theOrganization, + theUserType, + theUserLanguage, + theUserTaskContext, + theSetting, + theSettingContext, + theParameters, + true, + theData, + null, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = PlanDefinition.class) - public IBaseBundle packagePlanDefinition(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseBundle packagePlanDefinition( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "usePut") String theIsPut, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4PlanDefinitionProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .packagePlanDefinition(theId, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); + .create(myRepositoryFactory.create(theRequestDetails)) + .packagePlanDefinition(theId, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); } @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = PlanDefinition.class) - public IBaseBundle packagePlanDefinition(@OperationParam(name = "id") String theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseBundle packagePlanDefinition( + @OperationParam(name = "id") String theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "usePut") String theIsPut, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4PlanDefinitionProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .packagePlanDefinition(new IdType("PlanDefinition", theId), new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); + .create(myRepositoryFactory.create(theRequestDetails)) + .packagePlanDefinition( + new IdType("PlanDefinition", theId), + new CanonicalType(theCanonical), + null, + Boolean.parseBoolean(theIsPut)); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnaireOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnaireOperationsProvider.java index 83cabe022d4..2c58e037a11 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnaireOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaire/QuestionnaireOperationsProvider.java @@ -29,7 +29,6 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ProviderConstants; import org.hl7.fhir.exceptions.FHIRException; -import org.hl7.fhir.r4.model.BooleanType; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.CanonicalType; import org.hl7.fhir.r4.model.Endpoint; @@ -42,6 +41,7 @@ import org.springframework.beans.factory.annotation.Autowired; public class QuestionnaireOperationsProvider { @Autowired IRepositoryFactory myRepositoryFactory; + @Autowired IQuestionnaireProcessorFactory myR4QuestionnaireProcessorFactory; @@ -69,50 +69,56 @@ public class QuestionnaireOperationsProvider { * @return The partially (or fully)-populated set of answers for the specified Questionnaire. */ @Operation(name = ProviderConstants.CR_OPERATION_PREPOPULATE, idempotent = true, type = Questionnaire.class) - public Questionnaire prepopulate(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "bundle") Bundle theBundle, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public Questionnaire prepopulate( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "bundle") Bundle theBundle, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4QuestionnaireProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .prePopulate(theId, - new CanonicalType(theCanonical), - theQuestionnaire, - theSubject, - theParameters, - theBundle, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .prePopulate( + theId, + new CanonicalType(theCanonical), + theQuestionnaire, + theSubject, + theParameters, + theBundle, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } @Operation(name = ProviderConstants.CR_OPERATION_PREPOPULATE, idempotent = true, type = Questionnaire.class) - public Questionnaire prepopulate(@OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "bundle") Bundle theBundle, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public Questionnaire prepopulate( + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "bundle") Bundle theBundle, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4QuestionnaireProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .prePopulate(null, - new CanonicalType(theCanonical), - theQuestionnaire, - theSubject, - theParameters, - theBundle, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .prePopulate( + null, + new CanonicalType(theCanonical), + theQuestionnaire, + theSubject, + theParameters, + theBundle, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } /** @@ -137,50 +143,56 @@ public class QuestionnaireOperationsProvider { * @return The partially (or fully)-populated set of answers for the specified Questionnaire. */ @Operation(name = ProviderConstants.CR_OPERATION_POPULATE, idempotent = true, type = Questionnaire.class) - public QuestionnaireResponse populate(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "bundle") Bundle theBundle, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public QuestionnaireResponse populate( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "bundle") Bundle theBundle, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return (QuestionnaireResponse) myR4QuestionnaireProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .populate(theId, - new CanonicalType(theCanonical), - theQuestionnaire, - theSubject, - theParameters, - theBundle, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .populate( + theId, + new CanonicalType(theCanonical), + theQuestionnaire, + theSubject, + theParameters, + theBundle, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } @Operation(name = ProviderConstants.CR_OPERATION_POPULATE, idempotent = true, type = Questionnaire.class) - public QuestionnaireResponse populate(@OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, - @OperationParam(name = "subject") String theSubject, - @OperationParam(name = "parameters") Parameters theParameters, - @OperationParam(name = "bundle") Bundle theBundle, - @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, - @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, - @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public QuestionnaireResponse populate( + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "questionnaire") Questionnaire theQuestionnaire, + @OperationParam(name = "subject") String theSubject, + @OperationParam(name = "parameters") Parameters theParameters, + @OperationParam(name = "bundle") Bundle theBundle, + @OperationParam(name = "dataEndpoint") Endpoint theDataEndpoint, + @OperationParam(name = "contentEndpoint") Endpoint theContentEndpoint, + @OperationParam(name = "terminologyEndpoint") Endpoint theTerminologyEndpoint, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return (QuestionnaireResponse) myR4QuestionnaireProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .populate(null, - new CanonicalType(theCanonical), - theQuestionnaire, - theSubject, - theParameters, - theBundle, - theDataEndpoint, - theContentEndpoint, - theTerminologyEndpoint); + .create(myRepositoryFactory.create(theRequestDetails)) + .populate( + null, + new CanonicalType(theCanonical), + theQuestionnaire, + theSubject, + theParameters, + theBundle, + theDataEndpoint, + theContentEndpoint, + theTerminologyEndpoint); } /** @@ -195,21 +207,23 @@ public class QuestionnaireOperationsProvider { * @return A Bundle containing the Questionnaire and all related Library, CodeSystem and ValueSet resources */ @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = Questionnaire.class) - public Bundle packageQuestionnaire(@IdParam IdType theId, - @OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, - RequestDetails theRequestDetails) { + public Bundle packageQuestionnaire( + @IdParam IdType theId, + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "usePut") String theIsPut, + RequestDetails theRequestDetails) { return (Bundle) myR4QuestionnaireProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .packageQuestionnaire(theId, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); + .create(myRepositoryFactory.create(theRequestDetails)) + .packageQuestionnaire(theId, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); } @Operation(name = ProviderConstants.CR_OPERATION_PACKAGE, idempotent = true, type = Questionnaire.class) - public Bundle packageQuestionnaire(@OperationParam(name = "canonical") String theCanonical, - @OperationParam(name = "usePut") String theIsPut, - RequestDetails theRequestDetails) { + public Bundle packageQuestionnaire( + @OperationParam(name = "canonical") String theCanonical, + @OperationParam(name = "usePut") String theIsPut, + RequestDetails theRequestDetails) { return (Bundle) myR4QuestionnaireProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .packageQuestionnaire(null, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); + .create(myRepositoryFactory.create(theRequestDetails)) + .packageQuestionnaire(null, new CanonicalType(theCanonical), null, Boolean.parseBoolean(theIsPut)); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaireresponse/QuestionnaireResponseOperationsProvider.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaireresponse/QuestionnaireResponseOperationsProvider.java index f869f8b009e..98dba932bff 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaireresponse/QuestionnaireResponseOperationsProvider.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/r4/questionnaireresponse/QuestionnaireResponseOperationsProvider.java @@ -37,6 +37,7 @@ import org.springframework.beans.factory.annotation.Autowired; public class QuestionnaireResponseOperationsProvider { @Autowired IRepositoryFactory myRepositoryFactory; + @Autowired IQuestionnaireResponseProcessorFactory myR4QuestionnaireResponseProcessorFactory; @@ -53,18 +54,22 @@ public class QuestionnaireResponseOperationsProvider { * @return The resulting FHIR resource produced after extracting data. This will either be a single resource or a Transaction Bundle that contains multiple resources. */ @Operation(name = ProviderConstants.CR_OPERATION_EXTRACT, idempotent = true, type = QuestionnaireResponse.class) - public IBaseBundle extract(@IdParam IdType theId, @ResourceParam QuestionnaireResponse theQuestionnaireResponse, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseBundle extract( + @IdParam IdType theId, + @ResourceParam QuestionnaireResponse theQuestionnaireResponse, + RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4QuestionnaireResponseProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .extract(theId, theQuestionnaireResponse, null, null, null); + .create(myRepositoryFactory.create(theRequestDetails)) + .extract(theId, theQuestionnaireResponse, null, null, null); } @Operation(name = ProviderConstants.CR_OPERATION_EXTRACT, idempotent = true, type = QuestionnaireResponse.class) - public IBaseBundle extract(@ResourceParam QuestionnaireResponse theQuestionnaireResponse, - RequestDetails theRequestDetails) throws InternalErrorException, FHIRException { + public IBaseBundle extract( + @ResourceParam QuestionnaireResponse theQuestionnaireResponse, RequestDetails theRequestDetails) + throws InternalErrorException, FHIRException { return myR4QuestionnaireResponseProcessorFactory - .create(myRepositoryFactory.create(theRequestDetails)) - .extract(null, theQuestionnaireResponse, null, null, null); + .create(myRepositoryFactory.create(theRequestDetails)) + .extract(null, theQuestionnaireResponse, null, null, null); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/BundleProviderUtil.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/BundleProviderUtil.java index 538a0c0dc14..bdf8f35cde4 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/BundleProviderUtil.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/BundleProviderUtil.java @@ -20,12 +20,6 @@ package ca.uhn.fhir.cr.repo; * #L% */ -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Set; - import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.valueset.BundleTypeEnum; @@ -45,6 +39,12 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; import org.hl7.fhir.instance.model.api.IBaseResource; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; + import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -58,10 +58,17 @@ public class BundleProviderUtil { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseResourceReturningMethodBinding.class); - public static IBaseResource createBundleFromBundleProvider(IRestfulServer theServer, - RequestDetails theRequest, Integer theLimit, String theLinkSelf, Set theIncludes, - IBundleProvider theResult, int theOffset, BundleTypeEnum theBundleType, - EncodingEnum theLinkEncoding, String theSearchId) { + public static IBaseResource createBundleFromBundleProvider( + IRestfulServer theServer, + RequestDetails theRequest, + Integer theLimit, + String theLinkSelf, + Set theIncludes, + IBundleProvider theResult, + int theOffset, + BundleTypeEnum theBundleType, + EncodingEnum theLinkEncoding, + String theSearchId) { IVersionSpecificBundleFactory bundleFactory = theServer.getFhirContext().newBundleFactory(); final Integer offset; Integer limit = theLimit; @@ -69,8 +76,8 @@ public class BundleProviderUtil { if (theResult.getCurrentPageOffset() != null) { offset = theResult.getCurrentPageOffset(); limit = theResult.getCurrentPageSize(); - Validate.notNull(limit, - "IBundleProvider returned a non-null offset, but did not return a non-null page size"); + Validate.notNull( + limit, "IBundleProvider returned a non-null offset, but did not return a non-null page size"); } else { offset = RestfulServerUtils.tryToExtractNamedParameter(theRequest, Constants.PARAM_OFFSET); } @@ -165,33 +172,43 @@ public class BundleProviderUtil { if (next.getIdElement() == null || next.getIdElement().isEmpty()) { if (!(next instanceof IBaseOperationOutcome)) { throw new InternalErrorException(Msg.code(2311) - + "Server method returned resource of type[" + next.getClass().getSimpleName() + + "Server method returned resource of type[" + + next.getClass().getSimpleName() + "] with no ID specified (IResource#setId(IdDt) must be called)"); } } } - BundleLinks links = new BundleLinks(theRequest.getFhirServerBase(), theIncludes, - RestfulServerUtils.prettyPrintResponse(theServer, theRequest), theBundleType); + BundleLinks links = new BundleLinks( + theRequest.getFhirServerBase(), + theIncludes, + RestfulServerUtils.prettyPrintResponse(theServer, theRequest), + theBundleType); links.setSelf(theLinkSelf); if (theResult.getCurrentPageOffset() != null) { if (isNotBlank(theResult.getNextPageId())) { - links.setNext(RestfulServerUtils.createOffsetPagingLink(links, - theRequest.getRequestPath(), theRequest.getTenantId(), offset + limit, limit, + links.setNext(RestfulServerUtils.createOffsetPagingLink( + links, + theRequest.getRequestPath(), + theRequest.getTenantId(), + offset + limit, + limit, theRequest.getParameters())); } if (isNotBlank(theResult.getPreviousPageId())) { - links.setNext(RestfulServerUtils.createOffsetPagingLink(links, - theRequest.getRequestPath(), theRequest.getTenantId(), - Math.max(offset - limit, 0), limit, theRequest.getParameters())); + links.setNext(RestfulServerUtils.createOffsetPagingLink( + links, + theRequest.getRequestPath(), + theRequest.getTenantId(), + Math.max(offset - limit, 0), + limit, + theRequest.getParameters())); } - } - if (offset != null - || (!theServer.canStoreSearchResults() && !isEverythingOperation(theRequest))) { + if (offset != null || (!theServer.canStoreSearchResults() && !isEverythingOperation(theRequest))) { // Paging without caching // We're doing offset pages int requestedToReturn = numToReturn; @@ -202,28 +219,35 @@ public class BundleProviderUtil { } if (numTotalResults == null || requestedToReturn < numTotalResults) { if (!resourceList.isEmpty()) { - links.setNext( - RestfulServerUtils.createOffsetPagingLink(links, theRequest.getRequestPath(), - theRequest.getTenantId(), defaultIfNull(offset, 0) + numToReturn, - numToReturn, theRequest.getParameters())); + links.setNext(RestfulServerUtils.createOffsetPagingLink( + links, + theRequest.getRequestPath(), + theRequest.getTenantId(), + defaultIfNull(offset, 0) + numToReturn, + numToReturn, + theRequest.getParameters())); } } if (offset != null && offset > 0) { int start = Math.max(0, theOffset - pageSize); - links.setPrev( - RestfulServerUtils.createOffsetPagingLink(links, theRequest.getRequestPath(), - theRequest.getTenantId(), start, pageSize, theRequest.getParameters())); + links.setPrev(RestfulServerUtils.createOffsetPagingLink( + links, + theRequest.getRequestPath(), + theRequest.getTenantId(), + start, + pageSize, + theRequest.getParameters())); } } else if (isNotBlank(theResult.getCurrentPageId())) { // We're doing named pages searchId = theResult.getUuid(); if (isNotBlank(theResult.getNextPageId())) { - links.setNext(RestfulServerUtils.createPagingLink(links, theRequest, searchId, - theResult.getNextPageId(), theRequest.getParameters())); + links.setNext(RestfulServerUtils.createPagingLink( + links, theRequest, searchId, theResult.getNextPageId(), theRequest.getParameters())); } if (isNotBlank(theResult.getPreviousPageId())) { - links.setPrev(RestfulServerUtils.createPagingLink(links, theRequest, searchId, - theResult.getPreviousPageId(), theRequest.getParameters())); + links.setPrev(RestfulServerUtils.createPagingLink( + links, theRequest, searchId, theResult.getPreviousPageId(), theRequest.getParameters())); } } else if (searchId != null) { /* @@ -234,30 +258,37 @@ public class BundleProviderUtil { */ if (resourceList.size() > 0) { if (numTotalResults == null || theOffset + numToReturn < numTotalResults) { - links.setNext((RestfulServerUtils.createPagingLink(links, theRequest, searchId, - theOffset + numToReturn, numToReturn, theRequest.getParameters()))); + links.setNext((RestfulServerUtils.createPagingLink( + links, + theRequest, + searchId, + theOffset + numToReturn, + numToReturn, + theRequest.getParameters()))); } if (theOffset > 0) { int start = Math.max(0, theOffset - pageSize); - links.setPrev(RestfulServerUtils.createPagingLink(links, theRequest, searchId, start, - pageSize, theRequest.getParameters())); + links.setPrev(RestfulServerUtils.createPagingLink( + links, theRequest, searchId, start, pageSize, theRequest.getParameters())); } } } - bundleFactory.addRootPropertiesToBundle(theResult.getUuid(), links, theResult.size(), - theResult.getPublished()); - bundleFactory.addResourcesToBundle(new ArrayList<>(resourceList), theBundleType, - links.serverBase, theServer.getBundleInclusionRule(), theIncludes); + bundleFactory.addRootPropertiesToBundle(theResult.getUuid(), links, theResult.size(), theResult.getPublished()); + bundleFactory.addResourcesToBundle( + new ArrayList<>(resourceList), + theBundleType, + links.serverBase, + theServer.getBundleInclusionRule(), + theIncludes); return bundleFactory.getResourceBundle(); - } private static boolean isEverythingOperation(RequestDetails theRequest) { return (theRequest.getRestOperationType() == RestOperationTypeEnum.EXTENDED_OPERATION_TYPE - || theRequest - .getRestOperationType() == RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE) - && theRequest.getOperation() != null && theRequest.getOperation().equals("$everything"); + || theRequest.getRestOperationType() == RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE) + && theRequest.getOperation() != null + && theRequest.getOperation().equals("$everything"); } } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/HapiFhirRepository.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/HapiFhirRepository.java index abe04c3a71e..d61973a8f2c 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/HapiFhirRepository.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/HapiFhirRepository.java @@ -20,22 +20,6 @@ package ca.uhn.fhir.cr.repo; * #L% */ -import static ca.uhn.fhir.cr.repo.RequestDetailsCloner.startWith; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import java.io.IOException; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.hl7.fhir.instance.model.api.IBaseBundle; -import org.hl7.fhir.instance.model.api.IBaseConformance; -import org.hl7.fhir.instance.model.api.IBaseParameters; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IIdType; -import org.opencds.cqf.fhir.api.Repository; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; @@ -53,50 +37,63 @@ import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.method.PageMethodBinding; import ca.uhn.fhir.util.UrlUtil; +import org.hl7.fhir.instance.model.api.IBaseBundle; +import org.hl7.fhir.instance.model.api.IBaseConformance; +import org.hl7.fhir.instance.model.api.IBaseParameters; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; +import org.opencds.cqf.fhir.api.Repository; + +import java.io.IOException; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static ca.uhn.fhir.cr.repo.RequestDetailsCloner.startWith; +import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * This class leverages DaoRegistry from Hapi-fhir to implement CRUD FHIR API operations constrained to provide only the operations necessary for the cql-evaluator modules to function. **/ public class HapiFhirRepository implements Repository { - private static final org.slf4j.Logger ourLog = - org.slf4j.LoggerFactory.getLogger(HapiFhirRepository.class); + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(HapiFhirRepository.class); private final DaoRegistry myDaoRegistry; private final RequestDetails myRequestDetails; private final RestfulServer myRestfulServer; - public HapiFhirRepository(DaoRegistry theDaoRegistry, RequestDetails theRequestDetails, - RestfulServer theRestfulServer) { + public HapiFhirRepository( + DaoRegistry theDaoRegistry, RequestDetails theRequestDetails, RestfulServer theRestfulServer) { myDaoRegistry = theDaoRegistry; myRequestDetails = theRequestDetails; myRestfulServer = theRestfulServer; } @Override - public T read(Class theResourceType, I theId, - Map theHeaders) { + public T read( + Class theResourceType, I theId, Map theHeaders) { var details = startWith(myRequestDetails).addHeaders(theHeaders).create(); return myDaoRegistry.getResourceDao(theResourceType).read(theId, details); } @Override - public MethodOutcome create(T theResource, - Map theHeaders) { + public MethodOutcome create(T theResource, Map theHeaders) { var details = startWith(myRequestDetails).addHeaders(theHeaders).create(); return myDaoRegistry.getResourceDao(theResource).create(theResource, details); } @Override - public MethodOutcome patch(I theId, - P thePatchParameters, Map theHeaders) { + public MethodOutcome patch( + I theId, P thePatchParameters, Map theHeaders) { var details = startWith(myRequestDetails).addHeaders(theHeaders).create(); // TODO: conditional url, patch type, patch body? - return myDaoRegistry.getResourceDao(theId.getResourceType()).patch(theId, null, null, - null, thePatchParameters, details); + return myDaoRegistry + .getResourceDao(theId.getResourceType()) + .patch(theId, null, null, null, thePatchParameters, details); } @Override - public MethodOutcome update(T theResource, - Map theHeaders) { + public MethodOutcome update(T theResource, Map theHeaders) { var details = startWith(myRequestDetails).addHeaders(theHeaders).create(); return myDaoRegistry.getResourceDao(theResource).update(theResource, details); @@ -111,15 +108,17 @@ public class HapiFhirRepository implements Repository { } @Override - public B search(Class theBundleType, - Class theResourceType, Map> theSearchParameters, + public B search( + Class theBundleType, + Class theResourceType, + Map> theSearchParameters, Map theHeaders) { var details = startWith(myRequestDetails).addHeaders(theHeaders).create(); SearchConverter converter = new SearchConverter(); converter.convertParameters(theSearchParameters, fhirContext()); details.setParameters(converter.resultParameters); - var bundleProvider = myDaoRegistry.getResourceDao(theResourceType) - .search(converter.searchParameterMap, details); + var bundleProvider = + myDaoRegistry.getResourceDao(theResourceType).search(converter.searchParameterMap, details); if (bundleProvider == null) { return null; @@ -128,11 +127,10 @@ public class HapiFhirRepository implements Repository { return createBundle(details, bundleProvider, null); } - private B createBundle(RequestDetails theRequestDetails, - IBundleProvider theBundleProvider, String thePagingAction) { + private B createBundle( + RequestDetails theRequestDetails, IBundleProvider theBundleProvider, String thePagingAction) { var count = RestfulServerUtils.extractCountParameter(theRequestDetails); - var linkSelf = RestfulServerUtils.createLinkSelf(theRequestDetails.getFhirServerBase(), - theRequestDetails); + var linkSelf = RestfulServerUtils.createLinkSelf(theRequestDetails.getFhirServerBase(), theRequestDetails); Set includes = new HashSet<>(); var reqIncludes = theRequestDetails.getParameters().get(Constants.PARAM_INCLUDE); @@ -142,8 +140,7 @@ public class HapiFhirRepository implements Repository { } } - var offset = RestfulServerUtils.tryToExtractNamedParameter(theRequestDetails, - Constants.PARAM_PAGINGOFFSET); + var offset = RestfulServerUtils.tryToExtractNamedParameter(theRequestDetails, Constants.PARAM_PAGINGOFFSET); if (offset == null || offset < 0) { offset = 0; } @@ -162,20 +159,29 @@ public class HapiFhirRepository implements Repository { var responseEncoding = RestfulServerUtils.determineResponseEncodingNoDefault( theRequestDetails, myRestfulServer.getDefaultResponseEncoding()); - var linkEncoding = theRequestDetails.getParameters().containsKey(Constants.PARAM_FORMAT) - && responseEncoding != null ? responseEncoding.getEncoding() : null; + var linkEncoding = + theRequestDetails.getParameters().containsKey(Constants.PARAM_FORMAT) && responseEncoding != null + ? responseEncoding.getEncoding() + : null; - return (B) BundleProviderUtil.createBundleFromBundleProvider(myRestfulServer, - theRequestDetails, count, linkSelf, includes, theBundleProvider, start, bundleType, - linkEncoding, thePagingAction); + return (B) BundleProviderUtil.createBundleFromBundleProvider( + myRestfulServer, + theRequestDetails, + count, + linkSelf, + includes, + theBundleProvider, + start, + bundleType, + linkEncoding, + thePagingAction); } // TODO: The main use case for this is paging through Bundles, but I suppose that technically // we ought to handle any old link. Maybe this is also an escape hatch for "custom non-FHIR // repository action"? @Override - public B link(Class theBundleType, String theUrl, - Map theHeaders) { + public B link(Class theBundleType, String theUrl, Map theHeaders) { var details = startWith(myRequestDetails).addHeaders(theHeaders).create(); var urlParts = UrlUtil.parseUrl(theUrl); details.setCompleteUrl(theUrl); @@ -209,20 +215,20 @@ public class HapiFhirRepository implements Repository { return createBundle(details, bundleProvider, thePagingAction); } - private void validateHaveBundleProvider(String thePagingAction, - IBundleProvider theBundleProvider) { + private void validateHaveBundleProvider(String thePagingAction, IBundleProvider theBundleProvider) { // Return an HTTP 410 if the search is not known if (theBundleProvider == null) { ourLog.info("Client requested unknown paging ID[{}]", thePagingAction); - String msg = fhirContext().getLocalizer().getMessage(PageMethodBinding.class, - "unknownSearchId", thePagingAction); + String msg = fhirContext() + .getLocalizer() + .getMessage(PageMethodBinding.class, "unknownSearchId", thePagingAction); throw new ResourceGoneException(Msg.code(2313) + msg); } } @Override - public C capabilities(Class theCapabilityStatementType, - Map theHeaders) { + public C capabilities( + Class theCapabilityStatementType, Map theHeaders) { var method = myRestfulServer.getServerConformanceMethod(); if (method == null) { return null; @@ -238,70 +244,94 @@ public class HapiFhirRepository implements Repository { } @Override - public R invoke(String theName, - P theParameters, Class theReturnType, Map theHeaders) { - var details = startWith(myRequestDetails).addHeaders(theHeaders).setOperation(theName) - .setParameters(theParameters).create(); + public R invoke( + String theName, P theParameters, Class theReturnType, Map theHeaders) { + var details = startWith(myRequestDetails) + .addHeaders(theHeaders) + .setOperation(theName) + .setParameters(theParameters) + .create(); return invoke(details); } @Override - public

    MethodOutcome invoke(String theName, P theParameters, - Map theHeaders) { - var details = startWith(myRequestDetails).addHeaders(theHeaders).setOperation(theName) - .setParameters(theParameters).create(); + public

    MethodOutcome invoke( + String theName, P theParameters, Map theHeaders) { + var details = startWith(myRequestDetails) + .addHeaders(theHeaders) + .setOperation(theName) + .setParameters(theParameters) + .create(); return invoke(details); } @Override public R invoke( - Class theResourceType, String theName, P theParameters, Class theReturnType, + Class theResourceType, + String theName, + P theParameters, + Class theReturnType, Map theHeaders) { - var details = startWith(myRequestDetails).addHeaders(theHeaders).setOperation(theName) - .setResourceType(theResourceType.getSimpleName()).setParameters(theParameters).create(); + var details = startWith(myRequestDetails) + .addHeaders(theHeaders) + .setOperation(theName) + .setResourceType(theResourceType.getSimpleName()) + .setParameters(theParameters) + .create(); return invoke(details); } @Override public

    MethodOutcome invoke( - Class theResourceType, String theName, P theParameters, - Map theHeaders) { - var details = startWith(myRequestDetails).addHeaders(theHeaders).setOperation(theName) - .setResourceType(theResourceType.getSimpleName()).setParameters(theParameters).create(); - - return invoke(details); - } - - @Override - public R invoke(I theId, - String theName, P theParameters, Class theReturnType, Map theHeaders) { - var details = startWith(myRequestDetails).addHeaders(theHeaders).setOperation(theName) - .setResourceType(theId.getResourceType()).setId(theId).setParameters(theParameters) + Class theResourceType, String theName, P theParameters, Map theHeaders) { + var details = startWith(myRequestDetails) + .addHeaders(theHeaders) + .setOperation(theName) + .setResourceType(theResourceType.getSimpleName()) + .setParameters(theParameters) .create(); return invoke(details); } @Override - public

    MethodOutcome invoke(I theId, - String theName, P theParameters, Map theHeaders) { - var details = startWith(myRequestDetails).addHeaders(theHeaders).setOperation(theName) - .setResourceType(theId.getResourceType()).setId(theId).setParameters(theParameters) + public R invoke( + I theId, String theName, P theParameters, Class theReturnType, Map theHeaders) { + var details = startWith(myRequestDetails) + .addHeaders(theHeaders) + .setOperation(theName) + .setResourceType(theId.getResourceType()) + .setId(theId) + .setParameters(theParameters) .create(); return invoke(details); } - + + @Override + public

    MethodOutcome invoke( + I theId, String theName, P theParameters, Map theHeaders) { + var details = startWith(myRequestDetails) + .addHeaders(theHeaders) + .setOperation(theName) + .setResourceType(theId.getResourceType()) + .setId(theId) + .setParameters(theParameters) + .create(); + + return invoke(details); + } + private void notImplemented() { throw new NotImplementedOperationException(Msg.code(2314) + "history not yet implemented"); } @Override - public B history(P theParameters, - Class theReturnBundleType, Map theHeaders) { + public B history( + P theParameters, Class theReturnBundleType, Map theHeaders) { notImplemented(); return null; @@ -309,16 +339,15 @@ public class HapiFhirRepository implements Repository { @Override public B history( - Class theResourceType, P theParameters, Class theReturnBundleType, - Map theHeaders) { + Class theResourceType, P theParameters, Class theReturnBundleType, Map theHeaders) { notImplemented(); return null; } @Override - public B history(I theId, - P theParameters, Class theReturnBundleType, Map theHeaders) { + public B history( + I theId, P theParameters, Class theReturnBundleType, Map theHeaders) { notImplemented(); return null; @@ -331,8 +360,8 @@ public class HapiFhirRepository implements Repository { protected R invoke(RequestDetails theDetails) { try { - return (R) myRestfulServer.determineResourceMethod(theDetails, null) - .invokeServer(myRestfulServer, theDetails); + return (R) + myRestfulServer.determineResourceMethod(theDetails, null).invokeServer(myRestfulServer, theDetails); } catch (IOException e) { throw new RuntimeException(Msg.code(2315) + e); } diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/RequestDetailsCloner.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/RequestDetailsCloner.java index 559c611da9b..6d6e68049d1 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/RequestDetailsCloner.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/RequestDetailsCloner.java @@ -1,11 +1,3 @@ -package ca.uhn.fhir.cr.repo; - -import java.util.HashMap; -import java.util.Map; - -import org.hl7.fhir.instance.model.api.IBaseParameters; -import org.hl7.fhir.instance.model.api.IIdType; - /*- * #%L * HAPI FHIR - Clinical Reasoning @@ -25,10 +17,16 @@ import org.hl7.fhir.instance.model.api.IIdType; * limitations under the License. * #L% */ +package ca.uhn.fhir.cr.repo; import ca.uhn.fhir.rest.api.RequestTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import org.hl7.fhir.instance.model.api.IBaseParameters; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.util.HashMap; +import java.util.Map; /** * This class produces partial clones of RequestDetails, the intent being to reuse the context of a diff --git a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/SearchConverter.java b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/SearchConverter.java index 46b9e828357..cc4a8a87e7c 100644 --- a/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/SearchConverter.java +++ b/hapi-fhir-storage-cr/src/main/java/ca/uhn/fhir/cr/repo/SearchConverter.java @@ -20,42 +20,50 @@ package ca.uhn.fhir.cr.repo; * #L% */ -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.annotation.Nonnull; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.api.IQueryParameterAnd; import ca.uhn.fhir.model.api.IQueryParameterOr; import ca.uhn.fhir.model.api.IQueryParameterType; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.annotation.Nonnull; + /** * The IGenericClient API represents searches with OrLists, while the FhirRepository API uses nested * lists. This class (will eventually) convert between them */ public class SearchConverter { // hardcoded list from FHIR specs: https://www.hl7.org/fhir/search.html - private final List searchResultParameters = Arrays.asList("_sort", "_count", "_include", - "_revinclude", "_summary", "_total", "_elements", "_contained", "_containedType"); + private final List searchResultParameters = Arrays.asList( + "_sort", + "_count", + "_include", + "_revinclude", + "_summary", + "_total", + "_elements", + "_contained", + "_containedType"); public final Map> separatedSearchParameters = new HashMap<>(); public final Map> separatedResultParameters = new HashMap<>(); public final SearchParameterMap searchParameterMap = new SearchParameterMap(); public final Map resultParameters = new HashMap<>(); - void convertParameters(Map> theParameters, - FhirContext theFhirContext) { - if (theParameters == null) { return; } + void convertParameters(Map> theParameters, FhirContext theFhirContext) { + if (theParameters == null) { + return; + } separateParameterTypes(theParameters); convertToSearchParameterMap(separatedSearchParameters); convertToStringMap(separatedResultParameters, theFhirContext); } - public void convertToStringMap(@Nonnull Map> theParameters, - @Nonnull FhirContext theFhirContext) { + public void convertToStringMap( + @Nonnull Map> theParameters, @Nonnull FhirContext theFhirContext) { for (var entry : theParameters.entrySet()) { String[] values = new String[entry.getValue().size()]; for (int i = 0; i < entry.getValue().size(); i++) { @@ -86,8 +94,7 @@ public class SearchConverter { } } - public void separateParameterTypes( - @Nonnull Map> theParameters) { + public void separateParameterTypes(@Nonnull Map> theParameters) { for (var entry : theParameters.entrySet()) { if (isSearchResultParameter(entry.getKey())) { separatedResultParameters.put(entry.getKey(), entry.getValue()); diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java index e3c8f86868e..db674267fe0 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/interceptor/MdmSubmitterInterceptorLoader.java @@ -37,14 +37,19 @@ public class MdmSubmitterInterceptorLoader { @Autowired private IMdmSettings myMdmSettings; + @Autowired JpaStorageSettings myStorageSettings; + @Autowired private IMdmStorageInterceptor myIMdmStorageInterceptor; + @Autowired private MdmSearchExpandingInterceptor myMdmSearchExpandingInterceptorInterceptor; + @Autowired private IInterceptorService myInterceptorService; + @Autowired private SubscriptionSubmitInterceptorLoader mySubscriptionSubmitInterceptorLoader; @@ -58,7 +63,8 @@ public class MdmSubmitterInterceptorLoader { myInterceptorService.registerInterceptor(myIMdmStorageInterceptor); myInterceptorService.registerInterceptor(myMdmSearchExpandingInterceptorInterceptor); ourLog.info("MDM interceptor registered"); - // We need to call SubscriptionSubmitInterceptorLoader.start() again in case there were no subscription types the first time it was called. + // We need to call SubscriptionSubmitInterceptorLoader.start() again in case there were no subscription types + // the first time it was called. mySubscriptionSubmitInterceptorLoader.start(); } } diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/LoadGoldenIdsStep.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/LoadGoldenIdsStep.java index 04f8275a06d..60d4bb98ade 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/LoadGoldenIdsStep.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/LoadGoldenIdsStep.java @@ -32,7 +32,8 @@ import ca.uhn.fhir.mdm.batch2.clear.MdmClearJobParameters; import javax.annotation.Nonnull; -public class LoadGoldenIdsStep implements IJobStepWorker { +public class LoadGoldenIdsStep + implements IJobStepWorker { private final ResourceIdListStep myResourceIdListStep; public LoadGoldenIdsStep(IGoldenResourceSearchSvc theGoldenResourceSearchSvc) { @@ -43,7 +44,10 @@ public class LoadGoldenIdsStep implements IJobStepWorker theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { return myResourceIdListStep.run(theStepExecutionDetails, theDataSink); } } diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmBatch2Config.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmBatch2Config.java index 25824b207d4..ddca6b3b5ef 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmBatch2Config.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmBatch2Config.java @@ -34,15 +34,13 @@ import static ca.uhn.fhir.mdm.batch2.clear.MdmClearAppCtx.MDM_CLEAR_JOB_BEAN_NAM import static ca.uhn.fhir.mdm.batch2.submit.MdmSubmitAppCtx.MDM_SUBMIT_JOB_BEAN_NAME; @Configuration -@Import({ - MdmClearAppCtx.class, - MdmSubmitAppCtx.class -}) +@Import({MdmClearAppCtx.class, MdmSubmitAppCtx.class}) public class MdmBatch2Config { @Bean - MdmJobDefinitionLoader mdmJobDefinitionLoader(JobDefinitionRegistry theJobDefinitionRegistry, - @Qualifier(MDM_CLEAR_JOB_BEAN_NAME) JobDefinition theClearJobDefinition, - @Qualifier(MDM_SUBMIT_JOB_BEAN_NAME) JobDefinition theSubmitJobDefinition) { + MdmJobDefinitionLoader mdmJobDefinitionLoader( + JobDefinitionRegistry theJobDefinitionRegistry, + @Qualifier(MDM_CLEAR_JOB_BEAN_NAME) JobDefinition theClearJobDefinition, + @Qualifier(MDM_SUBMIT_JOB_BEAN_NAME) JobDefinition theSubmitJobDefinition) { return new MdmJobDefinitionLoader(theJobDefinitionRegistry, theClearJobDefinition, theSubmitJobDefinition); } } diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java index 10f96281e6b..9b4b2002e63 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmGenerateRangeChunksStep.java @@ -30,15 +30,18 @@ import ca.uhn.fhir.mdm.batch2.clear.MdmClearJobParameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.Date; +import javax.annotation.Nonnull; public class MdmGenerateRangeChunksStep implements IFirstJobStepWorker { private static final Logger ourLog = LoggerFactory.getLogger(MdmGenerateRangeChunksStep.class); @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { MdmClearJobParameters params = theStepExecutionDetails.getParameters(); Date start = Batch2Constants.BATCH_START_DATE; @@ -55,5 +58,4 @@ public class MdmGenerateRangeChunksStep implements IFirstJobStepWorker { private static final Logger ourLog = LoggerFactory.getLogger(MdmIdChunkProducer.class); @@ -38,11 +38,21 @@ public class MdmIdChunkProducer implements IIdChunkProducer { } @Override - public IResourcePidList fetchResourceIdsPage(Date theNextStart, Date theEnd, @Nonnull Integer thePageSize, RequestPartitionId theRequestPartitionId, MdmChunkRangeJson theData) { + public IResourcePidList fetchResourceIdsPage( + Date theNextStart, + Date theEnd, + @Nonnull Integer thePageSize, + RequestPartitionId theRequestPartitionId, + MdmChunkRangeJson theData) { String resourceType = theData.getResourceType(); - ourLog.info("Fetching golden resource ID chunk for resource type {} - Range {} - {}", resourceType, theNextStart, theEnd); + ourLog.info( + "Fetching golden resource ID chunk for resource type {} - Range {} - {}", + resourceType, + theNextStart, + theEnd); - return myGoldenResourceSearchSvc.fetchGoldenResourceIdsPage(theNextStart, theEnd, thePageSize, theRequestPartitionId, resourceType); + return myGoldenResourceSearchSvc.fetchGoldenResourceIdsPage( + theNextStart, theEnd, thePageSize, theRequestPartitionId, resourceType); } } diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmJobDefinitionLoader.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmJobDefinitionLoader.java index 7fd07bfa4a0..26ca412b7b0 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmJobDefinitionLoader.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/MdmJobDefinitionLoader.java @@ -25,9 +25,10 @@ import ca.uhn.fhir.mdm.batch2.clear.MdmClearJobParameters; import ca.uhn.fhir.mdm.batch2.submit.MdmSubmitJobParameters; public class MdmJobDefinitionLoader { - public MdmJobDefinitionLoader(JobDefinitionRegistry theJobDefinitionRegistry, - JobDefinition theClearJobDefinition, - JobDefinition theSubmitJobDefinition) { + public MdmJobDefinitionLoader( + JobDefinitionRegistry theJobDefinitionRegistry, + JobDefinition theClearJobDefinition, + JobDefinition theSubmitJobDefinition) { theJobDefinitionRegistry.addJobDefinitionIfNotRegistered(theClearJobDefinition); theJobDefinitionRegistry.addJobDefinitionIfNotRegistered(theSubmitJobDefinition); diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearAppCtx.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearAppCtx.java index be5007f7053..ea1008ae5af 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearAppCtx.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearAppCtx.java @@ -36,34 +36,35 @@ public class MdmClearAppCtx { public static final String MDM_CLEAR_JOB_BEAN_NAME = "mdmClearJobDefinition"; @Bean(name = MDM_CLEAR_JOB_BEAN_NAME) - public JobDefinition mdmClearJobDefinition(DaoRegistry theDaoRegistry, IGoldenResourceSearchSvc theGoldenResourceSearchSvc, IMdmSettings theMdmSettings) { - return JobDefinition - .newBuilder() - .setJobDefinitionId(JOB_MDM_CLEAR) - .setJobDescription("Clear mdm links and golden resrouces") - .setJobDefinitionVersion(1) - .setParametersType(MdmClearJobParameters.class) - .setParametersValidator(MdmJobParametersValidator(theDaoRegistry, theMdmSettings)) - .gatedExecution() - .addFirstStep( - "generate-ranges", - "Generate date ranges to Mdm Clear", - MdmChunkRangeJson.class, - mdmGenerateRangeChunksStep()) - .addIntermediateStep( - "find-golden-resource-ids", - "Load ids of golden resources to be cleared", - ResourceIdListWorkChunkJson.class, - loadGoldenIdsStep(theGoldenResourceSearchSvc)) - .addLastStep("remove-golden-resources-and-links", - "Remove golden resources and mdm links", - mdmClearStep() - ) - .build(); + public JobDefinition mdmClearJobDefinition( + DaoRegistry theDaoRegistry, + IGoldenResourceSearchSvc theGoldenResourceSearchSvc, + IMdmSettings theMdmSettings) { + return JobDefinition.newBuilder() + .setJobDefinitionId(JOB_MDM_CLEAR) + .setJobDescription("Clear mdm links and golden resrouces") + .setJobDefinitionVersion(1) + .setParametersType(MdmClearJobParameters.class) + .setParametersValidator(MdmJobParametersValidator(theDaoRegistry, theMdmSettings)) + .gatedExecution() + .addFirstStep( + "generate-ranges", + "Generate date ranges to Mdm Clear", + MdmChunkRangeJson.class, + mdmGenerateRangeChunksStep()) + .addIntermediateStep( + "find-golden-resource-ids", + "Load ids of golden resources to be cleared", + ResourceIdListWorkChunkJson.class, + loadGoldenIdsStep(theGoldenResourceSearchSvc)) + .addLastStep( + "remove-golden-resources-and-links", "Remove golden resources and mdm links", mdmClearStep()) + .build(); } @Bean - public MdmClearJobParametersValidator MdmJobParametersValidator(DaoRegistry theDaoRegistry, IMdmSettings theMdmSettings) { + public MdmClearJobParametersValidator MdmJobParametersValidator( + DaoRegistry theDaoRegistry, IMdmSettings theMdmSettings) { return new MdmClearJobParametersValidator(theDaoRegistry, theMdmSettings); } @@ -81,5 +82,4 @@ public class MdmClearAppCtx { public LoadGoldenIdsStep loadGoldenIdsStep(IGoldenResourceSearchSvc theGoldenResourceSearchSvc) { return new LoadGoldenIdsStep(theGoldenResourceSearchSvc); } - } diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearJobParameters.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearJobParameters.java index 97916a03fb7..fed4b7d3615 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearJobParameters.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearJobParameters.java @@ -23,15 +23,16 @@ import ca.uhn.fhir.batch2.jobs.parameters.PartitionedJobParameters; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.Validate; -import javax.annotation.Nonnull; -import javax.validation.constraints.Pattern; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.validation.constraints.Pattern; public class MdmClearJobParameters extends PartitionedJobParameters { @JsonProperty("resourceType") @Nonnull - private List<@Pattern(regexp = "^[A-Z][A-Za-z]+$", message = "If populated, must be a valid resource type'") String> myResourceNames; + private List<@Pattern(regexp = "^[A-Z][A-Za-z]+$", message = "If populated, must be a valid resource type'") String> + myResourceNames; public List getResourceNames() { if (myResourceNames == null) { diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearJobParametersValidator.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearJobParametersValidator.java index 4d353c2545e..2c805ea87c4 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearJobParametersValidator.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearJobParametersValidator.java @@ -24,11 +24,11 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.rest.api.server.RequestDetails; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class MdmClearJobParametersValidator implements IJobParametersValidator { @@ -47,7 +47,8 @@ public class MdmClearJobParametersValidator implements IJobParametersValidator retval = new ArrayList<>(); - if (theParameters.getResourceNames() == null || theParameters.getResourceNames().isEmpty()) { + if (theParameters.getResourceNames() == null + || theParameters.getResourceNames().isEmpty()) { retval.add("Mdm Clear Job Parameters must define at least one resource type"); } else { for (String resourceType : theParameters.getResourceNames()) { @@ -62,5 +63,4 @@ public class MdmClearJobParametersValidator implements IJobParametersValidator { @@ -56,8 +56,10 @@ public class MdmClearStep implements IJobStepWorker theStepExecutionDetails, @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { SystemRequestDetails requestDetails = new SystemRequestDetails(); requestDetails.setRetry(true); requestDetails.setMaxRetries(100); - requestDetails.setRequestPartitionId(theStepExecutionDetails.getParameters().getRequestPartitionId()); + requestDetails.setRequestPartitionId( + theStepExecutionDetails.getParameters().getRequestPartitionId()); TransactionDetails transactionDetails = new TransactionDetails(); - myHapiTransactionService - .execute(requestDetails, transactionDetails, buildJob(requestDetails, transactionDetails, theStepExecutionDetails)); + myHapiTransactionService.execute( + requestDetails, + transactionDetails, + buildJob(requestDetails, transactionDetails, theStepExecutionDetails)); return new RunOutcome(theStepExecutionDetails.getData().size()); } - MdmClearJob buildJob(RequestDetails requestDetails, TransactionDetails transactionDetails, StepExecutionDetails theStepExecutionDetails) { + MdmClearJob buildJob( + RequestDetails requestDetails, + TransactionDetails transactionDetails, + StepExecutionDetails theStepExecutionDetails) { return new MdmClearJob(requestDetails, transactionDetails, theStepExecutionDetails); } @@ -89,7 +100,10 @@ public class MdmClearStep implements IJobStepWorker theStepExecutionDetails) { + public MdmClearJob( + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + StepExecutionDetails theStepExecutionDetails) { myRequestDetails = theRequestDetails; myTransactionDetails = theTransactionDetails; myData = theStepExecutionDetails.getData(); @@ -120,7 +134,11 @@ public class MdmClearStep implements IJobStepWorker thePersistentIds) { - ourLog.info("Starting mdm clear work chunk with {} resources - Instance[{}] Chunk[{}]", thePersistentIds.size(), myInstanceId, myChunkId); + ourLog.info( + "Starting mdm clear work chunk with {} resources - Instance[{}] Chunk[{}]", + thePersistentIds.size(), + myInstanceId, + myChunkId); StopWatch sw = new StopWatch(); myMdmLinkSvc.deleteLinksWithAnyReferenceToPids(thePersistentIds); @@ -128,15 +146,22 @@ public class MdmClearStep implements IJobStepWorker { +public class MdmInflateAndSubmitResourcesStep + implements IJobStepWorker { private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); @Autowired @@ -51,29 +51,35 @@ public class MdmInflateAndSubmitResourcesStep implements IJobStepWorker myIdHelperService; @Nonnull @Override - public RunOutcome run(@Nonnull StepExecutionDetails theStepExecutionDetails, - @Nonnull IJobDataSink theDataSink) throws JobExecutionFailedException { + public RunOutcome run( + @Nonnull StepExecutionDetails theStepExecutionDetails, + @Nonnull IJobDataSink theDataSink) + throws JobExecutionFailedException { ResourceIdListWorkChunkJson idList = theStepExecutionDetails.getData(); ourLog.info("Final Step for $mdm-submit - Expand and submit resources"); - ourLog.info("About to expand {} resource IDs into their full resource bodies.", idList.getResourcePersistentIds(myIdHelperService).size()); + ourLog.info( + "About to expand {} resource IDs into their full resource bodies.", + idList.getResourcePersistentIds(myIdHelperService).size()); - //Inflate the resources by PID + // Inflate the resources by PID List allResources = fetchAllResources(idList.getResourcePersistentIds(myIdHelperService)); - //Replace the terminology + // Replace the terminology if (myResponseTerminologyTranslationSvc != null) { myResponseTerminologyTranslationSvc.processResourcesForTerminologyTranslation(allResources); } - //Submit + // Submit for (IBaseResource nextResource : allResources) { myMdmChannelSubmitterSvc.submitResourceToMdmChannel(nextResource); } @@ -87,7 +93,8 @@ public class MdmInflateAndSubmitResourcesStep implements IJobStepWorker dao = myDaoRegistry.getResourceDao(id.getResourceType()); - // This should be a query, but we have PIDs, and we don't have a _pid search param. TODO GGG, figure out how to make this search by pid. + // This should be a query, but we have PIDs, and we don't have a _pid search param. TODO GGG, figure out how + // to make this search by pid. try { resources.add(dao.readByPid(id)); } catch (ResourceNotFoundException e) { diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitAppCtx.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitAppCtx.java index 8734a75ed6a..974c3c5b24a 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitAppCtx.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitAppCtx.java @@ -35,41 +35,43 @@ import org.springframework.context.annotation.Configuration; public class MdmSubmitAppCtx { public static final String MDM_SUBMIT_JOB_BEAN_NAME = "mdmSubmitJobDefinition"; - public static String MDM_SUBMIT_JOB= "MDM_SUBMIT"; + public static String MDM_SUBMIT_JOB = "MDM_SUBMIT"; @Bean public GenerateRangeChunksStep submitGenerateRangeChunksStep() { return new GenerateRangeChunksStep(); } - @Bean(name = MDM_SUBMIT_JOB_BEAN_NAME) - public JobDefinition mdmSubmitJobDefinition(IBatch2DaoSvc theBatch2DaoSvc, MatchUrlService theMatchUrlService, FhirContext theFhirContext, IMdmSettings theMdmSettings) { + public JobDefinition mdmSubmitJobDefinition( + IBatch2DaoSvc theBatch2DaoSvc, + MatchUrlService theMatchUrlService, + FhirContext theFhirContext, + IMdmSettings theMdmSettings) { return JobDefinition.newBuilder() - .setJobDefinitionId(MDM_SUBMIT_JOB) - .setJobDescription("MDM Batch Submission") - .setJobDefinitionVersion(1) - .setParametersType(MdmSubmitJobParameters.class) - .setParametersValidator(mdmSubmitJobParametersValidator(theMatchUrlService, theFhirContext, theMdmSettings)) - .addFirstStep( - "generate-ranges", - "generate data ranges to submit to mdm", - PartitionedUrlChunkRangeJson.class, - submitGenerateRangeChunksStep()) - .addIntermediateStep( - "load-ids", - "Load the IDs", - ResourceIdListWorkChunkJson.class, - new LoadIdsStep(theBatch2DaoSvc)) - .addLastStep( - "inflate-and-submit-resources", - "Inflate and Submit resources", - mdmInflateAndSubmitResourcesStep()) - .build(); + .setJobDefinitionId(MDM_SUBMIT_JOB) + .setJobDescription("MDM Batch Submission") + .setJobDefinitionVersion(1) + .setParametersType(MdmSubmitJobParameters.class) + .setParametersValidator( + mdmSubmitJobParametersValidator(theMatchUrlService, theFhirContext, theMdmSettings)) + .addFirstStep( + "generate-ranges", + "generate data ranges to submit to mdm", + PartitionedUrlChunkRangeJson.class, + submitGenerateRangeChunksStep()) + .addIntermediateStep( + "load-ids", "Load the IDs", ResourceIdListWorkChunkJson.class, new LoadIdsStep(theBatch2DaoSvc)) + .addLastStep( + "inflate-and-submit-resources", + "Inflate and Submit resources", + mdmInflateAndSubmitResourcesStep()) + .build(); } @Bean - public MdmSubmitJobParametersValidator mdmSubmitJobParametersValidator(MatchUrlService theMatchUrlService, FhirContext theFhirContext, IMdmSettings theMdmSettings) { + public MdmSubmitJobParametersValidator mdmSubmitJobParametersValidator( + MatchUrlService theMatchUrlService, FhirContext theFhirContext, IMdmSettings theMdmSettings) { return new MdmSubmitJobParametersValidator(theMdmSettings, theMatchUrlService, theFhirContext); } @@ -77,5 +79,4 @@ public class MdmSubmitAppCtx { public MdmInflateAndSubmitResourcesStep mdmInflateAndSubmitResourcesStep() { return new MdmInflateAndSubmitResourcesStep(); } - } diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitJobParameters.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitJobParameters.java index 3196409324a..7131f9e7578 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitJobParameters.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitJobParameters.java @@ -21,6 +21,4 @@ package ca.uhn.fhir.mdm.batch2.submit; import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters; -public class MdmSubmitJobParameters extends PartitionedUrlListJobParameters { - -} +public class MdmSubmitJobParameters extends PartitionedUrlListJobParameters {} diff --git a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitJobParametersValidator.java b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitJobParametersValidator.java index 95ba19a634c..d67d3f03f87 100644 --- a/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitJobParametersValidator.java +++ b/hapi-fhir-storage-mdm/src/main/java/ca/uhn/fhir/mdm/batch2/submit/MdmSubmitJobParametersValidator.java @@ -28,10 +28,9 @@ import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; - +import javax.annotation.Nonnull; public class MdmSubmitJobParametersValidator implements IJobParametersValidator { @@ -39,7 +38,8 @@ public class MdmSubmitJobParametersValidator implements IJobParametersValidator< private MatchUrlService myMatchUrlService; private FhirContext myFhirContext; - public MdmSubmitJobParametersValidator(IMdmSettings theMdmSettings, MatchUrlService theMatchUrlService, FhirContext theFhirContext) { + public MdmSubmitJobParametersValidator( + IMdmSettings theMdmSettings, MatchUrlService theMatchUrlService, FhirContext theFhirContext) { myMdmSettings = theMdmSettings; myMatchUrlService = theMatchUrlService; myFhirContext = theFhirContext; @@ -59,11 +59,17 @@ public class MdmSubmitJobParametersValidator implements IJobParametersValidator< return errorMsgs; } - private void validateAllSearchParametersApplyToResourceType(List errorMsgs, PartitionedUrl partitionedUrl, String resourceType, RuntimeResourceDefinition resourceDefinition) { + private void validateAllSearchParametersApplyToResourceType( + List errorMsgs, + PartitionedUrl partitionedUrl, + String resourceType, + RuntimeResourceDefinition resourceDefinition) { try { myMatchUrlService.translateMatchUrl(partitionedUrl.getUrl(), resourceDefinition); } catch (MatchUrlService.UnrecognizedSearchParameterException e) { - String errorMsg = String.format("Search parameter %s is not recognized for resource type %s. Source error is %s", e.getParamName(), resourceType, e.getMessage()); + String errorMsg = String.format( + "Search parameter %s is not recognized for resource type %s. Source error is %s", + e.getParamName(), resourceType, e.getMessage()); errorMsgs.add(errorMsg); } catch (InvalidRequestException e) { errorMsgs.add("Invalid request detected: " + e.getMessage()); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/batch2/models/JobInstanceFetchRequest.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/batch2/models/JobInstanceFetchRequest.java index 500d265be04..ebe03bf0bac 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/batch2/models/JobInstanceFetchRequest.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/batch2/models/JobInstanceFetchRequest.java @@ -61,7 +61,11 @@ public class JobInstanceFetchRequest { mySort = theSort; } - public String getJobStatus() { return myJobStatus; } + public String getJobStatus() { + return myJobStatus; + } - public void setJobStatus(String theJobStatus) { myJobStatus = theJobStatus; } + public void setJobStatus(String theJobStatus) { + myJobStatus = theJobStatus; + } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/cache/BaseResourceCacheSynchronizer.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/cache/BaseResourceCacheSynchronizer.java index d67f5808c7a..dc05cb761ee 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/cache/BaseResourceCacheSynchronizer.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/cache/BaseResourceCacheSynchronizer.java @@ -42,13 +42,13 @@ import org.springframework.context.event.ContextClosedEvent; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.EventListener; -import javax.annotation.Nonnull; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; import java.util.Collection; import java.util.List; import java.util.concurrent.Semaphore; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; public abstract class BaseResourceCacheSynchronizer implements IResourceChangeListener { private static final Logger ourLog = LoggerFactory.getLogger(BaseResourceCacheSynchronizer.class); @@ -58,8 +58,10 @@ public abstract class BaseResourceCacheSynchronizer implements IResourceChangeLi @Autowired protected ISearchParamRegistry mySearchParamRegistry; + @Autowired private IResourceChangeListenerRegistry myResourceChangeListenerRegistry; + @Autowired DaoRegistry myDaoRegistry; @@ -82,7 +84,9 @@ public abstract class BaseResourceCacheSynchronizer implements IResourceChangeLi mySearchParameterMap = getSearchParameterMap(); mySystemRequestDetails = SystemRequestDetails.forAllPartitions(); - IResourceChangeListenerCache resourceCache = myResourceChangeListenerRegistry.registerResourceResourceChangeListener(myResourceName, mySearchParameterMap, this, REFRESH_INTERVAL); + IResourceChangeListenerCache resourceCache = + myResourceChangeListenerRegistry.registerResourceResourceChangeListener( + myResourceName, mySearchParameterMap, this, REFRESH_INTERVAL); resourceCache.forceRefresh(); } @@ -145,7 +149,11 @@ public abstract class BaseResourceCacheSynchronizer implements IResourceChangeLi Integer resourceCount = resourceBundleList.size(); assert resourceCount != null; if (resourceCount >= SubscriptionConstants.MAX_SUBSCRIPTION_RESULTS) { - ourLog.error("Currently over {} {}s. Some {}s have not been loaded.", SubscriptionConstants.MAX_SUBSCRIPTION_RESULTS, myResourceName, myResourceName); + ourLog.error( + "Currently over {} {}s. Some {}s have not been loaded.", + SubscriptionConstants.MAX_SUBSCRIPTION_RESULTS, + myResourceName, + myResourceName); } List resourceList = resourceBundleList.getResources(0, resourceCount); @@ -174,19 +182,20 @@ public abstract class BaseResourceCacheSynchronizer implements IResourceChangeLi return myDaoRegistry.getResourceDao(myResourceName); } - - - - @Override public void handleInit(Collection theResourceIds) { if (!resourceDaoExists()) { - ourLog.warn("The resource type {} is enabled on this server, but there is no {} DAO configured.", myResourceName, myResourceName); + ourLog.warn( + "The resource type {} is enabled on this server, but there is no {} DAO configured.", + myResourceName, + myResourceName); return; } IFhirResourceDao resourceDao = getResourceDao(); SystemRequestDetails systemRequestDetails = SystemRequestDetails.forAllPartitions(); - List resourceList = theResourceIds.stream().map(n -> resourceDao.read(n, systemRequestDetails)).collect(Collectors.toList()); + List resourceList = theResourceIds.stream() + .map(n -> resourceDao.read(n, systemRequestDetails)) + .collect(Collectors.toList()); handleInit(resourceList); } @@ -194,7 +203,8 @@ public abstract class BaseResourceCacheSynchronizer implements IResourceChangeLi @Override public void handleChange(IResourceChangeEvent theResourceChangeEvent) { - // For now ignore the contents of theResourceChangeEvent. In the future, consider updating the registry based on + // For now ignore the contents of theResourceChangeEvent. In the future, consider updating the registry based + // on // known resources that have been created, updated & deleted syncDatabaseToCache(); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/exception/TokenParamFormatInvalidRequestException.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/exception/TokenParamFormatInvalidRequestException.java index b75ef905e45..1895479d4db 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/exception/TokenParamFormatInvalidRequestException.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/exception/TokenParamFormatInvalidRequestException.java @@ -24,9 +24,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; public class TokenParamFormatInvalidRequestException extends InvalidRequestException { public TokenParamFormatInvalidRequestException(String theCode, String theParamName, String theTokenValue) { - super(theCode + "Missing " + theParamName + - " parameter (must supply a value/code and not just a system): " + theTokenValue); + super(theCode + "Missing " + theParamName + " parameter (must supply a value/code and not just a system): " + + theTokenValue); } - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java index 0e657c027cc..509dd160f56 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.jpa.api; public interface IDaoRegistry { boolean isResourceTypeSupported(String theResourceType); - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java index 570f89e7194..1101e08db96 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java @@ -36,14 +36,14 @@ import org.hl7.fhir.r4.model.Bundle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.TreeSet; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; @SuppressWarnings("JavadocLinkAsPlainText") public class JpaStorageSettings extends StorageSettings { @@ -67,25 +67,28 @@ public class JpaStorageSettings extends StorageSettings { * */ @SuppressWarnings("WeakerAccess") - public static final Set DEFAULT_BUNDLE_TYPES_ALLOWED_FOR_STORAGE = Collections.unmodifiableSet(new TreeSet<>(Sets.newHashSet( - Bundle.BundleType.COLLECTION.toCode(), - Bundle.BundleType.DOCUMENT.toCode(), - Bundle.BundleType.MESSAGE.toCode() - ))); + public static final Set DEFAULT_BUNDLE_TYPES_ALLOWED_FOR_STORAGE = + Collections.unmodifiableSet(new TreeSet<>(Sets.newHashSet( + Bundle.BundleType.COLLECTION.toCode(), + Bundle.BundleType.DOCUMENT.toCode(), + Bundle.BundleType.MESSAGE.toCode()))); // update setter javadoc if default changes public static final int DEFAULT_MAX_EXPANSION_SIZE = 1000; - public static final HistoryCountModeEnum DEFAULT_HISTORY_COUNT_MODE = HistoryCountModeEnum.CACHED_ONLY_WITHOUT_OFFSET; + public static final HistoryCountModeEnum DEFAULT_HISTORY_COUNT_MODE = + HistoryCountModeEnum.CACHED_ONLY_WITHOUT_OFFSET; /** * This constant applies to task enablement, e.g. {@link #setEnableTaskStaleSearchCleanup(boolean)}. *

    * By default, all are enabled. */ public static final boolean DEFAULT_ENABLE_TASKS = true; + public static final int DEFAULT_MAXIMUM_INCLUDES_TO_LOAD_PER_PAGE = 1000; /** * @since 5.5.0 */ public static final TagStorageModeEnum DEFAULT_TAG_STORAGE_MODE = TagStorageModeEnum.VERSIONED; + public static final int DEFAULT_EXPUNGE_BATCH_SIZE = 800; public static final int DEFAULT_BUNDLE_BATCH_QUEUE_CAPACITY = 200; @@ -96,6 +99,7 @@ public class JpaStorageSettings extends StorageSettings { * @see #setMaximumSearchResultCountInTransaction(Integer) */ private static final Integer DEFAULT_MAXIMUM_SEARCH_RESULT_COUNT_IN_TRANSACTION = null; + private static final Logger ourLog = LoggerFactory.getLogger(JpaStorageSettings.class); private static final int DEFAULT_REINDEX_BATCH_SIZE = 800; private static final int DEFAULT_MAXIMUM_DELETE_CONFLICT_COUNT = 60; @@ -113,7 +117,8 @@ public class JpaStorageSettings extends StorageSettings { * update setter javadoc if default changes */ @Nonnull - private final Long myTranslationCachesExpireAfterWriteInMinutes = DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES; + private final Long myTranslationCachesExpireAfterWriteInMinutes = + DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES; /** * @since 5.5.0 */ @@ -123,11 +128,13 @@ public class JpaStorageSettings extends StorageSettings { * update setter javadoc if default changes */ private boolean myAllowInlineMatchUrlReferences = true; + private boolean myAllowMultipleDelete; /** * update setter javadoc if default changes */ private int myDeferIndexingForCodesystemsOfSize = 100; + private boolean myDeleteStaleSearches = true; private boolean myEnforceReferentialIntegrityOnDelete = true; private boolean myUniqueIndexesEnabled = true; @@ -144,6 +151,7 @@ public class JpaStorageSettings extends StorageSettings { * update setter javadoc if default changes */ private Integer myFetchSizeDefaultMaximum = null; + private int myMaximumExpansionSize = DEFAULT_MAX_EXPANSION_SIZE; private Integer myMaximumSearchResultCountInTransaction = DEFAULT_MAXIMUM_SEARCH_RESULT_COUNT_IN_TRANSACTION; private ResourceEncodingEnum myResourceEncoding = ResourceEncodingEnum.JSONC; @@ -151,6 +159,7 @@ public class JpaStorageSettings extends StorageSettings { * update setter javadoc if default changes */ private Integer myResourceMetaCountHardLimit = 1000; + private Long myReuseCachedSearchResultsForMillis = DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS; private boolean mySchedulingDisabled; private boolean mySuppressUpdatesWithNoChange = true; @@ -175,7 +184,8 @@ public class JpaStorageSettings extends StorageSettings { private boolean myEnforceReferenceTargetTypes = true; private ClientIdStrategyEnum myResourceClientIdStrategy = ClientIdStrategyEnum.ALPHANUMERIC; private boolean myFilterParameterEnabled = false; - private StoreMetaSourceInformationEnum myStoreMetaSourceInformation = StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID; + private StoreMetaSourceInformationEnum myStoreMetaSourceInformation = + StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID; private HistoryCountModeEnum myHistoryCountMode = DEFAULT_HISTORY_COUNT_MODE; private int myInternalSynchronousSearchSize = DEFAULT_INTERNAL_SYNCHRONOUS_SEARCH_SIZE; /** @@ -238,6 +248,7 @@ public class JpaStorageSettings extends StorageSettings { * @since 5.5.0 */ private boolean myEnableTaskBulkExportJobExecution; + private boolean myAccountForDateIndexNulls; /** * @since 5.6.0 @@ -339,7 +350,6 @@ public class JpaStorageSettings extends StorageSettings { if (HapiSystemProperties.isUnitTestModeEnabled()) { setJobFastTrackingEnabled(true); } - } /** @@ -1282,8 +1292,10 @@ public class JpaStorageSettings extends StorageSettings { * * @since 4.2.0 */ - public void setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(boolean thePopulateIdentifierInAutoCreatedPlaceholderReferenceTargets) { - myPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets = thePopulateIdentifierInAutoCreatedPlaceholderReferenceTargets; + public void setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets( + boolean thePopulateIdentifierInAutoCreatedPlaceholderReferenceTargets) { + myPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets = + thePopulateIdentifierInAutoCreatedPlaceholderReferenceTargets; } /** @@ -1470,7 +1482,8 @@ public class JpaStorageSettings extends StorageSettings { * SearchParameter resource is added or changed. This should generally * be true (which is the default) */ - public void setMarkResourcesForReindexingUponSearchParameterChange(boolean theMarkResourcesForReindexingUponSearchParameterChange) { + public void setMarkResourcesForReindexingUponSearchParameterChange( + boolean theMarkResourcesForReindexingUponSearchParameterChange) { myMarkResourcesForReindexingUponSearchParameterChange = theMarkResourcesForReindexingUponSearchParameterChange; } @@ -1506,7 +1519,6 @@ public class JpaStorageSettings extends StorageSettings { */ public void setSuppressUpdatesWithNoChange(boolean theSuppressUpdatesWithNoChange) { mySuppressUpdatesWithNoChange = theSuppressUpdatesWithNoChange; - } /** @@ -1791,7 +1803,8 @@ public class JpaStorageSettings extends StorageSettings { */ public void setPreExpandValueSetsMaxCount(int thePreExpandValueSetsMaxCount) { myPreExpandValueSetsMaxCount = thePreExpandValueSetsMaxCount; - setPreExpandValueSetsDefaultCount(Math.min(getPreExpandValueSetsDefaultCount(), getPreExpandValueSetsMaxCount())); + setPreExpandValueSetsDefaultCount( + Math.min(getPreExpandValueSetsDefaultCount(), getPreExpandValueSetsMaxCount())); } /** @@ -1895,7 +1908,6 @@ public class JpaStorageSettings extends StorageSettings { myInternalSynchronousSearchSize = theInternalSynchronousSearchSize; } - /** * If this is enabled (this is the default), this server will attempt to activate and run Bulk Import * batch jobs. Otherwise, this server will not. @@ -2138,7 +2150,6 @@ public class JpaStorageSettings extends StorageSettings { return myAllowAutoInflateBinaries; } - /** * This setting indicates whether binaries are allowed to be automatically inflated from external storage during requests. * Default is true. @@ -2304,7 +2315,6 @@ public class JpaStorageSettings extends StorageSettings { return myNonResourceDbHistoryEnabled; } - public void setNonResourceDbHistoryEnabled(boolean theNonResourceDbHistoryEnabled) { myNonResourceDbHistoryEnabled = theNonResourceDbHistoryEnabled; } @@ -2332,8 +2342,6 @@ public class JpaStorageSettings extends StorageSettings { } } - - /** * This enum provides allowable options for {@link #setResourceServerIdStrategy(IdStrategyEnum)} */ @@ -2400,6 +2408,5 @@ public class JpaStorageSettings extends StorageSettings { * of the resource. */ INLINE - } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/DaoRegistry.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/DaoRegistry.java index b73c957a571..82bbd2cfe7c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/DaoRegistry.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/DaoRegistry.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.jpa.api.dao; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.IDaoRegistry; import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; @@ -32,7 +32,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; -import javax.annotation.Nullable; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -42,12 +41,14 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class DaoRegistry implements ApplicationContextAware, IDaoRegistry { private ApplicationContext myAppCtx; @Autowired private FhirContext myContext; + private volatile Map> myResourceNameToResourceDao; private volatile IFhirSystemDao mySystemDao; private Set mySupportedResourceTypes; @@ -74,7 +75,6 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry { } mySupportedResourceTypes = supportedResourceTypes; myResourceNameToResourceDao = null; - } @Override @@ -97,12 +97,11 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry { public IFhirResourceDao getResourceDao(String theResourceName) { IFhirResourceDao retVal = getResourceDaoOrNull(theResourceName); if (retVal == null) { - List supportedResourceTypes = myResourceNameToResourceDao - .keySet() - .stream() - .sorted() - .collect(Collectors.toList()); - throw new InvalidRequestException(Msg.code(572) + "Unable to process request, this server does not know how to handle resources of type " + theResourceName + " - Can handle: " + supportedResourceTypes); + List supportedResourceTypes = + myResourceNameToResourceDao.keySet().stream().sorted().collect(Collectors.toList()); + throw new InvalidRequestException(Msg.code(572) + + "Unable to process request, this server does not know how to handle resources of type " + + theResourceName + " - Can handle: " + supportedResourceTypes); } return retVal; } @@ -114,7 +113,8 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry { public IFhirResourceDao getResourceDao(Class theResourceType) { IFhirResourceDao retVal = getResourceDaoIfExists(theResourceType); - Validate.notNull(retVal, "No DAO exists for resource type %s - Have: %s", theResourceType, myResourceNameToResourceDao); + Validate.notNull( + retVal, "No DAO exists for resource type %s - Have: %s", theResourceType, myResourceNameToResourceDao); return retVal; } @@ -191,13 +191,13 @@ public class DaoRegistry implements ApplicationContextAware, IDaoRegistry { public IFhirResourceDao getDaoOrThrowException(Class theClass) { IFhirResourceDao retVal = getResourceDao(theClass); if (retVal == null) { - List supportedResourceNames = myResourceNameToResourceDao - .keySet() - .stream() - .map(t -> myContext.getResourceType(t)) - .sorted() - .collect(Collectors.toList()); - throw new InvalidRequestException(Msg.code(573) + "Unable to process request, this server does not know how to handle resources of type " + myContext.getResourceType(theClass) + " - Can handle: " + supportedResourceNames); + List supportedResourceNames = myResourceNameToResourceDao.keySet().stream() + .map(t -> myContext.getResourceType(t)) + .sorted() + .collect(Collectors.toList()); + throw new InvalidRequestException(Msg.code(573) + + "Unable to process request, this server does not know how to handle resources of type " + + myContext.getResourceType(theClass) + " - Can handle: " + supportedResourceNames); } return retVal; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IDao.java index 5fb009fdf56..cfb5f51714d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IDao.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IDao.java @@ -34,6 +34,4 @@ public interface IDao { MetadataKeyCurrentlyReindexing CURRENTLY_REINDEXING = new MetadataKeyCurrentlyReindexing("CURRENTLY_REINDEXING"); FhirContext getContext(); - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java index de5d967a315..6f08db9be85 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java @@ -47,13 +47,13 @@ import org.hl7.fhir.instance.model.api.IBaseParameters; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.servlet.http.HttpServletResponse; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.servlet.http.HttpServletResponse; /** * Note that this interface is not considered a stable interface. While it is possible to build applications @@ -85,7 +85,12 @@ public interface IFhirResourceDao extends IDao { * won't be indexed and searches won't work. * @param theRequestDetails The request details including permissions and partitioning information */ - DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails); + DaoMethodOutcome create( + T theResource, + String theIfNoneExist, + boolean thePerformIndexing, + RequestDetails theRequestDetails, + @Nonnull TransactionDetails theTransactionDetails); DaoMethodOutcome create(T theResource, String theIfNoneExist, RequestDetails theRequestDetails); @@ -101,7 +106,11 @@ public interface IFhirResourceDao extends IDao { * This method does not throw an exception if there are delete conflicts, but populates them * in the provided list */ - DaoMethodOutcome delete(IIdType theResource, DeleteConflictList theDeleteConflictsListToPopulate, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails); + DaoMethodOutcome delete( + IIdType theResource, + DeleteConflictList theDeleteConflictsListToPopulate, + RequestDetails theRequestDetails, + @Nonnull TransactionDetails theTransactionDetails); /** * This method throws an exception if there are delete conflicts @@ -114,7 +123,11 @@ public interface IFhirResourceDao extends IDao { * * @since 6.8.0 */ - DeleteMethodOutcome deleteByUrl(String theUrl, DeleteConflictList theDeleteConflictsListToPopulate, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails); + DeleteMethodOutcome deleteByUrl( + String theUrl, + DeleteConflictList theDeleteConflictsListToPopulate, + RequestDetails theRequestDetails, + @Nonnull TransactionDetails theTransactionDetails); /** * This method throws an exception if there are delete conflicts @@ -124,7 +137,11 @@ public interface IFhirResourceDao extends IDao { /** * @deprecated Deprecated in 6.8.0 - Use and implement {@link #deletePidList(String, Collection, DeleteConflictList, RequestDetails, TransactionDetails)} */ - default

    DeleteMethodOutcome deletePidList(String theUrl, Collection

    theResourceIds, DeleteConflictList theDeleteConflicts, RequestDetails theRequest) { + default

    DeleteMethodOutcome deletePidList( + String theUrl, + Collection

    theResourceIds, + DeleteConflictList theDeleteConflicts, + RequestDetails theRequest) { return deletePidList(theUrl, theResourceIds, theDeleteConflicts, theRequest, new TransactionDetails()); } @@ -143,7 +160,12 @@ public interface IFhirResourceDao extends IDao { * @return response back to the client * @since 6.8.0 */ -

    DeleteMethodOutcome deletePidList(String theUrl, Collection

    theResourceIds, DeleteConflictList theDeleteConflicts, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails); +

    DeleteMethodOutcome deletePidList( + String theUrl, + Collection

    theResourceIds, + DeleteConflictList theDeleteConflicts, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails); ExpungeOutcome expunge(ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails); @@ -151,7 +173,8 @@ public interface IFhirResourceDao extends IDao {

    void expunge(Collection

    theResourceIds, RequestDetails theRequest); - ExpungeOutcome forceExpungeInExistingTransaction(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest); + ExpungeOutcome forceExpungeInExistingTransaction( + IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest); @Nonnull Class getResourceType(); @@ -162,9 +185,13 @@ public interface IFhirResourceDao extends IDao { * @deprecated Use {@link #history(IIdType, HistorySearchDateRangeParam, RequestDetails)} instead */ @Deprecated(since = "6.2") - IBundleProvider history(IIdType theId, Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails); + IBundleProvider history( + IIdType theId, Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails); - IBundleProvider history(IIdType theId, HistorySearchDateRangeParam theHistorySearchDateRangeParam, RequestDetails theRequestDetails); + IBundleProvider history( + IIdType theId, + HistorySearchDateRangeParam theHistorySearchDateRangeParam, + RequestDetails theRequestDetails); /** * Not supported in DSTU1! @@ -197,12 +224,26 @@ public interface IFhirResourceDao extends IDao { /** * Opens a new transaction and performs a patch operation */ - DaoMethodOutcome patch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequestDetails); + DaoMethodOutcome patch( + IIdType theId, + String theConditionalUrl, + PatchTypeEnum thePatchType, + String thePatchBody, + IBaseParameters theFhirPatchBody, + RequestDetails theRequestDetails); /** * Execute a patch operation within the existing database transaction */ - DaoMethodOutcome patchInTransaction(IIdType theId, String theConditionalUrl, boolean thePerformIndexing, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails); + DaoMethodOutcome patchInTransaction( + IIdType theId, + String theConditionalUrl, + boolean thePerformIndexing, + PatchTypeEnum thePatchType, + String thePatchBody, + IBaseParameters theFhirPatchBody, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails); /** * Read a resource - Note that this variant of the method does not take in a {@link RequestDetails} and @@ -272,9 +313,14 @@ public interface IFhirResourceDao extends IDao { * @param theResourcePersistentId The ID * @return */ - ReindexOutcome reindex(IResourcePersistentId theResourcePersistentId, ReindexParameters theReindexParameters, RequestDetails theRequest, TransactionDetails theTransactionDetails); + ReindexOutcome reindex( + IResourcePersistentId theResourcePersistentId, + ReindexParameters theReindexParameters, + RequestDetails theRequest, + TransactionDetails theTransactionDetails); - void removeTag(IIdType theId, TagTypeEnum theTagType, String theSystem, String theCode, RequestDetails theRequestDetails); + void removeTag( + IIdType theId, TagTypeEnum theTagType, String theSystem, String theCode, RequestDetails theRequestDetails); void removeTag(IIdType theId, TagTypeEnum theTagType, String theSystem, String theCode); @@ -285,12 +331,14 @@ public interface IFhirResourceDao extends IDao { IBundleProvider search(SearchParameterMap theParams, RequestDetails theRequestDetails); - IBundleProvider search(SearchParameterMap theParams, RequestDetails theRequestDetails, HttpServletResponse theServletResponse); + IBundleProvider search( + SearchParameterMap theParams, RequestDetails theRequestDetails, HttpServletResponse theServletResponse); /** * Search for IDs for processing a match URLs, etc. */ - default List searchForIds(SearchParameterMap theParams, RequestDetails theRequest) { + default List searchForIds( + SearchParameterMap theParams, RequestDetails theRequest) { return searchForIds(theParams, theRequest, null); } @@ -301,11 +349,13 @@ public interface IFhirResourceDao extends IDao { * create/update, this is the resource being searched for * @since 5.5.0 */ - default List searchForIds(SearchParameterMap theParams, RequestDetails theRequest, @Nullable IBaseResource theConditionalOperationTargetOrNull) { + default List searchForIds( + SearchParameterMap theParams, + RequestDetails theRequest, + @Nullable IBaseResource theConditionalOperationTargetOrNull) { return searchForIds(theParams, theRequest); } - /** * Takes a map of incoming raw search parameters and translates/parses them into * appropriate {@link IQueryParameterType} instances of the appropriate type @@ -338,7 +388,8 @@ public interface IFhirResourceDao extends IDao { * won't be indexed and searches won't work. * @param theRequestDetails The request details including permissions and partitioning information */ - DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, RequestDetails theRequestDetails); + DaoMethodOutcome update( + T theResource, String theMatchUrl, boolean thePerformIndexing, RequestDetails theRequestDetails); DaoMethodOutcome update(T theResource, String theMatchUrl, RequestDetails theRequestDetails); @@ -346,7 +397,13 @@ public interface IFhirResourceDao extends IDao { * @param theForceUpdateVersion Create a new version with the same contents as the current version even if the content hasn't changed (this is mostly useful for * resources mapping to external content such as external code systems) */ - DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails); + DaoMethodOutcome update( + T theResource, + String theMatchUrl, + boolean thePerformIndexing, + boolean theForceUpdateVersion, + RequestDetails theRequestDetails, + @Nonnull TransactionDetails theTransactionDetails); /** * Not supported in DSTU1! @@ -354,8 +411,14 @@ public interface IFhirResourceDao extends IDao { * @param theRequestDetails The request details including permissions and partitioning information * @return MethodOutcome even if the resource fails validation it should still successfully return with a response status of 200 */ - - MethodOutcome validate(T theResource, IIdType theId, String theRawResource, EncodingEnum theEncoding, ValidationModeEnum theMode, String theProfile, RequestDetails theRequestDetails); + MethodOutcome validate( + T theResource, + IIdType theId, + String theRawResource, + EncodingEnum theEncoding, + ValidationModeEnum theMode, + String theProfile, + RequestDetails theRequestDetails); RuntimeResourceDefinition validateCriteriaAndReturnResourceDefinition(String criteria); @@ -365,5 +428,4 @@ public interface IFhirResourceDao extends IDao { default String getCurrentVersionId(IIdType theReferenceElement) { return read(theReferenceElement.toVersionless()).getIdElement().getVersionIdPart(); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoCodeSystem.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoCodeSystem.java index d75ec26f68e..bf323ac8226 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoCodeSystem.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoCodeSystem.java @@ -32,8 +32,8 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.codesystems.ConceptSubsumptionOutcome; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface IFhirResourceDaoCodeSystem extends IFhirResourceDao { @@ -41,15 +41,38 @@ public interface IFhirResourceDaoCodeSystem extends IFh @Transactional @Nonnull - IValidationSupport.LookupCodeResult lookupCode(IPrimitiveType theCode, IPrimitiveType theSystem, IBaseCoding theCoding, RequestDetails theRequestDetails); + IValidationSupport.LookupCodeResult lookupCode( + IPrimitiveType theCode, + IPrimitiveType theSystem, + IBaseCoding theCoding, + RequestDetails theRequestDetails); @Nonnull - IValidationSupport.LookupCodeResult lookupCode(IPrimitiveType theCode, IPrimitiveType theSystem, IBaseCoding theCoding, IPrimitiveType theDisplayLanguage, RequestDetails theRequestDetails); + IValidationSupport.LookupCodeResult lookupCode( + IPrimitiveType theCode, + IPrimitiveType theSystem, + IBaseCoding theCoding, + IPrimitiveType theDisplayLanguage, + RequestDetails theRequestDetails); - SubsumesResult subsumes(IPrimitiveType theCodeA, IPrimitiveType theCodeB, IPrimitiveType theSystem, IBaseCoding theCodingA, IBaseCoding theCodingB, RequestDetails theRequestDetails); + SubsumesResult subsumes( + IPrimitiveType theCodeA, + IPrimitiveType theCodeB, + IPrimitiveType theSystem, + IBaseCoding theCodingA, + IBaseCoding theCodingB, + RequestDetails theRequestDetails); @Nonnull - IValidationSupport.CodeValidationResult validateCode(IIdType theCodeSystemId, IPrimitiveType theCodeSystemUrl, IPrimitiveType theVersion, IPrimitiveType theCode, IPrimitiveType theDisplay, IBaseCoding theCoding, IBaseDatatype theCodeableConcept, RequestDetails theRequestDetails); + IValidationSupport.CodeValidationResult validateCode( + IIdType theCodeSystemId, + IPrimitiveType theCodeSystemUrl, + IPrimitiveType theVersion, + IPrimitiveType theCode, + IPrimitiveType theDisplay, + IBaseCoding theCoding, + IBaseDatatype theCodeableConcept, + RequestDetails theRequestDetails); class SubsumesResult { @@ -67,13 +90,12 @@ public interface IFhirResourceDaoCodeSystem extends IFh public IBaseParameters toParameters(FhirContext theFhirContext) { IBaseParameters retVal = ParametersUtil.newInstance(theFhirContext); - IPrimitiveType outcomeValue = (IPrimitiveType) theFhirContext.getElementDefinition("code").newInstance(); + IPrimitiveType outcomeValue = (IPrimitiveType) + theFhirContext.getElementDefinition("code").newInstance(); outcomeValue.setValueAsString(getOutcome().toCode()); ParametersUtil.addParameterToParameters(theFhirContext, retVal, "outcome", outcomeValue); return retVal; } } - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoComposition.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoComposition.java index eda1f36d0cf..a269de9bec5 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoComposition.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoComposition.java @@ -31,6 +31,12 @@ import javax.servlet.http.HttpServletRequest; public interface IFhirResourceDaoComposition extends IFhirResourceDao { - IBundleProvider getDocumentForComposition(HttpServletRequest theServletRequest, IIdType theId, IPrimitiveType theCount, IPrimitiveType theOffset, DateRangeParam theLastUpdate, SortSpec theSort, RequestDetails theRequestDetails); - + IBundleProvider getDocumentForComposition( + HttpServletRequest theServletRequest, + IIdType theId, + IPrimitiveType theCount, + IPrimitiveType theOffset, + DateRangeParam theLastUpdate, + SortSpec theSort, + RequestDetails theRequestDetails); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoEncounter.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoEncounter.java index dedecbfe208..f97b47dacad 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoEncounter.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoEncounter.java @@ -30,8 +30,18 @@ import javax.servlet.http.HttpServletRequest; public interface IFhirResourceDaoEncounter extends IFhirResourceDao { - IBundleProvider encounterInstanceEverything(HttpServletRequest theServletRequest, IIdType theId, IPrimitiveType theCount, IPrimitiveType theOffset, DateRangeParam theLastUpdate, SortSpec theSort); - - IBundleProvider encounterTypeEverything(HttpServletRequest theServletRequest, IPrimitiveType theCount, IPrimitiveType theOffset, DateRangeParam theLastUpdated, SortSpec theSortSpec); + IBundleProvider encounterInstanceEverything( + HttpServletRequest theServletRequest, + IIdType theId, + IPrimitiveType theCount, + IPrimitiveType theOffset, + DateRangeParam theLastUpdate, + SortSpec theSort); + IBundleProvider encounterTypeEverything( + HttpServletRequest theServletRequest, + IPrimitiveType theCount, + IPrimitiveType theOffset, + DateRangeParam theLastUpdated, + SortSpec theSortSpec); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoObservation.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoObservation.java index 9abdd6c4b5f..82fbbf9135d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoObservation.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoObservation.java @@ -38,6 +38,6 @@ public interface IFhirResourceDaoObservation extends IF * @param theServletResponse * @return */ - IBundleProvider observationsLastN(SearchParameterMap paramMap, RequestDetails theRequestDetails, HttpServletResponse theServletResponse); - + IBundleProvider observationsLastN( + SearchParameterMap paramMap, RequestDetails theRequestDetails, HttpServletResponse theServletResponse); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoPatient.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoPatient.java index 7e7b3604c7b..1a14ce9ad5a 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoPatient.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoPatient.java @@ -29,15 +29,15 @@ import javax.servlet.http.HttpServletRequest; public interface IFhirResourceDaoPatient extends IFhirResourceDao { - IBundleProvider patientInstanceEverything(HttpServletRequest theServletRequest, - RequestDetails theRequestDetails, - PatientEverythingParameters theQueryParams, - IIdType theId); - - IBundleProvider patientTypeEverything(HttpServletRequest theServletRequest, - RequestDetails theRequestDetails, - PatientEverythingParameters theQueryParams, - TokenOrListParam theId); - + IBundleProvider patientInstanceEverything( + HttpServletRequest theServletRequest, + RequestDetails theRequestDetails, + PatientEverythingParameters theQueryParams, + IIdType theId); + IBundleProvider patientTypeEverything( + HttpServletRequest theServletRequest, + RequestDetails theRequestDetails, + PatientEverythingParameters theQueryParams, + TokenOrListParam theId); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoSearchParameter.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoSearchParameter.java index d47049c885f..39792b8b684 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoSearchParameter.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoSearchParameter.java @@ -21,6 +21,4 @@ package ca.uhn.fhir.jpa.api.dao; import org.hl7.fhir.instance.model.api.IBaseResource; -public interface IFhirResourceDaoSearchParameter extends IFhirResourceDao { - -} +public interface IFhirResourceDaoSearchParameter extends IFhirResourceDao {} diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoStructureDefinition.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoStructureDefinition.java index 0eec42e4049..d1af5edaf8f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoStructureDefinition.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoStructureDefinition.java @@ -24,5 +24,4 @@ import org.hl7.fhir.instance.model.api.IBaseResource; public interface IFhirResourceDaoStructureDefinition extends IFhirResourceDao { T generateSnapshot(T theInput, String theUrl, String theWebUrl, String theName); - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoSubscription.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoSubscription.java index 89c892c34f9..894d27fc581 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoSubscription.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoSubscription.java @@ -26,6 +26,6 @@ import org.hl7.fhir.instance.model.api.IIdType; public interface IFhirResourceDaoSubscription extends IFhirResourceDao { - Long getSubscriptionTablePidForSubscriptionResource(IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails); - + Long getSubscriptionTablePidForSubscriptionResource( + IIdType theId, RequestDetails theRequest, TransactionDetails theTransactionDetails); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoValueSet.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoValueSet.java index 7953a9da81c..735366eaab3 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoValueSet.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDaoValueSet.java @@ -34,9 +34,29 @@ public interface IFhirResourceDaoValueSet extends IFhir T expand(T theSource, ValueSetExpansionOptions theOptions); - T expand(IIdType theId, T theValueSet, IPrimitiveType theUrl, IPrimitiveType theValueSetVersion, IPrimitiveType theFilter, IPrimitiveType theContext, IPrimitiveType theContextDirection, IPrimitiveType theOffset, IPrimitiveType theCount, IPrimitiveType theDisplayLanguage, IPrimitiveType theIncludeHierarchy, RequestDetails theRequestDetails); + T expand( + IIdType theId, + T theValueSet, + IPrimitiveType theUrl, + IPrimitiveType theValueSetVersion, + IPrimitiveType theFilter, + IPrimitiveType theContext, + IPrimitiveType theContextDirection, + IPrimitiveType theOffset, + IPrimitiveType theCount, + IPrimitiveType theDisplayLanguage, + IPrimitiveType theIncludeHierarchy, + RequestDetails theRequestDetails); T expandByIdentifier(String theUri, ValueSetExpansionOptions theOptions); - IValidationSupport.CodeValidationResult validateCode(IPrimitiveType theValueSetIdentifier, IIdType theId, IPrimitiveType theCode, IPrimitiveType theSystem, IPrimitiveType theDisplay, IBaseCoding theCoding, IBaseDatatype theCodeableConcept, RequestDetails theRequestDetails); + IValidationSupport.CodeValidationResult validateCode( + IPrimitiveType theValueSetIdentifier, + IIdType theId, + IPrimitiveType theCode, + IPrimitiveType theSystem, + IPrimitiveType theDisplay, + IBaseCoding theCoding, + IBaseDatatype theCodeableConcept, + RequestDetails theRequestDetails); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java index c823088380e..9c4b2713ba1 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirSystemDao.java @@ -28,10 +28,10 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nullable; import java.util.Date; import java.util.List; import java.util.Map; +import javax.annotation.Nullable; /** * Note that this interface is not considered a stable interface. While it is possible to build applications @@ -54,7 +54,6 @@ public interface IFhirSystemDao extends IDao { @Nullable Map getResourceCountsFromCache(); - IBundleProvider history(Date theDate, Date theUntil, Integer theOffset, RequestDetails theRequestDetails); /** @@ -91,7 +90,8 @@ public interface IFhirSystemDao extends IDao { * * @param thePreFetchIndexes Should resource indexes be loaded */ - default

    void preFetchResources(List

    theResolvedIds, boolean thePreFetchIndexes) { + default

    void preFetchResources( + List

    theResolvedIds, boolean thePreFetchIndexes) { // nothing by default } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IJpaDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IJpaDao.java index 2f9bad0ec21..599284af3b0 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IJpaDao.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IJpaDao.java @@ -32,25 +32,25 @@ import java.util.Date; public interface IJpaDao { @SuppressWarnings("unchecked") IBasePersistedResource updateEntity( - RequestDetails theRequest, - IBaseResource theResource, - IBasePersistedResource theEntity, - Date theDeletedTimestampOrNull, - boolean thePerformIndexing, - boolean theUpdateVersion, - TransactionDetails theTransactionDetails, - boolean theForceUpdate, - boolean theCreateNewHistoryEntry); + RequestDetails theRequest, + IBaseResource theResource, + IBasePersistedResource theEntity, + Date theDeletedTimestampOrNull, + boolean thePerformIndexing, + boolean theUpdateVersion, + TransactionDetails theTransactionDetails, + boolean theForceUpdate, + boolean theCreateNewHistoryEntry); DaoMethodOutcome updateInternal( - RequestDetails theRequestDetails, - T theResource, - String theMatchUrl, - boolean thePerformIndexing, - boolean theForceUpdateVersion, - IBasePersistedResource theEntity, - IIdType theResourceId, - IBaseResource theOldResource, - RestOperationTypeEnum theOperationType, - TransactionDetails theTransactionDetails); + RequestDetails theRequestDetails, + T theResource, + String theMatchUrl, + boolean thePerformIndexing, + boolean theForceUpdateVersion, + IBasePersistedResource theEntity, + IIdType theResourceId, + IBaseResource theOldResource, + RestOperationTypeEnum theOperationType, + TransactionDetails theTransactionDetails); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/MetadataKeyCurrentlyReindexing.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/MetadataKeyCurrentlyReindexing.java index ef1e44f0340..908faf57ba2 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/MetadataKeyCurrentlyReindexing.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/MetadataKeyCurrentlyReindexing.java @@ -27,5 +27,4 @@ public final class MetadataKeyCurrentlyReindexing extends ResourceMetadataKeyEnu MetadataKeyCurrentlyReindexing(String theValue) { super(theValue, Boolean.class); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/MetadataKeyResourcePid.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/MetadataKeyResourcePid.java index 3901a75abe4..f978913430f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/MetadataKeyResourcePid.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/MetadataKeyResourcePid.java @@ -27,5 +27,4 @@ public final class MetadataKeyResourcePid extends ResourceMetadataKeyEnum MetadataKeyResourcePid(String theValue) { super(theValue, Long.class); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/PatientEverythingParameters.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/PatientEverythingParameters.java index 79fc7647817..05fa55f4392 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/PatientEverythingParameters.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/PatientEverythingParameters.java @@ -26,28 +26,40 @@ import ca.uhn.fhir.rest.param.StringAndListParam; import org.hl7.fhir.instance.model.api.IPrimitiveType; public final class PatientEverythingParameters { - @Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the size of those pages.") + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the size of those pages.") private IPrimitiveType myCount; - @Description(formalDefinition="Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") + @Description( + formalDefinition = + "Results from this method are returned across multiple pages. This parameter controls the offset when fetching a page.") private IPrimitiveType myOffset; - @Description(shortDefinition="Only return resources which were last updated as specified by the given range") + @Description(shortDefinition = "Only return resources which were last updated as specified by the given range") private DateRangeParam myLastUpdated; - @Description(shortDefinition="The order in which to sort the results by") + @Description(shortDefinition = "The order in which to sort the results by") private SortSpec mySort; - @Description(shortDefinition="Filter the resources to return only resources matching the given _content filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _content filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") private StringAndListParam myContent; - @Description(shortDefinition="Filter the resources to return only resources matching the given _text filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _text filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") private StringAndListParam myNarrative; - @Description(shortDefinition = "Filter the resources to return only resources matching the given _filter filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _filter filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") private StringAndListParam myFilter; - @Description(shortDefinition = "Filter the resources to return only resources matching the given _type filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") + @Description( + shortDefinition = + "Filter the resources to return only resources matching the given _type filter (note that this filter is applied only to results which link to the given patient, not to the patient itself or to supporting resources linked to by the matched resources)") private StringAndListParam myTypes; public IPrimitiveType getCount() { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexOutcome.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexOutcome.java index 99c1223a1b0..3d060098aa0 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexOutcome.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexOutcome.java @@ -39,5 +39,4 @@ public class ReindexOutcome { } myWarnings.add(theWarning); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexParameters.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexParameters.java index b61b3373e9b..0cd518db3c2 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexParameters.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/ReindexParameters.java @@ -56,7 +56,6 @@ public class ReindexParameters { return this; } - public enum ReindexSearchParametersEnum { ALL, NONE @@ -67,5 +66,4 @@ public class ReindexParameters { CURRENT_VERSION, ALL_VERSIONS } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportJobResults.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportJobResults.java index 908682f1276..e07e56214fb 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportJobResults.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/BulkExportJobResults.java @@ -37,8 +37,7 @@ public class BulkExportJobResults implements IModelJson { @JsonProperty("originalRequestUrl") private String myOriginalRequestUrl; - public BulkExportJobResults() { - } + public BulkExportJobResults() {} public Map> getResourceTypeToBinaryIds() { if (myResourceTypeToBinaryIds == null) { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DaoMethodOutcome.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DaoMethodOutcome.java index f6e18ccb069..4d2bb709c9d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DaoMethodOutcome.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DaoMethodOutcome.java @@ -111,5 +111,4 @@ public class DaoMethodOutcome extends MethodOutcome { myResourcePersistentId = theResourcePersistentId; return this; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteConflict.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteConflict.java index 81a008626e8..4c186657c7d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteConflict.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteConflict.java @@ -50,10 +50,9 @@ public class DeleteConflict { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("sourceId", mySourceId) - .append("sourcePath", mySourcePath) - .append("targetId", myTargetId) - .toString(); + .append("sourceId", mySourceId) + .append("sourcePath", mySourcePath) + .append("targetId", myTargetId) + .toString(); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteConflictList.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteConflictList.java index a03ba4893ca..825a4cc69e7 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteConflictList.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteConflictList.java @@ -53,11 +53,11 @@ public class DeleteConflictList implements Iterable { myResourceIdsToIgnoreConflict = theParentList.myResourceIdsToIgnoreConflict; } - public boolean isResourceIdMarkedForDeletion(IIdType theIdType) { Validate.notNull(theIdType); Validate.notBlank(theIdType.toUnqualifiedVersionless().getValue()); - return myResourceIdsMarkedForDeletion.contains(theIdType.toUnqualifiedVersionless().getValue()); + return myResourceIdsMarkedForDeletion.contains( + theIdType.toUnqualifiedVersionless().getValue()); } public void setResourceIdMarkedForDeletion(IIdType theIdType) { @@ -69,7 +69,8 @@ public class DeleteConflictList implements Iterable { public boolean isResourceIdToIgnoreConflict(IIdType theIdType) { Validate.notNull(theIdType); Validate.notBlank(theIdType.toUnqualifiedVersionless().getValue()); - return myResourceIdsToIgnoreConflict.contains(theIdType.toUnqualifiedVersionless().getValue()); + return myResourceIdsToIgnoreConflict.contains( + theIdType.toUnqualifiedVersionless().getValue()); } public void setResourceIdToIgnoreConflict(IIdType theIdType) { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java index 1719f126097..78b4432da07 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java @@ -32,13 +32,14 @@ import java.util.List; public class DeleteMethodOutcome extends MethodOutcome { private List myDeletedEntities; + @Deprecated private long myExpungedResourcesCount; + @Deprecated private long myExpungedEntitiesCount; - public DeleteMethodOutcome() { - } + public DeleteMethodOutcome() {} public DeleteMethodOutcome(IBaseOperationOutcome theBaseOperationOutcome) { super(theBaseOperationOutcome); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/ExpungeOptions.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/ExpungeOptions.java index c05ebe5d6a5..47857646f23 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/ExpungeOptions.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/ExpungeOptions.java @@ -31,11 +31,11 @@ public class ExpungeOptions { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("limit", myLimit) - .append("oldVersions", myExpungeOldVersions) - .append("deletedResources", myExpungeDeletedResources) - .append("everything", myExpungeEverything) - .toString(); + .append("limit", myLimit) + .append("oldVersions", myExpungeOldVersions) + .append("deletedResources", myExpungeDeletedResources) + .append("everything", myExpungeEverything) + .toString(); } /** diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/HistoryCountModeEnum.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/HistoryCountModeEnum.java index ff9428cbcdd..663706e6bf1 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/HistoryCountModeEnum.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/HistoryCountModeEnum.java @@ -37,5 +37,4 @@ public enum HistoryCountModeEnum { * Do not include a count in history responses */ COUNT_DISABLED - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/LazyDaoMethodOutcome.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/LazyDaoMethodOutcome.java index b5636e6d5a2..4245319956d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/LazyDaoMethodOutcome.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/LazyDaoMethodOutcome.java @@ -98,7 +98,6 @@ public class LazyDaoMethodOutcome extends DaoMethodOutcome { myIdSupplier = theIdSupplier; } - public static class EntityAndResource { private final IBasePersistedResource myEntity; private final IBaseResource myResource; @@ -116,5 +115,4 @@ public class LazyDaoMethodOutcome extends DaoMethodOutcome { return myResource; } } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/PersistentIdToForcedIdMap.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/PersistentIdToForcedIdMap.java index 356374bef13..0d187d57eff 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/PersistentIdToForcedIdMap.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/PersistentIdToForcedIdMap.java @@ -29,27 +29,30 @@ import java.util.stream.Collectors; public class PersistentIdToForcedIdMap

    { private final Map> myResourcePersistentIdOptionalMap; - public PersistentIdToForcedIdMap(Map> theResourcePersistentIdOptionalMap){ + public PersistentIdToForcedIdMap(Map> theResourcePersistentIdOptionalMap) { myResourcePersistentIdOptionalMap = theResourcePersistentIdOptionalMap; } public Set getResolvedResourceIds() { return myResourcePersistentIdOptionalMap.entrySet().stream() - .map(this::getResolvedPid) - .collect(Collectors.toSet()); + .map(this::getResolvedPid) + .collect(Collectors.toSet()); } private String getResolvedPid(Map.Entry> entry) { - //If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID as the resource ID. - return entry.getValue().isPresent() ? entry.getValue().get() : entry.getKey().toString(); + // If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID + // as the resource ID. + return entry.getValue().isPresent() + ? entry.getValue().get() + : entry.getKey().toString(); } public Optional get(P theResourcePersistentId) { return myResourcePersistentIdOptionalMap.get(theResourcePersistentId); } - public Map> getResourcePersistentIdOptionalMap(){ + public Map> getResourcePersistentIdOptionalMap() { return myResourcePersistentIdOptionalMap; } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/TranslationQuery.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/TranslationQuery.java index 6f6bf0fa310..c66f33e5d6c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/TranslationQuery.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/TranslationQuery.java @@ -130,30 +130,30 @@ public class TranslationQuery { TranslationQuery that = (TranslationQuery) o; return new EqualsBuilder() - .append(getCoding().getCode(), that.getCoding().getCode()) - .append(getCoding().getSystem(), that.getCoding().getSystem()) - .append(getCoding().getVersion(), that.getCoding().getVersion()) - .append(getResourceId(), that.getResourceId()) - .append(getUrl(), that.getUrl()) - .append(getConceptMapVersion(), that.getConceptMapVersion()) - .append(getSource(), that.getSource()) - .append(getTarget(), that.getTarget()) - .append(getTargetSystem(), that.getTargetSystem()) - .isEquals(); + .append(getCoding().getCode(), that.getCoding().getCode()) + .append(getCoding().getSystem(), that.getCoding().getSystem()) + .append(getCoding().getVersion(), that.getCoding().getVersion()) + .append(getResourceId(), that.getResourceId()) + .append(getUrl(), that.getUrl()) + .append(getConceptMapVersion(), that.getConceptMapVersion()) + .append(getSource(), that.getSource()) + .append(getTarget(), that.getTarget()) + .append(getTargetSystem(), that.getTargetSystem()) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(getCoding().getCode()) - .append(getCoding().getSystem()) - .append(getCoding().getVersion()) - .append(getResourceId()) - .append(getUrl()) - .append(getConceptMapVersion()) - .append(getSource()) - .append(getTarget()) - .append(getTargetSystem()) - .toHashCode(); + .append(getCoding().getCode()) + .append(getCoding().getSystem()) + .append(getCoding().getVersion()) + .append(getResourceId()) + .append(getUrl()) + .append(getConceptMapVersion()) + .append(getSource()) + .append(getTarget()) + .append(getTargetSystem()) + .toHashCode(); } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/TranslationRequest.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/TranslationRequest.java index 0446ddb30a6..97e307eea3e 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/TranslationRequest.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/TranslationRequest.java @@ -209,25 +209,24 @@ public class TranslationRequest { public IValidationSupport.TranslateCodeRequest asTranslateCodeRequest() { return new IValidationSupport.TranslateCodeRequest( - Collections.unmodifiableList(this.getCodeableConcept().getCoding()), - this.getTargetSystem(), - this.getUrl(), - this.getConceptMapVersion(), - this.getSource(), - this.getTarget(), - this.getResourceId(), - this.getReverseAsBoolean() - ); + Collections.unmodifiableList(this.getCodeableConcept().getCoding()), + this.getTargetSystem(), + this.getUrl(), + this.getConceptMapVersion(), + this.getSource(), + this.getTarget(), + this.getResourceId(), + this.getReverseAsBoolean()); } public static TranslationRequest fromTranslateCodeRequest(IValidationSupport.TranslateCodeRequest theRequest) { CodeableConcept sourceCodeableConcept = new CodeableConcept(); for (IBaseCoding aCoding : theRequest.getCodings()) { sourceCodeableConcept - .addCoding() - .setSystem(aCoding.getSystem()) - .setCode(aCoding.getCode()) - .setVersion(((Coding) aCoding).getVersion()); + .addCoding() + .setSystem(aCoding.getSystem()) + .setCode(aCoding.getCode()) + .setVersion(((Coding) aCoding).getVersion()); } TranslationRequest translationRequest = new TranslationRequest(); @@ -240,6 +239,5 @@ public class TranslationRequest { translationRequest.setResourceId(theRequest.getResourceId()); translationRequest.setReverse(theRequest.isReverse()); return translationRequest; - } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/WarmCacheEntry.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/WarmCacheEntry.java index 03e37e33fdb..ac5e829e9af 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/WarmCacheEntry.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/WarmCacheEntry.java @@ -46,17 +46,14 @@ public class WarmCacheEntry { WarmCacheEntry that = (WarmCacheEntry) theO; return new EqualsBuilder() - .append(myPeriodMillis, that.myPeriodMillis) - .append(myUrl, that.myUrl) - .isEquals(); + .append(myPeriodMillis, that.myPeriodMillis) + .append(myUrl, that.myUrl) + .isEquals(); } @Override public int hashCode() { - return new HashCodeBuilder(17, 37) - .append(myPeriodMillis) - .append(myUrl) - .toHashCode(); + return new HashCodeBuilder(17, 37).append(myPeriodMillis).append(myUrl).toHashCode(); } public long getPeriodMillis() { @@ -76,5 +73,4 @@ public class WarmCacheEntry { myUrl = theUrl; return this; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/BaseResourcePidList.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/BaseResourcePidList.java index 6bb077d0316..79489a0a295 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/BaseResourcePidList.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/BaseResourcePidList.java @@ -22,19 +22,21 @@ package ca.uhn.fhir.jpa.api.pid; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; +import java.util.*; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.*; -abstract public class BaseResourcePidList implements IResourcePidList { +public abstract class BaseResourcePidList implements IResourcePidList { final List myIds = new ArrayList<>(); @Nullable final Date myLastDate; + private final RequestPartitionId myRequestPartitionId; - BaseResourcePidList(Collection theIds, Date theLastDate, RequestPartitionId theRequestPartitionId) { + BaseResourcePidList( + Collection theIds, Date theLastDate, RequestPartitionId theRequestPartitionId) { myIds.addAll(theIds); myLastDate = theLastDate; myRequestPartitionId = theRequestPartitionId; @@ -84,4 +86,3 @@ abstract public class BaseResourcePidList implements IResourcePidList { return myIds.toString(); } } - diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/EmptyResourcePidList.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/EmptyResourcePidList.java index af46cbeb4ea..2881a0d0d78 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/EmptyResourcePidList.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/EmptyResourcePidList.java @@ -22,12 +22,11 @@ package ca.uhn.fhir.jpa.api.pid; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; -import org.apache.commons.lang3.builder.ToStringBuilder; -import javax.annotation.Nonnull; import java.util.Collections; import java.util.Date; import java.util.List; +import javax.annotation.Nonnull; /** * An empty resource pid list @@ -56,7 +55,8 @@ public class EmptyResourcePidList implements IResourcePidList { @Override public String getResourceType(int i) { - throw new ArrayIndexOutOfBoundsException(Msg.code(2095) + "Attempting to get resource type from an empty resource pid list"); + throw new ArrayIndexOutOfBoundsException( + Msg.code(2095) + "Attempting to get resource type from an empty resource pid list"); } @Override diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/HomogeneousResourcePidList.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/HomogeneousResourcePidList.java index 4e1b52a3650..5e3a857c37e 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/HomogeneousResourcePidList.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/HomogeneousResourcePidList.java @@ -22,9 +22,9 @@ package ca.uhn.fhir.jpa.api.pid; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.Date; +import javax.annotation.Nonnull; /** * A resource pid list where all pids have the same resource type @@ -33,14 +33,18 @@ public class HomogeneousResourcePidList extends BaseResourcePidList { @Nonnull final String myResourceType; - public HomogeneousResourcePidList(String theResourceType, Collection theIds, Date theLastDate, RequestPartitionId theRequestPartitionId) { + public HomogeneousResourcePidList( + String theResourceType, + Collection theIds, + Date theLastDate, + RequestPartitionId theRequestPartitionId) { super(theIds, theLastDate, theRequestPartitionId); myResourceType = theResourceType; } @Override public String getResourceType(int i) { - return getResourceType(); + return getResourceType(); } public String getResourceType() { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/IResourcePidList.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/IResourcePidList.java index a7c104a3da5..d57a4e3c20d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/IResourcePidList.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/IResourcePidList.java @@ -22,9 +22,9 @@ package ca.uhn.fhir.jpa.api.pid; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; -import javax.annotation.Nonnull; import java.util.Date; import java.util.List; +import javax.annotation.Nonnull; /** * List of IResourcePersistentId along with a resource type each id diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/MixedResourcePidList.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/MixedResourcePidList.java index d78038b50b7..f14e52cad4c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/MixedResourcePidList.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/MixedResourcePidList.java @@ -22,10 +22,10 @@ package ca.uhn.fhir.jpa.api.pid; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.Date; import java.util.List; +import javax.annotation.Nonnull; /** * A resource pid list where the pids can have different resource types @@ -34,7 +34,11 @@ public class MixedResourcePidList extends BaseResourcePidList { @Nonnull final List myResourceTypes; - public MixedResourcePidList(List theResourceTypes, Collection theIds, Date theLastDate, RequestPartitionId theRequestPartitionId) { + public MixedResourcePidList( + List theResourceTypes, + Collection theIds, + Date theLastDate, + RequestPartitionId theRequestPartitionId) { super(theIds, theLastDate, theRequestPartitionId); myResourceTypes = theResourceTypes; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/ResourcePidListBuilder.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/ResourcePidListBuilder.java index 6f102d8a31f..3f7707e0d29 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/ResourcePidListBuilder.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/pid/ResourcePidListBuilder.java @@ -81,7 +81,7 @@ public class ResourcePidListBuilder { if (theCurrentEndDate == null) { endDate = theChunk.getLastDate(); } else if (theChunk.getLastDate().after(endDate) - && theChunk.getLastDate().before(thePassedInEndDate)) { + && theChunk.getLastDate().before(thePassedInEndDate)) { endDate = theChunk.getLastDate(); } return endDate; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java index 81d4329024d..07d366fd2d1 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IBatch2DaoSvc.java @@ -22,9 +22,9 @@ package ca.uhn.fhir.jpa.api.svc; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.pid.IResourcePidList; +import java.util.Date; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Date; public interface IBatch2DaoSvc { @@ -42,6 +42,10 @@ public interface IBatch2DaoSvc { * @param theRequestPartitionId The request partition ID (may be null on nonpartitioned systems) * @param theUrl The search URL, or null to return IDs for all resources across all resource types. Null will only be supplied if {@link #isAllResourceTypeSupported()} returns true. */ - IResourcePidList fetchResourceIdsPage(Date theStart, Date theEnd, @Nonnull Integer thePageSize, @Nullable RequestPartitionId theRequestPartitionId, @Nullable String theUrl); - + IResourcePidList fetchResourceIdsPage( + Date theStart, + Date theEnd, + @Nonnull Integer thePageSize, + @Nullable RequestPartitionId theRequestPartitionId, + @Nullable String theUrl); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IDeleteExpungeSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IDeleteExpungeSvc.java index b8d94a105fb..33f53ed7c6f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IDeleteExpungeSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IDeleteExpungeSvc.java @@ -28,6 +28,4 @@ public interface IDeleteExpungeSvc> { int deleteExpunge(List thePersistentIds, boolean theCascade, Integer theCascadeMaxRounds); boolean isCascadeSupported(); - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IGoldenResourceSearchSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IGoldenResourceSearchSvc.java index d2b9fe538ed..94548ae03d8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IGoldenResourceSearchSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IGoldenResourceSearchSvc.java @@ -22,9 +22,9 @@ package ca.uhn.fhir.jpa.api.svc; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.pid.IResourcePidList; +import java.util.Date; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Date; public interface IGoldenResourceSearchSvc { @@ -37,6 +37,10 @@ public interface IGoldenResourceSearchSvc { * @param theRequestPartitionId The request partition ID (may be null on nonpartitioned systems) * @param theResourceType the type of resource. */ - IResourcePidList fetchGoldenResourceIdsPage(Date theStart, Date theEnd, @Nonnull Integer thePageSize, @Nullable RequestPartitionId theRequestPartitionId, @Nullable String theResourceType); - + IResourcePidList fetchGoldenResourceIdsPage( + Date theStart, + Date theEnd, + @Nonnull Integer thePageSize, + @Nullable RequestPartitionId theRequestPartitionId, + @Nullable String theResourceType); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java index ec66a3f5ca2..7d48096cb60 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java @@ -30,13 +30,13 @@ import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * This interface is used to translate between {@link IResourcePersistentId} @@ -53,7 +53,8 @@ public interface IIdHelperService { * @param theOnlyForcedIds If true, resources which are not existing forced IDs will not be resolved */ @Nonnull - List resolveResourcePersistentIdsWithCache(@Nonnull RequestPartitionId theRequestPartitionId, List theIds, boolean theOnlyForcedIds); + List resolveResourcePersistentIdsWithCache( + @Nonnull RequestPartitionId theRequestPartitionId, List theIds, boolean theOnlyForcedIds); /** * Given a resource type and ID, determines the internal persistent ID for the resource. @@ -61,7 +62,8 @@ public interface IIdHelperService { * @throws ResourceNotFoundException If the ID can not be found */ @Nonnull - T resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId); + T resolveResourcePersistentIds( + @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId); /** * Given a resource type and ID, determines the internal persistent ID for a resource. @@ -70,7 +72,11 @@ public interface IIdHelperService { * @throws ResourceNotFoundException If the ID can not be found */ @Nonnull - T resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId, boolean theExcludeDeleted); + T resolveResourcePersistentIds( + @Nonnull RequestPartitionId theRequestPartitionId, + String theResourceType, + String theId, + boolean theExcludeDeleted); /** * Returns a mapping of Id -> IResourcePersistentId. @@ -78,7 +84,8 @@ public interface IIdHelperService { * (and no map will be returned) */ @Nonnull - Map resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List theIds); + Map resolveResourcePersistentIds( + @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List theIds); /** * Returns a mapping of Id -> IResourcePersistentId. @@ -86,7 +93,11 @@ public interface IIdHelperService { * Optionally filters out deleted resources. */ @Nonnull - Map resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List theIds, boolean theExcludeDeleted); + Map resolveResourcePersistentIds( + @Nonnull RequestPartitionId theRequestPartitionId, + String theResourceType, + List theIds, + boolean theExcludeDeleted); /** * Given a persistent ID, returns the associated resource ID @@ -101,7 +112,9 @@ public interface IIdHelperService { * @throws ResourceNotFoundException If the ID can not be found */ @Nonnull - IResourceLookup resolveResourceIdentity(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId) throws ResourceNotFoundException; + IResourceLookup resolveResourceIdentity( + @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId) + throws ResourceNotFoundException; /** * Given a forced ID, convert it to it's Long value. Since you are allowed to use string IDs for resources, we need to @@ -111,7 +124,12 @@ public interface IIdHelperService { * @throws ResourceNotFoundException If the ID can not be found */ @Nonnull - IResourceLookup resolveResourceIdentity(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId, boolean theExcludeDeleted) throws ResourceNotFoundException; + IResourceLookup resolveResourceIdentity( + @Nonnull RequestPartitionId theRequestPartitionId, + String theResourceType, + String theResourceId, + boolean theExcludeDeleted) + throws ResourceNotFoundException; /** * Returns true if the given resource ID should be stored in a forced ID. Under default config @@ -138,7 +156,12 @@ public interface IIdHelperService { /** * Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods */ - void addResolvedPidToForcedId(T theResourcePersistentId, @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, @Nullable String theForcedId, @Nullable Date theDeletedAt); + void addResolvedPidToForcedId( + T theResourcePersistentId, + @Nonnull RequestPartitionId theRequestPartitionId, + String theResourceType, + @Nullable String theForcedId, + @Nullable Date theDeletedAt); @Nonnull List getPidsOrThrowException(RequestPartitionId theRequestPartitionId, List theIds); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java index 9d4776c62ba..a15b0876289 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java @@ -27,22 +27,33 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; -import javax.annotation.Nullable; import java.util.List; import java.util.Optional; +import javax.annotation.Nullable; public interface ISearchCoordinatorSvc { void cancelAllActiveSearches(); - List getResources(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); + List getResources( + String theUuid, + int theFrom, + int theTo, + @Nullable RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId); - IBundleProvider registerSearch(IFhirResourceDao theCallingDao, SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); + IBundleProvider registerSearch( + IFhirResourceDao theCallingDao, + SearchParameterMap theParams, + String theResourceType, + CacheControlDirective theCacheControlDirective, + @Nullable RequestDetails theRequestDetails, + RequestPartitionId theRequestPartitionId); /** * Fetch the total number of search results for the given currently executing search, if one is currently executing and * the total is known. Will return empty otherwise */ - Optional getSearchTotal(String theUuid, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); - + Optional getSearchTotal( + String theUuid, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchSvc.java index d830c2d23e7..a5783326c74 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchSvc.java @@ -25,6 +25,6 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider; public interface ISearchSvc { - IBundleProvider executeQuery(String theResourceType, SearchParameterMap theSearchParameterMap, RequestPartitionId theRequestPartitionId); - + IBundleProvider executeQuery( + String theResourceType, SearchParameterMap theSearchParameterMap, RequestPartitionId theRequestPartitionId); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java index 048de32701b..399f2bc588e 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryStorageSvc.java @@ -24,10 +24,10 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import javax.annotation.Nonnull; public interface IBinaryStorageSvc { @@ -43,11 +43,11 @@ public interface IBinaryStorageSvc { * @param theNewBlobId the blob ID to validate * @return true if the blob ID is valid, false otherwise. */ - default boolean isValidBlobId(String theNewBlobId) { - return true;//default method here as we don't want to break existing implementations + default boolean isValidBlobId(String theNewBlobId) { + return true; // default method here as we don't want to break existing implementations } - /** + /** * Sets the maximum number of bytes that can be stored in a single binary * file by this service. The default is {@link Long#MAX_VALUE} * @@ -98,8 +98,9 @@ public interface IBinaryStorageSvc { */ @Deprecated(since = "6.6.0", forRemoval = true) @Nonnull - default StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, - InputStream theInputStream) throws IOException { + default StoredDetails storeBlob( + IIdType theResourceId, String theBlobIdOrNull, String theContentType, InputStream theInputStream) + throws IOException { return storeBlob(theResourceId, theBlobIdOrNull, theContentType, theInputStream, new ServletRequestDetails()); } @@ -114,8 +115,13 @@ public interface IBinaryStorageSvc { * @return Returns details about the stored data */ @Nonnull - StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, - InputStream theInputStream, RequestDetails theRequestDetails) throws IOException; + StoredDetails storeBlob( + IIdType theResourceId, + String theBlobIdOrNull, + String theContentType, + InputStream theInputStream, + RequestDetails theRequestDetails) + throws IOException; StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) throws IOException; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryTarget.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryTarget.java index 30aa9bf57be..d71760e3ad8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryTarget.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/IBinaryTarget.java @@ -47,14 +47,12 @@ public interface IBinaryTarget { @SuppressWarnings("unchecked") default Optional getAttachmentId() { - return getTarget() - .getExtension() - .stream() - .filter(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) - .filter(t -> t.getValue() instanceof IPrimitiveType) - .map(t -> (IPrimitiveType) t.getValue()) - .map(t -> t.getValue()) - .filter(t -> isNotBlank(t)) - .findFirst(); + return getTarget().getExtension().stream() + .filter(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) + .filter(t -> t.getValue() instanceof IPrimitiveType) + .map(t -> (IPrimitiveType) t.getValue()) + .map(t -> t.getValue()) + .filter(t -> isNotBlank(t)) + .findFirst(); } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java index e760237d5cf..bbca43085e0 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/api/StoredDetails.java @@ -19,28 +19,32 @@ */ package ca.uhn.fhir.jpa.binary.api; +import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.server.util.JsonDateDeserializer; import ca.uhn.fhir.rest.server.util.JsonDateSerializer; -import ca.uhn.fhir.model.api.IModelJson; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.google.common.hash.HashingInputStream; import org.apache.commons.lang3.builder.ToStringBuilder; -import javax.annotation.Nonnull; import java.util.Date; +import javax.annotation.Nonnull; public class StoredDetails implements IModelJson { @JsonProperty("blobId") private String myBlobId; + @JsonProperty("bytes") private long myBytes; + @JsonProperty("contentType") private String myContentType; + @JsonProperty("hash") private String myHash; + @JsonProperty("published") @JsonSerialize(using = JsonDateSerializer.class) @JsonDeserialize(using = JsonDateDeserializer.class) @@ -57,7 +61,12 @@ public class StoredDetails implements IModelJson { /** * Constructor */ - public StoredDetails(@Nonnull String theBlobId, long theBytes, @Nonnull String theContentType, HashingInputStream theIs, Date thePublished) { + public StoredDetails( + @Nonnull String theBlobId, + long theBytes, + @Nonnull String theContentType, + HashingInputStream theIs, + Date thePublished) { myBlobId = theBlobId; myBytes = theBytes; myContentType = theContentType; @@ -68,12 +77,12 @@ public class StoredDetails implements IModelJson { @Override public String toString() { return new ToStringBuilder(this) - .append("blobId", myBlobId) - .append("bytes", myBytes) - .append("contentType", myContentType) - .append("hash", myHash) - .append("published", myPublished) - .toString(); + .append("blobId", myBlobId) + .append("bytes", myBytes) + .append("contentType", myContentType) + .append("hash", myHash) + .append("published", myPublished) + .toString(); } public String getHash() { @@ -122,5 +131,4 @@ public class StoredDetails implements IModelJson { myBytes = theBytes; return this; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java index 9bdfac35a10..76c5a32833a 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/interceptor/BinaryStorageInterceptor.java @@ -55,7 +55,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -66,6 +65,7 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.util.HapiExtensions.EXT_EXTERNALIZED_BINARY_ID; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -74,14 +74,18 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class BinaryStorageInterceptor> { private static final Logger ourLog = LoggerFactory.getLogger(BinaryStorageInterceptor.class); + @Autowired private IBinaryStorageSvc myBinaryStorageSvc; + private final FhirContext myCtx; + @Autowired private BinaryAccessProvider myBinaryAccessProvider; @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + private Class myBinaryType; private String myDeferredListKey; private long myAutoInflateBinariesMaximumBytes = 10 * FileUtils.ONE_MB; @@ -93,7 +97,6 @@ public class BinaryStorageInterceptor> { assert base64Binary != null; myBinaryType = (Class) base64Binary.getImplementingClass(); myDeferredListKey = getClass().getName() + "_" + hashCode() + "_DEFERRED_LIST"; - } /** @@ -115,31 +118,44 @@ public class BinaryStorageInterceptor> { @Hook(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE) public void expungeResource(AtomicInteger theCounter, IBaseResource theResource) { - List binaryElements = myCtx.newTerser().getAllPopulatedChildElementsOfType(theResource, myBinaryType); + List binaryElements = + myCtx.newTerser().getAllPopulatedChildElementsOfType(theResource, myBinaryType); - List attachmentIds = binaryElements - .stream() - .flatMap(t -> ((IBaseHasExtensions) t).getExtension().stream()) - .filter(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) - .map(t -> ((IPrimitiveType) t.getValue()).getValueAsString()) - .collect(Collectors.toList()); + List attachmentIds = binaryElements.stream() + .flatMap(t -> ((IBaseHasExtensions) t).getExtension().stream()) + .filter(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) + .map(t -> ((IPrimitiveType) t.getValue()).getValueAsString()) + .collect(Collectors.toList()); for (String next : attachmentIds) { myBinaryStorageSvc.expungeBlob(theResource.getIdElement(), next); theCounter.incrementAndGet(); - ourLog.info("Deleting binary blob {} because resource {} is being expunged", next, theResource.getIdElement().getValue()); + ourLog.info( + "Deleting binary blob {} because resource {} is being expunged", + next, + theResource.getIdElement().getValue()); } - } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED) - public void extractLargeBinariesBeforeCreate(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException { + public void extractLargeBinariesBeforeCreate( + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + IBaseResource theResource, + Pointcut thePointcut) + throws IOException { extractLargeBinaries(theRequestDetails, theTransactionDetails, theResource, thePointcut); } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void extractLargeBinariesBeforeUpdate(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource thePreviousResource, IBaseResource theResource, Pointcut thePointcut) throws IOException { + public void extractLargeBinariesBeforeUpdate( + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + IBaseResource thePreviousResource, + IBaseResource theResource, + Pointcut thePointcut) + throws IOException { blockIllegalExternalBinaryIds(thePreviousResource, theResource); extractLargeBinaries(theRequestDetails, theTransactionDetails, theResource, thePointcut); } @@ -152,18 +168,18 @@ public class BinaryStorageInterceptor> { private void blockIllegalExternalBinaryIds(IBaseResource thePreviousResource, IBaseResource theResource) { Set existingBinaryIds = new HashSet<>(); if (thePreviousResource != null) { - List base64fields = myCtx.newTerser().getAllPopulatedChildElementsOfType(thePreviousResource, myBinaryType); + List base64fields = + myCtx.newTerser().getAllPopulatedChildElementsOfType(thePreviousResource, myBinaryType); for (IPrimitiveType nextBase64 : base64fields) { if (nextBase64 instanceof IBaseHasExtensions) { ((IBaseHasExtensions) nextBase64) - .getExtension() - .stream() - .filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null) - .filter(t -> EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) - .map(t -> (IPrimitiveType) t.getValue()) - .map(IPrimitiveType::getValueAsString) - .filter(StringUtils::isNotBlank) - .forEach(existingBinaryIds::add); + .getExtension().stream() + .filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null) + .filter(t -> EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) + .map(t -> (IPrimitiveType) t.getValue()) + .map(IPrimitiveType::getValueAsString) + .filter(StringUtils::isNotBlank) + .forEach(existingBinaryIds::add); } } } @@ -172,26 +188,34 @@ public class BinaryStorageInterceptor> { for (IPrimitiveType nextBase64 : base64fields) { if (nextBase64 instanceof IBaseHasExtensions) { Optional hasExternalizedBinaryReference = ((IBaseHasExtensions) nextBase64) - .getExtension() - .stream() - .filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null) - .filter(t -> t.getUrl().equals(EXT_EXTERNALIZED_BINARY_ID)) - .map(t -> (IPrimitiveType) t.getValue()) - .map(IPrimitiveType::getValueAsString) - .filter(StringUtils::isNotBlank) - .filter(t -> !existingBinaryIds.contains(t)) - .findFirst(); + .getExtension().stream() + .filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null) + .filter(t -> t.getUrl().equals(EXT_EXTERNALIZED_BINARY_ID)) + .map(t -> (IPrimitiveType) t.getValue()) + .map(IPrimitiveType::getValueAsString) + .filter(StringUtils::isNotBlank) + .filter(t -> !existingBinaryIds.contains(t)) + .findFirst(); if (hasExternalizedBinaryReference.isPresent()) { - String msg = myCtx.getLocalizer().getMessage(BinaryStorageInterceptor.class, "externalizedBinaryStorageExtensionFoundInRequestBody", EXT_EXTERNALIZED_BINARY_ID, hasExternalizedBinaryReference.get()); + String msg = myCtx.getLocalizer() + .getMessage( + BinaryStorageInterceptor.class, + "externalizedBinaryStorageExtensionFoundInRequestBody", + EXT_EXTERNALIZED_BINARY_ID, + hasExternalizedBinaryReference.get()); throw new InvalidRequestException(Msg.code(1329) + msg); } } } - } - private void extractLargeBinaries(RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePointcut) throws IOException { + private void extractLargeBinaries( + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + IBaseResource theResource, + Pointcut thePointcut) + throws IOException { IIdType resourceId = theResource.getIdElement(); if (!resourceId.hasResourceType() && resourceId.hasIdPart()) { @@ -206,13 +230,15 @@ public class BinaryStorageInterceptor> { long nextPayloadLength = data.length; String nextContentType = nextTarget.getContentType(); - boolean shouldStoreBlob = myBinaryStorageSvc.shouldStoreBlob(nextPayloadLength, resourceId, nextContentType); + boolean shouldStoreBlob = + myBinaryStorageSvc.shouldStoreBlob(nextPayloadLength, resourceId, nextContentType); if (shouldStoreBlob) { String newBlobId; if (resourceId.hasIdPart()) { ByteArrayInputStream inputStream = new ByteArrayInputStream(data); - StoredDetails storedDetails = myBinaryStorageSvc.storeBlob(resourceId, null, nextContentType, inputStream, theRequestDetails); + StoredDetails storedDetails = myBinaryStorageSvc.storeBlob( + resourceId, null, nextContentType, inputStream, theRequestDetails); newBlobId = storedDetails.getBlobId(); } else { assert thePointcut == Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED : thePointcut.name(); @@ -223,12 +249,16 @@ public class BinaryStorageInterceptor> { newBlobId = prefix + newBlobId; } if (myBinaryStorageSvc.isValidBlobId(newBlobId)) { - List deferredBinaryTargets = getOrCreateDeferredBinaryStorageMap(theTransactionDetails); - DeferredBinaryTarget newDeferredBinaryTarget = new DeferredBinaryTarget(newBlobId, nextTarget, data); + List deferredBinaryTargets = + getOrCreateDeferredBinaryStorageMap(theTransactionDetails); + DeferredBinaryTarget newDeferredBinaryTarget = + new DeferredBinaryTarget(newBlobId, nextTarget, data); deferredBinaryTargets.add(newDeferredBinaryTarget); newDeferredBinaryTarget.setBlobIdPrefixHookApplied(true); } else { - throw new InternalErrorException(Msg.code(2341) + "Invalid blob ID for backing storage service.[blobId=" + newBlobId + ",service=" + myBinaryStorageSvc.getClass().getName() +"]"); + throw new InternalErrorException(Msg.code(2341) + + "Invalid blob ID for backing storage service.[blobId=" + newBlobId + ",service=" + + myBinaryStorageSvc.getClass().getName() + "]"); } } @@ -243,17 +273,18 @@ public class BinaryStorageInterceptor> { * @return A string, which will be used to prefix the blob ID. May be null. */ private String invokeAssignBlobPrefix(RequestDetails theRequest, IBaseResource theResource) { - if ( ! CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequest)) { + if (!CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, myInterceptorBroadcaster, theRequest)) { return null; } - HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .add(IBaseResource.class, theResource); + HookParams params = + new HookParams().add(RequestDetails.class, theRequest).add(IBaseResource.class, theResource); BaseBinaryStorageSvcImpl.setBlobIdPrefixApplied(theRequest); - return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, params); + return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( + myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, params); } @Nonnull @@ -262,7 +293,9 @@ public class BinaryStorageInterceptor> { } @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED) - public void storeLargeBinariesBeforeCreatePersistence(TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePoincut) throws IOException { + public void storeLargeBinariesBeforeCreatePersistence( + TransactionDetails theTransactionDetails, IBaseResource theResource, Pointcut thePoincut) + throws IOException { if (theTransactionDetails == null) { return; } @@ -303,21 +336,29 @@ public class BinaryStorageInterceptor> { for (IBaseResource nextResource : theDetails) { if (nextResource == null) { - ourLog.warn("Received a null resource during STORAGE_PRESHOW_RESOURCES. This is a bug and should be reported. Skipping resource."); + ourLog.warn( + "Received a null resource during STORAGE_PRESHOW_RESOURCES. This is a bug and should be reported. Skipping resource."); continue; } cumulativeInflatedBytes = inflateBinariesInResource(cumulativeInflatedBytes, nextResource); inflatedResourceCount += 1; if (cumulativeInflatedBytes >= myAutoInflateBinariesMaximumBytes) { - ourLog.debug("Exiting binary data inflation early.[byteCount={}, resourcesInflated={}, resourcesSkipped={}]", cumulativeInflatedBytes, inflatedResourceCount, theDetails.size() - inflatedResourceCount); + ourLog.debug( + "Exiting binary data inflation early.[byteCount={}, resourcesInflated={}, resourcesSkipped={}]", + cumulativeInflatedBytes, + inflatedResourceCount, + theDetails.size() - inflatedResourceCount); return; } } - ourLog.debug("Exiting binary data inflation having inflated everything.[byteCount={}, resourcesInflated={}, resourcesSkipped=0]", cumulativeInflatedBytes, inflatedResourceCount); + ourLog.debug( + "Exiting binary data inflation having inflated everything.[byteCount={}, resourcesInflated={}, resourcesSkipped=0]", + cumulativeInflatedBytes, + inflatedResourceCount); } - - private long inflateBinariesInResource(long theCumulativeInflatedBytes, IBaseResource theResource) throws IOException { + private long inflateBinariesInResource(long theCumulativeInflatedBytes, IBaseResource theResource) + throws IOException { IIdType resourceId = theResource.getIdElement(); List attachments = recursivelyScanResourceForBinaryData(theResource); for (IBinaryTarget nextTarget : attachments) { @@ -345,7 +386,11 @@ public class BinaryStorageInterceptor> { List binaryTargets = new ArrayList<>(); myCtx.newTerser().visit(theResource, new IModelVisitor2() { @Override - public boolean acceptElement(IBase theElement, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptElement( + IBase theElement, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { if (theElement.getClass().equals(myBinaryType)) { IBase parent = theContainingElementPath.get(theContainingElementPath.size() - 2); @@ -372,7 +417,6 @@ public class BinaryStorageInterceptor> { private final InputStream myDataStream; private boolean myBlobIdPrefixHookApplied; - private DeferredBinaryTarget(String theBlobId, IBinaryTarget theBinaryTarget, byte[] theData) { myBlobId = theBlobId; myBinaryTarget = theBinaryTarget; @@ -391,12 +435,12 @@ public class BinaryStorageInterceptor> { return myDataStream; } - boolean isBlobIdPrefixHookApplied() { return myBlobIdPrefixHookApplied; } + boolean isBlobIdPrefixHookApplied() { + return myBlobIdPrefixHookApplied; + } void setBlobIdPrefixHookApplied(boolean theBlobIdPrefixHookApplied) { myBlobIdPrefixHookApplied = theBlobIdPrefixHookApplied; } } - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/provider/BinaryAccessProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/provider/BinaryAccessProvider.java index d9dfb37635e..52cce62c41a 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/provider/BinaryAccessProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/provider/BinaryAccessProvider.java @@ -19,9 +19,9 @@ */ package ca.uhn.fhir.jpa.binary.provider; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; @@ -56,12 +56,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import static ca.uhn.fhir.util.UrlUtil.sanitizeUrlPart; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -74,10 +74,13 @@ import static org.apache.commons.lang3.StringUtils.isBlank; public class BinaryAccessProvider { private static final Logger ourLog = LoggerFactory.getLogger(BinaryAccessProvider.class); + @Autowired private FhirContext myCtx; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired(required = false) private IBinaryStorageSvc myBinaryStorageSvc; @@ -86,13 +89,18 @@ public class BinaryAccessProvider { /** * $binary-access-read */ - @Operation(name = JpaConstants.OPERATION_BINARY_ACCESS_READ, global = true, manualResponse = true, idempotent = true) + @Operation( + name = JpaConstants.OPERATION_BINARY_ACCESS_READ, + global = true, + manualResponse = true, + idempotent = true) public void binaryAccessRead( - @IdParam IIdType theResourceId, - @OperationParam(name = "path", min = 1, max = 1) IPrimitiveType thePath, - ServletRequestDetails theRequestDetails, - HttpServletRequest theServletRequest, - HttpServletResponse theServletResponse) throws IOException { + @IdParam IIdType theResourceId, + @OperationParam(name = "path", min = 1, max = 1) IPrimitiveType thePath, + ServletRequestDetails theRequestDetails, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws IOException { String path = validateResourceTypeAndPath(theResourceId, thePath); IFhirResourceDao dao = getDaoForRequest(theResourceId); @@ -101,7 +109,7 @@ public class BinaryAccessProvider { IBinaryTarget target = findAttachmentForRequest(resource, path, theRequestDetails); Optional attachmentId = target.getAttachmentId(); - //for unit test only + // for unit test only if (addTargetAttachmentIdForTest) { attachmentId = Optional.of("1"); } @@ -127,7 +135,8 @@ public class BinaryAccessProvider { theServletResponse.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_PRIVATE); theServletResponse.addHeader(Constants.HEADER_ETAG, '"' + blobDetails.getHash() + '"'); - theServletResponse.addHeader(Constants.HEADER_LAST_MODIFIED, DateUtils.formatDate(blobDetails.getPublished())); + theServletResponse.addHeader( + Constants.HEADER_LAST_MODIFIED, DateUtils.formatDate(blobDetails.getPublished())); myBinaryStorageSvc.writeBlob(theResourceId, blobId, theServletResponse.getOutputStream()); theServletResponse.getOutputStream().close(); @@ -138,7 +147,12 @@ public class BinaryAccessProvider { byte[] data = target.getData(); if (data == null) { - String msg = myCtx.getLocalizer().getMessage(BinaryAccessProvider.class, "noAttachmentDataPresent", sanitizeUrlPart(theResourceId), sanitizeUrlPart(thePath)); + String msg = myCtx.getLocalizer() + .getMessage( + BinaryAccessProvider.class, + "noAttachmentDataPresent", + sanitizeUrlPart(theResourceId), + sanitizeUrlPart(thePath)); throw new InvalidRequestException(Msg.code(1332) + msg); } @@ -151,7 +165,6 @@ public class BinaryAccessProvider { theServletResponse.getOutputStream().write(data); theServletResponse.getOutputStream().close(); - } } @@ -159,13 +172,18 @@ public class BinaryAccessProvider { * $binary-access-write */ @SuppressWarnings("unchecked") - @Operation(name = JpaConstants.OPERATION_BINARY_ACCESS_WRITE, global = true, manualRequest = true, idempotent = false) + @Operation( + name = JpaConstants.OPERATION_BINARY_ACCESS_WRITE, + global = true, + manualRequest = true, + idempotent = false) public IBaseResource binaryAccessWrite( - @IdParam IIdType theResourceId, - @OperationParam(name = "path", min = 1, max = 1) IPrimitiveType thePath, - ServletRequestDetails theRequestDetails, - HttpServletRequest theServletRequest, - HttpServletResponse theServletResponse) throws IOException { + @IdParam IIdType theResourceId, + @OperationParam(name = "path", min = 1, max = 1) IPrimitiveType thePath, + ServletRequestDetails theRequestDetails, + HttpServletRequest theServletRequest, + HttpServletResponse theServletResponse) + throws IOException { String path = validateResourceTypeAndPath(theResourceId, thePath); IFhirResourceDao dao = getDaoForRequest(theResourceId); @@ -178,7 +196,8 @@ public class BinaryAccessProvider { throw new InvalidRequestException(Msg.code(1333) + "No content-target supplied"); } if (EncodingEnum.forContentTypeStrict(requestContentType) != null) { - throw new InvalidRequestException(Msg.code(1334) + "This operation is for binary content, got: " + requestContentType); + throw new InvalidRequestException( + Msg.code(1334) + "This operation is for binary content, got: " + requestContentType); } long size = theServletRequest.getContentLength(); @@ -189,10 +208,13 @@ public class BinaryAccessProvider { if (size > 0 && myBinaryStorageSvc != null) { if (bytes == null || bytes.length == 0) { - throw new IllegalStateException(Msg.code(2073) + "Input stream is empty! Ensure that you are uploading data, and if so, ensure that no interceptors are in use that may be consuming the input stream"); + throw new IllegalStateException( + Msg.code(2073) + + "Input stream is empty! Ensure that you are uploading data, and if so, ensure that no interceptors are in use that may be consuming the input stream"); } if (myBinaryStorageSvc.shouldStoreBlob(size, theResourceId, requestContentType)) { - StoredDetails storedDetails = myBinaryStorageSvc.storeBlob(theResourceId, null, requestContentType, new ByteArrayInputStream(bytes), theRequestDetails); + StoredDetails storedDetails = myBinaryStorageSvc.storeBlob( + theResourceId, null, requestContentType, new ByteArrayInputStream(bytes), theRequestDetails); size = storedDetails.getBytes(); blobId = storedDetails.getBlobId(); Validate.notBlank(blobId, "BinaryStorageSvc returned a null blob ID"); // should not happen @@ -219,25 +241,29 @@ public class BinaryAccessProvider { public void replaceDataWithExtension(IBinaryTarget theTarget, String theBlobId) { theTarget - .getTarget() - .getExtension() - .removeIf(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())); + .getTarget() + .getExtension() + .removeIf(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())); theTarget.setData(null); IBaseExtension ext = theTarget.getTarget().addExtension(); ext.setUrl(HapiExtensions.EXT_EXTERNALIZED_BINARY_ID); ext.setUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED, Boolean.TRUE); - IPrimitiveType blobIdString = (IPrimitiveType) myCtx.getElementDefinition("string").newInstance(); + IPrimitiveType blobIdString = + (IPrimitiveType) myCtx.getElementDefinition("string").newInstance(); blobIdString.setValueAsString(theBlobId); ext.setValue(blobIdString); } @Nonnull - private IBinaryTarget findAttachmentForRequest(IBaseResource theResource, String thePath, ServletRequestDetails theRequestDetails) { + private IBinaryTarget findAttachmentForRequest( + IBaseResource theResource, String thePath, ServletRequestDetails theRequestDetails) { Optional type = myCtx.newFluentPath().evaluateFirst(theResource, thePath, IBase.class); String resType = this.myCtx.getResourceType(theResource); if (type.isEmpty()) { - String msg = this.myCtx.getLocalizer().getMessageSanitized(BinaryAccessProvider.class, "unknownPath", resType, thePath); + String msg = this.myCtx + .getLocalizer() + .getMessageSanitized(BinaryAccessProvider.class, "unknownPath", resType, thePath); throw new InvalidRequestException(Msg.code(1335) + msg); } IBase element = type.get(); @@ -246,12 +272,13 @@ public class BinaryAccessProvider { if (binaryTarget.isEmpty()) { BaseRuntimeElementDefinition def2 = myCtx.getElementDefinition(element.getClass()); - String msg = this.myCtx.getLocalizer().getMessageSanitized(BinaryAccessProvider.class, "unknownType", resType, thePath, def2.getName()); + String msg = this.myCtx + .getLocalizer() + .getMessageSanitized(BinaryAccessProvider.class, "unknownType", resType, thePath, def2.getName()); throw new InvalidRequestException(Msg.code(1336) + msg); } else { return binaryTarget.get(); } - } public Optional toBinaryTarget(IBase theElement) { @@ -269,7 +296,8 @@ public class BinaryAccessProvider { @Override public String getContentType() { - return AttachmentUtil.getOrCreateContentType(BinaryAccessProvider.this.myCtx, attachment).getValueAsString(); + return AttachmentUtil.getOrCreateContentType(BinaryAccessProvider.this.myCtx, attachment) + .getValueAsString(); } @Override @@ -288,13 +316,10 @@ public class BinaryAccessProvider { AttachmentUtil.setContentType(BinaryAccessProvider.this.myCtx, attachment, theContentType); } - @Override public void setData(byte[] theBytes) { AttachmentUtil.setData(myCtx, attachment, theBytes); } - - }; } @@ -327,20 +352,19 @@ public class BinaryAccessProvider { binary.setContentType(theContentType); } - @Override public void setData(byte[] theBytes) { binary.setContent(theBytes); } - - }; } return Optional.ofNullable(binaryTarget); } - private String validateResourceTypeAndPath(@IdParam IIdType theResourceId, @OperationParam(name = "path", min = 1, max = 1) IPrimitiveType thePath) { + private String validateResourceTypeAndPath( + @IdParam IIdType theResourceId, + @OperationParam(name = "path", min = 1, max = 1) IPrimitiveType thePath) { if (isBlank(theResourceId.getResourceType())) { throw new InvalidRequestException(Msg.code(1337) + "No resource type specified"); } @@ -362,12 +386,12 @@ public class BinaryAccessProvider { String resourceType = theResourceId.getResourceType(); IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); if (dao == null) { - throw new InvalidRequestException(Msg.code(1340) + "Unknown/unsupported resource type: " + sanitizeUrlPart(resourceType)); + throw new InvalidRequestException( + Msg.code(1340) + "Unknown/unsupported resource type: " + sanitizeUrlPart(resourceType)); } return dao; } - @VisibleForTesting public void setDaoRegistryForUnitTest(DaoRegistry theDaoRegistry) { myDaoRegistry = theDaoRegistry; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java index 6848f1bbdf8..4d1cb164ee2 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/BaseBinaryStorageSvcImpl.java @@ -47,12 +47,11 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; -import java.security.SecureRandom; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -66,10 +65,10 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { @Autowired private FhirContext myFhirContext; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; - public BaseBinaryStorageSvcImpl() { super(); } @@ -128,20 +127,25 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { public long getByteCount() { long retVal = super.getByteCount(); if (retVal > getMaximumBinarySize()) { - throw new PayloadTooLargeException(Msg.code(1343) + "Binary size exceeds maximum: " + getMaximumBinarySize()); + throw new PayloadTooLargeException( + Msg.code(1343) + "Binary size exceeds maximum: " + getMaximumBinarySize()); } return retVal; } }; } - @Deprecated(since = "6.6.0 - Maintained for interface backwards compatibility. Note that invokes interceptor pointcut with empty parameters", forRemoval = true) + @Deprecated( + since = + "6.6.0 - Maintained for interface backwards compatibility. Note that invokes interceptor pointcut with empty parameters", + forRemoval = true) protected String provideIdForNewBlob(String theBlobIdOrNull) { return isNotBlank(theBlobIdOrNull) ? theBlobIdOrNull : newBlobId(); } @Nonnull - protected String provideIdForNewBlob(String theBlobIdOrNull, byte[] theBytes, RequestDetails theRequestDetails, String theContentType) { + protected String provideIdForNewBlob( + String theBlobIdOrNull, byte[] theBytes, RequestDetails theRequestDetails, String theContentType) { String blobId = isNotBlank(theBlobIdOrNull) ? theBlobIdOrNull : newBlobId(); // make sure another pointcut didn't already apply a prefix to the blobId @@ -169,18 +173,16 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { @Nullable private String callBlobIdPointcut(byte[] theBytes, RequestDetails theRequestDetails, String theContentType) { // Interceptor call: STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX - IBaseBinary binary = BinaryUtil.newBinary(myFhirContext) - .setContent(theBytes) - .setContentType(theContentType); + IBaseBinary binary = + BinaryUtil.newBinary(myFhirContext).setContent(theBytes).setContentType(theContentType); - HookParams hookParams = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .add(IBaseResource.class, binary); + HookParams hookParams = + new HookParams().add(RequestDetails.class, theRequestDetails).add(IBaseResource.class, binary); setBlobIdPrefixApplied(theRequestDetails); return (String) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( - myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, hookParams); + myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX, hookParams); } @Override @@ -192,7 +194,8 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { if (attachmentId.isPresent()) { value = fetchBlob(theBaseBinary.getIdElement(), attachmentId.get()); } else { - throw new InternalErrorException(Msg.code(1344) + "Unable to load binary blob data for " + theBaseBinary.getIdElement()); + throw new InternalErrorException( + Msg.code(1344) + "Unable to load binary blob data for " + theBaseBinary.getIdElement()); } } return value; @@ -200,15 +203,13 @@ public abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc { @SuppressWarnings("unchecked") private Optional getAttachmentId(IBaseHasExtensions theBaseBinary) { - return theBaseBinary - .getExtension() - .stream() - .filter(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) - .filter(t -> t.getValue() instanceof IPrimitiveType) - .map(t -> (IPrimitiveType) t.getValue()) - .map(IPrimitiveType::getValue) - .filter(StringUtils::isNotBlank) - .findFirst(); + return theBaseBinary.getExtension().stream() + .filter(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) + .filter(t -> t.getValue() instanceof IPrimitiveType) + .map(t -> (IPrimitiveType) t.getValue()) + .map(IPrimitiveType::getValue) + .filter(StringUtils::isNotBlank) + .findFirst(); } @VisibleForTesting diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java index 7495e10ffd1..3509be83924 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binary/svc/NullBinaryStorageSvcImpl.java @@ -26,9 +26,9 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; import java.io.InputStream; import java.io.OutputStream; +import javax.annotation.Nonnull; public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc { @@ -69,8 +69,12 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc { @Nonnull @Override - public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, - InputStream theInputStream, RequestDetails theRequestDetails) { + public StoredDetails storeBlob( + IIdType theResourceId, + String theBlobIdOrNull, + String theContentType, + InputStream theInputStream, + RequestDetails theRequestDetails) { throw new UnsupportedOperationException(Msg.code(1346)); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java index 7d14a5942f1..3d1ed467240 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/FilesystemBinaryStorageSvcImpl.java @@ -39,8 +39,6 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -52,6 +50,8 @@ import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.util.Date; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { @@ -83,13 +83,17 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { @Override public boolean isValidBlobId(String theNewBlobId) { return !StringUtils.containsAny(theNewBlobId, '\\', '/', '|', '.'); - } @Nonnull @Override - public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, - InputStream theInputStream, RequestDetails theRequestDetails) throws IOException { + public StoredDetails storeBlob( + IIdType theResourceId, + String theBlobIdOrNull, + String theContentType, + InputStream theInputStream, + RequestDetails theRequestDetails) + throws IOException { String id = super.provideIdForNewBlob(theBlobIdOrNull, null, theRequestDetails, theContentType); File storagePath = getStoragePath(id, true); @@ -112,7 +116,11 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { myJsonSerializer.writeValue(writer, details); } - ourLog.info("Stored binary blob with {} bytes and ContentType {} for resource {}", count, theContentType, theResourceId); + ourLog.info( + "Stored binary blob with {} bytes and ContentType {} for resource {}", + count, + theContentType, + theResourceId); return details; } @@ -186,10 +194,10 @@ public class FilesystemBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl { if (inputStream != null) { return IOUtils.toByteArray(inputStream, details.getBytes()); } - } - throw new ResourceNotFoundException(Msg.code(1327) + "Unknown blob ID: " + theBlobId + " for resource ID " + theResourceId); + throw new ResourceNotFoundException( + Msg.code(1327) + "Unknown blob ID: " + theBlobId + " for resource ID " + theResourceId); } private void delete(File theStorageFile, String theBlobId) { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/MemoryBinaryStorageSvcImpl.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/MemoryBinaryStorageSvcImpl.java index a9959084abd..a5f6dd3d4af 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/MemoryBinaryStorageSvcImpl.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/binstore/MemoryBinaryStorageSvcImpl.java @@ -28,12 +28,12 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.CountingInputStream; import org.hl7.fhir.instance.model.api.IIdType; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Date; import java.util.concurrent.ConcurrentHashMap; +import javax.annotation.Nonnull; /** * Purely in-memory implementation of binary storage service. This is really @@ -54,8 +54,13 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme @Nonnull @Override - public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, - InputStream theInputStream, RequestDetails theRequestDetails) throws IOException { + public StoredDetails storeBlob( + IIdType theResourceId, + String theBlobIdOrNull, + String theContentType, + InputStream theInputStream, + RequestDetails theRequestDetails) + throws IOException { HashingInputStream hashingIs = createHashingInputStream(theInputStream); CountingInputStream countingIs = createCountingInputStream(hashingIs); @@ -65,7 +70,8 @@ public class MemoryBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl impleme String key = toKey(theResourceId, id); theInputStream.close(); myDataMap.put(key, bytes); - StoredDetails storedDetails = new StoredDetails(id, countingIs.getByteCount(), theContentType, hashingIs, new Date()); + StoredDetails storedDetails = + new StoredDetails(id, countingIs.getByteCount(), theContentType, hashingIs, new Date()); myDetailsMap.put(key, storedDetails); return storedDetails; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseResourceToFileWriter.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseResourceToFileWriter.java deleted file mode 100644 index 66eacfb9686..00000000000 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseResourceToFileWriter.java +++ /dev/null @@ -1,19 +0,0 @@ -/*- - * #%L - * HAPI FHIR Storage api - * %% - * Copyright (C) 2014 - 2023 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java index edb6022cb67..8bc512d3618 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/BulkExportResponseJson.java @@ -19,10 +19,9 @@ */ package ca.uhn.fhir.jpa.bulk.export.model; - +import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.rest.server.util.JsonDateDeserializer; import ca.uhn.fhir.rest.server.util.JsonDateSerializer; -import ca.uhn.fhir.model.api.IModelJson; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; @@ -34,7 +33,12 @@ import java.util.Date; import java.util.List; @JsonInclude(JsonInclude.Include.NON_DEFAULT) -@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +@JsonAutoDetect( + creatorVisibility = JsonAutoDetect.Visibility.NONE, + fieldVisibility = JsonAutoDetect.Visibility.NONE, + getterVisibility = JsonAutoDetect.Visibility.NONE, + isGetterVisibility = JsonAutoDetect.Visibility.NONE, + setterVisibility = JsonAutoDetect.Visibility.NONE) public class BulkExportResponseJson { @JsonProperty("transactionTime") @@ -44,8 +48,10 @@ public class BulkExportResponseJson { @JsonProperty("request") private String myRequest; + @JsonProperty("requiresAccessToken") private Boolean myRequiresAccessToken; + @JsonProperty("output") private List myOutput; @@ -118,6 +124,7 @@ public class BulkExportResponseJson { @JsonProperty("type") private String myType; + @JsonProperty("url") private String myUrl; @@ -138,7 +145,5 @@ public class BulkExportResponseJson { myUrl = theUrl; return this; } - } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/ExportPIDIteratorParameters.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/ExportPIDIteratorParameters.java index a797086fca9..ff1bfb540a0 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/ExportPIDIteratorParameters.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/model/ExportPIDIteratorParameters.java @@ -55,6 +55,7 @@ public class ExportPIDIteratorParameters { * of results/status). */ private String myInstanceId; + private String myChunkId; /** * The export style @@ -185,5 +186,4 @@ public class ExportPIDIteratorParameters { public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportHelperService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportHelperService.java index ca7105dd64e..db544446f05 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportHelperService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkExportHelperService.java @@ -29,7 +29,6 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; -import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; @@ -51,20 +50,23 @@ public class BulkExportHelperService { * * The input boolean theConsiderSince determines whether to consider the lastUpdated date in the search parameter map. */ - public List createSearchParameterMapsForResourceType(RuntimeResourceDefinition theDef, ExportPIDIteratorParameters theParams, boolean theConsiderSince) { + public List createSearchParameterMapsForResourceType( + RuntimeResourceDefinition theDef, ExportPIDIteratorParameters theParams, boolean theConsiderSince) { String resourceType = theDef.getName(); List typeFilters = theParams.getFilters(); List spMaps = null; spMaps = typeFilters.stream() - .filter(typeFilter -> typeFilter.startsWith(resourceType + "?")) - .map(filter -> buildSearchParameterMapForTypeFilter(filter, theDef, theParams.getStartDate())) - .collect(Collectors.toList()); + .filter(typeFilter -> typeFilter.startsWith(resourceType + "?")) + .map(filter -> buildSearchParameterMapForTypeFilter(filter, theDef, theParams.getStartDate())) + .collect(Collectors.toList()); typeFilters.stream().filter(filter -> !filter.contains("?")).forEach(filter -> { - ourLog.warn("Found a strange _typeFilter that we could not process: {}. _typeFilters should follow the format ResourceType?searchparameter=value .", filter); + ourLog.warn( + "Found a strange _typeFilter that we could not process: {}. _typeFilters should follow the format ResourceType?searchparameter=value .", + filter); }); - //None of the _typeFilters applied to the current resource type, so just make a simple one. + // None of the _typeFilters applied to the current resource type, so just make a simple one. if (spMaps.isEmpty()) { SearchParameterMap defaultMap = new SearchParameterMap(); if (theConsiderSince) { @@ -76,7 +78,8 @@ public class BulkExportHelperService { return spMaps; } - private SearchParameterMap buildSearchParameterMapForTypeFilter(String theFilter, RuntimeResourceDefinition theDef, Date theSinceDate) { + private SearchParameterMap buildSearchParameterMapForTypeFilter( + String theFilter, RuntimeResourceDefinition theDef, Date theSinceDate) { SearchParameterMap searchParameterMap = myMatchUrlService.translateMatchUrl(theFilter, theDef); enhanceSearchParameterMapWithCommonParameters(searchParameterMap, theSinceDate); return searchParameterMap; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java index 87c01f4750f..d7f2d6e14b8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java @@ -24,44 +24,44 @@ import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum; -import javax.annotation.Nonnull; import java.util.Date; import java.util.List; +import javax.annotation.Nonnull; public interface IBulkDataImportSvc { - class JobInfo { - private BulkImportJobStatusEnum myStatus; - private Date myStatusTime; - private String myStatusMessage; + class JobInfo { + private BulkImportJobStatusEnum myStatus; + private Date myStatusTime; + private String myStatusMessage; - public Date getStatusTime() { - return myStatusTime; - } + public Date getStatusTime() { + return myStatusTime; + } - public JobInfo setStatusTime(Date theStatusTime) { - myStatusTime = theStatusTime; - return this; - } + public JobInfo setStatusTime(Date theStatusTime) { + myStatusTime = theStatusTime; + return this; + } - public BulkImportJobStatusEnum getStatus() { - return myStatus; - } + public BulkImportJobStatusEnum getStatus() { + return myStatus; + } - public JobInfo setStatus(BulkImportJobStatusEnum theStatus) { - myStatus = theStatus; - return this; - } + public JobInfo setStatus(BulkImportJobStatusEnum theStatus) { + myStatus = theStatus; + return this; + } - public String getStatusMessage() { - return myStatusMessage; - } + public String getStatusMessage() { + return myStatusMessage; + } - public JobInfo setStatusMessage(String theStatusMessage) { - myStatusMessage = theStatusMessage; - return this; - } - } + public JobInfo setStatusMessage(String theStatusMessage) { + myStatusMessage = theStatusMessage; + return this; + } + } /** * Create a new job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state (meaning it won't yet be worked on and can be added to) @@ -103,10 +103,10 @@ public interface IBulkDataImportSvc { */ void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage); - /** - * Gets the job status for the given job. - */ - JobInfo getJobStatus(String theJobId); + /** + * Gets the job status for the given job. + */ + JobInfo getJobStatus(String theJobId); /** * Gets the number of files available for a given Job ID diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ActivateJobResult.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ActivateJobResult.java index 35b129ba98e..da558d26693 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ActivateJobResult.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ActivateJobResult.java @@ -34,8 +34,7 @@ public class ActivateJobResult { @Nullable public final String jobId; - public ActivateJobResult(boolean theIsActivated, - @Nullable String theJobId) { + public ActivateJobResult(boolean theIsActivated, @Nullable String theJobId) { isActivated = theIsActivated; jobId = theJobId; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java index b732905cdf4..ae15690d7a3 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java @@ -26,8 +26,10 @@ public class BulkImportJobFileJson implements IModelJson { @JsonProperty("tenantName") private String myTenantName; + @JsonProperty("contents") private String myContents; + @JsonProperty("description") private String myDescription; @@ -56,5 +58,4 @@ public class BulkImportJobFileJson implements IModelJson { myContents = theContents; return this; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java index 6beb9d93a8e..71d8b779f7a 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java @@ -26,10 +26,13 @@ public class BulkImportJobJson implements IModelJson { @JsonProperty("processingMode") private JobFileRowProcessingModeEnum myProcessingMode; + @JsonProperty("jobDescription") private String myJobDescription; + @JsonProperty("fileCount") private int myFileCount; + @JsonProperty("batchSize") private int myBatchSize; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java index cba8e4230ac..467ba6bdb99 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java @@ -23,11 +23,9 @@ import com.fasterxml.jackson.annotation.JsonFormat; @JsonFormat(shape = JsonFormat.Shape.STRING) public enum BulkImportJobStatusEnum { - STAGING, READY, RUNNING, COMPLETE, ERROR - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java index 4cfb1938103..58daca124fb 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java @@ -27,7 +27,5 @@ public enum JobFileRowProcessingModeEnum { /** * Sorting OK */ - FHIR_TRANSACTION - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java index a4a41ddee74..72f0d4a8f1b 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java @@ -76,8 +76,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collection; import java.util.Collections; import java.util.IdentityHashMap; @@ -85,6 +83,8 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Supplier; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -102,12 +102,16 @@ public abstract class BaseStorageDao { @Autowired protected ISearchParamRegistry mySearchParamRegistry; + @Autowired protected FhirContext myFhirContext; + @Autowired protected DaoRegistry myDaoRegistry; + @Autowired protected IResourceVersionSvc myResourceVersionSvc; + @Autowired protected JpaStorageSettings myStorageSettings; @@ -132,7 +136,11 @@ public abstract class BaseStorageDao { * @param theResource The resource that is about to be stored * @since 5.3.0 */ - protected void preProcessResourceForStorage(IBaseResource theResource, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, boolean thePerformIndexing) { + protected void preProcessResourceForStorage( + IBaseResource theResource, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + boolean thePerformIndexing) { verifyResourceTypeIsAppropriateForDao(theResource); @@ -145,7 +153,6 @@ public abstract class BaseStorageDao { } performAutoVersioning(theResource, thePerformIndexing); - } /** @@ -154,7 +161,11 @@ public abstract class BaseStorageDao { private void verifyResourceTypeIsAppropriateForDao(IBaseResource theResource) { String type = getContext().getResourceType(theResource); if (getResourceName() != null && !getResourceName().equals(type)) { - throw new InvalidRequestException(Msg.code(520) + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "incorrectResourceType", type, getResourceName())); + throw new InvalidRequestException(Msg.code(520) + + getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, "incorrectResourceType", type, getResourceName())); } } @@ -164,7 +175,13 @@ public abstract class BaseStorageDao { private void verifyResourceIdIsValid(IBaseResource theResource) { if (theResource.getIdElement().hasIdPart()) { if (!theResource.getIdElement().isIdPartValid()) { - throw new InvalidRequestException(Msg.code(521) + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "failedToCreateWithInvalidId", theResource.getIdElement().getIdPart())); + throw new InvalidRequestException(Msg.code(521) + + getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "failedToCreateWithInvalidId", + theResource.getIdElement().getIdPart())); } } } @@ -178,7 +195,12 @@ public abstract class BaseStorageDao { String bundleType = BundleUtil.getBundleType(getContext(), (IBaseBundle) theResource); bundleType = defaultString(bundleType); if (!allowedBundleTypes.contains(bundleType)) { - String message = myFhirContext.getLocalizer().getMessage(BaseStorageDao.class, "invalidBundleTypeForStorage", (isNotBlank(bundleType) ? bundleType : "(missing)")); + String message = myFhirContext + .getLocalizer() + .getMessage( + BaseStorageDao.class, + "invalidBundleTypeForStorage", + (isNotBlank(bundleType) ? bundleType : "(missing)")); throw new UnprocessableEntityException(Msg.code(522) + message); } } @@ -218,14 +240,14 @@ public abstract class BaseStorageDao { */ private void performAutoVersioning(IBaseResource theResource, boolean thePerformIndexing) { if (thePerformIndexing) { - Set referencesToVersion = extractReferencesToAutoVersion(myFhirContext, myStorageSettings, theResource); + Set referencesToVersion = + extractReferencesToAutoVersion(myFhirContext, myStorageSettings, theResource); for (IBaseReference nextReference : referencesToVersion) { IIdType referenceElement = nextReference.getReferenceElement(); if (!referenceElement.hasBaseUrl()) { - ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(), - Collections.singletonList(referenceElement) - ); + ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds( + RequestPartitionId.allPartitions(), Collections.singletonList(referenceElement)); // 3 cases: // 1) there exists a resource in the db with some version (use this version) @@ -235,7 +257,9 @@ public abstract class BaseStorageDao { if (resourceVersionMap.containsKey(referenceElement)) { // the resource exists... latest id // will be the value in the IResourcePersistentId - version = resourceVersionMap.getResourcePersistentId(referenceElement).getVersion(); + version = resourceVersionMap + .getResourcePersistentId(referenceElement) + .getVersion(); } else if (myStorageSettings.isAutoCreatePlaceholderReferenceTargets()) { // if idToPID doesn't contain object // but autcreateplaceholders is on @@ -247,14 +271,20 @@ public abstract class BaseStorageDao { // we throw throw new ResourceNotFoundException(Msg.code(523) + referenceElement); } - String newTargetReference = referenceElement.withVersion(version.toString()).getValue(); + String newTargetReference = + referenceElement.withVersion(version.toString()).getValue(); nextReference.setReference(newTargetReference); } } } } - protected DaoMethodOutcome toMethodOutcome(RequestDetails theRequest, @Nonnull final IBasePersistedResource theEntity, @Nonnull IBaseResource theResource, @Nullable String theMatchUrl, @Nonnull RestOperationTypeEnum theOperationType) { + protected DaoMethodOutcome toMethodOutcome( + RequestDetails theRequest, + @Nonnull final IBasePersistedResource theEntity, + @Nonnull IBaseResource theResource, + @Nullable String theMatchUrl, + @Nonnull RestOperationTypeEnum theOperationType) { DaoMethodOutcome outcome = new DaoMethodOutcome(); IResourcePersistentId persistentId = theEntity.getPersistentId(); @@ -291,10 +321,11 @@ public abstract class BaseStorageDao { if (outcome.getResource() != null) { SimplePreResourceAccessDetails accessDetails = new SimplePreResourceAccessDetails(outcome.getResource()); HookParams params = new HookParams() - .add(IPreResourceAccessDetails.class, accessDetails) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); + .add(IPreResourceAccessDetails.class, accessDetails) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); if (accessDetails.isDontReturnResourceAtIndex(0)) { outcome.setResource(null); } @@ -308,10 +339,11 @@ public abstract class BaseStorageDao { if (outcome.getResource() != null) { SimplePreResourceShowDetails showDetails = new SimplePreResourceShowDetails(outcome.getResource()); HookParams params = new HookParams() - .add(IPreResourceShowDetails.class, showDetails) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); + .add(IPreResourceShowDetails.class, showDetails) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); outcome.setResource(showDetails.getResource(0)); } }); @@ -319,7 +351,11 @@ public abstract class BaseStorageDao { return outcome; } - protected DaoMethodOutcome toMethodOutcomeLazy(RequestDetails theRequest, IResourcePersistentId theResourcePersistentId, @Nonnull final Supplier theEntity, Supplier theIdSupplier) { + protected DaoMethodOutcome toMethodOutcomeLazy( + RequestDetails theRequest, + IResourcePersistentId theResourcePersistentId, + @Nonnull final Supplier theEntity, + Supplier theIdSupplier) { LazyDaoMethodOutcome outcome = new LazyDaoMethodOutcome(theResourcePersistentId); outcome.setEntitySupplier(theEntity); @@ -327,12 +363,14 @@ public abstract class BaseStorageDao { outcome.setEntitySupplierUseCallback(() -> { // Interceptor broadcast: STORAGE_PREACCESS_RESOURCES if (outcome.getResource() != null) { - SimplePreResourceAccessDetails accessDetails = new SimplePreResourceAccessDetails(outcome.getResource()); + SimplePreResourceAccessDetails accessDetails = + new SimplePreResourceAccessDetails(outcome.getResource()); HookParams params = new HookParams() - .add(IPreResourceAccessDetails.class, accessDetails) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); + .add(IPreResourceAccessDetails.class, accessDetails) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params); if (accessDetails.isDontReturnResourceAtIndex(0)) { outcome.setResource(null); } @@ -346,10 +384,11 @@ public abstract class BaseStorageDao { if (outcome.getResource() != null) { SimplePreResourceShowDetails showDetails = new SimplePreResourceShowDetails(outcome.getResource()); HookParams params = new HookParams() - .add(IPreResourceShowDetails.class, showDetails) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); + .add(IPreResourceShowDetails.class, showDetails) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); outcome.setResource(showDetails.getResource(0)); } }); @@ -358,11 +397,16 @@ public abstract class BaseStorageDao { return outcome; } - protected void doCallHooks(TransactionDetails theTransactionDetails, RequestDetails theRequestDetails, Pointcut thePointcut, HookParams theParams) { + protected void doCallHooks( + TransactionDetails theTransactionDetails, + RequestDetails theRequestDetails, + Pointcut thePointcut, + HookParams theParams) { if (theTransactionDetails.isAcceptingDeferredInterceptorBroadcasts(thePointcut)) { theTransactionDetails.addDeferredInterceptorBroadcast(thePointcut, theParams); } else { - CompositeInterceptorBroadcaster.doCallHooks(getInterceptorBroadcaster(), theRequestDetails, thePointcut, theParams); + CompositeInterceptorBroadcaster.doCallHooks( + getInterceptorBroadcaster(), theRequestDetails, thePointcut, theParams); } } @@ -376,7 +420,8 @@ public abstract class BaseStorageDao { return createInfoOperationOutcome(theMessage, null); } - public IBaseOperationOutcome createInfoOperationOutcome(String theMessage, @Nullable StorageResponseCodeEnum theStorageResponseCode) { + public IBaseOperationOutcome createInfoOperationOutcome( + String theMessage, @Nullable StorageResponseCodeEnum theStorageResponseCode) { return createOperationOutcome(OO_SEVERITY_INFO, theMessage, "informational", theStorageResponseCode); } @@ -384,7 +429,11 @@ public abstract class BaseStorageDao { return createOperationOutcome(theSeverity, theMessage, theCode, null); } - protected IBaseOperationOutcome createOperationOutcome(String theSeverity, String theMessage, String theCode, @Nullable StorageResponseCodeEnum theStorageResponseCode) { + protected IBaseOperationOutcome createOperationOutcome( + String theSeverity, + String theMessage, + String theCode, + @Nullable StorageResponseCodeEnum theStorageResponseCode) { IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(getContext()); String detailSystem = null; String detailCode = null; @@ -394,7 +443,8 @@ public abstract class BaseStorageDao { detailCode = theStorageResponseCode.getCode(); detailDescription = theStorageResponseCode.getDisplay(); } - OperationOutcomeUtil.addIssue(getContext(), oo, theSeverity, theMessage, null, theCode, detailSystem, detailCode, detailDescription); + OperationOutcomeUtil.addIssue( + getContext(), oo, theSeverity, theMessage, null, theCode, detailSystem, detailCode, detailDescription); return oo; } @@ -405,7 +455,8 @@ public abstract class BaseStorageDao { * * @param theResourceId - the id of the object being deleted. Eg: Patient/123 */ - protected DaoMethodOutcome createMethodOutcomeForResourceId(String theResourceId, String theMessageKey, StorageResponseCodeEnum theStorageResponseCode) { + protected DaoMethodOutcome createMethodOutcomeForResourceId( + String theResourceId, String theMessageKey, StorageResponseCodeEnum theStorageResponseCode) { DaoMethodOutcome outcome = new DaoMethodOutcome(); IIdType id = getContext().getVersion().newIdType(); @@ -460,26 +511,39 @@ public abstract class BaseStorageDao { QualifierDetails qualifiedParamName = QualifierDetails.extractQualifiersFromParameterName(nextParamName); RuntimeSearchParam param = searchParams.get(qualifiedParamName.getParamName()); if (param == null) { - Collection validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(getResourceName()); - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidSearchParameter", qualifiedParamName.getParamName(), getResourceName(), validNames); + Collection validNames = + mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(getResourceName()); + String msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "invalidSearchParameter", + qualifiedParamName.getParamName(), + getResourceName(), + validNames); throw new InvalidRequestException(Msg.code(524) + msg); } // Should not be null since the check above would have caught it - RuntimeSearchParam paramDef = mySearchParamRegistry.getActiveSearchParam(getResourceName(), qualifiedParamName.getParamName()); + RuntimeSearchParam paramDef = + mySearchParamRegistry.getActiveSearchParam(getResourceName(), qualifiedParamName.getParamName()); for (String nextValue : theSource.get(nextParamName)) { - QualifiedParamList qualifiedParam = QualifiedParamList.splitQueryStringByCommasIgnoreEscape(qualifiedParamName.getWholeQualifier(), nextValue); + QualifiedParamList qualifiedParam = QualifiedParamList.splitQueryStringByCommasIgnoreEscape( + qualifiedParamName.getWholeQualifier(), nextValue); List paramList = Collections.singletonList(qualifiedParam); - IQueryParameterAnd parsedParam = JpaParamUtil.parseQueryParams(mySearchParamRegistry, getContext(), paramDef, nextParamName, paramList); + IQueryParameterAnd parsedParam = JpaParamUtil.parseQueryParams( + mySearchParamRegistry, getContext(), paramDef, nextParamName, paramList); theTarget.add(qualifiedParamName.getParamName(), parsedParam); } - } } - - protected void populateOperationOutcomeForUpdate(@Nullable StopWatch theItemStopwatch, DaoMethodOutcome theMethodOutcome, String theMatchUrl, RestOperationTypeEnum theOperationType) { + protected void populateOperationOutcomeForUpdate( + @Nullable StopWatch theItemStopwatch, + DaoMethodOutcome theMethodOutcome, + String theMatchUrl, + RestOperationTypeEnum theOperationType) { String msg; StorageResponseCodeEnum outcome; @@ -488,18 +552,37 @@ public abstract class BaseStorageDao { if (theMatchUrl != null) { if (theMethodOutcome.isNop()) { outcome = StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH_NO_CHANGE; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulPatchConditionalNoChange", theMethodOutcome.getId(), UrlUtil.sanitizeUrlPart(theMatchUrl), theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "successfulPatchConditionalNoChange", + theMethodOutcome.getId(), + UrlUtil.sanitizeUrlPart(theMatchUrl), + theMethodOutcome.getId()); } else { outcome = StorageResponseCodeEnum.SUCCESSFUL_CONDITIONAL_PATCH; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulPatchConditional", theMethodOutcome.getId(), UrlUtil.sanitizeUrlPart(theMatchUrl), theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "successfulPatchConditional", + theMethodOutcome.getId(), + UrlUtil.sanitizeUrlPart(theMatchUrl), + theMethodOutcome.getId()); } } else { if (theMethodOutcome.isNop()) { outcome = StorageResponseCodeEnum.SUCCESSFUL_PATCH_NO_CHANGE; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulPatchNoChange", theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, "successfulPatchNoChange", theMethodOutcome.getId()); } else { outcome = StorageResponseCodeEnum.SUCCESSFUL_PATCH; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulPatch", theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "successfulPatch", theMethodOutcome.getId()); } } @@ -507,23 +590,46 @@ public abstract class BaseStorageDao { if (theMatchUrl == null) { outcome = StorageResponseCodeEnum.SUCCESSFUL_CREATE; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreate", theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "successfulCreate", theMethodOutcome.getId()); } else if (theMethodOutcome.isNop()) { outcome = StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreateConditionalWithMatch", theMethodOutcome.getId(), UrlUtil.sanitizeUrlPart(theMatchUrl)); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "successfulCreateConditionalWithMatch", + theMethodOutcome.getId(), + UrlUtil.sanitizeUrlPart(theMatchUrl)); } else { outcome = StorageResponseCodeEnum.SUCCESSFUL_CREATE_NO_CONDITIONAL_MATCH; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreateConditionalNoMatch", theMethodOutcome.getId(), UrlUtil.sanitizeUrlPart(theMatchUrl)); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "successfulCreateConditionalNoMatch", + theMethodOutcome.getId(), + UrlUtil.sanitizeUrlPart(theMatchUrl)); } } else if (theMethodOutcome.isNop()) { if (theMatchUrl != null) { outcome = StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH_NO_CHANGE; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdateConditionalNoChangeWithMatch", theMethodOutcome.getId(), theMatchUrl); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "successfulUpdateConditionalNoChangeWithMatch", + theMethodOutcome.getId(), + theMatchUrl); } else { outcome = StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CHANGE; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdateNoChange", theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, "successfulUpdateNoChange", theMethodOutcome.getId()); } } else { @@ -531,23 +637,40 @@ public abstract class BaseStorageDao { if (theMatchUrl != null) { if (theMethodOutcome.getCreated() == Boolean.TRUE) { outcome = StorageResponseCodeEnum.SUCCESSFUL_UPDATE_NO_CONDITIONAL_MATCH; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdateConditionalNoMatch", theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "successfulUpdateConditionalNoMatch", + theMethodOutcome.getId()); } else { outcome = StorageResponseCodeEnum.SUCCESSFUL_UPDATE_WITH_CONDITIONAL_MATCH; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdateConditionalWithMatch", theMethodOutcome.getId(), theMatchUrl); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "successfulUpdateConditionalWithMatch", + theMethodOutcome.getId(), + theMatchUrl); } } else if (theMethodOutcome.getCreated() == Boolean.TRUE) { outcome = StorageResponseCodeEnum.SUCCESSFUL_UPDATE_AS_CREATE; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdateAsCreate", theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, "successfulUpdateAsCreate", theMethodOutcome.getId()); } else { outcome = StorageResponseCodeEnum.SUCCESSFUL_UPDATE; - msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulUpdate", theMethodOutcome.getId()); + msg = getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "successfulUpdate", theMethodOutcome.getId()); } - } if (theItemStopwatch != null) { - String msgSuffix = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", theItemStopwatch.getMillis()); + String msgSuffix = getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", theItemStopwatch.getMillis()); msg = msg + " " + msgSuffix; } @@ -559,12 +682,14 @@ public abstract class BaseStorageDao { * @see StorageSettings#getAutoVersionReferenceAtPaths() */ @Nonnull - public static Set extractReferencesToAutoVersion(FhirContext theFhirContext, StorageSettings theStorageSettings, IBaseResource theResource) { + public static Set extractReferencesToAutoVersion( + FhirContext theFhirContext, StorageSettings theStorageSettings, IBaseResource theResource) { Map references = Collections.emptyMap(); if (!theStorageSettings.getAutoVersionReferenceAtPaths().isEmpty()) { String resourceName = theFhirContext.getResourceType(theResource); for (String nextPath : theStorageSettings.getAutoVersionReferenceAtPathsByResourceType(resourceName)) { - List nextReferences = theFhirContext.newTerser().getValues(theResource, nextPath, IBaseReference.class); + List nextReferences = + theFhirContext.newTerser().getValues(theResource, nextPath, IBaseReference.class); for (IBaseReference next : nextReferences) { if (next.getReferenceElement().hasVersionIdPart()) { continue; @@ -590,5 +715,4 @@ public abstract class BaseStorageDao { theRequestDetails.getUserData().put(PROCESSING_SUB_REQUEST, Boolean.TRUE); } } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageResourceDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageResourceDao.java index 0e4dc81f461..46d2c2fb925 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageResourceDao.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageResourceDao.java @@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.patch.FhirPatch; import ca.uhn.fhir.jpa.patch.JsonPatchUtils; import ca.uhn.fhir.jpa.patch.XmlPatchUtils; import ca.uhn.fhir.parser.StrictErrorHandler; -import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.DeleteCascadeModeEnum; import ca.uhn.fhir.rest.api.PatchTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum; @@ -51,14 +50,15 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.support.TransactionSynchronizationManager; -import javax.annotation.Nonnull; import java.util.Collections; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; -public abstract class BaseStorageResourceDao extends BaseStorageDao implements IFhirResourceDao, IJpaDao { +public abstract class BaseStorageResourceDao extends BaseStorageDao + implements IFhirResourceDao, IJpaDao { public static final StrictErrorHandler STRICT_ERROR_HANDLER = new StrictErrorHandler(); @Autowired @@ -74,29 +74,65 @@ public abstract class BaseStorageResourceDao extends Ba protected abstract IDeleteExpungeJobSubmitter getDeleteExpungeJobSubmitter(); @Override - public DaoMethodOutcome patch(IIdType theId, String theConditionalUrl, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequestDetails) { + public DaoMethodOutcome patch( + IIdType theId, + String theConditionalUrl, + PatchTypeEnum thePatchType, + String thePatchBody, + IBaseParameters theFhirPatchBody, + RequestDetails theRequestDetails) { TransactionDetails transactionDetails = new TransactionDetails(); - return getTransactionService().execute(theRequestDetails, transactionDetails, tx -> patchInTransaction(theId, theConditionalUrl, true, thePatchType, thePatchBody, theFhirPatchBody, theRequestDetails, transactionDetails)); + return getTransactionService() + .execute( + theRequestDetails, + transactionDetails, + tx -> patchInTransaction( + theId, + theConditionalUrl, + true, + thePatchType, + thePatchBody, + theFhirPatchBody, + theRequestDetails, + transactionDetails)); } @Override - public DaoMethodOutcome patchInTransaction(IIdType theId, String theConditionalUrl, boolean thePerformIndexing, PatchTypeEnum thePatchType, String thePatchBody, IBaseParameters theFhirPatchBody, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) { + public DaoMethodOutcome patchInTransaction( + IIdType theId, + String theConditionalUrl, + boolean thePerformIndexing, + PatchTypeEnum thePatchType, + String thePatchBody, + IBaseParameters theFhirPatchBody, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails) { assert TransactionSynchronizationManager.isActualTransactionActive(); IBasePersistedResource entityToUpdate; IIdType resourceId; if (isNotBlank(theConditionalUrl)) { - Set match = getMatchResourceUrlService().processMatchUrl(theConditionalUrl, getResourceType(), theTransactionDetails, theRequestDetails); + Set match = getMatchResourceUrlService() + .processMatchUrl(theConditionalUrl, getResourceType(), theTransactionDetails, theRequestDetails); if (match.size() > 1) { - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "PATCH", theConditionalUrl, match.size()); + String msg = getContext() + .getLocalizer() + .getMessageSanitized( + BaseStorageDao.class, + "transactionOperationWithMultipleMatchFailure", + "PATCH", + theConditionalUrl, + match.size()); throw new PreconditionFailedException(Msg.code(972) + msg); } else if (match.size() == 1) { IResourcePersistentId pid = match.iterator().next(); entityToUpdate = readEntityLatestVersion(pid, theRequestDetails, theTransactionDetails); resourceId = entityToUpdate.getIdDt(); } else { - String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "invalidMatchUrlNoMatches", theConditionalUrl); + String msg = getContext() + .getLocalizer() + .getMessageSanitized(BaseStorageDao.class, "invalidMatchUrlNoMatches", theConditionalUrl); throw new ResourceNotFoundException(Msg.code(973) + msg); } @@ -105,7 +141,8 @@ public abstract class BaseStorageResourceDao extends Ba entityToUpdate = readEntityLatestVersion(theId, theRequestDetails, theTransactionDetails); if (theId.hasVersionIdPart()) { if (theId.getVersionIdPartAsLong() != entityToUpdate.getVersion()) { - throw new ResourceVersionConflictException(Msg.code(974) + "Version " + theId.getVersionIdPart() + " is not the most recent version of this resource, unable to apply patch"); + throw new ResourceVersionConflictException(Msg.code(974) + "Version " + theId.getVersionIdPart() + + " is not the most recent version of this resource, unable to apply patch"); } } } @@ -118,7 +155,8 @@ public abstract class BaseStorageResourceDao extends Ba IBaseResource resourceToUpdate = getStorageResourceParser().toResource(entityToUpdate, false); if (resourceToUpdate == null) { - // If this is null, we are presumably in a FHIR transaction bundle with both a create and a patch on the same + // If this is null, we are presumably in a FHIR transaction bundle with both a create and a patch on the + // same // resource. This is weird but not impossible. resourceToUpdate = theTransactionDetails.getResolvedResource(resourceId); } @@ -142,11 +180,23 @@ public abstract class BaseStorageResourceDao extends Ba @SuppressWarnings("unchecked") T destinationCasted = (T) destination; - myFhirContext.newJsonParser().setParserErrorHandler(STRICT_ERROR_HANDLER).encodeResourceToString(destinationCasted); + myFhirContext + .newJsonParser() + .setParserErrorHandler(STRICT_ERROR_HANDLER) + .encodeResourceToString(destinationCasted); preProcessResourceForStorage(destinationCasted, theRequestDetails, theTransactionDetails, true); - return doUpdateForUpdateOrPatch(theRequestDetails, resourceId, theConditionalUrl, thePerformIndexing, false, destinationCasted, entityToUpdate, RestOperationTypeEnum.PATCH, theTransactionDetails); + return doUpdateForUpdateOrPatch( + theRequestDetails, + resourceId, + theConditionalUrl, + thePerformIndexing, + false, + destinationCasted, + entityToUpdate, + RestOperationTypeEnum.PATCH, + theTransactionDetails); } @Override @@ -157,18 +207,34 @@ public abstract class BaseStorageResourceDao extends Ba @Nonnull protected abstract String getResourceName(); - protected abstract IBasePersistedResource readEntityLatestVersion(IResourcePersistentId thePersistentId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails); + protected abstract IBasePersistedResource readEntityLatestVersion( + IResourcePersistentId thePersistentId, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails); - protected abstract IBasePersistedResource readEntityLatestVersion(IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails); + protected abstract IBasePersistedResource readEntityLatestVersion( + IIdType theId, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails); - - protected DaoMethodOutcome doUpdateForUpdateOrPatch(RequestDetails theRequest, IIdType theResourceId, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, T theResource, IBasePersistedResource theEntity, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) { - if (theResourceId.hasVersionIdPart() && Long.parseLong(theResourceId.getVersionIdPart()) != theEntity.getVersion()) { - throw new ResourceVersionConflictException(Msg.code(989) + "Trying to update " + theResourceId + " but this is not the current version"); + protected DaoMethodOutcome doUpdateForUpdateOrPatch( + RequestDetails theRequest, + IIdType theResourceId, + String theMatchUrl, + boolean thePerformIndexing, + boolean theForceUpdateVersion, + T theResource, + IBasePersistedResource theEntity, + RestOperationTypeEnum theOperationType, + TransactionDetails theTransactionDetails) { + if (theResourceId.hasVersionIdPart() + && Long.parseLong(theResourceId.getVersionIdPart()) != theEntity.getVersion()) { + throw new ResourceVersionConflictException( + Msg.code(989) + "Trying to update " + theResourceId + " but this is not the current version"); } if (theResourceId.hasResourceType() && !theResourceId.getResourceType().equals(getResourceName())) { - throw new UnprocessableEntityException(Msg.code(990) + "Invalid resource ID[" + theEntity.getIdDt().toUnqualifiedVersionless() + "] of type[" + theEntity.getResourceType() + "] - Does not match expected [" + getResourceName() + "]"); + throw new UnprocessableEntityException(Msg.code(990) + "Invalid resource ID[" + + theEntity.getIdDt().toUnqualifiedVersionless() + "] of type[" + theEntity.getResourceType() + + "] - Does not match expected [" + getResourceName() + "]"); } IBaseResource oldResource; @@ -200,12 +266,15 @@ public abstract class BaseStorageResourceDao extends Ba */ if (!thePerformIndexing) { theResource.setId(theEntity.getIdDt().getValue()); - DaoMethodOutcome outcome = toMethodOutcome(theRequest, theEntity, theResource, theMatchUrl, theOperationType).setCreated(wasDeleted); + DaoMethodOutcome outcome = toMethodOutcome( + theRequest, theEntity, theResource, theMatchUrl, theOperationType) + .setCreated(wasDeleted); outcome.setPreviousResource(oldResource); if (!outcome.isNop()) { // Technically this may not end up being right since we might not increment if the // contents turn out to be the same - outcome.setId(outcome.getId().withVersion(Long.toString(outcome.getId().getVersionIdPartAsLong() + 1))); + outcome.setId(outcome.getId() + .withVersion(Long.toString(outcome.getId().getVersionIdPartAsLong() + 1))); } return outcome; } @@ -213,21 +282,35 @@ public abstract class BaseStorageResourceDao extends Ba /* * Otherwise, we're not in a transaction */ - return updateInternal(theRequest, theResource, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theEntity, theResourceId, oldResource, theOperationType, theTransactionDetails); + return updateInternal( + theRequest, + theResource, + theMatchUrl, + thePerformIndexing, + theForceUpdateVersion, + theEntity, + theResourceId, + oldResource, + theOperationType, + theTransactionDetails); } public static void validateResourceType(IBasePersistedResource theEntity, String theResourceName) { if (!theResourceName.equals(theEntity.getResourceType())) { - throw new ResourceNotFoundException(Msg.code(935) + "Resource with ID " + theEntity.getIdDt().getIdPart() + " exists but it is not of type " + theResourceName + ", found resource of type " + theEntity.getResourceType()); + throw new ResourceNotFoundException(Msg.code(935) + "Resource with ID " + + theEntity.getIdDt().getIdPart() + " exists but it is not of type " + theResourceName + + ", found resource of type " + theEntity.getResourceType()); } } protected DeleteMethodOutcome deleteExpunge(String theUrl, RequestDetails theRequest) { if (!getStorageSettings().canDeleteExpunge()) { - throw new MethodNotAllowedException(Msg.code(963) + "_expunge is not enabled on this server: " + getStorageSettings().cannotDeleteExpungeReason()); + throw new MethodNotAllowedException(Msg.code(963) + "_expunge is not enabled on this server: " + + getStorageSettings().cannotDeleteExpungeReason()); } - RestfulServerUtils.DeleteCascadeDetails cascadeDelete = RestfulServerUtils.extractDeleteCascadeParameter(theRequest); + RestfulServerUtils.DeleteCascadeDetails cascadeDelete = + RestfulServerUtils.extractDeleteCascadeParameter(theRequest); boolean cascade = false; Integer cascadeMaxRounds = null; if (cascadeDelete.getMode() == DeleteCascadeModeEnum.DELETE) { @@ -235,14 +318,21 @@ public abstract class BaseStorageResourceDao extends Ba cascadeMaxRounds = cascadeDelete.getMaxRounds(); if (cascadeMaxRounds == null) { cascadeMaxRounds = myStorageSettings.getMaximumDeleteConflictQueryCount(); - } else if (myStorageSettings.getMaximumDeleteConflictQueryCount() != null && myStorageSettings.getMaximumDeleteConflictQueryCount() < cascadeMaxRounds) { + } else if (myStorageSettings.getMaximumDeleteConflictQueryCount() != null + && myStorageSettings.getMaximumDeleteConflictQueryCount() < cascadeMaxRounds) { cascadeMaxRounds = myStorageSettings.getMaximumDeleteConflictQueryCount(); } } List urlsToDeleteExpunge = Collections.singletonList(theUrl); try { - String jobId = getDeleteExpungeJobSubmitter().submitJob(getStorageSettings().getExpungeBatchSize(), urlsToDeleteExpunge, cascade, cascadeMaxRounds, theRequest); + String jobId = getDeleteExpungeJobSubmitter() + .submitJob( + getStorageSettings().getExpungeBatchSize(), + urlsToDeleteExpunge, + cascade, + cascadeMaxRounds, + theRequest); return new DeleteMethodOutcome(createInfoOperationOutcome("Delete job submitted with id " + jobId)); } catch (InvalidRequestException e) { throw new InvalidRequestException(Msg.code(965) + "Invalid Delete Expunge Request: " + e.getMessage(), e); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java index ee4ed9b6621..3604cd2cd8d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java @@ -105,7 +105,6 @@ import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; @@ -125,6 +124,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.util.StringUtil.toUtf8String; import static java.util.Objects.isNull; @@ -139,24 +139,32 @@ public abstract class BaseTransactionProcessor { public static final Pattern UNQUALIFIED_MATCH_URL_START = Pattern.compile("^[a-zA-Z0-9_-]+="); public static final Pattern INVALID_PLACEHOLDER_PATTERN = Pattern.compile("[a-zA-Z]+:.*"); private static final Logger ourLog = LoggerFactory.getLogger(BaseTransactionProcessor.class); + @Autowired private PlatformTransactionManager myTxManager; + @Autowired private FhirContext myContext; @Autowired @SuppressWarnings("rawtypes") private ITransactionProcessorVersionAdapter myVersionAdapter; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private IHapiTransactionService myHapiTransactionService; + @Autowired private StorageSettings myStorageSettings; + @Autowired private InMemoryResourceMatcher myInMemoryResourceMatcher; + @Autowired private SearchParamMatcher mySearchParamMatcher; @@ -184,12 +192,16 @@ public abstract class BaseTransactionProcessor { private TaskExecutor getTaskExecutor() { if (myExecutor == null) { - myExecutor = myThreadPoolFactory.newThreadPool(myStorageSettings.getBundleBatchPoolSize(), myStorageSettings.getBundleBatchMaxPoolSize(), "bundle-batch-"); + myExecutor = myThreadPoolFactory.newThreadPool( + myStorageSettings.getBundleBatchPoolSize(), + myStorageSettings.getBundleBatchMaxPoolSize(), + "bundle-batch-"); } return myExecutor; } - public BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest, boolean theNestedMode) { + public BUNDLE transaction( + RequestDetails theRequestDetails, BUNDLE theRequest, boolean theNestedMode) { String actionName = "Transaction"; IBaseBundle response = processTransactionAsSubRequest(theRequestDetails, theRequest, actionName, theNestedMode); @@ -208,15 +220,19 @@ public abstract class BaseTransactionProcessor { String transactionType = myVersionAdapter.getBundleType(theRequest); if (!org.hl7.fhir.r4.model.Bundle.BundleType.COLLECTION.toCode().equals(transactionType)) { - throw new InvalidRequestException(Msg.code(526) + "Can not process collection Bundle of type: " + transactionType); + throw new InvalidRequestException( + Msg.code(526) + "Can not process collection Bundle of type: " + transactionType); } - ourLog.info("Beginning storing collection with {} resources", myVersionAdapter.getEntries(theRequest).size()); + ourLog.info( + "Beginning storing collection with {} resources", + myVersionAdapter.getEntries(theRequest).size()); TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); - IBaseBundle resp = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.BATCHRESPONSE.toCode()); + IBaseBundle resp = + myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.BATCHRESPONSE.toCode()); List resources = new ArrayList<>(); for (final Object nextRequestEntry : myVersionAdapter.getEntries(theRequest)) { @@ -229,7 +245,8 @@ public abstract class BaseTransactionProcessor { IBase entry = myVersionAdapter.addEntry(transactionBundle); myVersionAdapter.setResource(entry, next); myVersionAdapter.setRequestVerb(entry, "PUT"); - myVersionAdapter.setRequestUrl(entry, next.getIdElement().toUnqualifiedVersionless().getValue()); + myVersionAdapter.setRequestUrl( + entry, next.getIdElement().toUnqualifiedVersionless().getValue()); } transaction(theRequestDetails, transactionBundle, false); @@ -243,10 +260,15 @@ public abstract class BaseTransactionProcessor { } @SuppressWarnings("unchecked") - private void handleTransactionCreateOrUpdateOutcome(IdSubstitutionMap idSubstitutions, Map idToPersistedOutcome, - IIdType nextResourceId, DaoMethodOutcome outcome, - IBase newEntry, String theResourceType, - IBaseResource theRes, RequestDetails theRequestDetails) { + private void handleTransactionCreateOrUpdateOutcome( + IdSubstitutionMap idSubstitutions, + Map idToPersistedOutcome, + IIdType nextResourceId, + DaoMethodOutcome outcome, + IBase newEntry, + String theResourceType, + IBaseResource theRes, + RequestDetails theRequestDetails) { IIdType newId = outcome.getId().toUnqualified(); IIdType resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless(); if (newId.equals(resourceId) == false) { @@ -284,7 +306,8 @@ public abstract class BaseTransactionProcessor { if (theRequestDetails != null) { String prefer = theRequestDetails.getHeader(Constants.HEADER_PREFER); - PreferReturnEnum preferReturn = RestfulServerUtils.parsePreferHeader(null, prefer).getReturn(); + PreferReturnEnum preferReturn = + RestfulServerUtils.parsePreferHeader(null, prefer).getReturn(); if (preferReturn != null) { if (preferReturn == PreferReturnEnum.REPRESENTATION) { if (outcome.getResource() != null) { @@ -294,7 +317,6 @@ public abstract class BaseTransactionProcessor { } } } - } /** @@ -302,8 +324,9 @@ public abstract class BaseTransactionProcessor { * Will store whatever outcome is sent, unless the key already exists, then we only replace an instance if we find that the instance * we are replacing with is non-lazy. This allows us to evaluate later more easily, as we _know_ we need access to these. */ - private void populateIdToPersistedOutcomeMap(Map idToPersistedOutcome, IIdType newId, DaoMethodOutcome outcome) { - //Prefer real method outcomes over lazy ones. + private void populateIdToPersistedOutcomeMap( + Map idToPersistedOutcome, IIdType newId, DaoMethodOutcome outcome) { + // Prefer real method outcomes over lazy ones. if (idToPersistedOutcome.containsKey(newId)) { if (!(outcome instanceof LazyDaoMethodOutcome)) { idToPersistedOutcome.put(newId, outcome); @@ -317,17 +340,20 @@ public abstract class BaseTransactionProcessor { return theRes.getMeta().getLastUpdated(); } - private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName, boolean theNestedMode) { + private IBaseBundle processTransactionAsSubRequest( + RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName, boolean theNestedMode) { BaseStorageDao.markRequestAsProcessingSubRequest(theRequestDetails); try { // Interceptor call: STORAGE_TRANSACTION_PROCESSING - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_PROCESSING, myInterceptorBroadcaster, theRequestDetails)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_TRANSACTION_PROCESSING, myInterceptorBroadcaster, theRequestDetails)) { HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(IBaseBundle.class, theRequest); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_TRANSACTION_PROCESSING, params); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(IBaseBundle.class, theRequest); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_TRANSACTION_PROCESSING, params); } return processTransaction(theRequestDetails, theRequest, theActionName, theNestedMode); @@ -342,20 +368,24 @@ public abstract class BaseTransactionProcessor { } private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest, boolean theNestedMode) { - ourLog.info("Beginning batch with {} resources", myVersionAdapter.getEntries(theRequest).size()); + ourLog.info( + "Beginning batch with {} resources", + myVersionAdapter.getEntries(theRequest).size()); long start = System.currentTimeMillis(); TransactionTemplate txTemplate = new TransactionTemplate(myTxManager); txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); - IBaseBundle response = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.BATCHRESPONSE.toCode()); + IBaseBundle response = + myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.BATCHRESPONSE.toCode()); Map responseMap = new ConcurrentHashMap<>(); List requestEntries = myVersionAdapter.getEntries(theRequest); int requestEntriesSize = requestEntries.size(); - // Now, run all non-gets sequentially, and all gets are submitted to the executor to run (potentially) in parallel + // Now, run all non-gets sequentially, and all gets are submitted to the executor to run (potentially) in + // parallel // The result is kept in the map to save the original position List getCalls = new ArrayList<>(); List nonGetCalls = new ArrayList<>(); @@ -363,16 +393,19 @@ public abstract class BaseTransactionProcessor { CountDownLatch completionLatch = new CountDownLatch(requestEntriesSize); for (int i = 0; i < requestEntriesSize; i++) { IBase nextRequestEntry = requestEntries.get(i); - RetriableBundleTask retriableBundleTask = new RetriableBundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode); - if (myVersionAdapter.getEntryRequestVerb(myContext, nextRequestEntry).equalsIgnoreCase("GET")) { + RetriableBundleTask retriableBundleTask = new RetriableBundleTask( + completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode); + if (myVersionAdapter + .getEntryRequestVerb(myContext, nextRequestEntry) + .equalsIgnoreCase("GET")) { getCalls.add(retriableBundleTask); } else { nonGetCalls.add(retriableBundleTask); } } - //Execute all non-gets on calling thread. + // Execute all non-gets on calling thread. nonGetCalls.forEach(RetriableBundleTask::run); - //Execute all gets (potentially in a pool) + // Execute all gets (potentially in a pool) if (myStorageSettings.getBundleBatchPoolSize() == 1) { getCalls.forEach(RetriableBundleTask::run); } else { @@ -392,7 +425,8 @@ public abstract class BaseTransactionProcessor { if (caughtEx.getException() != null) { IBase nextEntry = myVersionAdapter.addEntry(response); populateEntryWithOperationOutcome(caughtEx.getException(), nextEntry); - myVersionAdapter.setResponseStatus(nextEntry, toStatusString(caughtEx.getException().getStatusCode())); + myVersionAdapter.setResponseStatus( + nextEntry, toStatusString(caughtEx.getException().getStatusCode())); } } else { myVersionAdapter.addEntry(response, (IBase) nextResponseEntry); @@ -410,8 +444,11 @@ public abstract class BaseTransactionProcessor { myHapiTransactionService = theHapiTransactionService; } - private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, - final String theActionName, boolean theNestedMode) { + private IBaseBundle processTransaction( + final RequestDetails theRequestDetails, + final IBaseBundle theRequest, + final String theActionName, + boolean theNestedMode) { validateDependencies(); String transactionType = myVersionAdapter.getBundleType(theRequest); @@ -421,21 +458,25 @@ public abstract class BaseTransactionProcessor { } if (transactionType == null) { - String message = "Transaction Bundle did not specify valid Bundle.type, assuming " + Bundle.BundleType.TRANSACTION.toCode(); + String message = "Transaction Bundle did not specify valid Bundle.type, assuming " + + Bundle.BundleType.TRANSACTION.toCode(); ourLog.warn(message); transactionType = org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode(); } if (!org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode().equals(transactionType)) { - throw new InvalidRequestException(Msg.code(527) + "Unable to process transaction where incoming Bundle.type = " + transactionType); + throw new InvalidRequestException( + Msg.code(527) + "Unable to process transaction where incoming Bundle.type = " + transactionType); } List requestEntries = myVersionAdapter.getEntries(theRequest); int numberOfEntries = requestEntries.size(); - if (myStorageSettings.getMaximumTransactionBundleSize() != null && numberOfEntries > myStorageSettings.getMaximumTransactionBundleSize()) { - throw new PayloadTooLargeException(Msg.code(528) + "Transaction Bundle Too large. Transaction bundle contains " + - numberOfEntries + - " which exceedes the maximum permitted transaction bundle size of " + myStorageSettings.getMaximumTransactionBundleSize()); + if (myStorageSettings.getMaximumTransactionBundleSize() != null + && numberOfEntries > myStorageSettings.getMaximumTransactionBundleSize()) { + throw new PayloadTooLargeException( + Msg.code(528) + "Transaction Bundle Too large. Transaction bundle contains " + numberOfEntries + + " which exceedes the maximum permitted transaction bundle size of " + + myStorageSettings.getMaximumTransactionBundleSize()); } ourLog.debug("Beginning {} with {} resources", theActionName, numberOfEntries); @@ -449,7 +490,10 @@ public abstract class BaseTransactionProcessor { IBase nextReqEntry = requestEntries.get(i); String verb = myVersionAdapter.getEntryRequestVerb(myContext, nextReqEntry); if (verb == null || !isValidVerb(verb)) { - throw new InvalidRequestException(Msg.code(529) + myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionEntryHasInvalidVerb", verb, i)); + throw new InvalidRequestException(Msg.code(529) + + myContext + .getLocalizer() + .getMessage(BaseStorageDao.class, "transactionEntryHasInvalidVerb", verb, i)); } } @@ -463,7 +507,8 @@ public abstract class BaseTransactionProcessor { * are saved in a two-phase way in order to deal with interdependencies, and * we want the GET processing to use the final indexing state */ - final IBaseBundle response = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTIONRESPONSE.toCode()); + final IBaseBundle response = + myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTIONRESPONSE.toCode()); List getEntries = new ArrayList<>(); final IdentityHashMap originalRequestOrder = new IdentityHashMap<>(); for (int i = 0; i < requestEntries.size(); i++) { @@ -490,36 +535,46 @@ public abstract class BaseTransactionProcessor { requestEntries.sort(new TransactionSorter(placeholderIds)); // perform all writes - prepareThenExecuteTransactionWriteOperations(theRequestDetails, theActionName, - transactionDetails, transactionStopWatch, - response, originalRequestOrder, requestEntries); + prepareThenExecuteTransactionWriteOperations( + theRequestDetails, + theActionName, + transactionDetails, + transactionStopWatch, + response, + originalRequestOrder, + requestEntries); // perform all gets // (we do these last so that the gets happen on the final state of the DB; // see above note) - doTransactionReadOperations(theRequestDetails, response, - getEntries, originalRequestOrder, - transactionStopWatch, theNestedMode); + doTransactionReadOperations( + theRequestDetails, response, getEntries, originalRequestOrder, transactionStopWatch, theNestedMode); // Interceptor broadcast: JPA_PERFTRACE_INFO - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequestDetails)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequestDetails)) { String taskDurations = transactionStopWatch.formatTaskDurations(); StorageProcessingMessage message = new StorageProcessingMessage(); message.setMessage("Transaction timing:\n" + taskDurations); HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(StorageProcessingMessage.class, message); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_INFO, params); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(StorageProcessingMessage.class, message); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_INFO, params); } return response; } @SuppressWarnings("unchecked") - private void doTransactionReadOperations(final RequestDetails theRequestDetails, IBaseBundle theResponse, - List theGetEntries, IdentityHashMap theOriginalRequestOrder, - StopWatch theTransactionStopWatch, boolean theNestedMode) { + private void doTransactionReadOperations( + final RequestDetails theRequestDetails, + IBaseBundle theResponse, + List theGetEntries, + IdentityHashMap theOriginalRequestOrder, + StopWatch theTransactionStopWatch, + boolean theNestedMode) { if (theGetEntries.size() > 0) { theTransactionStopWatch.startTask("Process " + theGetEntries.size() + " GET entries"); @@ -528,22 +583,26 @@ public abstract class BaseTransactionProcessor { */ for (IBase nextReqEntry : theGetEntries) { if (theNestedMode) { - throw new InvalidRequestException(Msg.code(530) + "Can not invoke read operation on nested transaction"); + throw new InvalidRequestException( + Msg.code(530) + "Can not invoke read operation on nested transaction"); } if (!(theRequestDetails instanceof ServletRequestDetails)) { - throw new MethodNotAllowedException(Msg.code(531) + "Can not call transaction GET methods from this context"); + throw new MethodNotAllowedException( + Msg.code(531) + "Can not call transaction GET methods from this context"); } ServletRequestDetails srd = (ServletRequestDetails) theRequestDetails; Integer originalOrder = theOriginalRequestOrder.get(nextReqEntry); - IBase nextRespEntry = (IBase) myVersionAdapter.getEntries(theResponse).get(originalOrder); + IBase nextRespEntry = + (IBase) myVersionAdapter.getEntries(theResponse).get(originalOrder); ArrayListMultimap paramValues = ArrayListMultimap.create(); String transactionUrl = extractTransactionUrlOrThrowException(nextReqEntry, "GET"); - ServletSubRequestDetails requestDetails = ServletRequestUtil.getServletSubRequestDetails(srd, transactionUrl, paramValues); + ServletSubRequestDetails requestDetails = + ServletRequestUtil.getServletSubRequestDetails(srd, transactionUrl, paramValues); String url = requestDetails.getRequestPath(); @@ -553,13 +612,16 @@ public abstract class BaseTransactionProcessor { } if (isNotBlank(myVersionAdapter.getEntryRequestIfMatch(nextReqEntry))) { - requestDetails.addHeader(Constants.HEADER_IF_MATCH, myVersionAdapter.getEntryRequestIfMatch(nextReqEntry)); + requestDetails.addHeader( + Constants.HEADER_IF_MATCH, myVersionAdapter.getEntryRequestIfMatch(nextReqEntry)); } if (isNotBlank(myVersionAdapter.getEntryRequestIfNoneExist(nextReqEntry))) { - requestDetails.addHeader(Constants.HEADER_IF_NONE_EXIST, myVersionAdapter.getEntryRequestIfNoneExist(nextReqEntry)); + requestDetails.addHeader( + Constants.HEADER_IF_NONE_EXIST, myVersionAdapter.getEntryRequestIfNoneExist(nextReqEntry)); } if (isNotBlank(myVersionAdapter.getEntryRequestIfNoneMatch(nextReqEntry))) { - requestDetails.addHeader(Constants.HEADER_IF_NONE_MATCH, myVersionAdapter.getEntryRequestIfNoneMatch(nextReqEntry)); + requestDetails.addHeader( + Constants.HEADER_IF_NONE_MATCH, myVersionAdapter.getEntryRequestIfNoneMatch(nextReqEntry)); } Validate.isTrue(method instanceof BaseResourceReturningMethodBinding, "Unable to handle GET {}", url); @@ -568,13 +630,15 @@ public abstract class BaseTransactionProcessor { requestDetails.setRestOperationType(methodBinding.getRestOperationType()); IBaseResource resource = methodBinding.doInvokeServer(srd.getServer(), requestDetails); - if (paramValues.containsKey(Constants.PARAM_SUMMARY) || paramValues.containsKey(Constants.PARAM_CONTENT)) { + if (paramValues.containsKey(Constants.PARAM_SUMMARY) + || paramValues.containsKey(Constants.PARAM_CONTENT)) { resource = filterNestedBundle(requestDetails, resource); } myVersionAdapter.setResource(nextRespEntry, resource); myVersionAdapter.setResponseStatus(nextRespEntry, toStatusString(Constants.STATUS_HTTP_200_OK)); } catch (NotModifiedException e) { - myVersionAdapter.setResponseStatus(nextRespEntry, toStatusString(Constants.STATUS_HTTP_304_NOT_MODIFIED)); + myVersionAdapter.setResponseStatus( + nextRespEntry, toStatusString(Constants.STATUS_HTTP_304_NOT_MODIFIED)); } catch (BaseServerResponseException e) { ourLog.info("Failure processing transaction GET {}: {}", url, e.toString()); myVersionAdapter.setResponseStatus(nextRespEntry, toStatusString(e.getStatusCode())); @@ -595,15 +659,23 @@ public abstract class BaseTransactionProcessor { * database connections. */ @SuppressWarnings("unchecked") - private void prepareThenExecuteTransactionWriteOperations(RequestDetails theRequestDetails, String theActionName, - TransactionDetails theTransactionDetails, StopWatch theTransactionStopWatch, - IBaseBundle theResponse, IdentityHashMap theOriginalRequestOrder, - List theEntries) { + private void prepareThenExecuteTransactionWriteOperations( + RequestDetails theRequestDetails, + String theActionName, + TransactionDetails theTransactionDetails, + StopWatch theTransactionStopWatch, + IBaseBundle theResponse, + IdentityHashMap theOriginalRequestOrder, + List theEntries) { TransactionWriteOperationsDetails writeOperationsDetails = null; if (haveWriteOperationsHooks(theRequestDetails)) { writeOperationsDetails = buildWriteOperationsDetails(theEntries); - callWriteOperationsHook(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, theRequestDetails, theTransactionDetails, writeOperationsDetails); + callWriteOperationsHook( + Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, + theRequestDetails, + theTransactionDetails, + writeOperationsDetails); } TransactionCallback txCallback = status -> { @@ -611,11 +683,17 @@ public abstract class BaseTransactionProcessor { final IdSubstitutionMap idSubstitutions = new IdSubstitutionMap(); final Map idToPersistedOutcome = new HashMap<>(); - EntriesToProcessMap retVal = doTransactionWriteOperations(theRequestDetails, theActionName, - theTransactionDetails, allIds, - idSubstitutions, idToPersistedOutcome, - theResponse, theOriginalRequestOrder, - theEntries, theTransactionStopWatch); + EntriesToProcessMap retVal = doTransactionWriteOperations( + theRequestDetails, + theActionName, + theTransactionDetails, + allIds, + idSubstitutions, + idToPersistedOutcome, + theResponse, + theOriginalRequestOrder, + theEntries, + theTransactionStopWatch); theTransactionStopWatch.startTask("Commit writes to database"); return retVal; @@ -624,12 +702,16 @@ public abstract class BaseTransactionProcessor { try { entriesToProcess = myHapiTransactionService - .withRequest(theRequestDetails) - .withTransactionDetails(theTransactionDetails) - .execute(txCallback); + .withRequest(theRequestDetails) + .withTransactionDetails(theTransactionDetails) + .execute(txCallback); } finally { if (haveWriteOperationsHooks(theRequestDetails)) { - callWriteOperationsHook(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, theRequestDetails, theTransactionDetails, writeOperationsDetails); + callWriteOperationsHook( + Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, + theRequestDetails, + theTransactionDetails, + writeOperationsDetails); } } @@ -644,14 +726,22 @@ public abstract class BaseTransactionProcessor { } private boolean haveWriteOperationsHooks(RequestDetails theRequestDetails) { - return CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, myInterceptorBroadcaster, theRequestDetails) || - CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, myInterceptorBroadcaster, theRequestDetails); + return CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, myInterceptorBroadcaster, theRequestDetails) + || CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, + myInterceptorBroadcaster, + theRequestDetails); } - private void callWriteOperationsHook(Pointcut thePointcut, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails, TransactionWriteOperationsDetails theWriteOperationsDetails) { + private void callWriteOperationsHook( + Pointcut thePointcut, + RequestDetails theRequestDetails, + TransactionDetails theTransactionDetails, + TransactionWriteOperationsDetails theWriteOperationsDetails) { HookParams params = new HookParams() - .add(TransactionDetails.class, theTransactionDetails) - .add(TransactionWriteOperationsDetails.class, theWriteOperationsDetails); + .add(TransactionDetails.class, theTransactionDetails) + .add(TransactionWriteOperationsDetails.class, theWriteOperationsDetails); CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, thePointcut, params); } @@ -660,7 +750,7 @@ public abstract class BaseTransactionProcessor { TransactionWriteOperationsDetails writeOperationsDetails; List updateRequestUrls = new ArrayList<>(); List conditionalCreateRequestUrls = new ArrayList<>(); - //Extract + // Extract for (IBase nextEntry : theEntries) { String method = myVersionAdapter.getEntryRequestVerb(myContext, nextEntry); if ("PUT".equals(method)) { @@ -717,7 +807,8 @@ public abstract class BaseTransactionProcessor { * Searches for duplicate conditional creates and consolidates them. */ @SuppressWarnings("unchecked") - private void consolidateDuplicateConditionals(RequestDetails theRequestDetails, String theActionName, List theEntries) { + private void consolidateDuplicateConditionals( + RequestDetails theRequestDetails, String theActionName, List theEntries) { final Set keysWithNoFullUrl = new HashSet<>(); final HashMap keyToUuid = new HashMap<>(); @@ -744,7 +835,7 @@ public abstract class BaseTransactionProcessor { } break; - // Conditional CREATE + // Conditional CREATE case "POST": conditionalUrl = ifNoneExist; if (isNotBlank(ifNoneExist)) { @@ -767,24 +858,32 @@ public abstract class BaseTransactionProcessor { if (isBlank(entryFullUrl)) { if (isNotBlank(conditionalUrl)) { if (!keysWithNoFullUrl.add(key)) { - throw new InvalidRequestException( - Msg.code(2008) + "Unable to process " + theActionName + " - Request contains multiple anonymous entries (Bundle.entry.fullUrl not populated) with conditional URL: \"" + UrlUtil.sanitizeUrlPart(conditionalUrl) + "\". Does transaction request contain duplicates?"); + throw new InvalidRequestException(Msg.code(2008) + "Unable to process " + theActionName + + " - Request contains multiple anonymous entries (Bundle.entry.fullUrl not populated) with conditional URL: \"" + + UrlUtil.sanitizeUrlPart(conditionalUrl) + + "\". Does transaction request contain duplicates?"); } } } else { if (!keyToUuid.containsKey(key)) { keyToUuid.put(key, entryFullUrl); } else { - String msg = "Discarding transaction bundle entry " + originalIndex + " as it contained a duplicate conditional " + verb; + String msg = "Discarding transaction bundle entry " + originalIndex + + " as it contained a duplicate conditional " + verb; ourLog.info(msg); // Interceptor broadcast: JPA_PERFTRACE_INFO - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING, myInterceptorBroadcaster, theRequestDetails)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_WARNING, myInterceptorBroadcaster, theRequestDetails)) { StorageProcessingMessage message = new StorageProcessingMessage().setMessage(msg); HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) - .add(StorageProcessingMessage.class, message); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_INFO, params); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails) + .add(StorageProcessingMessage.class, message); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, + theRequestDetails, + Pointcut.JPA_PERFTRACE_INFO, + params); } theEntries.remove(index); @@ -802,17 +901,20 @@ public abstract class BaseTransactionProcessor { * Iterates over all entries, and if it finds any which have references which match the fullUrl of the entry that was consolidated out * replace them with our new consolidated UUID */ - private void replaceReferencesInEntriesWithConsolidatedUUID(List theEntries, String theEntryFullUrl, String existingUuid) { + private void replaceReferencesInEntriesWithConsolidatedUUID( + List theEntries, String theEntryFullUrl, String existingUuid) { for (IBase nextEntry : theEntries) { @SuppressWarnings("unchecked") IBaseResource nextResource = myVersionAdapter.getResource(nextEntry); if (nextResource != null) { - for (IBaseReference nextReference : myContext.newTerser().getAllPopulatedChildElementsOfType(nextResource, IBaseReference.class)) { + for (IBaseReference nextReference : + myContext.newTerser().getAllPopulatedChildElementsOfType(nextResource, IBaseReference.class)) { // We're interested in any references directly to the placeholder ID, but also // references that have a resource target that has the placeholder ID. String nextReferenceId = nextReference.getReferenceElement().getValue(); if (isBlank(nextReferenceId) && nextReference.getResource() != null) { - nextReferenceId = nextReference.getResource().getIdElement().getValue(); + nextReferenceId = + nextReference.getResource().getIdElement().getValue(); } if (theEntryFullUrl.equals(nextReferenceId)) { nextReference.setReference(existingUuid); @@ -831,9 +933,8 @@ public abstract class BaseTransactionProcessor { * @param theAllIds - set of all IIdType values * @return */ - private IIdType getNextResourceIdFromBaseResource(IBaseResource theBaseResource, - IBase theNextReqEntry, - Set theAllIds) { + private IIdType getNextResourceIdFromBaseResource( + IBaseResource theBaseResource, IBase theNextReqEntry, Set theAllIds) { IIdType nextResourceId = null; if (theBaseResource != null) { nextResourceId = theBaseResource.getIdElement(); @@ -852,8 +953,12 @@ public abstract class BaseTransactionProcessor { if (nextResourceId.hasIdPart() && !isPlaceholder(nextResourceId)) { int colonIndex = nextResourceId.getIdPart().indexOf(':'); if (colonIndex != -1) { - if (INVALID_PLACEHOLDER_PATTERN.matcher(nextResourceId.getIdPart()).matches()) { - throw new InvalidRequestException(Msg.code(533) + "Invalid placeholder ID found: " + nextResourceId.getIdPart() + " - Must be of the form 'urn:uuid:[uuid]' or 'urn:oid:[oid]'"); + if (INVALID_PLACEHOLDER_PATTERN + .matcher(nextResourceId.getIdPart()) + .matches()) { + throw new InvalidRequestException( + Msg.code(533) + "Invalid placeholder ID found: " + nextResourceId.getIdPart() + + " - Must be of the form 'urn:uuid:[uuid]' or 'urn:oid:[oid]'"); } } } @@ -868,15 +973,26 @@ public abstract class BaseTransactionProcessor { */ if (isPlaceholder(nextResourceId)) { if (!theAllIds.add(nextResourceId)) { - throw new InvalidRequestException(Msg.code(534) + myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionContainsMultipleWithDuplicateId", nextResourceId)); + throw new InvalidRequestException(Msg.code(534) + + myContext + .getLocalizer() + .getMessage( + BaseStorageDao.class, + "transactionContainsMultipleWithDuplicateId", + nextResourceId)); } } else if (nextResourceId.hasResourceType() && nextResourceId.hasIdPart()) { IIdType nextId = nextResourceId.toUnqualifiedVersionless(); if (!theAllIds.add(nextId)) { - throw new InvalidRequestException(Msg.code(535) + myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionContainsMultipleWithDuplicateId", nextId)); + throw new InvalidRequestException(Msg.code(535) + + myContext + .getLocalizer() + .getMessage( + BaseStorageDao.class, + "transactionContainsMultipleWithDuplicateId", + nextId)); } } - } return nextResourceId; @@ -886,18 +1002,23 @@ public abstract class BaseTransactionProcessor { * After pre-hooks have been called */ @SuppressWarnings({"unchecked", "rawtypes"}) - protected EntriesToProcessMap doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, - TransactionDetails theTransactionDetails, Set theAllIds, - IdSubstitutionMap theIdSubstitutions, Map theIdToPersistedOutcome, - IBaseBundle theResponse, IdentityHashMap theOriginalRequestOrder, - List theEntries, StopWatch theTransactionStopWatch) { + protected EntriesToProcessMap doTransactionWriteOperations( + final RequestDetails theRequest, + String theActionName, + TransactionDetails theTransactionDetails, + Set theAllIds, + IdSubstitutionMap theIdSubstitutions, + Map theIdToPersistedOutcome, + IBaseBundle theResponse, + IdentityHashMap theOriginalRequestOrder, + List theEntries, + StopWatch theTransactionStopWatch) { // During a transaction, we don't execute hooks, instead, we execute them all post-transaction. theTransactionDetails.beginAcceptingDeferredInterceptorBroadcasts( - Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, - Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, - Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED - ); + Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, + Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, + Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED); try { Set deletedResources = new HashSet<>(); DeleteConflictList deleteConflicts = new DeleteConflictList(); @@ -930,9 +1051,11 @@ public abstract class BaseTransactionProcessor { String verb = myVersionAdapter.getEntryRequestVerb(myContext, nextReqEntry); String resourceType = res != null ? myContext.getResourceType(res) : null; Integer order = theOriginalRequestOrder.get(nextReqEntry); - IBase nextRespEntry = (IBase) myVersionAdapter.getEntries(theResponse).get(order); + IBase nextRespEntry = + (IBase) myVersionAdapter.getEntries(theResponse).get(order); - theTransactionStopWatch.startTask("Bundle.entry[" + i + "]: " + verb + " " + defaultString(resourceType)); + theTransactionStopWatch.startTask( + "Bundle.entry[" + i + "]: " + verb + " " + defaultString(resourceType)); if (res != null) { String previousResourceId = res.getIdElement().getValue(); @@ -966,7 +1089,15 @@ public abstract class BaseTransactionProcessor { res.setId(outcome.getId()); if (nextResourceId != null) { - handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res, theRequest); + handleTransactionCreateOrUpdateOutcome( + theIdSubstitutions, + theIdToPersistedOutcome, + nextResourceId, + outcome, + nextRespEntry, + resourceType, + res, + theRequest); } entriesToProcess.put(nextRespEntry, outcome.getId(), nextRespEntry); theTransactionDetails.addResolvedResource(outcome.getId(), outcome::getResource); @@ -989,7 +1120,8 @@ public abstract class BaseTransactionProcessor { if (parts.getResourceId() != null) { IIdType deleteId = newIdType(parts.getResourceType(), parts.getResourceId()); if (!deletedResources.contains(deleteId.getValueAsString())) { - DaoMethodOutcome outcome = dao.delete(deleteId, deleteConflicts, theRequest, theTransactionDetails); + DaoMethodOutcome outcome = + dao.delete(deleteId, deleteConflicts, theRequest, theTransactionDetails); if (outcome.getEntity() != null) { deletedResources.add(deleteId.getValueAsString()); entriesToProcess.put(nextRespEntry, outcome.getId(), nextRespEntry); @@ -999,11 +1131,14 @@ public abstract class BaseTransactionProcessor { } else { String matchUrl = parts.getResourceType() + '?' + parts.getParams(); matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl); - DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(matchUrl, deleteConflicts, theRequest, theTransactionDetails); + DeleteMethodOutcome deleteOutcome = + dao.deleteByUrl(matchUrl, deleteConflicts, theRequest, theTransactionDetails); setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, deleteOutcome.getId()); List allDeleted = deleteOutcome.getDeletedEntities(); for (IBasePersistedResource deleted : allDeleted) { - deletedResources.add(deleted.getIdDt().toUnqualifiedVersionless().getValueAsString()); + deletedResources.add(deleted.getIdDt() + .toUnqualifiedVersionless() + .getValueAsString()); } if (allDeleted.isEmpty()) { status = Constants.STATUS_HTTP_204_NO_CONTENT; @@ -1029,7 +1164,8 @@ public abstract class BaseTransactionProcessor { if (isNotBlank(parts.getResourceId())) { String version = null; if (isNotBlank(myVersionAdapter.getEntryRequestIfMatch(nextReqEntry))) { - version = ParameterUtil.parseETagValue(myVersionAdapter.getEntryRequestIfMatch(nextReqEntry)); + version = ParameterUtil.parseETagValue( + myVersionAdapter.getEntryRequestIfMatch(nextReqEntry)); } res.setId(newIdType(parts.getResourceType(), parts.getResourceId(), version)); outcome = resourceDao.update(res, null, false, false, theRequest, theTransactionDetails); @@ -1044,7 +1180,8 @@ public abstract class BaseTransactionProcessor { matchUrl = parts.getResourceType(); } matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl); - outcome = resourceDao.update(res, matchUrl, false, false, theRequest, theTransactionDetails); + outcome = + resourceDao.update(res, matchUrl, false, false, theRequest, theTransactionDetails); setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, outcome.getId()); if (Boolean.TRUE.equals(outcome.getCreated())) { conditionalRequestUrls.put(matchUrl, res.getClass()); @@ -1052,7 +1189,8 @@ public abstract class BaseTransactionProcessor { } if (outcome.getCreated() == Boolean.FALSE - || (outcome.getCreated() == Boolean.TRUE && outcome.getId().getVersionIdPartAsLong() > 1)) { + || (outcome.getCreated() == Boolean.TRUE + && outcome.getId().getVersionIdPartAsLong() > 1)) { updatedEntities.add(outcome.getEntity()); if (outcome.getResource() != null) { updatedResources.add(outcome.getResource()); @@ -1060,8 +1198,15 @@ public abstract class BaseTransactionProcessor { } theTransactionDetails.addResolvedResource(outcome.getId(), outcome::getResource); - handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, - outcome, nextRespEntry, resourceType, res, theRequest); + handleTransactionCreateOrUpdateOutcome( + theIdSubstitutions, + theIdToPersistedOutcome, + nextResourceId, + outcome, + nextRespEntry, + resourceType, + res, + theRequest); entriesToProcess.put(nextRespEntry, outcome.getId(), nextRespEntry); break; } @@ -1085,9 +1230,14 @@ public abstract class BaseTransactionProcessor { patchBody = toUtf8String(binary.getContent()); } contentType = binary.getContentType(); - patchType = PatchTypeEnum.forContentTypeOrThrowInvalidRequestException(myContext, contentType); - if (patchType == PatchTypeEnum.FHIR_PATCH_JSON || patchType == PatchTypeEnum.FHIR_PATCH_XML) { - String msg = myContext.getLocalizer().getMessage(BaseTransactionProcessor.class, "fhirPatchShouldNotUseBinaryResource"); + patchType = + PatchTypeEnum.forContentTypeOrThrowInvalidRequestException(myContext, contentType); + if (patchType == PatchTypeEnum.FHIR_PATCH_JSON + || patchType == PatchTypeEnum.FHIR_PATCH_XML) { + String msg = myContext + .getLocalizer() + .getMessage( + BaseTransactionProcessor.class, "fhirPatchShouldNotUseBinaryResource"); throw new InvalidRequestException(Msg.code(536) + msg); } } else if (res instanceof IBaseParameters) { @@ -1097,7 +1247,9 @@ public abstract class BaseTransactionProcessor { if (patchBodyParameters == null) { if (isBlank(patchBody)) { - String msg = myContext.getLocalizer().getMessage(BaseTransactionProcessor.class, "missingPatchBody"); + String msg = myContext + .getLocalizer() + .getMessage(BaseTransactionProcessor.class, "missingPatchBody"); throw new InvalidRequestException(Msg.code(537) + msg); } } @@ -1116,14 +1268,30 @@ public abstract class BaseTransactionProcessor { } } - DaoMethodOutcome outcome = dao.patchInTransaction(patchId, conditionalUrl, false, patchType, patchBody, patchBodyParameters, theRequest, theTransactionDetails); + DaoMethodOutcome outcome = dao.patchInTransaction( + patchId, + conditionalUrl, + false, + patchType, + patchBody, + patchBodyParameters, + theRequest, + theTransactionDetails); setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, outcome.getId()); updatedEntities.add(outcome.getEntity()); if (outcome.getResource() != null) { updatedResources.add(outcome.getResource()); } if (nextResourceId != null) { - handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res, theRequest); + handleTransactionCreateOrUpdateOutcome( + theIdSubstitutions, + theIdToPersistedOutcome, + nextResourceId, + outcome, + nextRespEntry, + resourceType, + res, + theRequest); } entriesToProcess.put(nextRespEntry, outcome.getId(), nextRespEntry); @@ -1132,8 +1300,8 @@ public abstract class BaseTransactionProcessor { case "GET": break; default: - throw new InvalidRequestException(Msg.code(538) + "Unable to handle verb in transaction: " + verb); - + throw new InvalidRequestException( + Msg.code(538) + "Unable to handle verb in transaction: " + verb); } theTransactionStopWatch.endCurrentTask(); @@ -1148,17 +1316,23 @@ public abstract class BaseTransactionProcessor { checkForDeleteConflicts(deleteConflicts, deletedResources, updatedResources); theIdToPersistedOutcome.entrySet().forEach(idAndOutcome -> { - theTransactionDetails.addResolvedResourceId(idAndOutcome.getKey(), idAndOutcome.getValue().getPersistentId()); + theTransactionDetails.addResolvedResourceId( + idAndOutcome.getKey(), idAndOutcome.getValue().getPersistentId()); }); /* * Perform ID substitutions and then index each resource we have saved */ - resolveReferencesThenSaveAndIndexResources(theRequest, theTransactionDetails, - theIdSubstitutions, theIdToPersistedOutcome, - theTransactionStopWatch, entriesToProcess, - nonUpdatedEntities, updatedEntities); + resolveReferencesThenSaveAndIndexResources( + theRequest, + theTransactionDetails, + theIdSubstitutions, + theIdToPersistedOutcome, + theTransactionStopWatch, + entriesToProcess, + nonUpdatedEntities, + updatedEntities); theTransactionStopWatch.endCurrentTask(); @@ -1177,12 +1351,14 @@ public abstract class BaseTransactionProcessor { theTransactionStopWatch.startTask("Check for conflicts in conditional resources"); } if (!myStorageSettings.isMassIngestionMode()) { - validateNoDuplicates(theRequest, theActionName, conditionalRequestUrls, theIdToPersistedOutcome.values()); + validateNoDuplicates( + theRequest, theActionName, conditionalRequestUrls, theIdToPersistedOutcome.values()); } theTransactionStopWatch.endCurrentTask(); if (conditionalUrlToIdMap.size() > 0) { - theTransactionStopWatch.startTask("Check that all conditionally created/updated entities actually match their conditionals."); + theTransactionStopWatch.startTask( + "Check that all conditionally created/updated entities actually match their conditionals."); } if (!myStorageSettings.isMassIngestionMode()) { @@ -1193,29 +1369,34 @@ public abstract class BaseTransactionProcessor { for (IIdType next : theAllIds) { IIdType replacement = theIdSubstitutions.getForSource(next); if (replacement != null && !replacement.equals(next)) { - ourLog.debug("Placeholder resource ID \"{}\" was replaced with permanent ID \"{}\"", next, replacement); + ourLog.debug( + "Placeholder resource ID \"{}\" was replaced with permanent ID \"{}\"", next, replacement); } } - ListMultimap deferredBroadcastEvents = theTransactionDetails.endAcceptingDeferredInterceptorBroadcasts(); + ListMultimap deferredBroadcastEvents = + theTransactionDetails.endAcceptingDeferredInterceptorBroadcasts(); for (Map.Entry nextEntry : deferredBroadcastEvents.entries()) { Pointcut nextPointcut = nextEntry.getKey(); HookParams nextParams = nextEntry.getValue(); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, nextPointcut, nextParams); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, nextPointcut, nextParams); } - DeferredInterceptorBroadcasts deferredInterceptorBroadcasts = new DeferredInterceptorBroadcasts(deferredBroadcastEvents); + DeferredInterceptorBroadcasts deferredInterceptorBroadcasts = + new DeferredInterceptorBroadcasts(deferredBroadcastEvents); HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(DeferredInterceptorBroadcasts.class, deferredInterceptorBroadcasts) - .add(TransactionDetails.class, theTransactionDetails) - .add(IBaseBundle.class, theResponse); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_TRANSACTION_PROCESSED, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(DeferredInterceptorBroadcasts.class, deferredInterceptorBroadcasts) + .add(TransactionDetails.class, theTransactionDetails) + .add(IBaseBundle.class, theResponse); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_TRANSACTION_PROCESSED, params); theTransactionDetails.deferredBroadcastProcessingFinished(); - //finishedCallingDeferredInterceptorBroadcasts + // finishedCallingDeferredInterceptorBroadcasts return entriesToProcess; @@ -1236,7 +1417,8 @@ public abstract class BaseTransactionProcessor { if (myContext.getVersion().getVersion().isOlderThan(FhirVersionEnum.R4)) { return false; } - if (theTransactionDetails.hasResolvedResourceId(theId) && !theTransactionDetails.isResolvedResourceIdEmpty(theId)) { + if (theTransactionDetails.hasResolvedResourceId(theId) + && !theTransactionDetails.isResolvedResourceIdEmpty(theId)) { return false; } if (theId != null && theId.getValue() != null) { @@ -1245,15 +1427,19 @@ public abstract class BaseTransactionProcessor { return true; } - private boolean shouldSwapBinaryToActualResource(IBaseResource theResource, String theResourceType, IIdType theNextResourceId) { - if ("Binary".equalsIgnoreCase(theResourceType) && theNextResourceId.getResourceType() != null && !theNextResourceId.getResourceType().equalsIgnoreCase("Binary")) { + private boolean shouldSwapBinaryToActualResource( + IBaseResource theResource, String theResourceType, IIdType theNextResourceId) { + if ("Binary".equalsIgnoreCase(theResourceType) + && theNextResourceId.getResourceType() != null + && !theNextResourceId.getResourceType().equalsIgnoreCase("Binary")) { return true; } else { return false; } } - private void setConditionalUrlToBeValidatedLater(Map theConditionalUrlToIdMap, String theMatchUrl, IIdType theId) { + private void setConditionalUrlToBeValidatedLater( + Map theConditionalUrlToIdMap, String theMatchUrl, IIdType theId) { if (!StringUtils.isBlank(theMatchUrl)) { theConditionalUrlToIdMap.put(theMatchUrl, theId); } @@ -1263,25 +1449,37 @@ public abstract class BaseTransactionProcessor { * After transaction processing and resolution of indexes and references, we want to validate that the resources that were stored _actually_ * match the conditional URLs that they were brought in on. */ - private void validateAllInsertsMatchTheirConditionalUrls(Map theIdToPersistedOutcome, Map conditionalUrlToIdMap, RequestDetails theRequest) { + private void validateAllInsertsMatchTheirConditionalUrls( + Map theIdToPersistedOutcome, + Map conditionalUrlToIdMap, + RequestDetails theRequest) { conditionalUrlToIdMap.entrySet().stream() - .filter(entry -> entry.getKey() != null) - .forEach(entry -> { - String matchUrl = entry.getKey(); - IIdType value = entry.getValue(); - DaoMethodOutcome daoMethodOutcome = theIdToPersistedOutcome.get(value); - if (daoMethodOutcome != null && !daoMethodOutcome.isNop() && daoMethodOutcome.getResource() != null) { - InMemoryMatchResult match = mySearchParamMatcher.match(matchUrl, daoMethodOutcome.getResource(), theRequest); - if (ourLog.isDebugEnabled()) { - ourLog.debug("Checking conditional URL [{}] against resource with ID [{}]: Supported?:[{}], Matched?:[{}]", matchUrl, value, match.supported(), match.matched()); - } - if (match.supported()) { - if (!match.matched()) { - throw new PreconditionFailedException(Msg.code(539) + "Invalid conditional URL \"" + matchUrl + "\". The given resource is not matched by this URL."); + .filter(entry -> entry.getKey() != null) + .forEach(entry -> { + String matchUrl = entry.getKey(); + IIdType value = entry.getValue(); + DaoMethodOutcome daoMethodOutcome = theIdToPersistedOutcome.get(value); + if (daoMethodOutcome != null + && !daoMethodOutcome.isNop() + && daoMethodOutcome.getResource() != null) { + InMemoryMatchResult match = + mySearchParamMatcher.match(matchUrl, daoMethodOutcome.getResource(), theRequest); + if (ourLog.isDebugEnabled()) { + ourLog.debug( + "Checking conditional URL [{}] against resource with ID [{}]: Supported?:[{}], Matched?:[{}]", + matchUrl, + value, + match.supported(), + match.matched()); + } + if (match.supported()) { + if (!match.matched()) { + throw new PreconditionFailedException(Msg.code(539) + "Invalid conditional URL \"" + + matchUrl + "\". The given resource is not matched by this URL."); + } } } - } - }); + }); } /** @@ -1291,9 +1489,10 @@ public abstract class BaseTransactionProcessor { * @param theDeletedResources - set of deleted resources * @param theUpdatedResources - list of updated resources */ - private void checkForDeleteConflicts(DeleteConflictList theDeleteConflicts, - Set theDeletedResources, - List theUpdatedResources) { + private void checkForDeleteConflicts( + DeleteConflictList theDeleteConflicts, + Set theDeletedResources, + List theUpdatedResources) { for (Iterator iter = theDeleteConflicts.iterator(); iter.hasNext(); ) { DeleteConflict nextDeleteConflict = iter.next(); @@ -1302,7 +1501,8 @@ public abstract class BaseTransactionProcessor { * Resource/B has a reference to it. We'll ignore that conflict though * if it turns out we're also deleting Resource/B in this transaction. */ - if (theDeletedResources.contains(nextDeleteConflict.getSourceId().toUnqualifiedVersionless().getValue())) { + if (theDeletedResources.contains( + nextDeleteConflict.getSourceId().toUnqualifiedVersionless().getValue())) { iter.remove(); continue; } @@ -1313,17 +1513,22 @@ public abstract class BaseTransactionProcessor { * in this transaction, and the updated version of it has no references * to Resource/A any more. */ - String sourceId = nextDeleteConflict.getSourceId().toUnqualifiedVersionless().getValue(); - String targetId = nextDeleteConflict.getTargetId().toUnqualifiedVersionless().getValue(); - Optional updatedSource = theUpdatedResources - .stream() - .filter(t -> sourceId.equals(t.getIdElement().toUnqualifiedVersionless().getValue())) - .findFirst(); + String sourceId = + nextDeleteConflict.getSourceId().toUnqualifiedVersionless().getValue(); + String targetId = + nextDeleteConflict.getTargetId().toUnqualifiedVersionless().getValue(); + Optional updatedSource = theUpdatedResources.stream() + .filter(t -> sourceId.equals( + t.getIdElement().toUnqualifiedVersionless().getValue())) + .findFirst(); if (updatedSource.isPresent()) { - List referencesInSource = myContext.newTerser().getAllResourceReferences(updatedSource.get()); - boolean sourceStillReferencesTarget = referencesInSource - .stream() - .anyMatch(t -> targetId.equals(t.getResourceReference().getReferenceElement().toUnqualifiedVersionless().getValue())); + List referencesInSource = + myContext.newTerser().getAllResourceReferences(updatedSource.get()); + boolean sourceStillReferencesTarget = referencesInSource.stream() + .anyMatch(t -> targetId.equals(t.getResourceReference() + .getReferenceElement() + .toUnqualifiedVersionless() + .getValue())); if (!sourceStillReferencesTarget) { iter.remove(); } @@ -1354,10 +1559,15 @@ public abstract class BaseTransactionProcessor { * pass because it's too complex to try and insert the auto-versioned references and still * account for NOPs, so we block NOPs in that pass. */ - private void resolveReferencesThenSaveAndIndexResources(RequestDetails theRequest, TransactionDetails theTransactionDetails, - IdSubstitutionMap theIdSubstitutions, Map theIdToPersistedOutcome, - StopWatch theTransactionStopWatch, EntriesToProcessMap entriesToProcess, - Set nonUpdatedEntities, Set updatedEntities) { + private void resolveReferencesThenSaveAndIndexResources( + RequestDetails theRequest, + TransactionDetails theTransactionDetails, + IdSubstitutionMap theIdSubstitutions, + Map theIdToPersistedOutcome, + StopWatch theTransactionStopWatch, + EntriesToProcessMap entriesToProcess, + Set nonUpdatedEntities, + Set updatedEntities) { FhirTerser terser = myContext.newTerser(); theTransactionStopWatch.startTask("Index " + theIdToPersistedOutcome.size() + " resources"); IdentityHashMap> deferredIndexesForAutoVersioning = null; @@ -1365,7 +1575,10 @@ public abstract class BaseTransactionProcessor { for (DaoMethodOutcome nextOutcome : theIdToPersistedOutcome.values()) { if (i++ % 250 == 0) { - ourLog.debug("Have indexed {} entities out of {} in transaction", i, theIdToPersistedOutcome.values().size()); + ourLog.debug( + "Have indexed {} entities out of {} in transaction", + i, + theIdToPersistedOutcome.values().size()); } if (nextOutcome.isNop()) { @@ -1377,15 +1590,22 @@ public abstract class BaseTransactionProcessor { continue; } - Set referencesToAutoVersion = BaseStorageDao.extractReferencesToAutoVersion(myContext, myStorageSettings, nextResource); + Set referencesToAutoVersion = + BaseStorageDao.extractReferencesToAutoVersion(myContext, myStorageSettings, nextResource); if (referencesToAutoVersion.isEmpty()) { // no references to autoversion - we can do the resolve and save now - resolveReferencesThenSaveAndIndexResource(theRequest, theTransactionDetails, - theIdSubstitutions, theIdToPersistedOutcome, - entriesToProcess, nonUpdatedEntities, - updatedEntities, terser, - nextOutcome, nextResource, - referencesToAutoVersion); // this is empty + resolveReferencesThenSaveAndIndexResource( + theRequest, + theTransactionDetails, + theIdSubstitutions, + theIdToPersistedOutcome, + entriesToProcess, + nonUpdatedEntities, + updatedEntities, + terser, + nextOutcome, + nextResource, + referencesToAutoVersion); // this is empty } else { // we have autoversioned things to defer until later if (deferredIndexesForAutoVersioning == null) { @@ -1397,28 +1617,40 @@ public abstract class BaseTransactionProcessor { // If we have any resources we'll be auto-versioning, index these next if (deferredIndexesForAutoVersioning != null) { - for (Map.Entry> nextEntry : deferredIndexesForAutoVersioning.entrySet()) { + for (Map.Entry> nextEntry : + deferredIndexesForAutoVersioning.entrySet()) { DaoMethodOutcome nextOutcome = nextEntry.getKey(); Set referencesToAutoVersion = nextEntry.getValue(); IBaseResource nextResource = nextOutcome.getResource(); - - resolveReferencesThenSaveAndIndexResource(theRequest, theTransactionDetails, - theIdSubstitutions, theIdToPersistedOutcome, - entriesToProcess, nonUpdatedEntities, - updatedEntities, terser, - nextOutcome, nextResource, - referencesToAutoVersion); + resolveReferencesThenSaveAndIndexResource( + theRequest, + theTransactionDetails, + theIdSubstitutions, + theIdToPersistedOutcome, + entriesToProcess, + nonUpdatedEntities, + updatedEntities, + terser, + nextOutcome, + nextResource, + referencesToAutoVersion); } } } - private void resolveReferencesThenSaveAndIndexResource(RequestDetails theRequest, TransactionDetails theTransactionDetails, - IdSubstitutionMap theIdSubstitutions, Map theIdToPersistedOutcome, - EntriesToProcessMap entriesToProcess, Set nonUpdatedEntities, - Set updatedEntities, FhirTerser terser, - DaoMethodOutcome theDaoMethodOutcome, IBaseResource theResource, - Set theReferencesToAutoVersion) { + private void resolveReferencesThenSaveAndIndexResource( + RequestDetails theRequest, + TransactionDetails theTransactionDetails, + IdSubstitutionMap theIdSubstitutions, + Map theIdToPersistedOutcome, + EntriesToProcessMap entriesToProcess, + Set nonUpdatedEntities, + Set updatedEntities, + FhirTerser terser, + DaoMethodOutcome theDaoMethodOutcome, + IBaseResource theResource, + Set theReferencesToAutoVersion) { // References List allRefs = terser.getAllResourceReferences(theResource); for (ResourceReferenceInfo nextRef : allRefs) { @@ -1434,7 +1666,8 @@ public abstract class BaseTransactionProcessor { } else if (theIdSubstitutions.containsTarget(targetId)) { newId = targetId; } else { - throw new InternalErrorException(Msg.code(540) + "References by resource with no reference ID are not supported in DAO layer"); + throw new InternalErrorException(Msg.code(540) + + "References by resource with no reference ID are not supported in DAO layer"); } } else { continue; @@ -1457,19 +1690,24 @@ public abstract class BaseTransactionProcessor { } } } else if (nextId.getValue().startsWith("urn:")) { - throw new InvalidRequestException(Msg.code(541) + "Unable to satisfy placeholder ID " + nextId.getValue() + " found in element named '" + nextRef.getName() + "' within resource of type: " + theResource.getIdElement().getResourceType()); + throw new InvalidRequestException( + Msg.code(541) + "Unable to satisfy placeholder ID " + nextId.getValue() + + " found in element named '" + nextRef.getName() + "' within resource of type: " + + theResource.getIdElement().getResourceType()); } else { // get a map of // existing ids -> PID (for resources that exist in the DB) // should this be allPartitions? - ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(), - theReferencesToAutoVersion.stream() - .map(IBaseReference::getReferenceElement).collect(Collectors.toList())); + ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds( + RequestPartitionId.allPartitions(), + theReferencesToAutoVersion.stream() + .map(IBaseReference::getReferenceElement) + .collect(Collectors.toList())); for (IBaseReference baseRef : theReferencesToAutoVersion) { IIdType id = baseRef.getReferenceElement(); if (!resourceVersionMap.containsKey(id) - && myStorageSettings.isAutoCreatePlaceholderReferenceTargets()) { + && myStorageSettings.isAutoCreatePlaceholderReferenceTargets()) { // not in the db, but autocreateplaceholders is true // so the version we'll set is "1" (since it will be // created later) @@ -1478,8 +1716,7 @@ public abstract class BaseTransactionProcessor { } else { // we will add the looked up info to the transaction // for later - theTransactionDetails.addResolvedResourceId(id, - resourceVersionMap.getResourcePersistentId(id)); + theTransactionDetails.addResolvedResourceId(id, resourceVersionMap.getResourcePersistentId(id)); } } @@ -1496,7 +1733,8 @@ public abstract class BaseTransactionProcessor { } // URIs - Class> uriType = (Class>) myContext.getElementDefinition("uri").getImplementingClass(); + Class> uriType = (Class>) + myContext.getElementDefinition("uri").getImplementingClass(); List> allUris = terser.getAllPopulatedChildElementsOfType(theResource, uriType); for (IPrimitiveType nextRef : allUris) { if (nextRef instanceof IIdType) { @@ -1527,11 +1765,30 @@ public abstract class BaseTransactionProcessor { boolean forceUpdateVersion = !theReferencesToAutoVersion.isEmpty(); String matchUrl = theDaoMethodOutcome.getMatchUrl(); RestOperationTypeEnum operationType = theDaoMethodOutcome.getOperationType(); - DaoMethodOutcome daoMethodOutcome = jpaDao.updateInternal(theRequest, theResource, matchUrl, true, forceUpdateVersion, theDaoMethodOutcome.getEntity(), theResource.getIdElement(), theDaoMethodOutcome.getPreviousResource(), operationType, theTransactionDetails); + DaoMethodOutcome daoMethodOutcome = jpaDao.updateInternal( + theRequest, + theResource, + matchUrl, + true, + forceUpdateVersion, + theDaoMethodOutcome.getEntity(), + theResource.getIdElement(), + theDaoMethodOutcome.getPreviousResource(), + operationType, + theTransactionDetails); updateOutcome = daoMethodOutcome.getEntity(); theDaoMethodOutcome = daoMethodOutcome; } else if (!nonUpdatedEntities.contains(theDaoMethodOutcome.getId())) { - updateOutcome = jpaDao.updateEntity(theRequest, theResource, theDaoMethodOutcome.getEntity(), deletedTimestampOrNull, true, false, theTransactionDetails, false, true); + updateOutcome = jpaDao.updateEntity( + theRequest, + theResource, + theDaoMethodOutcome.getEntity(), + deletedTimestampOrNull, + true, + false, + theTransactionDetails, + false, + true); } // Make sure we reflect the actual final version for the resource. @@ -1554,30 +1811,39 @@ public abstract class BaseTransactionProcessor { IBase responseEntry = entriesToProcess.getResponseBundleEntryWithVersionlessComparison(newId); myVersionAdapter.setResponseOutcome(responseEntry, theDaoMethodOutcome.getOperationOutcome()); } - } } - private void addRollbackReferenceRestore(TransactionDetails theTransactionDetails, IBaseReference resourceReference) { + private void addRollbackReferenceRestore( + TransactionDetails theTransactionDetails, IBaseReference resourceReference) { String existingValue = resourceReference.getReferenceElement().getValue(); theTransactionDetails.addRollbackUndoAction(() -> resourceReference.setReference(existingValue)); } - private void validateNoDuplicates(RequestDetails theRequest, String theActionName, Map> conditionalRequestUrls, Collection thePersistedOutcomes) { + private void validateNoDuplicates( + RequestDetails theRequest, + String theActionName, + Map> conditionalRequestUrls, + Collection thePersistedOutcomes) { - IdentityHashMap resourceToIndexedParams = new IdentityHashMap<>(thePersistedOutcomes.size()); - thePersistedOutcomes - .stream() - .filter(t -> !t.isNop()) - .filter(t -> t.getEntity() instanceof ResourceTable)//N.B. GGG: This validation never occurs for mongo, as nothing is a ResourceTable. - .filter(t -> t.getEntity().getDeleted() == null) - .filter(t -> t.getResource() != null) - .forEach(t -> resourceToIndexedParams.put(t.getResource(), new ResourceIndexedSearchParams((ResourceTable) t.getEntity()))); + IdentityHashMap resourceToIndexedParams = + new IdentityHashMap<>(thePersistedOutcomes.size()); + thePersistedOutcomes.stream() + .filter(t -> !t.isNop()) + .filter(t -> t.getEntity() + instanceof ResourceTable) // N.B. GGG: This validation never occurs for mongo, as nothing is a + // ResourceTable. + .filter(t -> t.getEntity().getDeleted() == null) + .filter(t -> t.getResource() != null) + .forEach(t -> resourceToIndexedParams.put( + t.getResource(), new ResourceIndexedSearchParams((ResourceTable) t.getEntity()))); for (Map.Entry> nextEntry : conditionalRequestUrls.entrySet()) { String matchUrl = nextEntry.getKey(); if (isNotBlank(matchUrl)) { - if (matchUrl.startsWith("?") || (!matchUrl.contains("?") && UNQUALIFIED_MATCH_URL_START.matcher(matchUrl).find())) { + if (matchUrl.startsWith("?") + || (!matchUrl.contains("?") + && UNQUALIFIED_MATCH_URL_START.matcher(matchUrl).find())) { StringBuilder b = new StringBuilder(); b.append(myContext.getResourceType(nextEntry.getValue())); if (!matchUrl.startsWith("?")) { @@ -1592,7 +1858,8 @@ public abstract class BaseTransactionProcessor { } int counter = 0; - for (Map.Entry entries : resourceToIndexedParams.entrySet()) { + for (Map.Entry entries : + resourceToIndexedParams.entrySet()) { ResourceIndexedSearchParams indexedParams = entries.getValue(); IBaseResource resource = entries.getKey(); @@ -1601,10 +1868,14 @@ public abstract class BaseTransactionProcessor { continue; } - if (myInMemoryResourceMatcher.match(matchUrl, resource, indexedParams, theRequest).matched()) { + if (myInMemoryResourceMatcher + .match(matchUrl, resource, indexedParams, theRequest) + .matched()) { counter++; if (counter > 1) { - throw new InvalidRequestException(Msg.code(542) + "Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?"); + throw new InvalidRequestException(Msg.code(542) + "Unable to process " + theActionName + + " - Request would cause multiple resources to match URL: \"" + matchUrl + + "\". Does transaction request contain duplicates?"); } } } @@ -1616,7 +1887,9 @@ public abstract class BaseTransactionProcessor { private void validateResourcePresent(IBaseResource theResource, Integer theOrder, String theVerb) { if (theResource == null) { - String msg = myContext.getLocalizer().getMessage(BaseTransactionProcessor.class, "missingMandatoryResource", theVerb, theOrder); + String msg = myContext + .getLocalizer() + .getMessage(BaseTransactionProcessor.class, "missingMandatoryResource", theVerb, theOrder); throw new InvalidRequestException(Msg.code(543) + msg); } } @@ -1640,7 +1913,9 @@ public abstract class BaseTransactionProcessor { if (dao == null) { Set types = new TreeSet<>(myDaoRegistry.getRegisteredDaoTypes()); String type = myContext.getResourceType(theClass); - String msg = myContext.getLocalizer().getMessage(BaseTransactionProcessor.class, "unsupportedResourceType", type, types.toString()); + String msg = myContext + .getLocalizer() + .getMessage(BaseTransactionProcessor.class, "unsupportedResourceType", type, types.toString()); throw new InvalidRequestException(Msg.code(544) + msg); } return dao; @@ -1666,7 +1941,8 @@ public abstract class BaseTransactionProcessor { if (!isValidResourceTypeUrl(url)) { ourLog.debug("Invalid url. Should begin with a resource type: {}", url); - String msg = myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionInvalidUrl", theVerb, url); + String msg = + myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionInvalidUrl", theVerb, url); throw new InvalidRequestException(Msg.code(2006) + msg); } return url; @@ -1709,7 +1985,8 @@ public abstract class BaseTransactionProcessor { private String extractTransactionUrlOrThrowException(IBase nextEntry, String verb) { String url = myVersionAdapter.getEntryRequestUrl(nextEntry); if (isBlank(url)) { - throw new InvalidRequestException(Msg.code(545) + myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionMissingUrl", verb)); + throw new InvalidRequestException(Msg.code(545) + + myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionMissingUrl", verb)); } return url; } @@ -1719,7 +1996,8 @@ public abstract class BaseTransactionProcessor { try { resType = myContext.getResourceDefinition(theParts.getResourceType()); } catch (DataFormatException e) { - String msg = myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionInvalidUrl", theVerb, theUrl); + String msg = + myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionInvalidUrl", theVerb, theUrl); throw new InvalidRequestException(Msg.code(546) + msg); } IFhirResourceDao dao = null; @@ -1727,7 +2005,8 @@ public abstract class BaseTransactionProcessor { dao = myDaoRegistry.getResourceDao(resType.getImplementingClass()); } if (dao == null) { - String msg = myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionInvalidUrl", theVerb, theUrl); + String msg = + myContext.getLocalizer().getMessage(BaseStorageDao.class, "transactionInvalidUrl", theVerb, theUrl); throw new InvalidRequestException(Msg.code(547) + msg); } @@ -1762,7 +2041,7 @@ public abstract class BaseTransactionProcessor { * Process any PATCH interactions * Process any GET interactions */ - //@formatter:off + // @formatter:off public class TransactionSorter implements Comparator { private final Set myPlaceholderIds; @@ -1842,7 +2121,6 @@ public abstract class BaseTransactionProcessor { } return o1; } - } public class RetriableBundleTask implements Runnable { @@ -1855,7 +2133,13 @@ public abstract class BaseTransactionProcessor { private final boolean myNestedMode; private BaseServerResponseException myLastSeenException; - protected RetriableBundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) { + protected RetriableBundleTask( + CountDownLatch theCompletedLatch, + RequestDetails theRequestDetails, + Map theResponseMap, + int theResponseOrder, + IBase theNextReqEntry, + boolean theNestedMode) { this.myCompletedLatch = theCompletedLatch; this.myRequestDetails = theRequestDetails; this.myNextReqEntry = theNextReqEntry; @@ -1866,19 +2150,23 @@ public abstract class BaseTransactionProcessor { } private void processBatchEntry() { - IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode()); + IBaseBundle subRequestBundle = + myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode()); myVersionAdapter.addEntry(subRequestBundle, myNextReqEntry); - IBaseBundle nextResponseBundle = processTransactionAsSubRequest(myRequestDetails, subRequestBundle, "Batch sub-request", myNestedMode); + IBaseBundle nextResponseBundle = processTransactionAsSubRequest( + myRequestDetails, subRequestBundle, "Batch sub-request", myNestedMode); - IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); + IBase subResponseEntry = + (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); myResponseMap.put(myResponseOrder, subResponseEntry); /* * If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it */ if (myVersionAdapter.getResource(subResponseEntry) == null) { - IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); + IBase nextResponseBundleFirstEntry = + (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); myResponseMap.put(myResponseOrder, nextResponseBundleFirstEntry); } } @@ -1890,12 +2178,13 @@ public abstract class BaseTransactionProcessor { processBatchEntry(); return true; } catch (BaseServerResponseException e) { - //If we catch a known and structured exception from HAPI, just fail. + // If we catch a known and structured exception from HAPI, just fail. myLastSeenException = e; return false; } catch (Throwable t) { myLastSeenException = new InternalErrorException(t); - //If we have caught a non-tag-storage failure we are unfamiliar with, or we have exceeded max attempts, exit. + // If we have caught a non-tag-storage failure we are unfamiliar with, or we have exceeded max + // attempts, exit. if (!DaoFailureUtil.isTagStorageFailure(t) || attempt >= maxAttempts) { ourLog.error("Failure during BATCH sub transaction processing", t); return false; @@ -1921,7 +2210,6 @@ public abstract class BaseTransactionProcessor { caughtEx.setException(myLastSeenException); myResponseMap.put(myResponseOrder, caughtEx); } - } private static class ServerResponseExceptionHolder { @@ -1990,7 +2278,9 @@ public abstract class BaseTransactionProcessor { } else { replacementValue = replacement.getValue(); } - matchUrl = matchUrl.substring(0, equalsIdx + 1) + replacementValue + matchUrl.substring(endIdx); + matchUrl = matchUrl.substring(0, equalsIdx + 1) + + replacementValue + + matchUrl.substring(endIdx); searchFrom = equalsIdx + 1 + replacementValue.length(); } else { searchFrom = endIdx; @@ -2006,7 +2296,6 @@ public abstract class BaseTransactionProcessor { startIdx = matchUrl.indexOf('&', searchFrom); } - } return matchUrl; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/GZipUtil.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/GZipUtil.java index 8e9907ca0dc..d9ccc5cb5fb 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/GZipUtil.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/GZipUtil.java @@ -54,5 +54,4 @@ public class GZipUtil { throw new DataFormatException(Msg.code(517) + "Compress contents", e); } } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IResultIterator.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IResultIterator.java index 313ac21f8fc..cf45484a38f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IResultIterator.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IResultIterator.java @@ -32,5 +32,4 @@ public interface IResultIterator extends Iterat int getNonSkippedCount(); Collection getNextResultBatch(long theBatchSize); - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java index 732cd58a3fa..42b1a668c2d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ISearchBuilder.java @@ -30,42 +30,62 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.param.DateRangeParam; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; -import javax.persistence.EntityManager; import java.util.Collection; import java.util.List; import java.util.Set; +import javax.annotation.Nonnull; +import javax.persistence.EntityManager; public interface ISearchBuilder { String SEARCH_BUILDER_BEAN_NAME = "SearchBuilder"; - IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntime, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId); + IResultIterator createQuery( + SearchParameterMap theParams, + SearchRuntimeDetails theSearchRuntime, + RequestDetails theRequest, + @Nonnull RequestPartitionId theRequestPartitionId); - Long createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, RequestPartitionId theRequestPartitionId); + Long createCountQuery( + SearchParameterMap theParams, + String theSearchUuid, + RequestDetails theRequest, + RequestPartitionId theRequestPartitionId); void setMaxResultsToFetch(Integer theMaxResultsToFetch); - void loadResourcesByPid(Collection thePids, Collection theIncludedPids, List theResourceListToPopulate, boolean theForHistoryOperation, RequestDetails theDetails); + void loadResourcesByPid( + Collection thePids, + Collection theIncludedPids, + List theResourceListToPopulate, + boolean theForHistoryOperation, + RequestDetails theDetails); /** * Use the loadIncludes that takes a parameters object instead. */ @Deprecated - Set loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection theMatches, Collection theRevIncludes, boolean theReverseMode, - DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest, Integer theMaxCount); + Set loadIncludes( + FhirContext theContext, + EntityManager theEntityManager, + Collection theMatches, + Collection theRevIncludes, + boolean theReverseMode, + DateRangeParam theLastUpdated, + String theSearchIdOrDescription, + RequestDetails theRequest, + Integer theMaxCount); default Set loadIncludes(SearchBuilderLoadIncludesParameters theParameters) { return this.loadIncludes( - theParameters.getFhirContext(), - theParameters.getEntityManager(), - theParameters.getMatches(), - theParameters.getIncludeFilters(), - theParameters.isReverseMode(), - theParameters.getLastUpdated(), - theParameters.getSearchIdOrDescription(), - theParameters.getRequestDetails(), - theParameters.getMaxCount() - ); + theParameters.getFhirContext(), + theParameters.getEntityManager(), + theParameters.getMatches(), + theParameters.getIncludeFilters(), + theParameters.isReverseMode(), + theParameters.getLastUpdated(), + theParameters.getSearchIdOrDescription(), + theParameters.getRequestDetails(), + theParameters.getMaxCount()); } /** @@ -74,5 +94,4 @@ public interface ISearchBuilder { void setFetchSize(int theFetchSize); void setPreviouslyAddedResourcePids(List thePreviouslyAddedResourcePids); - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java index d75e34c839e..ad7f012917f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/IStorageResourceParser.java @@ -36,5 +36,4 @@ public interface IStorageResourceParser { // metadata but there's no reason to not always just add that, and this would // simplify this interface IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation); - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ITransactionProcessorVersionAdapter.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ITransactionProcessorVersionAdapter.java index ad5cda39f0c..e75c73d107d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ITransactionProcessorVersionAdapter.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ITransactionProcessorVersionAdapter.java @@ -76,5 +76,4 @@ public interface ITransactionProcessorVersionAdapter myMap = new HashMap<>(); - private final Multimap myReverseMap = MultimapBuilder.hashKeys().arrayListValues().build(); - + private final Multimap myReverseMap = + MultimapBuilder.hashKeys().arrayListValues().build(); public boolean containsSource(IIdType theId) { if (theId.isLocal()) { @@ -63,7 +63,6 @@ public class IdSubstitutionMap { return null; } - public IIdType getForSource(String theId) { Entry target = myMap.get(new Entry(theId)); if (target != null) { @@ -74,11 +73,9 @@ public class IdSubstitutionMap { } public List> entrySet() { - return myMap - .entrySet() - .stream() - .map(t->Pair.of(t.getKey().myId, t.getValue().myId)) - .collect(Collectors.toList()); + return myMap.entrySet().stream() + .map(t -> Pair.of(t.getKey().myId, t.getValue().myId)) + .collect(Collectors.toList()); } public void put(IIdType theSource, IIdType theTarget) { @@ -90,7 +87,6 @@ public class IdSubstitutionMap { return myMap.isEmpty(); } - private static class Entry { private final String myUnversionedId; @@ -122,7 +118,6 @@ public class IdSubstitutionMap { public int hashCode() { return myUnversionedId.hashCode(); } - } static String toVersionlessValue(IIdType theId) { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/MatchResourceUrlService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/MatchResourceUrlService.java index e4d674d6edd..c04eadb174c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/MatchResourceUrlService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/MatchResourceUrlService.java @@ -48,7 +48,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import javax.annotation.Nullable; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -57,6 +56,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nullable; @Service public class MatchResourceUrlService { @@ -65,34 +65,49 @@ public class MatchResourceUrlService { @Autowired private DaoRegistry myDaoRegistry; + @Autowired private FhirContext myContext; + @Autowired private MatchUrlService myMatchUrlService; + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private MemoryCacheService myMemoryCacheService; /** * Note that this will only return a maximum of 2 results!! */ - public Set processMatchUrl(String theMatchUrl, Class theResourceType, TransactionDetails theTransactionDetails, RequestDetails theRequest) { + public Set processMatchUrl( + String theMatchUrl, + Class theResourceType, + TransactionDetails theTransactionDetails, + RequestDetails theRequest) { return processMatchUrl(theMatchUrl, theResourceType, theTransactionDetails, theRequest, null); } /** * Note that this will only return a maximum of 2 results!! */ - public Set processMatchUrl(String theMatchUrl, Class theResourceType, TransactionDetails theTransactionDetails, RequestDetails theRequest, IBaseResource theConditionalOperationTargetOrNull) { + public Set processMatchUrl( + String theMatchUrl, + Class theResourceType, + TransactionDetails theTransactionDetails, + RequestDetails theRequest, + IBaseResource theConditionalOperationTargetOrNull) { Set retVal = null; String resourceType = myContext.getResourceType(theResourceType); String matchUrl = massageForStorage(resourceType, theMatchUrl); - T resolvedInTransaction = (T) theTransactionDetails.getResolvedMatchUrls().get(matchUrl); + T resolvedInTransaction = + (T) theTransactionDetails.getResolvedMatchUrls().get(matchUrl); if (resolvedInTransaction != null) { // If the resource has previously been looked up within the transaction, there's no need to re-authorize it. if (resolvedInTransaction == TransactionDetails.NOT_FOUND) { @@ -111,7 +126,8 @@ public class MatchResourceUrlService { RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(theResourceType); SearchParameterMap paramMap = myMatchUrlService.translateMatchUrl(matchUrl, resourceDef); if (paramMap.isEmpty() && paramMap.getLastUpdated() == null) { - throw new InvalidRequestException(Msg.code(518) + "Invalid match URL[" + matchUrl + "] - URL has no search parameters"); + throw new InvalidRequestException( + Msg.code(518) + "Invalid match URL[" + matchUrl + "] - URL has no search parameters"); } paramMap.setLoadSynchronousUpTo(2); @@ -119,7 +135,8 @@ public class MatchResourceUrlService { } // Interceptor broadcast: STORAGE_PRESHOW_RESOURCES - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PRESHOW_RESOURCES, myInterceptorBroadcaster, theRequest)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.STORAGE_PRESHOW_RESOURCES, myInterceptorBroadcaster, theRequest)) { Map resourceToPidMap = new HashMap<>(); IFhirResourceDao dao = getResourceDao(theResourceType); @@ -130,22 +147,25 @@ public class MatchResourceUrlService { SimplePreResourceShowDetails accessDetails = new SimplePreResourceShowDetails(resourceToPidMap.keySet()); HookParams params = new HookParams() - .add(IPreResourceShowDetails.class, accessDetails) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); + .add(IPreResourceShowDetails.class, accessDetails) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); try { - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params); - retVal = accessDetails.toList() - .stream() - .map(resourceToPidMap::get) - .filter(Objects::nonNull) - .collect(Collectors.toSet()); + retVal = accessDetails.toList().stream() + .map(resourceToPidMap::get) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); } catch (ForbiddenOperationException e) { // If the search matches a resource that the user does not have authorization for, // we want to treat it the same as if the search matched no resources, in order not to leak information. - ourLog.warn("Inline match URL [" + matchUrl + "] specified a resource the user is not authorized to access.", e); + ourLog.warn( + "Inline match URL [" + matchUrl + + "] specified a resource the user is not authorized to access.", + e); retVal = new HashSet<>(); } } @@ -191,38 +211,49 @@ public class MatchResourceUrlService { return existing; } - public Set search(SearchParameterMap theParamMap, Class theResourceType, RequestDetails theRequest, @Nullable IBaseResource theConditionalOperationTargetOrNull) { + public Set search( + SearchParameterMap theParamMap, + Class theResourceType, + RequestDetails theRequest, + @Nullable IBaseResource theConditionalOperationTargetOrNull) { StopWatch sw = new StopWatch(); IFhirResourceDao dao = getResourceDao(theResourceType); List retVal = dao.searchForIds(theParamMap, theRequest, theConditionalOperationTargetOrNull); // Interceptor broadcast: JPA_PERFTRACE_INFO - if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) { + if (CompositeInterceptorBroadcaster.hasHooks( + Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) { StorageProcessingMessage message = new StorageProcessingMessage(); message.setMessage("Processed conditional resource URL with " + retVal.size() + " result(s) in " + sw); HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(StorageProcessingMessage.class, message); - CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(StorageProcessingMessage.class, message); + CompositeInterceptorBroadcaster.doCallHooks( + myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); } return new HashSet<>(retVal); } - - public void matchUrlResolved(TransactionDetails theTransactionDetails, String theResourceType, String theMatchUrl, T theResourcePersistentId) { + public void matchUrlResolved( + TransactionDetails theTransactionDetails, + String theResourceType, + String theMatchUrl, + T theResourcePersistentId) { Validate.notBlank(theMatchUrl); Validate.notNull(theResourcePersistentId); String matchUrl = massageForStorage(theResourceType, theMatchUrl); theTransactionDetails.addResolvedMatchUrl(myContext, matchUrl, theResourcePersistentId); if (myStorageSettings.isMatchUrlCacheEnabled()) { - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl, theResourcePersistentId); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.MATCH_URL, matchUrl, theResourcePersistentId); } } - public void unresolveMatchUrl(TransactionDetails theTransactionDetails, String theResourceType, String theMatchUrl) { + public void unresolveMatchUrl( + TransactionDetails theTransactionDetails, String theResourceType, String theMatchUrl) { Validate.notBlank(theMatchUrl); String matchUrl = massageForStorage(theResourceType, theMatchUrl); theTransactionDetails.removeResolvedMatchUrl(matchUrl); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilderFactory.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilderFactory.java index 2684cbcb0cd..e6f289e23bb 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilderFactory.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilderFactory.java @@ -30,8 +30,9 @@ public class SearchBuilderFactory> { @Autowired private ApplicationContext myApplicationContext; - public ISearchBuilder newSearchBuilder(IDao theDao, String theResourceName, Class theResourceType) { - return (ISearchBuilder) myApplicationContext.getBean(ISearchBuilder.SEARCH_BUILDER_BEAN_NAME, theDao, theResourceName, theResourceType); + public ISearchBuilder newSearchBuilder( + IDao theDao, String theResourceName, Class theResourceType) { + return (ISearchBuilder) myApplicationContext.getBean( + ISearchBuilder.SEARCH_BUILDER_BEAN_NAME, theDao, theResourceName, theResourceType); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ThreadPoolFactory.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ThreadPoolFactory.java index 1401a653a80..c9d1f37612f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ThreadPoolFactory.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/ThreadPoolFactory.java @@ -27,7 +27,8 @@ import org.springframework.core.task.AsyncTaskExecutor; */ public class ThreadPoolFactory { - public AsyncTaskExecutor newThreadPool(Integer theBundleBatchPoolSize, Integer theBundleBatchMaxPoolSize, String theThreadPrefix) { + public AsyncTaskExecutor newThreadPool( + Integer theBundleBatchPoolSize, Integer theBundleBatchMaxPoolSize, String theThreadPrefix) { return ThreadPoolUtil.newThreadPool(theBundleBatchPoolSize, theBundleBatchMaxPoolSize, theThreadPrefix); } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/TransactionProcessorVersionAdapterDstu3.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/TransactionProcessorVersionAdapterDstu3.java index 5cd6c81ad0d..b81c977a432 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/TransactionProcessorVersionAdapterDstu3.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/TransactionProcessorVersionAdapterDstu3.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.dao.dstu3; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -37,7 +37,8 @@ import java.util.List; import static org.apache.commons.lang3.StringUtils.isBlank; -public class TransactionProcessorVersionAdapterDstu3 implements ITransactionProcessorVersionAdapter { +public class TransactionProcessorVersionAdapterDstu3 + implements ITransactionProcessorVersionAdapter { @Override public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) { theBundleEntry.getResponse().setStatus(theStatus); @@ -67,12 +68,13 @@ public class TransactionProcessorVersionAdapterDstu3 implements ITransactionProc } @Override - public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.BundleEntryComponent theEntry) { + public void populateEntryWithOperationOutcome( + BaseServerResponseException theCaughtEx, Bundle.BundleEntryComponent theEntry) { OperationOutcome oo = new OperationOutcome(); oo.addIssue() - .setSeverity(OperationOutcome.IssueSeverity.ERROR) - .setDiagnostics(theCaughtEx.getMessage()) - .setCode(OperationOutcome.IssueType.EXCEPTION); + .setSeverity(OperationOutcome.IssueSeverity.ERROR) + .setDiagnostics(theCaughtEx.getMessage()) + .setCode(OperationOutcome.IssueType.EXCEPTION); theEntry.getResponse().setOutcome(oo); } @@ -184,5 +186,4 @@ public class TransactionProcessorVersionAdapterDstu3 implements ITransactionProc public void setRequestUrl(Bundle.BundleEntryComponent theEntry, String theUrl) { theEntry.getRequest().setUrl(theUrl); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeOperation.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeOperation.java index a61a3d9dc3c..0a8ceb54240 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeOperation.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeOperation.java @@ -44,6 +44,7 @@ public class ExpungeOperation implements Callable { @Autowired private IResourceExpungeService myExpungeDaoService; + @Autowired private JpaStorageSettings myStorageSettings; @@ -52,10 +53,15 @@ public class ExpungeOperation implements Callable { private final ExpungeOptions myExpungeOptions; private final RequestDetails myRequestDetails; private final AtomicInteger myRemainingCount; + @Autowired private HapiTransactionService myTxService; - public ExpungeOperation(String theResourceName, IResourcePersistentId theResourceId, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) { + public ExpungeOperation( + String theResourceName, + IResourcePersistentId theResourceId, + ExpungeOptions theExpungeOptions, + RequestDetails theRequestDetails) { myResourceName = theResourceName; myResourceId = theResourceId; myExpungeOptions = theExpungeOptions; @@ -65,7 +71,8 @@ public class ExpungeOperation implements Callable { @Override public ExpungeOutcome call() { - if (myExpungeOptions.isExpungeDeletedResources() && (myResourceId == null || myResourceId.getVersion() == null)) { + if (myExpungeOptions.isExpungeDeletedResources() + && (myResourceId == null || myResourceId.getVersion() == null)) { expungeDeletedResources(); if (expungeLimitReached()) { return expungeOutcome(); @@ -94,13 +101,15 @@ public class ExpungeOperation implements Callable { } private List findHistoricalVersionsOfDeletedResources() { - List retVal = myExpungeDaoService.findHistoricalVersionsOfDeletedResources(myResourceName, myResourceId, myRemainingCount.get()); + List retVal = myExpungeDaoService.findHistoricalVersionsOfDeletedResources( + myResourceName, myResourceId, myRemainingCount.get()); ourLog.debug("Found {} historical versions", retVal.size()); return retVal; } private List findHistoricalVersionsOfNonDeletedResources() { - return myExpungeDaoService.findHistoricalVersionsOfNonDeletedResources(myResourceName, myResourceId, myRemainingCount.get()); + return myExpungeDaoService.findHistoricalVersionsOfNonDeletedResources( + myResourceName, myResourceId, myRemainingCount.get()); } private boolean expungeLimitReached() { @@ -114,23 +123,40 @@ public class ExpungeOperation implements Callable { private void expungeOldVersions() { List historicalIds = findHistoricalVersionsOfNonDeletedResources(); - getPartitionRunner().runInPartitionedThreads(historicalIds, partition -> myExpungeDaoService.expungeHistoricalVersions(myRequestDetails, partition, myRemainingCount)); + getPartitionRunner() + .runInPartitionedThreads( + historicalIds, + partition -> myExpungeDaoService.expungeHistoricalVersions( + myRequestDetails, partition, myRemainingCount)); } private PartitionRunner getPartitionRunner() { - return new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myStorageSettings.getExpungeBatchSize(), myStorageSettings.getExpungeThreadCount(), myTxService, myRequestDetails); + return new PartitionRunner( + PROCESS_NAME, + THREAD_PREFIX, + myStorageSettings.getExpungeBatchSize(), + myStorageSettings.getExpungeThreadCount(), + myTxService, + myRequestDetails); } private void deleteCurrentVersionsOfDeletedResources(List theResourceIds) { - getPartitionRunner().runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeCurrentVersionOfResources(myRequestDetails, partition, myRemainingCount)); + getPartitionRunner() + .runInPartitionedThreads( + theResourceIds, + partition -> myExpungeDaoService.expungeCurrentVersionOfResources( + myRequestDetails, partition, myRemainingCount)); } private void deleteHistoricalVersions(List theResourceIds) { - getPartitionRunner().runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeHistoricalVersionsOfIds(myRequestDetails, partition, myRemainingCount)); + getPartitionRunner() + .runInPartitionedThreads( + theResourceIds, + partition -> myExpungeDaoService.expungeHistoricalVersionsOfIds( + myRequestDetails, partition, myRemainingCount)); } private ExpungeOutcome expungeOutcome() { return new ExpungeOutcome().setDeletedCount(myExpungeOptions.getLimit() - myRemainingCount.get()); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeService.java index d653c77cb6e..7ff1b2092d9 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeService.java @@ -37,24 +37,44 @@ public class ExpungeService { @Autowired private IExpungeEverythingService myExpungeEverythingService; + @Autowired private IResourceExpungeService myExpungeDaoService; + @Autowired private ApplicationContext myApplicationContext; - protected ExpungeOperation getExpungeOperation(String theResourceName, IResourcePersistentId theResourceId, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) { - return myApplicationContext.getBean(ExpungeOperation.class, theResourceName, theResourceId, theExpungeOptions, theRequestDetails); + protected ExpungeOperation getExpungeOperation( + String theResourceName, + IResourcePersistentId theResourceId, + ExpungeOptions theExpungeOptions, + RequestDetails theRequestDetails) { + return myApplicationContext.getBean( + ExpungeOperation.class, theResourceName, theResourceId, theExpungeOptions, theRequestDetails); } - public ExpungeOutcome expunge(String theResourceName, IResourcePersistentId theResourceId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) { - ourLog.info("Expunge: ResourceName[{}] Id[{}] Version[{}] Options[{}]", theResourceName, theResourceId != null ? theResourceId.getId() : null, theResourceId != null ? theResourceId.getVersion() : null, theExpungeOptions); - ExpungeOperation expungeOperation = getExpungeOperation(theResourceName, theResourceId, theExpungeOptions, theRequest); + public ExpungeOutcome expunge( + String theResourceName, + IResourcePersistentId theResourceId, + ExpungeOptions theExpungeOptions, + RequestDetails theRequest) { + ourLog.info( + "Expunge: ResourceName[{}] Id[{}] Version[{}] Options[{}]", + theResourceName, + theResourceId != null ? theResourceId.getId() : null, + theResourceId != null ? theResourceId.getVersion() : null, + theExpungeOptions); + ExpungeOperation expungeOperation = + getExpungeOperation(theResourceName, theResourceId, theExpungeOptions, theRequest); if (theExpungeOptions.getLimit() < 1) { - throw new InvalidRequestException(Msg.code(1087) + "Expunge limit may not be less than 1. Received expunge limit " + theExpungeOptions.getLimit() + "."); + throw new InvalidRequestException( + Msg.code(1087) + "Expunge limit may not be less than 1. Received expunge limit " + + theExpungeOptions.getLimit() + "."); } - if (theResourceName == null && (theResourceId == null || (theResourceId.getId() == null && theResourceId.getVersion() == null))) { + if (theResourceName == null + && (theResourceId == null || (theResourceId.getId() == null && theResourceId.getVersion() == null))) { if (theExpungeOptions.isExpungeEverything()) { myExpungeEverythingService.expungeEverything(theRequest); return new ExpungeOutcome().setDeletedCount(myExpungeEverythingService.getExpungeDeletedEntityCount()); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/IResourceExpungeService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/IResourceExpungeService.java index bb0101a3231..2f999919d3a 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/IResourceExpungeService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/IResourceExpungeService.java @@ -30,11 +30,14 @@ public interface IResourceExpungeService { List findHistoricalVersionsOfNonDeletedResources(String theResourceName, T theResourceId, int theI); - void expungeHistoricalVersions(RequestDetails theRequestDetails, List thePartition, AtomicInteger theRemainingCount); + void expungeHistoricalVersions( + RequestDetails theRequestDetails, List thePartition, AtomicInteger theRemainingCount); - void expungeCurrentVersionOfResources(RequestDetails theRequestDetails, List theResourceIds, AtomicInteger theRemainingCount); + void expungeCurrentVersionOfResources( + RequestDetails theRequestDetails, List theResourceIds, AtomicInteger theRemainingCount); - void expungeHistoricalVersionsOfIds(RequestDetails theRequestDetails, List theResourceIds, AtomicInteger theRemainingCount); + void expungeHistoricalVersionsOfIds( + RequestDetails theRequestDetails, List theResourceIds, AtomicInteger theRemainingCount); void deleteAllSearchParams(T theResourceId); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java index 2a5d80597b9..125ea40189c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/expunge/PartitionRunner.java @@ -30,7 +30,6 @@ import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; @@ -44,6 +43,7 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class PartitionRunner { private static final Logger ourLog = LoggerFactory.getLogger(PartitionRunner.class); @@ -67,7 +67,13 @@ public class PartitionRunner { * Constructor - Use this constructor and provide a {@link RequestDetails} and {@link HapiTransactionService} if * you want each individual callable task to be performed in a managed transaction. */ - public PartitionRunner(String theProcessName, String theThreadPrefix, int theBatchSize, int theThreadCount, @Nullable HapiTransactionService theTransactionService, @Nullable RequestDetails theRequestDetails) { + public PartitionRunner( + String theProcessName, + String theThreadPrefix, + int theBatchSize, + int theThreadCount, + @Nullable HapiTransactionService theTransactionService, + @Nullable RequestDetails theRequestDetails) { myProcessName = theProcessName; myThreadPrefix = theThreadPrefix; myBatchSize = theBatchSize; @@ -76,7 +82,8 @@ public class PartitionRunner { myRequestDetails = theRequestDetails; } - public void runInPartitionedThreads(List theResourceIds, Consumer> partitionConsumer) { + public void runInPartitionedThreads( + List theResourceIds, Consumer> partitionConsumer) { List> runnableTasks = buildCallableTasks(theResourceIds, partitionConsumer); if (runnableTasks.size() == 0) { @@ -85,14 +92,13 @@ public class PartitionRunner { if (myTransactionService != null) { // Wrap each Callable task in an invocation to HapiTransactionService#execute - runnableTasks = runnableTasks - .stream() - .map(t -> (Callable) () -> { - return myTransactionService - .withRequest(myRequestDetails) - .execute(t); - }) - .collect(Collectors.toList()); + runnableTasks = runnableTasks.stream() + .map(t -> (Callable) () -> { + return myTransactionService + .withRequest(myRequestDetails) + .execute(t); + }) + .collect(Collectors.toList()); } if (runnableTasks.size() == 1) { @@ -107,10 +113,9 @@ public class PartitionRunner { ExecutorService executorService = buildExecutor(runnableTasks.size()); try { - List> futures = runnableTasks - .stream() - .map(t -> executorService.submit(() -> t.call())) - .collect(Collectors.toList()); + List> futures = runnableTasks.stream() + .map(t -> executorService.submit(() -> t.call())) + .collect(Collectors.toList()); // wait for all the threads to finish for (Future future : futures) { future.get(); @@ -126,7 +131,8 @@ public class PartitionRunner { } } - private List> buildCallableTasks(List theResourceIds, Consumer> partitionConsumer) { + private List> buildCallableTasks( + List theResourceIds, Consumer> partitionConsumer) { List> retval = new ArrayList<>(); if (myBatchSize > theResourceIds.size()) { @@ -157,28 +163,31 @@ public class PartitionRunner { ourLog.info(myProcessName + " with {} threads", threadCount); LinkedBlockingQueue executorQueue = new LinkedBlockingQueue<>(MAX_POOL_SIZE); BasicThreadFactory threadFactory = new BasicThreadFactory.Builder() - .namingPattern(myThreadPrefix + "-%d") - .daemon(false) - .priority(Thread.NORM_PRIORITY) - .build(); + .namingPattern(myThreadPrefix + "-%d") + .daemon(false) + .priority(Thread.NORM_PRIORITY) + .build(); RejectedExecutionHandler rejectedExecutionHandler = (theRunnable, theExecutor) -> { - ourLog.info("Note: " + myThreadPrefix + " executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size()); + ourLog.info( + "Note: " + myThreadPrefix + + " executor queue is full ({} elements), waiting for a slot to become available!", + executorQueue.size()); StopWatch sw = new StopWatch(); try { executorQueue.put(theRunnable); } catch (InterruptedException e) { - throw new RejectedExecutionException(Msg.code(1086) + "Task " + theRunnable.toString() + - " rejected from " + e); + throw new RejectedExecutionException( + Msg.code(1086) + "Task " + theRunnable.toString() + " rejected from " + e); } ourLog.info("Slot become available after {}ms", sw.getMillis()); }; return new ThreadPoolExecutor( - threadCount, - MAX_POOL_SIZE, - 0L, - TimeUnit.MILLISECONDS, - executorQueue, - threadFactory, - rejectedExecutionHandler); + threadCount, + MAX_POOL_SIZE, + 0L, + TimeUnit.MILLISECONDS, + executorQueue, + threadFactory, + rejectedExecutionHandler); } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java index f85f715eb65..a2df3b9a415 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java @@ -57,30 +57,41 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.nio.charset.StandardCharsets; import java.util.Date; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DaoResourceLinkResolver implements IResourceLinkResolver { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DaoResourceLinkResolver.class); + @Autowired private JpaStorageSettings myStorageSettings; + @Autowired private FhirContext myContext; + @Autowired private IIdHelperService myIdHelperService; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired private IHapiTransactionService myTransactionService; @Override - public IResourceLookup findTargetResource(@Nonnull RequestPartitionId theRequestPartitionId, String theSourceResourceName, PathAndRef thePathAndRef, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + public IResourceLookup findTargetResource( + @Nonnull RequestPartitionId theRequestPartitionId, + String theSourceResourceName, + PathAndRef thePathAndRef, + RequestDetails theRequest, + TransactionDetails theTransactionDetails) { IBaseReference targetReference = thePathAndRef.getRef(); String sourcePath = thePathAndRef.getPath(); @@ -94,12 +105,15 @@ public class DaoResourceLinkResolver implements RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(resourceType); Class type = resourceDef.getImplementingClass(); - RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theSourceResourceName, thePathAndRef.getSearchParamName()); + RuntimeSearchParam searchParam = + mySearchParamRegistry.getActiveSearchParam(theSourceResourceName, thePathAndRef.getSearchParamName()); T persistentId = null; if (theTransactionDetails != null) { T resolvedResourceId = (T) theTransactionDetails.getResolvedResourceId(targetResourceId); - if (resolvedResourceId != null && resolvedResourceId.getId() != null && resolvedResourceId.getAssociatedResourceId() != null) { + if (resolvedResourceId != null + && resolvedResourceId.getId() != null + && resolvedResourceId.getAssociatedResourceId() != null) { persistentId = resolvedResourceId; } } @@ -108,14 +122,16 @@ public class DaoResourceLinkResolver implements String idPart = targetResourceId.getIdPart(); try { if (persistentId == null) { - resolvedResource = myIdHelperService.resolveResourceIdentity(theRequestPartitionId, resourceType, idPart); + resolvedResource = + myIdHelperService.resolveResourceIdentity(theRequestPartitionId, resourceType, idPart); ourLog.trace("Translated {}/{} to resource PID {}", type, idPart, resolvedResource); } else { resolvedResource = new ResourceLookupPersistentIdWrapper(persistentId); } } catch (ResourceNotFoundException e) { - Optional createdTableOpt = createPlaceholderTargetIfConfiguredToDoSo(type, targetReference, idPart, theRequest, theTransactionDetails); + Optional createdTableOpt = createPlaceholderTargetIfConfiguredToDoSo( + type, targetReference, idPart, theRequest, theTransactionDetails); if (!createdTableOpt.isPresent()) { if (myStorageSettings.isEnforceReferentialIntegrityOnWrite() == false) { @@ -124,25 +140,35 @@ public class DaoResourceLinkResolver implements RuntimeResourceDefinition missingResourceDef = myContext.getResourceDefinition(type); String resName = missingResourceDef.getName(); - throw new InvalidRequestException(Msg.code(1094) + "Resource " + resName + "/" + idPart + " not found, specified in path: " + sourcePath); - + throw new InvalidRequestException(Msg.code(1094) + "Resource " + resName + "/" + idPart + + " not found, specified in path: " + sourcePath); } resolvedResource = createdTableOpt.get(); } - ourLog.trace("Resolved resource of type {} as PID: {}", resolvedResource.getResourceType(), resolvedResource.getPersistentId()); + ourLog.trace( + "Resolved resource of type {} as PID: {}", + resolvedResource.getResourceType(), + resolvedResource.getPersistentId()); if (!resourceType.equals(resolvedResource.getResourceType())) { - ourLog.error("Resource with PID {} was of type {} and wanted {}", resolvedResource.getPersistentId(), resourceType, resolvedResource.getResourceType()); - throw new UnprocessableEntityException(Msg.code(1095) + "Resource contains reference to unknown resource ID " + targetResourceId.getValue()); + ourLog.error( + "Resource with PID {} was of type {} and wanted {}", + resolvedResource.getPersistentId(), + resourceType, + resolvedResource.getResourceType()); + throw new UnprocessableEntityException(Msg.code(1095) + + "Resource contains reference to unknown resource ID " + targetResourceId.getValue()); } if (resolvedResource.getDeleted() != null) { String resName = resolvedResource.getResourceType(); - throw new InvalidRequestException(Msg.code(1096) + "Resource " + resName + "/" + idPart + " is deleted, specified in path: " + sourcePath); + throw new InvalidRequestException(Msg.code(1096) + "Resource " + resName + "/" + idPart + + " is deleted, specified in path: " + sourcePath); } if (persistentId == null) { - persistentId = myIdHelperService.newPid(resolvedResource.getPersistentId().getId()); + persistentId = + myIdHelperService.newPid(resolvedResource.getPersistentId().getId()); persistentId.setAssociatedResourceId(targetResourceId); if (theTransactionDetails != null) { theTransactionDetails.addResolvedResourceId(targetResourceId, persistentId); @@ -158,22 +184,32 @@ public class DaoResourceLinkResolver implements @Nullable @Override - public IBaseResource loadTargetResource(@Nonnull RequestPartitionId theRequestPartitionId, String theSourceResourceName, PathAndRef thePathAndRef, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + public IBaseResource loadTargetResource( + @Nonnull RequestPartitionId theRequestPartitionId, + String theSourceResourceName, + PathAndRef thePathAndRef, + RequestDetails theRequest, + TransactionDetails theTransactionDetails) { return myTransactionService - .withRequest(theRequest) - .withTransactionDetails(theTransactionDetails) - .withRequestPartitionId(theRequestPartitionId) - .execute(()->{ - IIdType targetId = thePathAndRef.getRef().getReferenceElement(); - IFhirResourceDao dao = myDaoRegistry.getResourceDao(targetId.getResourceType()); - return dao.read(targetId, theRequest); - }); + .withRequest(theRequest) + .withTransactionDetails(theTransactionDetails) + .withRequestPartitionId(theRequestPartitionId) + .execute(() -> { + IIdType targetId = thePathAndRef.getRef().getReferenceElement(); + IFhirResourceDao dao = myDaoRegistry.getResourceDao(targetId.getResourceType()); + return dao.read(targetId, theRequest); + }); } /** * @param theIdToAssignToPlaceholder If specified, the placeholder resource created will be given a specific ID */ - public Optional createPlaceholderTargetIfConfiguredToDoSo(Class theType, IBaseReference theReference, @Nullable String theIdToAssignToPlaceholder, RequestDetails theRequest, TransactionDetails theTransactionDetails) { + public Optional createPlaceholderTargetIfConfiguredToDoSo( + Class theType, + IBaseReference theReference, + @Nullable String theIdToAssignToPlaceholder, + RequestDetails theRequest, + TransactionDetails theTransactionDetails) { IBasePersistedResource valueOf = null; if (myStorageSettings.isAutoCreatePlaceholderReferenceTargets()) { @@ -186,7 +222,9 @@ public class DaoResourceLinkResolver implements tryToAddPlaceholderExtensionToResource(newResource); IFhirResourceDao placeholderResourceDao = myDaoRegistry.getResourceDao(theType); - ourLog.debug("Automatically creating empty placeholder resource: {}", newResource.getIdElement().getValue()); + ourLog.debug( + "Automatically creating empty placeholder resource: {}", + newResource.getIdElement().getValue()); if (myStorageSettings.isPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets()) { tryToCopyIdentifierFromReferenceToTargetResource(theReference, missingResourceDef, newResource); @@ -220,9 +258,11 @@ public class DaoResourceLinkResolver implements } } - private void tryToCopyIdentifierFromReferenceToTargetResource(IBaseReference theSourceReference, RuntimeResourceDefinition theTargetResourceDef, T theTargetResource) { -// boolean referenceHasIdentifier = theSourceReference.hasIdentifier(); - CanonicalIdentifier referenceMatchUrlIdentifier = extractIdentifierFromUrl(theSourceReference.getReferenceElement().getValue()); + private void tryToCopyIdentifierFromReferenceToTargetResource( + IBaseReference theSourceReference, RuntimeResourceDefinition theTargetResourceDef, T theTargetResource) { + // boolean referenceHasIdentifier = theSourceReference.hasIdentifier(); + CanonicalIdentifier referenceMatchUrlIdentifier = extractIdentifierFromUrl( + theSourceReference.getReferenceElement().getValue()); CanonicalIdentifier referenceIdentifier = extractIdentifierReference(theSourceReference); if (referenceIdentifier == null && referenceMatchUrlIdentifier != null) { @@ -234,12 +274,14 @@ public class DaoResourceLinkResolver implements addSubjectIdentifierToTargetResource(theSourceReference, theTargetResourceDef, theTargetResource); } else { addSubjectIdentifierToTargetResource(theSourceReference, theTargetResourceDef, theTargetResource); - addMatchUrlIdentifierToTargetResource(theTargetResourceDef, theTargetResource, referenceMatchUrlIdentifier); + addMatchUrlIdentifierToTargetResource( + theTargetResourceDef, theTargetResource, referenceMatchUrlIdentifier); } } } - private void addSubjectIdentifierToTargetResource(IBaseReference theSourceReference, RuntimeResourceDefinition theTargetResourceDef, T theTargetResource) { + private void addSubjectIdentifierToTargetResource( + IBaseReference theSourceReference, RuntimeResourceDefinition theTargetResourceDef, T theTargetResource) { BaseRuntimeChildDefinition targetIdentifier = theTargetResourceDef.getChildByName("identifier"); if (targetIdentifier != null) { BaseRuntimeElementDefinition identifierElement = targetIdentifier.getChildByName("identifier"); @@ -247,20 +289,32 @@ public class DaoResourceLinkResolver implements boolean targetHasIdentifierElement = identifierElementName.equals("Identifier"); if (targetHasIdentifierElement) { - BaseRuntimeElementCompositeDefinition referenceElement = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theSourceReference.getClass()); + BaseRuntimeElementCompositeDefinition referenceElement = (BaseRuntimeElementCompositeDefinition) + myContext.getElementDefinition(theSourceReference.getClass()); BaseRuntimeChildDefinition referenceIdentifierChild = referenceElement.getChildByName("identifier"); - Optional identifierOpt = referenceIdentifierChild.getAccessor().getFirstValueOrNull(theSourceReference); - identifierOpt.ifPresent(theIBase -> targetIdentifier.getMutator().addValue(theTargetResource, theIBase)); + Optional identifierOpt = + referenceIdentifierChild.getAccessor().getFirstValueOrNull(theSourceReference); + identifierOpt.ifPresent( + theIBase -> targetIdentifier.getMutator().addValue(theTargetResource, theIBase)); } } } - private void addMatchUrlIdentifierToTargetResource(RuntimeResourceDefinition theTargetResourceDef, T theTargetResource, CanonicalIdentifier referenceMatchUrlIdentifier) { + private void addMatchUrlIdentifierToTargetResource( + RuntimeResourceDefinition theTargetResourceDef, + T theTargetResource, + CanonicalIdentifier referenceMatchUrlIdentifier) { BaseRuntimeChildDefinition identifierDefinition = theTargetResourceDef.getChildByName("identifier"); - IBase identifierIBase = identifierDefinition.getChildByName("identifier").newInstance(identifierDefinition.getInstanceConstructorArguments()); - IBase systemIBase = TerserUtil.newElement(myContext, "uri", referenceMatchUrlIdentifier.getSystemElement().getValueAsString()); - IBase valueIBase = TerserUtil.newElement(myContext, "string", referenceMatchUrlIdentifier.getValueElement().getValueAsString()); - //Set system in the IBase Identifier + IBase identifierIBase = identifierDefinition + .getChildByName("identifier") + .newInstance(identifierDefinition.getInstanceConstructorArguments()); + IBase systemIBase = TerserUtil.newElement( + myContext, "uri", referenceMatchUrlIdentifier.getSystemElement().getValueAsString()); + IBase valueIBase = TerserUtil.newElement( + myContext, + "string", + referenceMatchUrlIdentifier.getValueElement().getValueAsString()); + // Set system in the IBase Identifier BaseRuntimeElementDefinition elementDefinition = myContext.getElementDefinition(identifierIBase.getClass()); @@ -270,18 +324,21 @@ public class DaoResourceLinkResolver implements BaseRuntimeChildDefinition valueDefinition = elementDefinition.getChildByName("value"); valueDefinition.getMutator().setValue(identifierIBase, valueIBase); - //Set Value in the IBase identifier + // Set Value in the IBase identifier identifierDefinition.getMutator().addValue(theTargetResource, identifierIBase); } private CanonicalIdentifier extractIdentifierReference(IBaseReference theSourceReference) { - Optional identifier = myContext.newFhirPath().evaluateFirst(theSourceReference, "identifier", IBase.class); + Optional identifier = + myContext.newFhirPath().evaluateFirst(theSourceReference, "identifier", IBase.class); if (!identifier.isPresent()) { return null; } else { CanonicalIdentifier canonicalIdentifier = new CanonicalIdentifier(); - Optional system = myContext.newFhirPath().evaluateFirst(identifier.get(), "system", IPrimitiveType.class); - Optional value = myContext.newFhirPath().evaluateFirst(identifier.get(), "value", IPrimitiveType.class); + Optional system = + myContext.newFhirPath().evaluateFirst(identifier.get(), "system", IPrimitiveType.class); + Optional value = + myContext.newFhirPath().evaluateFirst(identifier.get(), "value", IPrimitiveType.class); system.ifPresent(theIPrimitiveType -> canonicalIdentifier.setSystem(theIPrimitiveType.getValueAsString())); value.ifPresent(theIPrimitiveType -> canonicalIdentifier.setValue(theIPrimitiveType.getValueAsString())); @@ -302,8 +359,10 @@ public class DaoResourceLinkResolver implements return null; } - List params = URLEncodedUtils.parse(theValue.substring(identifierIndex), StandardCharsets.UTF_8, '&', ';'); - Optional idOptional = params.stream().filter(p -> p.getName().equals("identifier")).findFirst(); + List params = + URLEncodedUtils.parse(theValue.substring(identifierIndex), StandardCharsets.UTF_8, '&', ';'); + Optional idOptional = + params.stream().filter(p -> p.getName().equals("identifier")).findFirst(); if (!idOptional.isPresent()) { return null; } @@ -312,7 +371,8 @@ public class DaoResourceLinkResolver implements String identifierString = id.getValue(); String[] split = identifierString.split("\\|"); if (split.length != 2) { - throw new IllegalArgumentException(Msg.code(1097) + "Can't create a placeholder reference with identifier " + theValue + ". It is not a valid identifier"); + throw new IllegalArgumentException(Msg.code(1097) + "Can't create a placeholder reference with identifier " + + theValue + ". It is not a valid identifier"); } CanonicalIdentifier identifier = new CanonicalIdentifier(); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/r4/TransactionProcessorVersionAdapterR4.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/r4/TransactionProcessorVersionAdapterR4.java index ffc07301640..0c07565777f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/r4/TransactionProcessorVersionAdapterR4.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/r4/TransactionProcessorVersionAdapterR4.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.dao.r4; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter; import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; @@ -34,7 +34,8 @@ import org.hl7.fhir.r4.model.Resource; import java.util.Date; import java.util.List; -public class TransactionProcessorVersionAdapterR4 implements ITransactionProcessorVersionAdapter { +public class TransactionProcessorVersionAdapterR4 + implements ITransactionProcessorVersionAdapter { @Override public void setResponseStatus(Bundle.BundleEntryComponent theBundleEntry, String theStatus) { theBundleEntry.getResponse().setStatus(theStatus); @@ -64,12 +65,13 @@ public class TransactionProcessorVersionAdapterR4 implements ITransactionProcess } @Override - public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.BundleEntryComponent theEntry) { + public void populateEntryWithOperationOutcome( + BaseServerResponseException theCaughtEx, Bundle.BundleEntryComponent theEntry) { OperationOutcome oo = new OperationOutcome(); oo.addIssue() - .setSeverity(OperationOutcome.IssueSeverity.ERROR) - .setDiagnostics(theCaughtEx.getMessage()) - .setCode(OperationOutcome.IssueType.EXCEPTION); + .setSeverity(OperationOutcome.IssueSeverity.ERROR) + .setDiagnostics(theCaughtEx.getMessage()) + .setCode(OperationOutcome.IssueType.EXCEPTION); theEntry.getResponse().setOutcome(oo); } @@ -114,7 +116,6 @@ public class TransactionProcessorVersionAdapterR4 implements ITransactionProcess return theEntry.getFullUrl(); } - @Override public void setFullUrl(Bundle.BundleEntryComponent theEntry, String theFullUrl) { theEntry.setFullUrl(theFullUrl); @@ -169,5 +170,4 @@ public class TransactionProcessorVersionAdapterR4 implements ITransactionProcess public void setRequestUrl(Bundle.BundleEntryComponent theEntry, String theUrl) { theEntry.getRequest().setUrl(theUrl); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java index 7a37a8d5e17..c7c0cf45cba 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java @@ -56,28 +56,35 @@ import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Objects; import java.util.concurrent.Callable; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * @see IHapiTransactionService for an explanation of this class */ public class HapiTransactionService implements IHapiTransactionService { - public static final String XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS = HapiTransactionService.class.getName() + "_RESOLVED_TAG_DEFINITIONS"; - public static final String XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS = HapiTransactionService.class.getName() + "_EXISTING_SEARCH_PARAMS"; + public static final String XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS = + HapiTransactionService.class.getName() + "_RESOLVED_TAG_DEFINITIONS"; + public static final String XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS = + HapiTransactionService.class.getName() + "_EXISTING_SEARCH_PARAMS"; private static final Logger ourLog = LoggerFactory.getLogger(HapiTransactionService.class); private static final ThreadLocal ourRequestPartitionThreadLocal = new ThreadLocal<>(); + @Autowired protected IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired protected PlatformTransactionManager myTransactionManager; + @Autowired protected IRequestPartitionHelperSvc myRequestPartitionHelperSvc; + @Autowired protected PartitionSettings myPartitionSettings; + private Propagation myTransactionPropagationWhenChangingPartitions = Propagation.REQUIRED; @VisibleForTesting @@ -99,7 +106,10 @@ public class HapiTransactionService implements IHapiTransactionService { * @deprecated Use {@link #withRequest(RequestDetails)} with fluent call instead */ @Deprecated - public T execute(@Nullable RequestDetails theRequestDetails, @Nullable TransactionDetails theTransactionDetails, @Nonnull TransactionCallback theCallback) { + public T execute( + @Nullable RequestDetails theRequestDetails, + @Nullable TransactionDetails theTransactionDetails, + @Nonnull TransactionCallback theCallback) { return execute(theRequestDetails, theTransactionDetails, theCallback, null); } @@ -107,7 +117,12 @@ public class HapiTransactionService implements IHapiTransactionService { * @deprecated Use {@link #withRequest(RequestDetails)} with fluent call instead */ @Deprecated - public void execute(@Nullable RequestDetails theRequestDetails, @Nullable TransactionDetails theTransactionDetails, @Nonnull Propagation thePropagation, @Nonnull Isolation theIsolation, @Nonnull Runnable theCallback) { + public void execute( + @Nullable RequestDetails theRequestDetails, + @Nullable TransactionDetails theTransactionDetails, + @Nonnull Propagation thePropagation, + @Nonnull Isolation theIsolation, + @Nonnull Runnable theCallback) { TransactionCallbackWithoutResult callback = new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { @@ -122,7 +137,12 @@ public class HapiTransactionService implements IHapiTransactionService { */ @Deprecated @Override - public T withRequest(@Nullable RequestDetails theRequestDetails, @Nullable TransactionDetails theTransactionDetails, @Nonnull Propagation thePropagation, @Nonnull Isolation theIsolation, @Nonnull ICallable theCallback) { + public T withRequest( + @Nullable RequestDetails theRequestDetails, + @Nullable TransactionDetails theTransactionDetails, + @Nonnull Propagation thePropagation, + @Nonnull Isolation theIsolation, + @Nonnull ICallable theCallback) { TransactionCallback callback = tx -> theCallback.call(); return execute(theRequestDetails, theTransactionDetails, callback, null, thePropagation, theIsolation); @@ -132,7 +152,11 @@ public class HapiTransactionService implements IHapiTransactionService { * @deprecated Use {@link #withRequest(RequestDetails)} with fluent call instead */ @Deprecated - public T execute(@Nullable RequestDetails theRequestDetails, @Nullable TransactionDetails theTransactionDetails, @Nonnull TransactionCallback theCallback, @Nullable Runnable theOnRollback) { + public T execute( + @Nullable RequestDetails theRequestDetails, + @Nullable TransactionDetails theTransactionDetails, + @Nonnull TransactionCallback theCallback, + @Nullable Runnable theOnRollback) { return execute(theRequestDetails, theTransactionDetails, theCallback, theOnRollback, null, null); } @@ -141,27 +165,40 @@ public class HapiTransactionService implements IHapiTransactionService { * @deprecated Use {@link #withRequest(RequestDetails)} with fluent call instead */ @Deprecated - public T execute(@Nullable RequestDetails theRequestDetails, @Nullable TransactionDetails theTransactionDetails, @Nonnull TransactionCallback theCallback, @Nullable Runnable theOnRollback, @Nullable Propagation thePropagation, @Nullable Isolation theIsolation) { + public T execute( + @Nullable RequestDetails theRequestDetails, + @Nullable TransactionDetails theTransactionDetails, + @Nonnull TransactionCallback theCallback, + @Nullable Runnable theOnRollback, + @Nullable Propagation thePropagation, + @Nullable Isolation theIsolation) { return withRequest(theRequestDetails) - .withTransactionDetails(theTransactionDetails) - .withPropagation(thePropagation) - .withIsolation(theIsolation) - .onRollback(theOnRollback) - .execute(theCallback); + .withTransactionDetails(theTransactionDetails) + .withPropagation(thePropagation) + .withIsolation(theIsolation) + .onRollback(theOnRollback) + .execute(theCallback); } /** * @deprecated Use {@link #withRequest(RequestDetails)} with fluent call instead */ @Deprecated - public T execute(@Nullable RequestDetails theRequestDetails, @Nullable TransactionDetails theTransactionDetails, @Nonnull TransactionCallback theCallback, @Nullable Runnable theOnRollback, @Nonnull Propagation thePropagation, @Nonnull Isolation theIsolation, RequestPartitionId theRequestPartitionId) { + public T execute( + @Nullable RequestDetails theRequestDetails, + @Nullable TransactionDetails theTransactionDetails, + @Nonnull TransactionCallback theCallback, + @Nullable Runnable theOnRollback, + @Nonnull Propagation thePropagation, + @Nonnull Isolation theIsolation, + RequestPartitionId theRequestPartitionId) { return withRequest(theRequestDetails) - .withTransactionDetails(theTransactionDetails) - .withPropagation(thePropagation) - .withIsolation(theIsolation) - .withRequestPartitionId(theRequestPartitionId) - .onRollback(theOnRollback) - .execute(theCallback); + .withTransactionDetails(theTransactionDetails) + .withPropagation(thePropagation) + .withIsolation(theIsolation) + .withRequestPartitionId(theRequestPartitionId) + .onRollback(theOnRollback) + .execute(theCallback); } public boolean isCustomIsolationSupported() { @@ -188,7 +225,8 @@ public class HapiTransactionService implements IHapiTransactionService { if (theExecutionBuilder.myRequestPartitionId != null) { requestPartitionId = theExecutionBuilder.myRequestPartitionId; } else if (theExecutionBuilder.myRequestDetails != null) { - requestPartitionId = myRequestPartitionHelperSvc.determineGenericPartitionForRequest(theExecutionBuilder.myRequestDetails); + requestPartitionId = myRequestPartitionHelperSvc.determineGenericPartitionForRequest( + theExecutionBuilder.myRequestDetails); } else { requestPartitionId = null; } @@ -208,20 +246,29 @@ public class HapiTransactionService implements IHapiTransactionService { return executeInExistingTransaction(theCallback); } } else if (myTransactionPropagationWhenChangingPartitions == Propagation.REQUIRES_NEW) { - return executeInNewTransactionForPartitionChange(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId); + return executeInNewTransactionForPartitionChange( + theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId); } return doExecuteInTransaction(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId); } @Nullable - private T executeInNewTransactionForPartitionChange(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback, RequestPartitionId requestPartitionId, RequestPartitionId previousRequestPartitionId) { + private T executeInNewTransactionForPartitionChange( + ExecutionBuilder theExecutionBuilder, + TransactionCallback theCallback, + RequestPartitionId requestPartitionId, + RequestPartitionId previousRequestPartitionId) { theExecutionBuilder.myPropagation = myTransactionPropagationWhenChangingPartitions; return doExecuteInTransaction(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId); } @Nullable - private T doExecuteInTransaction(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback, RequestPartitionId requestPartitionId, RequestPartitionId previousRequestPartitionId) { + private T doExecuteInTransaction( + ExecutionBuilder theExecutionBuilder, + TransactionCallback theCallback, + RequestPartitionId requestPartitionId, + RequestPartitionId previousRequestPartitionId) { try { for (int i = 0; ; i++) { try { @@ -229,10 +276,11 @@ public class HapiTransactionService implements IHapiTransactionService { return doExecuteCallback(theExecutionBuilder, theCallback); } catch (Exception e) { - if (!(ExceptionUtils.indexOfThrowable(e, ResourceVersionConflictException.class) != -1 || - ExceptionUtils.indexOfThrowable(e, DataIntegrityViolationException.class) != -1 || - ExceptionUtils.indexOfThrowable(e, ConstraintViolationException.class) != -1 || - ExceptionUtils.indexOfThrowable(e, ObjectOptimisticLockingFailureException.class) != -1)) { + if (!(ExceptionUtils.indexOfThrowable(e, ResourceVersionConflictException.class) != -1 + || ExceptionUtils.indexOfThrowable(e, DataIntegrityViolationException.class) != -1 + || ExceptionUtils.indexOfThrowable(e, ConstraintViolationException.class) != -1 + || ExceptionUtils.indexOfThrowable(e, ObjectOptimisticLockingFailureException.class) + != -1)) { ourLog.debug("Unexpected transaction exception. Will not be retried.", e); throw e; } else { @@ -258,14 +306,16 @@ public class HapiTransactionService implements IHapiTransactionService { if (maxRetries == 0) { HookParams params = new HookParams() - .add(RequestDetails.class, theExecutionBuilder.myRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theExecutionBuilder.myRequestDetails); - ResourceVersionConflictResolutionStrategy conflictResolutionStrategy = (ResourceVersionConflictResolutionStrategy) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( - myInterceptorBroadcaster, - theExecutionBuilder.myRequestDetails, - Pointcut.STORAGE_VERSION_CONFLICT, - params - ); + .add(RequestDetails.class, theExecutionBuilder.myRequestDetails) + .addIfMatchesType( + ServletRequestDetails.class, theExecutionBuilder.myRequestDetails); + ResourceVersionConflictResolutionStrategy conflictResolutionStrategy = + (ResourceVersionConflictResolutionStrategy) + CompositeInterceptorBroadcaster.doCallHooksAndReturnObject( + myInterceptorBroadcaster, + theExecutionBuilder.myRequestDetails, + Pointcut.STORAGE_VERSION_CONFLICT, + params); if (conflictResolutionStrategy != null && conflictResolutionStrategy.isRetry()) { maxRetries = conflictResolutionStrategy.getMaxRetries(); } @@ -273,17 +323,24 @@ public class HapiTransactionService implements IHapiTransactionService { if (i < maxRetries) { if (theExecutionBuilder.myTransactionDetails != null) { - theExecutionBuilder.myTransactionDetails.getRollbackUndoActions().forEach(Runnable::run); + theExecutionBuilder + .myTransactionDetails + .getRollbackUndoActions() + .forEach(Runnable::run); theExecutionBuilder.myTransactionDetails.clearRollbackUndoActions(); theExecutionBuilder.myTransactionDetails.clearResolvedItems(); - theExecutionBuilder.myTransactionDetails.clearUserData(XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS); - theExecutionBuilder.myTransactionDetails.clearUserData(XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS); + theExecutionBuilder.myTransactionDetails.clearUserData( + XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS); + theExecutionBuilder.myTransactionDetails.clearUserData( + XACT_USERDATA_KEY_EXISTING_SEARCH_PARAMS); } double sleepAmount = (250.0d * i) * Math.random(); long sleepAmountLong = (long) sleepAmount; TestUtil.sleepAtLeast(sleepAmountLong, false); - ourLog.info("About to start a transaction retry due to conflict or constraint error. Sleeping {}ms first.", sleepAmountLong); + ourLog.info( + "About to start a transaction retry due to conflict or constraint error. Sleeping {}ms first.", + sleepAmountLong); continue; } @@ -293,7 +350,8 @@ public class HapiTransactionService implements IHapiTransactionService { } if (maxRetries > 0) { - String msg = "Max retries (" + maxRetries + ") exceeded for version conflict: " + e.getMessage(); + String msg = + "Max retries (" + maxRetries + ") exceeded for version conflict: " + e.getMessage(); ourLog.info(msg, maxRetries); throw new ResourceVersionConflictException(Msg.code(549) + msg); } @@ -309,7 +367,8 @@ public class HapiTransactionService implements IHapiTransactionService { } } - public void setTransactionPropagationWhenChangingPartitions(Propagation theTransactionPropagationWhenChangingPartitions) { + public void setTransactionPropagationWhenChangingPartitions( + Propagation theTransactionPropagationWhenChangingPartitions) { Validate.notNull(theTransactionPropagationWhenChangingPartitions); myTransactionPropagationWhenChangingPartitions = theTransactionPropagationWhenChangingPartitions; } @@ -323,7 +382,9 @@ public class HapiTransactionService implements IHapiTransactionService { txTemplate.setPropagationBehavior(theExecutionBuilder.myPropagation.value()); } - if (isCustomIsolationSupported() && theExecutionBuilder.myIsolation != null && theExecutionBuilder.myIsolation != Isolation.DEFAULT) { + if (isCustomIsolationSupported() + && theExecutionBuilder.myIsolation != null + && theExecutionBuilder.myIsolation != Isolation.DEFAULT) { txTemplate.setIsolationLevel(theExecutionBuilder.myIsolation.value()); } @@ -417,7 +478,6 @@ public class HapiTransactionService implements IHapiTransactionService { return doExecute(this, callback); } - } /** @@ -438,8 +498,9 @@ public class HapiTransactionService implements IHapiTransactionService { */ private static boolean canReuseExistingTransaction(ExecutionBuilder theExecutionBuilder) { return TransactionSynchronizationManager.isActualTransactionActive() - && (!TransactionSynchronizationManager.isCurrentTransactionReadOnly() || theExecutionBuilder.myReadOnly) - && (theExecutionBuilder.myPropagation == null || theExecutionBuilder.myPropagation == Propagation.REQUIRED); + && (!TransactionSynchronizationManager.isCurrentTransactionReadOnly() || theExecutionBuilder.myReadOnly) + && (theExecutionBuilder.myPropagation == null + || theExecutionBuilder.myPropagation == Propagation.REQUIRED); } @Nullable @@ -480,13 +541,17 @@ public class HapiTransactionService implements IHapiTransactionService { * Throws an {@link IllegalArgumentException} if a transaction is active */ public static void noTransactionAllowed() { - Validate.isTrue(!TransactionSynchronizationManager.isActualTransactionActive(), "Transaction must not be active but found an active transaction"); + Validate.isTrue( + !TransactionSynchronizationManager.isActualTransactionActive(), + "Transaction must not be active but found an active transaction"); } /** * Throws an {@link IllegalArgumentException} if no transaction is active */ public static void requireTransaction() { - Validate.isTrue(TransactionSynchronizationManager.isActualTransactionActive(), "Transaction required here but no active transaction found"); + Validate.isTrue( + TransactionSynchronizationManager.isActualTransactionActive(), + "Transaction required here but no active transaction found"); } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/IHapiTransactionService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/IHapiTransactionService.java index f4362d7577c..6a3ca18f3b3 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/IHapiTransactionService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/IHapiTransactionService.java @@ -27,9 +27,9 @@ import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.support.TransactionCallback; +import java.util.concurrent.Callable; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.concurrent.Callable; /** * This class is used to execute code within the context of a database transaction, @@ -66,16 +66,19 @@ public interface IHapiTransactionService { * @since 6.6.0 */ default IExecutionBuilder withSystemRequestOnPartition(RequestPartitionId theRequestPartitionId) { - return withSystemRequest() - .withRequestPartitionId(theRequestPartitionId); + return withSystemRequest().withRequestPartitionId(theRequestPartitionId); } /** * @deprecated It is highly recommended to use {@link #withRequest(RequestDetails)} instead of this method, for increased visibility. */ @Deprecated - T withRequest(@Nullable RequestDetails theRequestDetails, @Nullable TransactionDetails theTransactionDetails, @Nonnull Propagation thePropagation, @Nonnull Isolation theIsolation, @Nonnull ICallable theCallback); - + T withRequest( + @Nullable RequestDetails theRequestDetails, + @Nullable TransactionDetails theTransactionDetails, + @Nonnull Propagation thePropagation, + @Nonnull Isolation theIsolation, + @Nonnull ICallable theCallback); interface IExecutionBuilder { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/validation/SearchParameterDaoValidator.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/validation/SearchParameterDaoValidator.java index af06c73c38d..f0d75caea5b 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/validation/SearchParameterDaoValidator.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/validation/SearchParameterDaoValidator.java @@ -56,7 +56,10 @@ public class SearchParameterDaoValidator { private final JpaStorageSettings myStorageSettings; private final ISearchParamRegistry mySearchParamRegistry; - public SearchParameterDaoValidator(FhirContext theContext, JpaStorageSettings theStorageSettings, ISearchParamRegistry theSearchParamRegistry) { + public SearchParameterDaoValidator( + FhirContext theContext, + JpaStorageSettings theStorageSettings, + ISearchParamRegistry theSearchParamRegistry) { myFhirContext = theContext; myStorageSettings = theStorageSettings; mySearchParamRegistry = theSearchParamRegistry; @@ -70,12 +73,15 @@ public class SearchParameterDaoValidator { if (myStorageSettings.isDefaultSearchParamsCanBeOverridden() == false) { for (IPrimitiveType nextBaseType : searchParameter.getBase()) { String nextBase = nextBaseType.getValueAsString(); - RuntimeSearchParam existingSearchParam = mySearchParamRegistry.getActiveSearchParam(nextBase, searchParameter.getCode()); + RuntimeSearchParam existingSearchParam = + mySearchParamRegistry.getActiveSearchParam(nextBase, searchParameter.getCode()); if (existingSearchParam != null) { boolean isBuiltIn = existingSearchParam.getId() == null; isBuiltIn |= existingSearchParam.getUri().startsWith("http://hl7.org/fhir/SearchParameter/"); if (isBuiltIn) { - throw new UnprocessableEntityException(Msg.code(1111) + "Can not override built-in search parameter " + nextBase + ":" + searchParameter.getCode() + " because overriding is disabled on this server"); + throw new UnprocessableEntityException( + Msg.code(1111) + "Can not override built-in search parameter " + nextBase + ":" + + searchParameter.getCode() + " because overriding is disabled on this server"); } } } @@ -118,14 +124,15 @@ public class SearchParameterDaoValidator { } private boolean isCompositeSp(SearchParameter theSearchParameter) { - return theSearchParameter.getType() != null && theSearchParameter.getType().equals(Enumerations.SearchParamType.COMPOSITE); + return theSearchParameter.getType() != null + && theSearchParameter.getType().equals(Enumerations.SearchParamType.COMPOSITE); } private boolean isCompositeWithoutBase(SearchParameter searchParameter) { - return - ElementUtil.isEmpty(searchParameter.getBase()) && - ElementUtil.isEmpty(searchParameter.getExtensionsByUrl(HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE)) && - !isCompositeSp(searchParameter); + return ElementUtil.isEmpty(searchParameter.getBase()) + && ElementUtil.isEmpty( + searchParameter.getExtensionsByUrl(HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE)) + && !isCompositeSp(searchParameter); } private boolean isCompositeWithoutExpression(SearchParameter searchParameter) { @@ -143,11 +150,13 @@ public class SearchParameterDaoValidator { private void maybeValidateCompositeSpForUniqueIndexing(SearchParameter theSearchParameter) { if (isCompositeSpForUniqueIndexing(theSearchParameter)) { if (!theSearchParameter.hasComponent()) { - throw new UnprocessableEntityException(Msg.code(1115) + "SearchParameter is marked as unique but has no components"); + throw new UnprocessableEntityException( + Msg.code(1115) + "SearchParameter is marked as unique but has no components"); } for (SearchParameter.SearchParameterComponentComponent next : theSearchParameter.getComponent()) { if (isBlank(next.getDefinition())) { - throw new UnprocessableEntityException(Msg.code(1116) + "SearchParameter is marked as unique but is missing component.definition"); + throw new UnprocessableEntityException( + Msg.code(1116) + "SearchParameter is marked as unique but is missing component.definition"); } } } @@ -165,12 +174,14 @@ public class SearchParameterDaoValidator { boolean isResourceOfTypeComposite = theSearchParameter.getType() == Enumerations.SearchParamType.COMPOSITE; boolean isResourceOfTypeSpecial = theSearchParameter.getType() == Enumerations.SearchParamType.SPECIAL; - boolean expressionHasPath = REGEX_SP_EXPRESSION_HAS_PATH.matcher(expression).matches(); + boolean expressionHasPath = + REGEX_SP_EXPRESSION_HAS_PATH.matcher(expression).matches(); boolean isUnique = hasAnyExtensionUniqueSetTo(theSearchParameter, true); if (!isUnique && !isResourceOfTypeComposite && !isResourceOfTypeSpecial && !expressionHasPath) { - throw new UnprocessableEntityException(Msg.code(1120) + "SearchParameter.expression value \"" + expression + "\" is invalid due to missing/incorrect path"); + throw new UnprocessableEntityException(Msg.code(1120) + "SearchParameter.expression value \"" + expression + + "\" is invalid due to missing/incorrect path"); } } @@ -180,7 +191,9 @@ public class SearchParameterDaoValidator { try { myFhirContext.newFhirPath().parse(expression); } catch (Exception exception) { - throw new UnprocessableEntityException(Msg.code(1121) + "Invalid FHIRPath format for SearchParameter.expression \"" + expression + "\": " + exception.getMessage()); + throw new UnprocessableEntityException( + Msg.code(1121) + "Invalid FHIRPath format for SearchParameter.expression \"" + expression + "\": " + + exception.getMessage()); } } @@ -191,10 +204,8 @@ public class SearchParameterDaoValidator { private boolean hasAnyExtensionUniqueSetTo(SearchParameter theSearchParameter, boolean theValue) { String theValueAsString = Boolean.toString(theValue); - return theSearchParameter - .getExtensionsByUrl(HapiExtensions.EXT_SP_UNIQUE) - .stream() - .anyMatch(t -> theValueAsString.equals(t.getValueAsPrimitive().getValueAsString())); + return theSearchParameter.getExtensionsByUrl(HapiExtensions.EXT_SP_UNIQUE).stream() + .anyMatch(t -> theValueAsString.equals(t.getValueAsPrimitive().getValueAsString())); } private void maybeValidateCompositeWithComponent(SearchParameter theSearchParameter) { @@ -205,20 +216,25 @@ public class SearchParameterDaoValidator { private void validateCompositeSearchParameterComponents(SearchParameter theSearchParameter) { theSearchParameter.getComponent().stream() - .filter(SearchParameter.SearchParameterComponentComponent::hasDefinition) - .map(SearchParameter.SearchParameterComponentComponent::getDefinition) - .filter(Objects::nonNull) - .map(mySearchParamRegistry::getActiveSearchParamByUrl) - .filter(Objects::nonNull) - .forEach(theRuntimeSp -> validateComponentSpTypeAgainstWhiteList(theRuntimeSp, getAllowedSearchParameterTypes(theSearchParameter))); + .filter(SearchParameter.SearchParameterComponentComponent::hasDefinition) + .map(SearchParameter.SearchParameterComponentComponent::getDefinition) + .filter(Objects::nonNull) + .map(mySearchParamRegistry::getActiveSearchParamByUrl) + .filter(Objects::nonNull) + .forEach(theRuntimeSp -> validateComponentSpTypeAgainstWhiteList( + theRuntimeSp, getAllowedSearchParameterTypes(theSearchParameter))); } - private void validateComponentSpTypeAgainstWhiteList(RuntimeSearchParam theRuntimeSearchParam, - Collection theAllowedSearchParamTypes) { + private void validateComponentSpTypeAgainstWhiteList( + RuntimeSearchParam theRuntimeSearchParam, + Collection theAllowedSearchParamTypes) { if (!theAllowedSearchParamTypes.contains(theRuntimeSearchParam.getParamType())) { - throw new UnprocessableEntityException(String.format("%sInvalid component search parameter type: %s in component.definition: %s, supported types: %s", - Msg.code(2347), theRuntimeSearchParam.getParamType().name(), theRuntimeSearchParam.getUri(), - theAllowedSearchParamTypes.stream().map(Enum::name).collect(Collectors.joining(", ")))); + throw new UnprocessableEntityException(String.format( + "%sInvalid component search parameter type: %s in component.definition: %s, supported types: %s", + Msg.code(2347), + theRuntimeSearchParam.getParamType().name(), + theRuntimeSearchParam.getUri(), + theAllowedSearchParamTypes.stream().map(Enum::name).collect(Collectors.joining(", ")))); } } @@ -240,12 +256,13 @@ public class SearchParameterDaoValidator { // combo unique search parameter if (hasAnyExtensionUniqueSetTo(theSearchParameter, true)) { return Set.of(STRING, TOKEN, DATE, QUANTITY, URI, NUMBER, REFERENCE); - // combo non-unique search parameter or composite Search Parameter with HSearch indexing - } else if (hasAnyExtensionUniqueSetTo(theSearchParameter, false) || // combo non-unique search parameter - myStorageSettings.isAdvancedHSearchIndexing()) { // composite Search Parameter with HSearch indexing + // combo non-unique search parameter or composite Search Parameter with HSearch indexing + } else if (hasAnyExtensionUniqueSetTo(theSearchParameter, false) + || // combo non-unique search parameter + myStorageSettings.isAdvancedHSearchIndexing()) { // composite Search Parameter with HSearch indexing return Set.of(STRING, TOKEN, DATE, QUANTITY, URI, NUMBER); } else { // composite Search Parameter (JPA only) - return Set.of(STRING, TOKEN, DATE, QUANTITY); + return Set.of(STRING, TOKEN, DATE, QUANTITY); } } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictUtil.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictUtil.java index cb5a4964d46..75a40f0a003 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictUtil.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/delete/DeleteConflictUtil.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.delete; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.model.DeleteConflict; import ca.uhn.fhir.jpa.api.model.DeleteConflictList; import ca.uhn.fhir.jpa.dao.BaseStorageDao; @@ -29,10 +29,10 @@ import ca.uhn.fhir.util.OperationOutcomeUtil; import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; public final class DeleteConflictUtil { - private DeleteConflictUtil() { - } + private DeleteConflictUtil() {} - public static void validateDeleteConflictsEmptyOrThrowException(FhirContext theFhirContext, DeleteConflictList theDeleteConflicts) { + public static void validateDeleteConflictsEmptyOrThrowException( + FhirContext theFhirContext, DeleteConflictList theDeleteConflicts) { IBaseOperationOutcome oo = null; String firstMsg = null; @@ -42,18 +42,19 @@ public final class DeleteConflictUtil { continue; } - String msg = "Unable to delete " + - next.getTargetId().toUnqualifiedVersionless().getValue() + - " because at least one resource has a reference to this resource. First reference found was resource " + - next.getSourceId().toUnqualifiedVersionless().getValue() + - " in path " + - next.getSourcePath(); + String msg = "Unable to delete " + + next.getTargetId().toUnqualifiedVersionless().getValue() + + " because at least one resource has a reference to this resource. First reference found was resource " + + next.getSourceId().toUnqualifiedVersionless().getValue() + + " in path " + + next.getSourcePath(); if (firstMsg == null) { firstMsg = msg; oo = OperationOutcomeUtil.newInstance(theFhirContext); } - OperationOutcomeUtil.addIssue(theFhirContext, oo, BaseStorageDao.OO_SEVERITY_ERROR, msg, null, "processing"); + OperationOutcomeUtil.addIssue( + theFhirContext, oo, BaseStorageDao.OO_SEVERITY_ERROR, msg, null, "processing"); } if (firstMsg == null) { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/graphql/DaoRegistryGraphQLStorageServices.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/graphql/DaoRegistryGraphQLStorageServices.java index 33c02a6528f..529aef2a592 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/graphql/DaoRegistryGraphQLStorageServices.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/graphql/DaoRegistryGraphQLStorageServices.java @@ -65,8 +65,6 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.utilities.graphql.Argument; import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices; import org.hl7.fhir.utilities.graphql.Value; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; @@ -87,16 +85,22 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService static final String SEARCH_OFFSET_PARAM = "search-offset"; private static final int MAX_SEARCH_SIZE = 500; + @Autowired private FhirContext myContext; + @Autowired private DaoRegistry myDaoRegistry; + @Autowired private ISearchParamRegistry mySearchParamRegistry; + @Autowired protected ISearchCoordinatorSvc mySearchCoordinatorSvc; + @Autowired private IRequestPartitionHelperSvc myPartitionHelperSvc; + @Autowired private IPagingProvider myPagingProvider; @@ -119,8 +123,8 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService private SearchParameterMap buildSearchParams(String theType, List theSearchParams) { List resourceSearchParam = theSearchParams.stream() - .filter(it -> !PARAM_COUNT.equals(it.getName())) - .collect(Collectors.toList()); + .filter(it -> !PARAM_COUNT.equals(it.getName())) + .collect(Collectors.toList()); FhirContext fhirContext = myContext; RuntimeResourceDefinition typeDef = fhirContext.getResourceDefinition(theType); @@ -140,9 +144,15 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService RuntimeSearchParam searchParam = searchParams.get(searchParamName); if (searchParam == null) { Set graphqlArguments = searchParams.getSearchParamNames().stream() - .map(this::searchParamToGraphqlArgument) - .collect(Collectors.toSet()); - String msg = myContext.getLocalizer().getMessageSanitized(DaoRegistryGraphQLStorageServices.class, "invalidGraphqlArgument", nextArgument.getName(), new TreeSet<>(graphqlArguments)); + .map(this::searchParamToGraphqlArgument) + .collect(Collectors.toSet()); + String msg = myContext + .getLocalizer() + .getMessageSanitized( + DaoRegistryGraphQLStorageServices.class, + "invalidGraphqlArgument", + nextArgument.getName(), + new TreeSet<>(graphqlArguments)); throw new InvalidRequestException(Msg.code(1275) + msg); } @@ -204,7 +214,9 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService case URI: case HAS: default: - throw new InvalidRequestException(Msg.code(1276) + String.format("%s parameters are not yet supported in GraphQL", searchParam.getParamType())); + throw new InvalidRequestException(Msg.code(1276) + + String.format( + "%s parameters are not yet supported in GraphQL", searchParam.getParamType())); } params.add(searchParamName, queryParam); @@ -215,17 +227,19 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService @Transactional(propagation = Propagation.NEVER) @Override - public void listResources(Object theAppInfo, String theType, List theSearchParams, List theMatches) throws FHIRException { + public void listResources( + Object theAppInfo, String theType, List theSearchParams, List theMatches) + throws FHIRException { SearchParameterMap params = buildSearchParams(theType, theSearchParams); params.setLoadSynchronousUpTo(MAX_SEARCH_SIZE); RequestDetails requestDetails = (RequestDetails) theAppInfo; IBundleProvider response = getDao(theType).search(params, requestDetails); Integer size = response.size(); - //We set size to null in SearchCoordinatorSvcImpl.executeQuery() if matching results exceeds count - //so don't throw here - if ((response.preferredPageSize() != null && size != null && response.preferredPageSize() < size) || - size == null) { + // We set size to null in SearchCoordinatorSvcImpl.executeQuery() if matching results exceeds count + // so don't throw here + if ((response.preferredPageSize() != null && size != null && response.preferredPageSize() < size) + || size == null) { size = response.preferredPageSize(); } @@ -249,7 +263,8 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService @Transactional(propagation = Propagation.REQUIRED) @Override - public ReferenceResolution lookup(Object theAppInfo, IBaseResource theContext, IBaseReference theReference) throws FHIRException { + public ReferenceResolution lookup(Object theAppInfo, IBaseResource theContext, IBaseReference theReference) + throws FHIRException { IBaseResource outcome = lookup(theAppInfo, theReference.getReferenceElement()); if (outcome == null) { return null; @@ -259,9 +274,9 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService private Optional getArgument(List params, String name) { return params.stream() - .filter(it -> name.equals(it.getName())) - .map(it -> it.getValues().get(0).getValue()) - .findAny(); + .filter(it -> name.equals(it.getName())) + .map(it -> it.getValues().get(0).getValue()) + .findAny(); } @Transactional(propagation = Propagation.NEVER) @@ -281,16 +296,23 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService searchId = searchIdArgument.get(); searchOffset = Integer.parseInt(searchOffsetArgument.get()); - response = Optional.ofNullable(myPagingProvider.retrieveResultList(requestDetails, searchId)).orElseThrow(()->{ - String msg = myContext.getLocalizer().getMessageSanitized(DaoRegistryGraphQLStorageServices.class, "invalidGraphqlCursorArgument", searchId); - return new InvalidRequestException(Msg.code(2076) + msg); - }); + response = Optional.ofNullable(myPagingProvider.retrieveResultList(requestDetails, searchId)) + .orElseThrow(() -> { + String msg = myContext + .getLocalizer() + .getMessageSanitized( + DaoRegistryGraphQLStorageServices.class, + "invalidGraphqlCursorArgument", + searchId); + return new InvalidRequestException(Msg.code(2076) + msg); + }); - pageSize = Optional.ofNullable(response.preferredPageSize()) - .orElseGet(myPagingProvider::getDefaultPageSize); + pageSize = + Optional.ofNullable(response.preferredPageSize()).orElseGet(myPagingProvider::getDefaultPageSize); } else { - pageSize = getArgument(theSearchParams, "_count").map(Integer::parseInt) - .orElseGet(myPagingProvider::getDefaultPageSize); + pageSize = getArgument(theSearchParams, "_count") + .map(Integer::parseInt) + .orElseGet(myPagingProvider::getDefaultPageSize); SearchParameterMap params = buildSearchParams(theType, theSearchParams); params.setCount(pageSize); @@ -298,20 +320,27 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService CacheControlDirective cacheControlDirective = new CacheControlDirective(); cacheControlDirective.parse(requestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)); - RequestPartitionId requestPartitionId = myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(requestDetails, theType, params, null); - response = mySearchCoordinatorSvc.registerSearch(getDao(theType), params, theType, cacheControlDirective, requestDetails, requestPartitionId); + RequestPartitionId requestPartitionId = myPartitionHelperSvc.determineReadPartitionForRequestForSearchType( + requestDetails, theType, params, null); + response = mySearchCoordinatorSvc.registerSearch( + getDao(theType), params, theType, cacheControlDirective, requestDetails, requestPartitionId); searchOffset = 0; searchId = myPagingProvider.storeResultList(requestDetails, response); } - // response.size() may return {@literal null}, in that case use pageSize String serverBase = requestDetails.getFhirServerBase(); Optional numTotalResults = Optional.ofNullable(response.size()); - int numToReturn = numTotalResults.map(integer -> Math.min(pageSize, integer - searchOffset)).orElse(pageSize); + int numToReturn = numTotalResults + .map(integer -> Math.min(pageSize, integer - searchOffset)) + .orElse(pageSize); - BundleLinks links = new BundleLinks(requestDetails.getServerBaseForRequest(), null, RestfulServerUtils.prettyPrintResponse(requestDetails.getServer(), requestDetails), BundleTypeEnum.SEARCHSET); + BundleLinks links = new BundleLinks( + requestDetails.getServerBaseForRequest(), + null, + RestfulServerUtils.prettyPrintResponse(requestDetails.getServer(), requestDetails), + BundleTypeEnum.SEARCHSET); // RestfulServerUtils.createLinkSelf not suitable here String linkFormat = "%s/%s?_format=application/json&search-id=%s&search-offset=%d&_count=%d"; @@ -319,15 +348,19 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService String linkSelf = String.format(linkFormat, serverBase, theType, searchId, searchOffset, pageSize); links.setSelf(linkSelf); - boolean hasNext = numTotalResults.map(total -> (searchOffset + numToReturn) < total).orElse(true); + boolean hasNext = numTotalResults + .map(total -> (searchOffset + numToReturn) < total) + .orElse(true); if (hasNext) { - String linkNext = String.format(linkFormat, serverBase, theType, searchId, searchOffset+numToReturn, pageSize); + String linkNext = + String.format(linkFormat, serverBase, theType, searchId, searchOffset + numToReturn, pageSize); links.setNext(linkNext); } if (searchOffset > 0) { - String linkPrev = String.format(linkFormat, serverBase, theType, searchId, Math.max(0, searchOffset-pageSize), pageSize); + String linkPrev = String.format( + linkFormat, serverBase, theType, searchId, Math.max(0, searchOffset - pageSize), pageSize); links.setPrev(linkPrev); } @@ -340,5 +373,4 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService IBaseResource result = bundleFactory.getResourceBundle(); return (IBaseBundle) result; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProvider.java index 3e0abc1f75f..4136e40f846 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProvider.java @@ -19,12 +19,12 @@ */ package ca.uhn.fhir.jpa.graphql; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.annotation.Description; import ca.uhn.fhir.rest.annotation.GraphQL; import ca.uhn.fhir.rest.annotation.GraphQLQueryBody; @@ -50,9 +50,9 @@ import org.hl7.fhir.utilities.graphql.Parser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.function.Supplier; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.function.Supplier; public class GraphQLProvider { private static final Logger ourLog = LoggerFactory.getLogger(GraphQLProvider.class); @@ -76,36 +76,47 @@ public class GraphQLProvider { * @param theValidationSupport The HAPI Validation Support object, or null * @param theStorageServices The storage services (this object will be used to retrieve various resources as required by the GraphQL engine) */ - public GraphQLProvider(@Nonnull FhirContext theFhirContext, @Nullable IValidationSupport theValidationSupport, @Nonnull IGraphQLStorageServices theStorageServices) { + public GraphQLProvider( + @Nonnull FhirContext theFhirContext, + @Nullable IValidationSupport theValidationSupport, + @Nonnull IGraphQLStorageServices theStorageServices) { Validate.notNull(theFhirContext, "theFhirContext must not be null"); Validate.notNull(theStorageServices, "theStorageServices must not be null"); switch (theFhirContext.getVersion().getVersion()) { case DSTU3: { IValidationSupport validationSupport = theValidationSupport; - validationSupport = ObjectUtils.defaultIfNull(validationSupport, new DefaultProfileValidationSupport(theFhirContext)); - org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext workerContext = new org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext(theFhirContext, validationSupport); + validationSupport = ObjectUtils.defaultIfNull( + validationSupport, new DefaultProfileValidationSupport(theFhirContext)); + org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext workerContext = + new org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext(theFhirContext, validationSupport); myEngineFactory = () -> new org.hl7.fhir.dstu3.utils.GraphQLEngine(workerContext); break; } case R4: { IValidationSupport validationSupport = theValidationSupport; - validationSupport = ObjectUtils.defaultIfNull(validationSupport, new DefaultProfileValidationSupport(theFhirContext)); - org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext workerContext = new org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext(theFhirContext, validationSupport); + validationSupport = ObjectUtils.defaultIfNull( + validationSupport, new DefaultProfileValidationSupport(theFhirContext)); + org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext workerContext = + new org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext(theFhirContext, validationSupport); myEngineFactory = () -> new org.hl7.fhir.r4.utils.GraphQLEngine(workerContext); break; } case R4B: { IValidationSupport validationSupport = theValidationSupport; - validationSupport = ObjectUtils.defaultIfNull(validationSupport, new DefaultProfileValidationSupport(theFhirContext)); - org.hl7.fhir.r4b.hapi.ctx.HapiWorkerContext workerContext = new org.hl7.fhir.r4b.hapi.ctx.HapiWorkerContext(theFhirContext, validationSupport); + validationSupport = ObjectUtils.defaultIfNull( + validationSupport, new DefaultProfileValidationSupport(theFhirContext)); + org.hl7.fhir.r4b.hapi.ctx.HapiWorkerContext workerContext = + new org.hl7.fhir.r4b.hapi.ctx.HapiWorkerContext(theFhirContext, validationSupport); myEngineFactory = () -> new org.hl7.fhir.r4b.utils.GraphQLEngine(workerContext); break; } case R5: { IValidationSupport validationSupport = theValidationSupport; - validationSupport = ObjectUtils.defaultIfNull(validationSupport, new DefaultProfileValidationSupport(theFhirContext)); - org.hl7.fhir.r5.hapi.ctx.HapiWorkerContext workerContext = new org.hl7.fhir.r5.hapi.ctx.HapiWorkerContext(theFhirContext, validationSupport); + validationSupport = ObjectUtils.defaultIfNull( + validationSupport, new DefaultProfileValidationSupport(theFhirContext)); + org.hl7.fhir.r5.hapi.ctx.HapiWorkerContext workerContext = + new org.hl7.fhir.r5.hapi.ctx.HapiWorkerContext(theFhirContext, validationSupport); myEngineFactory = () -> new org.hl7.fhir.r5.utils.GraphQLEngine(workerContext); break; } @@ -113,25 +124,35 @@ public class GraphQLProvider { case DSTU2_HL7ORG: case DSTU2_1: default: { - throw new UnsupportedOperationException(Msg.code(1143) + "GraphQL not supported for version: " + theFhirContext.getVersion().getVersion()); + throw new UnsupportedOperationException(Msg.code(1143) + "GraphQL not supported for version: " + + theFhirContext.getVersion().getVersion()); } } myStorageServices = theStorageServices; } - @Description(value = "This operation invokes a GraphQL expression for fetching an joining a graph of resources, returning them in a custom format.") + @Description( + value = + "This operation invokes a GraphQL expression for fetching an joining a graph of resources, returning them in a custom format.") @GraphQL(type = RequestTypeEnum.GET) - public String processGraphQlGetRequest(ServletRequestDetails theRequestDetails, @IdParam IIdType theId, @GraphQLQueryUrl String theQueryUrl) { + public String processGraphQlGetRequest( + ServletRequestDetails theRequestDetails, @IdParam IIdType theId, @GraphQLQueryUrl String theQueryUrl) { if (theQueryUrl != null) { return processGraphQLRequest(theRequestDetails, theId, theQueryUrl); } throw new InvalidRequestException(Msg.code(1144) + "Unable to parse empty GraphQL expression"); } - @Description(value = "This operation invokes a GraphQL expression for fetching an joining a graph of resources, returning them in a custom format.") + @Description( + value = + "This operation invokes a GraphQL expression for fetching an joining a graph of resources, returning them in a custom format.") @GraphQL(type = RequestTypeEnum.POST) - public String processGraphQlPostRequest(ServletRequestDetails theServletRequestDetails, RequestDetails theRequestDetails, @IdParam IIdType theId, @GraphQLQueryBody String theQueryBody) { + public String processGraphQlPostRequest( + ServletRequestDetails theServletRequestDetails, + RequestDetails theRequestDetails, + @IdParam IIdType theId, + @GraphQLQueryBody String theQueryBody) { if (theQueryBody != null) { return processGraphQLRequest(theServletRequestDetails, theId, theQueryBody); } @@ -149,7 +170,8 @@ public class GraphQLProvider { return processGraphQLRequest(theRequestDetails, theId, parsedGraphQLRequest); } - protected String processGraphQLRequest(ServletRequestDetails theRequestDetails, IIdType theId, Package parsedGraphQLRequest) { + protected String processGraphQLRequest( + ServletRequestDetails theRequestDetails, IIdType theId, Package parsedGraphQLRequest) { IGraphQLEngine engine = myEngineFactory.get(); engine.setAppInfo(theRequestDetails); engine.setServices(myStorageServices); @@ -158,7 +180,8 @@ public class GraphQLProvider { try { if (theId != null) { - IBaseResource focus = myStorageServices.lookup(theRequestDetails, theId.getResourceType(), theId.getIdPart()); + IBaseResource focus = + myStorageServices.lookup(theRequestDetails, theId.getResourceType(), theId.getIdPart()); engine.setFocus(focus); } engine.execute(); @@ -191,10 +214,9 @@ public class GraphQLProvider { public void initialize(RestfulServer theServer) { ourLog.trace("Initializing GraphQL provider"); if (!theServer.getFhirContext().getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) { - throw new ConfigurationException(Msg.code(1148) + "Can not use " + getClass().getName() + " provider on server with FHIR " + theServer.getFhirContext().getVersion().getVersion().name() + " context"); + throw new ConfigurationException(Msg.code(1148) + "Can not use " + + getClass().getName() + " provider on server with FHIR " + + theServer.getFhirContext().getVersion().getVersion().name() + " context"); } } - - } - diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java index 73a4f52cb8f..3cd57cb7c26 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java @@ -43,10 +43,10 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.IdType; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -78,7 +78,10 @@ public class PatientIdPartitionInterceptor { /** * Constructor */ - public PatientIdPartitionInterceptor(FhirContext theFhirContext, ISearchParamExtractor theSearchParamExtractor, PartitionSettings thePartitionSettings) { + public PatientIdPartitionInterceptor( + FhirContext theFhirContext, + ISearchParamExtractor theSearchParamExtractor, + PartitionSettings thePartitionSettings) { this(); myFhirContext = theFhirContext; mySearchParamExtractor = theSearchParamExtractor; @@ -97,34 +100,36 @@ public class PatientIdPartitionInterceptor { if (resourceDef.getName().equals("Patient")) { compartmentIdentity = theResource.getIdElement().getIdPart(); if (isBlank(compartmentIdentity)) { - throw new MethodNotAllowedException(Msg.code(1321) + "Patient resource IDs must be client-assigned in patient compartment mode"); + throw new MethodNotAllowedException( + Msg.code(1321) + "Patient resource IDs must be client-assigned in patient compartment mode"); } } else { - compartmentIdentity = compartmentSps - .stream() - .flatMap(param -> Arrays.stream(BaseSearchParamExtractor.splitPathsR4(param.getPath()))) - .filter(StringUtils::isNotBlank) - .map(path -> mySearchParamExtractor.getPathValueExtractor(theResource, path).get()) - .filter(t -> !t.isEmpty()) - .map(t -> t.get(0)) - .filter(t -> t instanceof IBaseReference) - .map(t -> (IBaseReference) t) - .map(t -> t.getReferenceElement().getValue()) - .map(t -> new IdType(t).getIdPart()) - .filter(StringUtils::isNotBlank) - .findFirst() - .orElse(null); + compartmentIdentity = compartmentSps.stream() + .flatMap(param -> Arrays.stream(BaseSearchParamExtractor.splitPathsR4(param.getPath()))) + .filter(StringUtils::isNotBlank) + .map(path -> mySearchParamExtractor + .getPathValueExtractor(theResource, path) + .get()) + .filter(t -> !t.isEmpty()) + .map(t -> t.get(0)) + .filter(t -> t instanceof IBaseReference) + .map(t -> (IBaseReference) t) + .map(t -> t.getReferenceElement().getValue()) + .map(t -> new IdType(t).getIdPart()) + .filter(StringUtils::isNotBlank) + .findFirst() + .orElse(null); if (isBlank(compartmentIdentity)) { return provideNonCompartmentMemberInstanceResponse(theResource); } } - return provideCompartmentMemberInstanceResponse(theRequestDetails, compartmentIdentity); } @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ) - public RequestPartitionId identifyForRead(ReadPartitionIdRequestDetails theReadDetails, RequestDetails theRequestDetails) { + public RequestPartitionId identifyForRead( + ReadPartitionIdRequestDetails theReadDetails, RequestDetails theRequestDetails) { if (isBlank(theReadDetails.getResourceType())) { return provideNonCompartmentMemberTypeResponse(null); } @@ -139,7 +144,9 @@ public class PatientIdPartitionInterceptor { case READ: case VREAD: if ("Patient".equals(theReadDetails.getResourceType())) { - return provideCompartmentMemberInstanceResponse(theRequestDetails, theReadDetails.getReadResourceId().getIdPart()); + return provideCompartmentMemberInstanceResponse( + theRequestDetails, + theReadDetails.getReadResourceId().getIdPart()); } break; case SEARCH_TYPE: @@ -177,15 +184,15 @@ public class PatientIdPartitionInterceptor { @Nonnull private List getCompartmentSearchParams(RuntimeResourceDefinition resourceDef) { - return resourceDef - .getSearchParams() - .stream() - .filter(param -> param.getParamType() == RestSearchParameterTypeEnum.REFERENCE) - .filter(param -> param.getProvidesMembershipInCompartments() != null && param.getProvidesMembershipInCompartments().contains("Patient")) - .collect(Collectors.toList()); + return resourceDef.getSearchParams().stream() + .filter(param -> param.getParamType() == RestSearchParameterTypeEnum.REFERENCE) + .filter(param -> param.getProvidesMembershipInCompartments() != null + && param.getProvidesMembershipInCompartments().contains("Patient")) + .collect(Collectors.toList()); } - private String getSingleResourceIdValueOrNull(SearchParameterMap theParams, String theParamName, String theResourceType) { + private String getSingleResourceIdValueOrNull( + SearchParameterMap theParams, String theParamName, String theResourceType) { String idPart = null; List> idParamAndList = theParams.get(theParamName); if (idParamAndList != null && idParamAndList.size() == 1) { @@ -193,12 +200,14 @@ public class PatientIdPartitionInterceptor { if (idParamOrList.size() == 1) { IQueryParameterType idParam = idParamOrList.get(0); if (isNotBlank(idParam.getQueryParameterQualifier())) { - throw new MethodNotAllowedException(Msg.code(1322) + "The parameter " + theParamName + idParam.getQueryParameterQualifier() + " is not supported in patient compartment mode"); + throw new MethodNotAllowedException(Msg.code(1322) + "The parameter " + theParamName + + idParam.getQueryParameterQualifier() + " is not supported in patient compartment mode"); } if (idParam instanceof ReferenceParam) { String chain = ((ReferenceParam) idParam).getChain(); if (chain != null) { - throw new MethodNotAllowedException(Msg.code(1323) + "The parameter " + theParamName + "." + chain + " is not supported in patient compartment mode"); + throw new MethodNotAllowedException(Msg.code(1323) + "The parameter " + theParamName + "." + + chain + " is not supported in patient compartment mode"); } } @@ -207,29 +216,31 @@ public class PatientIdPartitionInterceptor { idPart = id.getIdPart(); } } else if (idParamOrList.size() > 1) { - throw new MethodNotAllowedException(Msg.code(1324) + "Multiple values for parameter " + theParamName + " is not supported in patient compartment mode"); + throw new MethodNotAllowedException(Msg.code(1324) + "Multiple values for parameter " + theParamName + + " is not supported in patient compartment mode"); } } else if (idParamAndList != null && idParamAndList.size() > 1) { - throw new MethodNotAllowedException(Msg.code(1325) + "Multiple values for parameter " + theParamName + " is not supported in patient compartment mode"); + throw new MethodNotAllowedException(Msg.code(1325) + "Multiple values for parameter " + theParamName + + " is not supported in patient compartment mode"); } return idPart; } - /** * Return a partition or throw an error for FHIR operations that can not be used with this interceptor */ - protected RequestPartitionId provideNonPatientSpecificQueryResponse(ReadPartitionIdRequestDetails theRequestDetails) { + protected RequestPartitionId provideNonPatientSpecificQueryResponse( + ReadPartitionIdRequestDetails theRequestDetails) { return RequestPartitionId.allPartitions(); } - /** * Generate the partition for a given patient resource ID. This method may be overridden in subclasses, but it * may be easier to override {@link #providePartitionIdForPatientId(RequestDetails, String)} instead. */ @Nonnull - protected RequestPartitionId provideCompartmentMemberInstanceResponse(RequestDetails theRequestDetails, String theResourceIdPart) { + protected RequestPartitionId provideCompartmentMemberInstanceResponse( + RequestDetails theRequestDetails, String theResourceIdPart) { int partitionId = providePartitionIdForPatientId(theRequestDetails, theResourceIdPart); return RequestPartitionId.fromPartitionIdAndName(partitionId, theResourceIdPart); } @@ -256,7 +267,8 @@ public class PatientIdPartitionInterceptor { */ @Nonnull protected RequestPartitionId provideNonCompartmentMemberInstanceResponse(IBaseResource theResource) { - throw new MethodNotAllowedException(Msg.code(1326) + "Resource of type " + myFhirContext.getResourceType(theResource) + " has no values placing it in the Patient compartment"); + throw new MethodNotAllowedException(Msg.code(1326) + "Resource of type " + + myFhirContext.getResourceType(theResource) + " has no values placing it in the Patient compartment"); } /** @@ -268,6 +280,4 @@ public class PatientIdPartitionInterceptor { protected RequestPartitionId provideNonCompartmentMemberTypeResponse(IBaseResource theResource) { return RequestPartitionId.fromPartitionId(myPartitionSettings.getDefaultPartitionId()); } - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/UserRequestRetryVersionConflictsInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/UserRequestRetryVersionConflictsInterceptor.java index d1f6704b2da..6f69a88e923 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/UserRequestRetryVersionConflictsInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/UserRequestRetryVersionConflictsInterceptor.java @@ -64,7 +64,6 @@ public class UserRequestRetryVersionConflictsInterceptor { return retVal; } - /** * Convenience method to add a retry header to a system request */ diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/BaseTypedRule.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/BaseTypedRule.java index 14ad874390c..2cbd1830440 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/BaseTypedRule.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/BaseTypedRule.java @@ -45,5 +45,4 @@ abstract class BaseTypedRule implements IRepositoryValidatingRule { protected FhirContext getFhirContext() { return myFhirContext; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/IRepositoryValidatingRule.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/IRepositoryValidatingRule.java index a264114a17f..4bbeb1af078 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/IRepositoryValidatingRule.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/IRepositoryValidatingRule.java @@ -44,7 +44,11 @@ public interface IRepositoryValidatingRule { private boolean myPasses; private String myFailureDescription; - private RuleEvaluation(IRepositoryValidatingRule theRule, boolean thePasses, String theFailureDescription, IBaseOperationOutcome theOperationOutcome) { + private RuleEvaluation( + IRepositoryValidatingRule theRule, + boolean thePasses, + String theFailureDescription, + IBaseOperationOutcome theOperationOutcome) { myRule = theRule; myPasses = thePasses; myFailureDescription = theFailureDescription; @@ -83,6 +87,5 @@ public interface IRepositoryValidatingRule { public String getFailureDescription() { return myFailureDescription; } - } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/IRuleRoot.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/IRuleRoot.java index 265486c14c3..543f9c793b5 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/IRuleRoot.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/IRuleRoot.java @@ -34,5 +34,4 @@ interface IRuleRoot { * Create the repository validation rules */ List build(); - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.java index 0853db0c7ad..f59ab1bc605 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingInterceptor.java @@ -19,8 +19,8 @@ */ package ca.uhn.fhir.jpa.interceptor.validation; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.Hook; import ca.uhn.fhir.interceptor.api.Interceptor; import ca.uhn.fhir.interceptor.api.Pointcut; @@ -35,10 +35,10 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import static ca.uhn.fhir.util.HapiExtensions.EXT_RESOURCE_PLACEHOLDER; @@ -94,7 +94,6 @@ public class RepositoryValidatingInterceptor { String rulesDescription = "RepositoryValidatingInterceptor has rules:\n" + describeRules(); ourLog.info(rulesDescription); - } /** @@ -104,13 +103,12 @@ public class RepositoryValidatingInterceptor { */ @Nonnull public String describeRules() { - return " * " + myRules - .values() - .stream() - .distinct() - .map(t -> t.toString()) - .sorted() - .collect(Collectors.joining("\n * ")); + return " * " + + myRules.values().stream() + .distinct() + .map(t -> t.toString()) + .sorted() + .collect(Collectors.joining("\n * ")); } /** @@ -130,7 +128,7 @@ public class RepositoryValidatingInterceptor { } private void handle(RequestDetails theRequestDetails, IBaseResource theNewResource) { - + Validate.notNull(myFhirContext, "No FhirContext has been set for this interceptor of type: %s", getClass()); if (!isPlaceholderResource(theNewResource)) { String resourceType = myFhirContext.getResourceType(theNewResource); @@ -141,7 +139,7 @@ public class RepositoryValidatingInterceptor { handleFailure(outcome); } } - } + } } /** @@ -155,10 +153,10 @@ public class RepositoryValidatingInterceptor { protected void handleFailure(IRepositoryValidatingRule.RuleEvaluation theOutcome) { if (theOutcome.getOperationOutcome() != null) { - String firstIssue = OperationOutcomeUtil.getFirstIssueDetails(myFhirContext, theOutcome.getOperationOutcome()); + String firstIssue = + OperationOutcomeUtil.getFirstIssueDetails(myFhirContext, theOutcome.getOperationOutcome()); throw new PreconditionFailedException(Msg.code(574) + firstIssue, theOutcome.getOperationOutcome()); } throw new PreconditionFailedException(Msg.code(575) + theOutcome.getFailureDescription()); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.java index b83767a13b2..a7b8671e295 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RepositoryValidatingRuleBuilder.java @@ -31,11 +31,11 @@ import org.apache.commons.text.WordUtils; import org.hl7.fhir.r5.utils.validation.constants.BestPracticeWarningLevel; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import javax.annotation.Nonnull; import static com.google.common.base.Ascii.toLowerCase; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -52,11 +52,15 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot { @Autowired private FhirContext myFhirContext; + private final IValidationSupport myValidationSupport; + @Autowired private ValidatorResourceFetcher myValidatorResourceFetcher; + @Autowired private ValidatorPolicyAdvisor myValidationPolicyAdvisor; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; @@ -180,8 +184,13 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot { * @see ValidationResultEnrichingInterceptor */ public FinalizedRequireValidationRule requireValidationToDeclaredProfiles() { - RequireValidationRule rule = new RequireValidationRule(myFhirContext, myType, myValidationSupport, - myValidatorResourceFetcher, myValidationPolicyAdvisor, myInterceptorBroadcaster); + RequireValidationRule rule = new RequireValidationRule( + myFhirContext, + myType, + myValidationSupport, + myValidatorResourceFetcher, + myValidationPolicyAdvisor, + myInterceptorBroadcaster); myRules.add(rule); return new FinalizedRequireValidationRule(rule); } @@ -193,7 +202,6 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot { return new FinalizedTypedRule(myType); } - public class FinalizedRequireValidationRule extends FinalizedTypedRule { private final RequireValidationRule myRule; @@ -213,7 +221,8 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot { public FinalizedRequireValidationRule withBestPracticeWarningLevel(String theBestPracticeWarningLevel) { BestPracticeWarningLevel level = null; if (isNotBlank(theBestPracticeWarningLevel)) { - level = BestPracticeWarningLevel.valueOf(WordUtils.capitalize(theBestPracticeWarningLevel.toLowerCase())); + level = BestPracticeWarningLevel.valueOf( + WordUtils.capitalize(theBestPracticeWarningLevel.toLowerCase())); } return withBestPracticeWarningLevel(level); } @@ -227,7 +236,8 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot { * to not include any best practice notifications. */ @Nonnull - public FinalizedRequireValidationRule withBestPracticeWarningLevel(BestPracticeWarningLevel bestPracticeWarningLevel) { + public FinalizedRequireValidationRule withBestPracticeWarningLevel( + BestPracticeWarningLevel bestPracticeWarningLevel) { myRule.setBestPracticeWarningLevel(bestPracticeWarningLevel); return this; } @@ -283,7 +293,8 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot { * @return */ @Nonnull - public FinalizedRequireValidationRule tagOnSeverity(@Nonnull String theSeverity, @Nonnull String theTagSystem, @Nonnull String theTagCode) { + public FinalizedRequireValidationRule tagOnSeverity( + @Nonnull String theSeverity, @Nonnull String theTagSystem, @Nonnull String theTagCode) { ResultSeverityEnum severity = ResultSeverityEnum.fromCode(toLowerCase(theSeverity)); return tagOnSeverity(severity, theTagSystem, theTagCode); } @@ -298,7 +309,8 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot { * @return */ @Nonnull - public FinalizedRequireValidationRule tagOnSeverity(@Nonnull ResultSeverityEnum theSeverity, @Nonnull String theTagSystem, @Nonnull String theTagCode) { + public FinalizedRequireValidationRule tagOnSeverity( + @Nonnull ResultSeverityEnum theSeverity, @Nonnull String theTagSystem, @Nonnull String theTagCode) { myRule.tagOnSeverity(theSeverity, theTagSystem, theTagCode); return this; } @@ -362,9 +374,6 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot { myRule.getValidator().setNoExtensibleWarnings(true); return this; } - } - } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RequireValidationRule.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RequireValidationRule.java index 8732729c5f1..eab7f12bbe8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RequireValidationRule.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RequireValidationRule.java @@ -38,10 +38,10 @@ import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r5.utils.validation.constants.BestPracticeWarningLevel; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nonnull; class RequireValidationRule extends BaseTypedRule { private final FhirInstanceValidator myValidator; @@ -49,12 +49,13 @@ class RequireValidationRule extends BaseTypedRule { private ResultSeverityEnum myRejectOnSeverity = ResultSeverityEnum.ERROR; private List myTagOnSeverity = Collections.emptyList(); - public RequireValidationRule(FhirContext theFhirContext, - String theType, - IValidationSupport theValidationSupport, - ValidatorResourceFetcher theValidatorResourceFetcher, - ValidatorPolicyAdvisor theValidationPolicyAdvisor, - IInterceptorBroadcaster theInterceptorBroadcaster) { + public RequireValidationRule( + FhirContext theFhirContext, + String theType, + IValidationSupport theValidationSupport, + ValidatorResourceFetcher theValidatorResourceFetcher, + ValidatorPolicyAdvisor theValidationPolicyAdvisor, + IInterceptorBroadcaster theInterceptorBroadcaster) { super(theFhirContext, theType); myInterceptorBroadcaster = theInterceptorBroadcaster; @@ -74,13 +75,15 @@ class RequireValidationRule extends BaseTypedRule { public RuleEvaluation evaluate(RequestDetails theRequestDetails, @Nonnull IBaseResource theResource) { FhirValidator validator = getFhirContext().newValidator(); - validator.setInterceptorBroadcaster(CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequestDetails)); + validator.setInterceptorBroadcaster( + CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequestDetails)); validator.registerValidatorModule(myValidator); ValidationResult outcome = validator.validateWithResult(theResource); for (SingleValidationMessage next : outcome.getMessages()) { if (next.getSeverity().ordinal() >= ResultSeverityEnum.ERROR.ordinal()) { - if (myRejectOnSeverity != null && myRejectOnSeverity.ordinal() <= next.getSeverity().ordinal()) { + if (myRejectOnSeverity != null + && myRejectOnSeverity.ordinal() <= next.getSeverity().ordinal()) { return RuleEvaluation.forFailure(this, outcome.toOperationOutcome()); } } @@ -88,13 +91,12 @@ class RequireValidationRule extends BaseTypedRule { for (TagOnSeverity nextTagOnSeverity : myTagOnSeverity) { if (next.getSeverity().ordinal() >= nextTagOnSeverity.getSeverity()) { theResource - .getMeta() - .addTag() - .setSystem(nextTagOnSeverity.getTagSystem()) - .setCode(nextTagOnSeverity.getTagCode()); + .getMeta() + .addTag() + .setSystem(nextTagOnSeverity.getTagSystem()) + .setCode(nextTagOnSeverity.getTagCode()); } } - } ValidationResultEnrichingInterceptor.addValidationResultToRequestDetails(theRequestDetails, outcome); @@ -123,10 +125,10 @@ class RequireValidationRule extends BaseTypedRule { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("resourceType", getResourceType()) - .append("rejectOnSeverity", myRejectOnSeverity) - .append("tagOnSeverity", myTagOnSeverity) - .toString(); + .append("resourceType", getResourceType()) + .append("rejectOnSeverity", myRejectOnSeverity) + .append("tagOnSeverity", myTagOnSeverity) + .toString(); } public FhirInstanceValidator getValidator() { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RuleDisallowProfile.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RuleDisallowProfile.java index a1358261910..5ae5a036563 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RuleDisallowProfile.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RuleDisallowProfile.java @@ -28,9 +28,9 @@ import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.HashSet; import java.util.Set; +import javax.annotation.Nonnull; class RuleDisallowProfile extends BaseTypedRule { private final Set myProfileUrls; @@ -52,7 +52,9 @@ class RuleDisallowProfile extends BaseTypedRule { String nextUrl = next.getValueAsString(); String nextUrlNormalized = UrlUtil.normalizeCanonicalUrlForComparison(nextUrl); if (myProfileUrls.contains(nextUrlNormalized)) { - String msg = getFhirContext().getLocalizer().getMessage(RuleRequireProfileDeclaration.class, "illegalProfile", getResourceType(), nextUrl); + String msg = getFhirContext() + .getLocalizer() + .getMessage(RuleRequireProfileDeclaration.class, "illegalProfile", getResourceType(), nextUrl); return RuleEvaluation.forFailure(this, msg); } } @@ -63,10 +65,8 @@ class RuleDisallowProfile extends BaseTypedRule { @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("resourceType", getResourceType()) - .append("profiles", myProfileUrls) - .toString(); + .append("resourceType", getResourceType()) + .append("profiles", myProfileUrls) + .toString(); } - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RuleRequireProfileDeclaration.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RuleRequireProfileDeclaration.java index 575571928f3..55f51b8ec4d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RuleRequireProfileDeclaration.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/validation/RuleRequireProfileDeclaration.java @@ -25,9 +25,9 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hl7.fhir.instance.model.api.IBaseResource; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.Optional; +import javax.annotation.Nonnull; class RuleRequireProfileDeclaration extends BaseTypedRule { private final Collection myProfileOptions; @@ -39,29 +39,27 @@ class RuleRequireProfileDeclaration extends BaseTypedRule { @Nonnull @Override - public IRepositoryValidatingRule.RuleEvaluation evaluate(RequestDetails theRequestDetails, @Nonnull IBaseResource theResource) { - Optional matchingProfile = theResource - .getMeta() - .getProfile() - .stream() - .map(t -> t.getValueAsString()) - .filter(t -> myProfileOptions.contains(t)) - .findFirst(); + public IRepositoryValidatingRule.RuleEvaluation evaluate( + RequestDetails theRequestDetails, @Nonnull IBaseResource theResource) { + Optional matchingProfile = theResource.getMeta().getProfile().stream() + .map(t -> t.getValueAsString()) + .filter(t -> myProfileOptions.contains(t)) + .findFirst(); if (matchingProfile.isPresent()) { return IRepositoryValidatingRule.RuleEvaluation.forSuccess(this); } - String msg = getFhirContext().getLocalizer().getMessage(RuleRequireProfileDeclaration.class, "noMatchingProfile", getResourceType(), myProfileOptions); + String msg = getFhirContext() + .getLocalizer() + .getMessage( + RuleRequireProfileDeclaration.class, "noMatchingProfile", getResourceType(), myProfileOptions); return IRepositoryValidatingRule.RuleEvaluation.forFailure(this, msg); } - @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) - .append("resourceType", getResourceType()) - .append("profiles", myProfileOptions) - .toString(); + .append("resourceType", getResourceType()) + .append("profiles", myProfileOptions) + .toString(); } - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchBuilderLoadIncludesParameters.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchBuilderLoadIncludesParameters.java index 8d5a06327df..f199bbfe9be 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchBuilderLoadIncludesParameters.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchBuilderLoadIncludesParameters.java @@ -25,10 +25,10 @@ import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.param.DateRangeParam; -import javax.persistence.EntityManager; import java.util.ArrayList; import java.util.Collection; import java.util.List; +import javax.persistence.EntityManager; public class SearchBuilderLoadIncludesParameters { diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java index 850d6551026..b532571ab52 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java @@ -38,12 +38,12 @@ import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.doCallHooks; import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.doCallHooksAndReturnObject; @@ -52,10 +52,13 @@ import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.hasHo public abstract class BaseRequestPartitionHelperSvc implements IRequestPartitionHelperSvc { private final HashSet myNonPartitionableResourceNames; + @Autowired protected FhirContext myFhirContext; + @Autowired private IInterceptorBroadcaster myInterceptorBroadcaster; + @Autowired private PartitionSettings myPartitionSettings; @@ -80,7 +83,6 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition myNonPartitionableResourceNames.add("ValueSet"); myNonPartitionableResourceNames.add("NamingSystem"); myNonPartitionableResourceNames.add("StructureMap"); - } /** @@ -91,7 +93,8 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition */ @Nonnull @Override - public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails) { + public RequestPartitionId determineReadPartitionForRequest( + @Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails) { RequestPartitionId requestPartitionId; String resourceType = theDetails != null ? theDetails.getResourceType() : null; @@ -99,29 +102,35 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition if (myPartitionSettings.isPartitioningEnabled()) { RequestDetails requestDetails = theRequest; - //TODO GGG eventually, theRequest will not be allowed to be null here, and we will pass through SystemRequestDetails instead. + // TODO GGG eventually, theRequest will not be allowed to be null here, and we will pass through + // SystemRequestDetails instead. if (requestDetails == null) { requestDetails = new SystemRequestDetails(); } // Handle system requests - if (requestDetails instanceof SystemRequestDetails && systemRequestHasExplicitPartition((SystemRequestDetails) requestDetails) && !nonPartitionableResource) { - requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) requestDetails, nonPartitionableResource); + if (requestDetails instanceof SystemRequestDetails + && systemRequestHasExplicitPartition((SystemRequestDetails) requestDetails) + && !nonPartitionableResource) { + requestPartitionId = + getSystemRequestPartitionId((SystemRequestDetails) requestDetails, nonPartitionableResource); } else if ((requestDetails instanceof SystemRequestDetails) && nonPartitionableResource) { return RequestPartitionId.fromPartitionId(myPartitionSettings.getDefaultPartitionId()); } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, myInterceptorBroadcaster, requestDetails)) { // Interceptor call: STORAGE_PARTITION_IDENTIFY_ANY HookParams params = new HookParams() - .add(RequestDetails.class, requestDetails) - .addIfMatchesType(ServletRequestDetails.class, requestDetails); - requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, requestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params); + .add(RequestDetails.class, requestDetails) + .addIfMatchesType(ServletRequestDetails.class, requestDetails); + requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject( + myInterceptorBroadcaster, requestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params); } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, requestDetails)) { // Interceptor call: STORAGE_PARTITION_IDENTIFY_READ HookParams params = new HookParams() - .add(RequestDetails.class, requestDetails) - .addIfMatchesType(ServletRequestDetails.class, requestDetails) - .add(ReadPartitionIdRequestDetails.class, theDetails); - requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, requestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_READ, params); + .add(RequestDetails.class, requestDetails) + .addIfMatchesType(ServletRequestDetails.class, requestDetails) + .add(ReadPartitionIdRequestDetails.class, theDetails); + requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject( + myInterceptorBroadcaster, requestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_READ, params); } else { requestPartitionId = null; } @@ -149,14 +158,14 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, myInterceptorBroadcaster, theRequestDetails)) { // Interceptor call: STORAGE_PARTITION_IDENTIFY_ANY HookParams params = new HookParams() - .add(RequestDetails.class, theRequestDetails) - .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); - retVal = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params); + .add(RequestDetails.class, theRequestDetails) + .addIfMatchesType(ServletRequestDetails.class, theRequestDetails); + retVal = (RequestPartitionId) doCallHooksAndReturnObject( + myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params); if (retVal != null) { retVal = validateNormalizeAndNotifyHooksForRead(retVal, theRequestDetails, null); } - } } @@ -167,11 +176,13 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition * For system requests, read partition from tenant ID if present, otherwise set to DEFAULT. If the resource they are attempting to partition * is non-partitionable scream in the logs and set the partition to DEFAULT. */ - private RequestPartitionId getSystemRequestPartitionId(SystemRequestDetails theRequest, boolean theNonPartitionableResource) { + private RequestPartitionId getSystemRequestPartitionId( + SystemRequestDetails theRequest, boolean theNonPartitionableResource) { RequestPartitionId requestPartitionId; requestPartitionId = getSystemRequestPartitionId(theRequest); if (theNonPartitionableResource && !requestPartitionId.isDefaultPartition()) { - throw new InternalErrorException(Msg.code(1315) + "System call is attempting to write a non-partitionable resource to a partition! This is a bug!"); + throw new InternalErrorException(Msg.code(1315) + + "System call is attempting to write a non-partitionable resource to a partition! This is a bug!"); } return requestPartitionId; } @@ -204,44 +215,53 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition */ @Nonnull @Override - public RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType) { + public RequestPartitionId determineCreatePartitionForRequest( + @Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType) { RequestPartitionId requestPartitionId; if (myPartitionSettings.isPartitioningEnabled()) { boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType); - //TODO GGG eventually, theRequest will not be allowed to be null here, and we will pass through SystemRequestDetails instead. + // TODO GGG eventually, theRequest will not be allowed to be null here, and we will pass through + // SystemRequestDetails instead. if ((theRequest == null || theRequest instanceof SystemRequestDetails) && nonPartitionableResource) { return RequestPartitionId.defaultPartition(); } - if (theRequest instanceof SystemRequestDetails && systemRequestHasExplicitPartition((SystemRequestDetails) theRequest)) { - requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) theRequest, nonPartitionableResource); + if (theRequest instanceof SystemRequestDetails + && systemRequestHasExplicitPartition((SystemRequestDetails) theRequest)) { + requestPartitionId = + getSystemRequestPartitionId((SystemRequestDetails) theRequest, nonPartitionableResource); } else { if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, myInterceptorBroadcaster, theRequest)) { // Interceptor call: STORAGE_PARTITION_IDENTIFY_ANY HookParams params = new HookParams() - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params); + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject( + myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params); } else { - //This is an external Request (e.g. ServletRequestDetails) so we want to figure out the partition via interceptor. + // This is an external Request (e.g. ServletRequestDetails) so we want to figure out the partition + // via interceptor. // Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE HookParams params = new HookParams() - .add(IBaseResource.class, theResource) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest); - requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params); + .add(IBaseResource.class, theResource) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest); + requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject( + myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params); } - //If the interceptors haven't selected a partition, and its a non-partitionable resource anyhow, send to DEFAULT + // If the interceptors haven't selected a partition, and its a non-partitionable resource anyhow, send + // to DEFAULT if (nonPartitionableResource && requestPartitionId == null) { requestPartitionId = RequestPartitionId.defaultPartition(); } } String resourceName = myFhirContext.getResourceType(theResource); - validateSinglePartitionForCreate(requestPartitionId, resourceName, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE); + validateSinglePartitionForCreate( + requestPartitionId, resourceName, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE); return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest, theResourceType); } @@ -256,7 +276,9 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition @Nonnull @Override public Set toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId) { - return theRequestPartitionId.getPartitionIds().stream().map(t -> t == null ? myPartitionSettings.getDefaultPartitionId() : t).collect(Collectors.toSet()); + return theRequestPartitionId.getPartitionIds().stream() + .map(t -> t == null ? myPartitionSettings.getDefaultPartitionId() : t) + .collect(Collectors.toSet()); } /** @@ -267,7 +289,10 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition * If the partition has both, they are validated to ensure that they correspond. */ @Nonnull - private RequestPartitionId validateNormalizeAndNotifyHooksForRead(@Nonnull RequestPartitionId theRequestPartitionId, RequestDetails theRequest, @Nullable String theResourceType) { + private RequestPartitionId validateNormalizeAndNotifyHooksForRead( + @Nonnull RequestPartitionId theRequestPartitionId, + RequestDetails theRequest, + @Nullable String theResourceType) { RequestPartitionId retVal = theRequestPartitionId; if (!myPartitionSettings.isUnnamedPartitionMode()) { @@ -285,21 +310,21 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition } return retVal; - } @Override - public void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) { + public void validateHasPartitionPermissions( + RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) { if (myInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PARTITION_SELECTED)) { RuntimeResourceDefinition runtimeResourceDefinition = null; if (theResourceType != null) { runtimeResourceDefinition = myFhirContext.getResourceDefinition(theResourceType); } HookParams params = new HookParams() - .add(RequestPartitionId.class, theRequestPartitionId) - .add(RequestDetails.class, theRequest) - .addIfMatchesType(ServletRequestDetails.class, theRequest) - .add(RuntimeResourceDefinition.class, runtimeResourceDefinition); + .add(RequestPartitionId.class, theRequestPartitionId) + .add(RequestDetails.class, theRequest) + .addIfMatchesType(ServletRequestDetails.class, theRequest) + .add(RuntimeResourceDefinition.class, runtimeResourceDefinition); doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_SELECTED, params); } } @@ -313,7 +338,8 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition protected abstract RequestPartitionId validateAndNormalizePartitionNames(RequestPartitionId theRequestPartitionId); - private void validateSinglePartitionForCreate(RequestPartitionId theRequestPartitionId, @Nonnull String theResourceName, Pointcut thePointcut) { + private void validateSinglePartitionForCreate( + RequestPartitionId theRequestPartitionId, @Nonnull String theResourceName, Pointcut thePointcut) { validateRequestPartitionNotNull(theRequestPartitionId, thePointcut); if (theRequestPartitionId.hasPartitionIds()) { @@ -322,26 +348,35 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition validateSinglePartitionIdOrNameForCreate(theRequestPartitionId.getPartitionNames()); // Make sure we're not using one of the conformance resources in a non-default partition - if ((theRequestPartitionId.hasPartitionIds() && !theRequestPartitionId.getPartitionIds().contains(null)) || (theRequestPartitionId.hasPartitionNames() && !theRequestPartitionId.getPartitionNames().contains(JpaConstants.DEFAULT_PARTITION_NAME))) { + if ((theRequestPartitionId.hasPartitionIds() + && !theRequestPartitionId.getPartitionIds().contains(null)) + || (theRequestPartitionId.hasPartitionNames() + && !theRequestPartitionId.getPartitionNames().contains(JpaConstants.DEFAULT_PARTITION_NAME))) { if (!isResourcePartitionable(theResourceName)) { - String msg = myFhirContext.getLocalizer().getMessageSanitized(BaseRequestPartitionHelperSvc.class, "nonDefaultPartitionSelectedForNonPartitionable", theResourceName); + String msg = myFhirContext + .getLocalizer() + .getMessageSanitized( + BaseRequestPartitionHelperSvc.class, + "nonDefaultPartitionSelectedForNonPartitionable", + theResourceName); throw new UnprocessableEntityException(Msg.code(1318) + msg); } - } - } private void validateRequestPartitionNotNull(RequestPartitionId theRequestPartitionId, Pointcut theThePointcut) { if (theRequestPartitionId == null) { - throw new InternalErrorException(Msg.code(1319) + "No interceptor provided a value for pointcut: " + theThePointcut); + throw new InternalErrorException( + Msg.code(1319) + "No interceptor provided a value for pointcut: " + theThePointcut); } } private void validateSinglePartitionIdOrNameForCreate(@Nullable List thePartitionIds) { if (thePartitionIds != null && thePartitionIds.size() != 1) { - throw new InternalErrorException(Msg.code(1320) + "RequestPartitionId must contain a single partition for create operations, found: " + thePartitionIds); + throw new InternalErrorException( + Msg.code(1320) + "RequestPartitionId must contain a single partition for create operations, found: " + + thePartitionIds); } } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/FhirPatch.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/FhirPatch.java index 4b1f882debf..310e856e7c2 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/FhirPatch.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/FhirPatch.java @@ -38,8 +38,6 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.utilities.xhtml.XhtmlNode; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -47,6 +45,8 @@ import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -141,9 +141,8 @@ public class FhirPatch { int lastDot = path.lastIndexOf("."); String containingPath = path.substring(0, lastDot); String elementName = path.substring(lastDot + 1); - Integer insertIndex = ParametersUtil - .getParameterPartValueAsInteger(myContext, theParameters, PARAMETER_INDEX) - .orElseThrow(() -> new InvalidRequestException("No index supplied for insert operation")); + Integer insertIndex = ParametersUtil.getParameterPartValueAsInteger(myContext, theParameters, PARAMETER_INDEX) + .orElseThrow(() -> new InvalidRequestException("No index supplied for insert operation")); List containingElements = myContext.newFhirPath().evaluate(theResource, containingPath, IBase.class); for (IBase nextElement : containingElements) { @@ -152,9 +151,12 @@ public class FhirPatch { IBase newValue = getNewValue(theParameters, nextElement, childDefinition); - List existingValues = new ArrayList<>(childDefinition.getChildDef().getAccessor().getValues(nextElement)); + List existingValues = + new ArrayList<>(childDefinition.getChildDef().getAccessor().getValues(nextElement)); if (insertIndex == null || insertIndex < 0 || insertIndex > existingValues.size()) { - String msg = myContext.getLocalizer().getMessage(FhirPatch.class, "invalidInsertIndex", insertIndex, path, existingValues.size()); + String msg = myContext + .getLocalizer() + .getMessage(FhirPatch.class, "invalidInsertIndex", insertIndex, path, existingValues.size()); throw new InvalidRequestException(Msg.code(1270) + msg); } existingValues.add(insertIndex, newValue); @@ -177,8 +179,9 @@ public class FhirPatch { if (path.endsWith(")")) { // This is probably a filter, so we're probably dealing with a list int filterArgsIndex = path.lastIndexOf('('); // Let's hope there aren't nested parentheses - int lastDotIndex = path.lastIndexOf('.', filterArgsIndex); // There might be a dot inside the parentheses, so look to the left of that - int secondLastDotIndex = path.lastIndexOf('.', lastDotIndex-1); + int lastDotIndex = path.lastIndexOf( + '.', filterArgsIndex); // There might be a dot inside the parentheses, so look to the left of that + int secondLastDotIndex = path.lastIndexOf('.', lastDotIndex - 1); containingPath = path.substring(0, secondLastDotIndex); elementName = path.substring(secondLastDotIndex + 1, lastDotIndex); } else if (path.endsWith("]")) { @@ -202,11 +205,17 @@ public class FhirPatch { } } - private void deleteFromList(IBaseResource theResource, IBase theContainingElement, String theListElementName, String theElementToDeletePath) { + private void deleteFromList( + IBaseResource theResource, + IBase theContainingElement, + String theListElementName, + String theElementToDeletePath) { ChildDefinition childDefinition = findChildDefinition(theContainingElement, theListElementName); - List existingValues = new ArrayList<>(childDefinition.getChildDef().getAccessor().getValues(theContainingElement)); - List elementsToRemove = myContext.newFhirPath().evaluate(theResource, theElementToDeletePath, IBase.class); + List existingValues = + new ArrayList<>(childDefinition.getChildDef().getAccessor().getValues(theContainingElement)); + List elementsToRemove = + myContext.newFhirPath().evaluate(theResource, theElementToDeletePath, IBase.class); existingValues.removeAll(elementsToRemove); childDefinition.getChildDef().getMutator().setValue(theContainingElement, null); @@ -241,27 +250,37 @@ public class FhirPatch { int lastDot = path.lastIndexOf("."); String containingPath = path.substring(0, lastDot); String elementName = path.substring(lastDot + 1); - Integer insertIndex = ParametersUtil - .getParameterPartValueAsInteger(myContext, theParameters, PARAMETER_DESTINATION) - .orElseThrow(() -> new InvalidRequestException("No index supplied for move operation")); - Integer removeIndex = ParametersUtil - .getParameterPartValueAsInteger(myContext, theParameters, PARAMETER_SOURCE) - .orElseThrow(() -> new InvalidRequestException("No index supplied for move operation")); + Integer insertIndex = ParametersUtil.getParameterPartValueAsInteger( + myContext, theParameters, PARAMETER_DESTINATION) + .orElseThrow(() -> new InvalidRequestException("No index supplied for move operation")); + Integer removeIndex = ParametersUtil.getParameterPartValueAsInteger(myContext, theParameters, PARAMETER_SOURCE) + .orElseThrow(() -> new InvalidRequestException("No index supplied for move operation")); List containingElements = myContext.newFhirPath().evaluate(theResource, containingPath, IBase.class); for (IBase nextElement : containingElements) { ChildDefinition childDefinition = findChildDefinition(nextElement, elementName); - List existingValues = new ArrayList<>(childDefinition.getChildDef().getAccessor().getValues(nextElement)); + List existingValues = + new ArrayList<>(childDefinition.getChildDef().getAccessor().getValues(nextElement)); if (removeIndex == null || removeIndex < 0 || removeIndex >= existingValues.size()) { - String msg = myContext.getLocalizer().getMessage(FhirPatch.class, "invalidMoveSourceIndex", removeIndex, path, existingValues.size()); + String msg = myContext + .getLocalizer() + .getMessage( + FhirPatch.class, "invalidMoveSourceIndex", removeIndex, path, existingValues.size()); throw new InvalidRequestException(Msg.code(1268) + msg); } IBase newValue = existingValues.remove(removeIndex.intValue()); if (insertIndex == null || insertIndex < 0 || insertIndex > existingValues.size()) { - String msg = myContext.getLocalizer().getMessage(FhirPatch.class, "invalidMoveDestinationIndex", insertIndex, path, existingValues.size()); + String msg = myContext + .getLocalizer() + .getMessage( + FhirPatch.class, + "invalidMoveDestinationIndex", + insertIndex, + path, + existingValues.size()); throw new InvalidRequestException(Msg.code(1269) + msg); } existingValues.add(insertIndex, newValue); @@ -282,7 +301,8 @@ public class FhirPatch { if (childDef == null) { childName = theElementName + "[x]"; childDef = elementDef.getChildByName(childName); - childElement = childDef.getChildByName(childDef.getValidChildNames().iterator().next()); + childElement = childDef.getChildByName( + childDef.getValidChildNames().iterator().next()); } else { childElement = childDef.getChildByName(childName); } @@ -292,7 +312,8 @@ public class FhirPatch { private IBase getNewValue(IBase theParameters, IBase theElement, ChildDefinition theChildDefinition) { Optional valuePart = ParametersUtil.getParameterPart(myContext, theParameters, PARAMETER_VALUE); - Optional valuePartValue = ParametersUtil.getParameterPartValue(myContext, theParameters, PARAMETER_VALUE); + Optional valuePartValue = + ParametersUtil.getParameterPartValue(myContext, theParameters, PARAMETER_VALUE); IBase newValue; if (valuePartValue.isPresent()) { @@ -304,18 +325,23 @@ public class FhirPatch { IBase theValueElement = valuePart.get(); populateNewValue(theChildDefinition, newValue, theValueElement); } - } - if (IBaseEnumeration.class.isAssignableFrom(theChildDefinition.getChildElement().getImplementingClass()) || XhtmlNode.class.isAssignableFrom(theChildDefinition.getChildElement().getImplementingClass())) { - // If the compositeElementDef is an IBaseEnumeration, we will use the actual compositeElementDef definition to build one, since + if (IBaseEnumeration.class.isAssignableFrom( + theChildDefinition.getChildElement().getImplementingClass()) + || XhtmlNode.class.isAssignableFrom( + theChildDefinition.getChildElement().getImplementingClass())) { + // If the compositeElementDef is an IBaseEnumeration, we will use the actual compositeElementDef definition + // to build one, since // it needs the right factory object passed to its constructor IPrimitiveType newValueInstance; if (theChildDefinition.getChildDef().getInstanceConstructorArguments() != null) { - newValueInstance = (IPrimitiveType) theChildDefinition.getChildElement().newInstance( - theChildDefinition.getChildDef().getInstanceConstructorArguments()); + newValueInstance = (IPrimitiveType) theChildDefinition + .getChildElement() + .newInstance(theChildDefinition.getChildDef().getInstanceConstructorArguments()); } else { - newValueInstance = (IPrimitiveType) theChildDefinition.getChildElement().newInstance(); + newValueInstance = + (IPrimitiveType) theChildDefinition.getChildElement().newInstance(); } newValueInstance.setValueAsString(((IPrimitiveType) newValue).getValueAsString()); theChildDefinition.getChildDef().getMutator().setValue(theElement, newValueInstance); @@ -328,30 +354,37 @@ public class FhirPatch { List valuePartParts = myContext.newTerser().getValues(theValueElement, "part"); for (IBase nextValuePartPart : valuePartParts) { - String name = myContext.newTerser().getSingleValue(nextValuePartPart, PARAMETER_NAME, IPrimitiveType.class).map(IPrimitiveType::getValueAsString).orElse(null); + String name = myContext + .newTerser() + .getSingleValue(nextValuePartPart, PARAMETER_NAME, IPrimitiveType.class) + .map(IPrimitiveType::getValueAsString) + .orElse(null); if (isNotBlank(name)) { - Optional value = myContext.newTerser().getSingleValue(nextValuePartPart, "value[x]", IBase.class); + Optional value = + myContext.newTerser().getSingleValue(nextValuePartPart, "value[x]", IBase.class); if (value.isPresent()) { - BaseRuntimeChildDefinition partChildDef = theChildDefinition.getChildElement().getChildByName(name); + BaseRuntimeChildDefinition partChildDef = + theChildDefinition.getChildElement().getChildByName(name); if (partChildDef == null) { name = name + "[x]"; partChildDef = theChildDefinition.getChildElement().getChildByName(name); } partChildDef.getMutator().addValue(theNewValue, value.get()); - } - } - } } private void deleteSingleElement(IBase theElementToDelete) { myContext.newTerser().visit(theElementToDelete, new IModelVisitor2() { @Override - public boolean acceptElement(IBase theElement, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptElement( + IBase theElement, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { if (theElement instanceof IPrimitiveType) { ((IPrimitiveType) theElement).setValueAsString(null); } @@ -359,7 +392,11 @@ public class FhirPatch { } @Override - public boolean acceptUndeclaredExtension(IBaseExtension theNextExt, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptUndeclaredExtension( + IBaseExtension theNextExt, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { theNextExt.setUrl(null); theNextExt.setValue(null); return true; @@ -380,11 +417,12 @@ public class FhirPatch { } else { - String oldValueTypeName = myContext.getResourceDefinition(theOldValue).getName(); + String oldValueTypeName = + myContext.getResourceDefinition(theOldValue).getName(); Validate.isTrue(oldValueTypeName.equalsIgnoreCase(newValueTypeName), "Resources must be of same type"); - - BaseRuntimeElementCompositeDefinition def = myContext.getResourceDefinition(theOldValue).getBaseDefinition(); + BaseRuntimeElementCompositeDefinition def = + myContext.getResourceDefinition(theOldValue).getBaseDefinition(); String path = def.getName(); EncodeContextPath contextPath = new EncodeContextPath(); @@ -399,7 +437,14 @@ public class FhirPatch { return retVal; } - private void compare(IBaseParameters theDiff, EncodeContextPath theSourceEncodeContext, BaseRuntimeElementDefinition theDef, String theSourcePath, String theTargetPath, IBase theOldField, IBase theNewField) { + private void compare( + IBaseParameters theDiff, + EncodeContextPath theSourceEncodeContext, + BaseRuntimeElementDefinition theDef, + String theSourcePath, + String theTargetPath, + IBase theOldField, + IBase theNewField) { boolean pathIsIgnored = pathIsIgnored(theSourceEncodeContext); if (pathIsIgnored) { @@ -429,14 +474,26 @@ public class FhirPatch { List children = theDef.getChildren(); for (BaseRuntimeChildDefinition nextChild : children) { - compareField(theDiff, theSourceEncodeContext, theSourcePath, theTargetPath, theOldField, theNewField, nextChild); + compareField( + theDiff, + theSourceEncodeContext, + theSourcePath, + theTargetPath, + theOldField, + theNewField, + nextChild); } - } - } - private void compareField(IBaseParameters theDiff, EncodeContextPath theSourceEncodePath, String theSourcePath, String theTargetPath, IBase theOldField, IBase theNewField, BaseRuntimeChildDefinition theChildDef) { + private void compareField( + IBaseParameters theDiff, + EncodeContextPath theSourceEncodePath, + String theSourcePath, + String theTargetPath, + IBase theOldField, + IBase theNewField, + BaseRuntimeChildDefinition theChildDef) { String elementName = theChildDef.getElementName(); boolean repeatable = theChildDef.getMax() != 1; theSourceEncodePath.pushPath(elementName, false); @@ -477,7 +534,11 @@ public class FhirPatch { while (sourceIndex < sourceValues.size()) { IBase operation = ParametersUtil.addParameterToParameters(myContext, theDiff, PARAMETER_OPERATION); ParametersUtil.addPartCode(myContext, operation, PARAMETER_TYPE, OPERATION_DELETE); - ParametersUtil.addPartString(myContext, operation, PARAMETER_PATH, theTargetPath + "." + elementName + (repeatable ? "[" + targetIndex + "]" : "")); + ParametersUtil.addPartString( + myContext, + operation, + PARAMETER_PATH, + theTargetPath + "." + elementName + (repeatable ? "[" + targetIndex + "]" : "")); sourceIndex++; targetIndex++; @@ -486,7 +547,12 @@ public class FhirPatch { theSourceEncodePath.popPath(); } - private void addInsertItems(IBaseParameters theDiff, List theTargetValues, int theTargetIndex, String thePath, BaseRuntimeChildDefinition theChildDefinition) { + private void addInsertItems( + IBaseParameters theDiff, + List theTargetValues, + int theTargetIndex, + String thePath, + BaseRuntimeChildDefinition theChildDefinition) { IBase operation = ParametersUtil.addParameterToParameters(myContext, theDiff, PARAMETER_OPERATION); ParametersUtil.addPartCode(myContext, operation, PARAMETER_TYPE, OPERATION_INSERT); ParametersUtil.addPartString(myContext, operation, PARAMETER_PATH, thePath); @@ -508,7 +574,8 @@ public class FhirPatch { List childValues = nextChild.getAccessor().getValues(value); for (int index = 0; index < childValues.size(); index++) { boolean childRepeatable = theChildDefinition.getMax() != 1; - String elementName = nextChild.getChildNameByDatatype(childValues.get(index).getClass()); + String elementName = nextChild.getChildNameByDatatype( + childValues.get(index).getClass()); String targetPath = thePath + (childRepeatable ? "[" + index + "]" : "") + "." + elementName; addInsertItems(theDiff, childValues, index, targetPath, nextChild); } @@ -567,7 +634,8 @@ public class FhirPatch { private final BaseRuntimeChildDefinition myChildDef; private final BaseRuntimeElementDefinition myChildElement; - public ChildDefinition(BaseRuntimeChildDefinition theChildDef, BaseRuntimeElementDefinition theChildElement) { + public ChildDefinition( + BaseRuntimeChildDefinition theChildDef, BaseRuntimeElementDefinition theChildElement) { this.myChildDef = theChildDef; this.myChildElement = theChildElement; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/JsonPatchUtils.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/JsonPatchUtils.java index e259286440a..0ddfef67c1f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/JsonPatchUtils.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/JsonPatchUtils.java @@ -52,7 +52,8 @@ public class JsonPatchUtils { JsonNode jsonPatchNode = mapper.readTree(parser); patch = JsonPatch.fromJson(jsonPatchNode); - JsonNode originalJsonDocument = mapper.readTree(theCtx.newJsonParser().encodeResourceToString(theResourceToUpdate)); + JsonNode originalJsonDocument = + mapper.readTree(theCtx.newJsonParser().encodeResourceToString(theResourceToUpdate)); JsonNode after = patch.apply(originalJsonDocument); @SuppressWarnings("unchecked") @@ -67,10 +68,15 @@ public class JsonPatchUtils { try { retVal = fhirJsonParser.parseResource(clazz, postPatchedContent); } catch (DataFormatException e) { - String resourceId = theResourceToUpdate.getIdElement().toUnqualifiedVersionless().getValue(); - String resourceType = theCtx.getResourceDefinition(theResourceToUpdate).getName(); + String resourceId = theResourceToUpdate + .getIdElement() + .toUnqualifiedVersionless() + .getValue(); + String resourceType = + theCtx.getResourceDefinition(theResourceToUpdate).getName(); resourceId = defaultString(resourceId, resourceType); - String msg = theCtx.getLocalizer().getMessage(JsonPatchUtils.class, "failedToApplyPatch", resourceId, e.getMessage()); + String msg = theCtx.getLocalizer() + .getMessage(JsonPatchUtils.class, "failedToApplyPatch", resourceId, e.getMessage()); throw new InvalidRequestException(Msg.code(1271) + msg); } return retVal; @@ -78,7 +84,5 @@ public class JsonPatchUtils { } catch (IOException | JsonPatchException theE) { throw new InvalidRequestException(Msg.code(1272) + theE.getMessage()); } - } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/XmlPatchUtils.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/XmlPatchUtils.java index 4dcc84086e7..44b2d4eed64 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/XmlPatchUtils.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/patch/XmlPatchUtils.java @@ -35,24 +35,25 @@ import static ca.uhn.fhir.util.StringUtil.toUtf8String; public class XmlPatchUtils { public static T apply(FhirContext theCtx, T theResourceToUpdate, String thePatchBody) { - + @SuppressWarnings("unchecked") Class clazz = (Class) theResourceToUpdate.getClass(); - + String inputResource = theCtx.newXmlParser().encodeResourceToString(theResourceToUpdate); - + ByteArrayOutputStream result = new ByteArrayOutputStream(); try { - Patcher.patch(new ByteArrayInputStream(inputResource.getBytes(Constants.CHARSET_UTF8)), new ByteArrayInputStream(thePatchBody.getBytes(Constants.CHARSET_UTF8)), result); + Patcher.patch( + new ByteArrayInputStream(inputResource.getBytes(Constants.CHARSET_UTF8)), + new ByteArrayInputStream(thePatchBody.getBytes(Constants.CHARSET_UTF8)), + result); } catch (IOException e) { throw new InternalErrorException(Msg.code(1266) + e); } - + String resultString = toUtf8String(result.toByteArray()); T retVal = theCtx.newXmlParser().parseResource(clazz, resultString); - + return retVal; } - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java index 9edb0237a7a..7a017937c4e 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaProvider.java @@ -35,18 +35,20 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.jboss.logging.MDC; import org.springframework.beans.factory.annotation.Autowired; -import javax.servlet.http.HttpServletRequest; import java.util.Date; import java.util.Enumeration; import java.util.Set; import java.util.TreeSet; +import javax.servlet.http.HttpServletRequest; public abstract class BaseJpaProvider { public static final String REMOTE_ADDR = "req.remoteAddr"; public static final String REMOTE_UA = "req.userAgent"; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseJpaProvider.class); + @Autowired protected JpaStorageSettings myStorageSettings; + @Autowired private FhirContext myContext; @@ -58,7 +60,11 @@ public abstract class BaseJpaProvider { myStorageSettings = theStorageSettings; } - protected ExpungeOptions createExpungeOptions(IPrimitiveType theLimit, IPrimitiveType theExpungeDeletedResources, IPrimitiveType theExpungeOldVersions, IPrimitiveType theExpungeEverything) { + protected ExpungeOptions createExpungeOptions( + IPrimitiveType theLimit, + IPrimitiveType theExpungeDeletedResources, + IPrimitiveType theExpungeOldVersions, + IPrimitiveType theExpungeEverything) { ExpungeOptions options = new ExpungeOptions(); if (theLimit != null && theLimit.getValue() != null) { options.setLimit(theLimit.getValue()); @@ -81,7 +87,8 @@ public abstract class BaseJpaProvider { protected IBaseParameters createExpungeResponse(ExpungeOutcome theOutcome) { IBaseParameters parameters = ParametersUtil.newInstance(getContext()); String value = Integer.toString(theOutcome.getDeletedCount()); - ParametersUtil.addParameterToParameters(getContext(), parameters, JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, "integer", value); + ParametersUtil.addParameterToParameters( + getContext(), parameters, JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, "integer", value); return parameters; } @@ -98,7 +105,8 @@ public abstract class BaseJpaProvider { } protected DateRangeParam processSinceOrAt(Date theSince, DateRangeParam theAt) { - boolean haveAt = theAt != null && (theAt.getLowerBoundAsInstant() != null || theAt.getUpperBoundAsInstant() != null); + boolean haveAt = + theAt != null && (theAt.getLowerBoundAsInstant() != null || theAt.getUpperBoundAsInstant() != null); if (haveAt && theSince != null) { String msg = getContext().getLocalizer().getMessage(BaseJpaProvider.class, "cantCombintAtAndSince"); throw new InvalidRequestException(Msg.code(553) + msg); @@ -153,12 +161,9 @@ public abstract class BaseJpaProvider { String userAgent = StringUtils.defaultString(theRequest.getHeader("user-agent")); org.slf4j.MDC.put(REMOTE_UA, userAgent); - } public static void startRequest(ServletRequestDetails theRequest) { startRequest(theRequest.getServletRequest()); } - - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProvider.java index 8186242c64b..8c2a8b35ff3 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseJpaResourceProvider.java @@ -61,14 +61,15 @@ import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Required; -import javax.servlet.http.HttpServletRequest; import java.util.Date; +import javax.servlet.http.HttpServletRequest; import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_ADD; import static ca.uhn.fhir.jpa.model.util.JpaConstants.OPERATION_META_DELETE; import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_META; -public abstract class BaseJpaResourceProvider extends BaseJpaProvider implements IResourceProvider { +public abstract class BaseJpaResourceProvider extends BaseJpaProvider + implements IResourceProvider { private IFhirResourceDao myDao; @@ -81,10 +82,16 @@ public abstract class BaseJpaResourceProvider extends B myDao = theDao; } + protected IBaseParameters doExpunge( + IIdType theIdParam, + IPrimitiveType theLimit, + IPrimitiveType theExpungeDeletedResources, + IPrimitiveType theExpungeOldVersions, + IPrimitiveType theExpungeEverything, + RequestDetails theRequest) { - protected IBaseParameters doExpunge(IIdType theIdParam, IPrimitiveType theLimit, IPrimitiveType theExpungeDeletedResources, IPrimitiveType theExpungeOldVersions, IPrimitiveType theExpungeEverything, RequestDetails theRequest) { - - ExpungeOptions options = createExpungeOptions(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything); + ExpungeOptions options = + createExpungeOptions(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything); ExpungeOutcome outcome; if (theIdParam != null) { @@ -96,7 +103,6 @@ public abstract class BaseJpaResourceProvider extends B return createExpungeResponse(outcome); } - public IFhirResourceDao getDao() { return myDao; } @@ -108,17 +114,20 @@ public abstract class BaseJpaResourceProvider extends B @History public IBundleProvider getHistoryForResourceInstance( - HttpServletRequest theRequest, - @Offset Integer theOffset, - @IdParam IIdType theId, - @Since Date theSince, - @At DateRangeParam theAt, - RequestDetails theRequestDetails) { + HttpServletRequest theRequest, + @Offset Integer theOffset, + @IdParam IIdType theId, + @Since Date theSince, + @At DateRangeParam theAt, + RequestDetails theRequestDetails) { startRequest(theRequest); try { DateRangeParam sinceOrAt = processSinceOrAt(theSince, theAt); - return myDao.history(theId, new HistorySearchDateRangeParam(theRequestDetails.getParameters(), sinceOrAt, theOffset), theRequestDetails); + return myDao.history( + theId, + new HistorySearchDateRangeParam(theRequestDetails.getParameters(), sinceOrAt, theOffset), + theRequestDetails); } finally { endRequest(theRequest); } @@ -126,15 +135,19 @@ public abstract class BaseJpaResourceProvider extends B @History public IBundleProvider getHistoryForResourceType( - HttpServletRequest theRequest, - @Offset Integer theOffset, - @Since Date theSince, - @At DateRangeParam theAt, - RequestDetails theRequestDetails) { + HttpServletRequest theRequest, + @Offset Integer theOffset, + @Since Date theSince, + @At DateRangeParam theAt, + RequestDetails theRequestDetails) { startRequest(theRequest); try { DateRangeParam sinceOrAt = processSinceOrAt(theSince, theAt); - return myDao.history(sinceOrAt.getLowerBoundAsInstant(), sinceOrAt.getUpperBoundAsInstant(), theOffset, theRequestDetails); + return myDao.history( + sinceOrAt.getLowerBoundAsInstant(), + sinceOrAt.getUpperBoundAsInstant(), + theOffset, + theRequestDetails); } finally { endRequest(theRequest); } @@ -146,7 +159,14 @@ public abstract class BaseJpaResourceProvider extends B } @Patch - public DaoMethodOutcome patch(HttpServletRequest theRequest, @IdParam IIdType theId, @ConditionalUrlParam String theConditionalUrl, RequestDetails theRequestDetails, @ResourceParam String theBody, PatchTypeEnum thePatchType, @ResourceParam IBaseParameters theRequestBody) { + public DaoMethodOutcome patch( + HttpServletRequest theRequest, + @IdParam IIdType theId, + @ConditionalUrlParam String theConditionalUrl, + RequestDetails theRequestDetails, + @ResourceParam String theBody, + PatchTypeEnum thePatchType, + @ResourceParam IBaseParameters theRequestBody) { startRequest(theRequest); try { return myDao.patch(theId, theConditionalUrl, thePatchType, theBody, theRequestBody, theRequestDetails); @@ -166,7 +186,11 @@ public abstract class BaseJpaResourceProvider extends B } @Create - public MethodOutcome create(HttpServletRequest theRequest, @ResourceParam T theResource, @ConditionalUrlParam String theConditional, RequestDetails theRequestDetails) { + public MethodOutcome create( + HttpServletRequest theRequest, + @ResourceParam T theResource, + @ConditionalUrlParam String theConditional, + RequestDetails theRequestDetails) { startRequest(theRequest); try { if (theConditional != null) { @@ -180,7 +204,11 @@ public abstract class BaseJpaResourceProvider extends B } @Delete() - public MethodOutcome delete(HttpServletRequest theRequest, @IdParam IIdType theResource, @ConditionalUrlParam(supportsMultiple = true) String theConditional, RequestDetails theRequestDetails) { + public MethodOutcome delete( + HttpServletRequest theRequest, + @IdParam IIdType theResource, + @ConditionalUrlParam(supportsMultiple = true) String theConditional, + RequestDetails theRequestDetails) { startRequest(theRequest); try { if (theConditional != null) { @@ -193,33 +221,54 @@ public abstract class BaseJpaResourceProvider extends B } } - @Operation(name = ProviderConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = { - @OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer") - }) + @Operation( + name = ProviderConstants.OPERATION_EXPUNGE, + idempotent = false, + returnParameters = { + @OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer") + }) public IBaseParameters expunge( - @IdParam IIdType theIdParam, - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType theLimit, - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType theExpungeDeletedResources, - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType theExpungeOldVersions, - RequestDetails theRequest) { + @IdParam IIdType theIdParam, + @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") + IPrimitiveType theLimit, + @OperationParam( + name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, + typeName = "boolean") + IPrimitiveType theExpungeDeletedResources, + @OperationParam( + name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, + typeName = "boolean") + IPrimitiveType theExpungeOldVersions, + RequestDetails theRequest) { return doExpunge(theIdParam, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest); } - @Operation(name = ProviderConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = { - @OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer") - }) + @Operation( + name = ProviderConstants.OPERATION_EXPUNGE, + idempotent = false, + returnParameters = { + @OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer") + }) public IBaseParameters expunge( - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType theLimit, - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType theExpungeDeletedResources, - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType theExpungeOldVersions, - RequestDetails theRequest) { + @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") + IPrimitiveType theLimit, + @OperationParam( + name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, + typeName = "boolean") + IPrimitiveType theExpungeDeletedResources, + @OperationParam( + name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, + typeName = "boolean") + IPrimitiveType theExpungeOldVersions, + RequestDetails theRequest) { return doExpunge(null, theLimit, theExpungeDeletedResources, theExpungeOldVersions, null, theRequest); } @Description("Request a global list of tags, profiles, and security labels") - @Operation(name = OPERATION_META, idempotent = true, returnParameters = { - @OperationParam(name = "return", typeName = "Meta") - }) + @Operation( + name = OPERATION_META, + idempotent = true, + returnParameters = {@OperationParam(name = "return", typeName = "Meta")}) public IBaseParameters meta(RequestDetails theRequestDetails) { Class metaType = getContext().getElementDefinition("Meta").getImplementingClass(); IBaseMetaType metaGetOperation = getDao().metaGetOperation(metaType, theRequestDetails); @@ -229,9 +278,10 @@ public abstract class BaseJpaResourceProvider extends B } @Description("Request a list of tags, profiles, and security labels for a specfic resource instance") - @Operation(name = OPERATION_META, idempotent = true, returnParameters = { - @OperationParam(name = "return", typeName = "Meta") - }) + @Operation( + name = OPERATION_META, + idempotent = true, + returnParameters = {@OperationParam(name = "return", typeName = "Meta")}) public IBaseParameters meta(@IdParam IIdType theId, RequestDetails theRequestDetails) { Class metaType = getContext().getElementDefinition("Meta").getImplementingClass(); IBaseMetaType metaGetOperation = getDao().metaGetOperation(metaType, theId, theRequestDetails); @@ -242,10 +292,14 @@ public abstract class BaseJpaResourceProvider extends B } @Description("Add tags, profiles, and/or security labels to a resource") - @Operation(name = OPERATION_META_ADD, idempotent = false, returnParameters = { - @OperationParam(name = "return", typeName = "Meta") - }) - public IBaseParameters metaAdd(@IdParam IIdType theId, @OperationParam(name = "meta", typeName = "Meta") IBaseMetaType theMeta, RequestDetails theRequestDetails) { + @Operation( + name = OPERATION_META_ADD, + idempotent = false, + returnParameters = {@OperationParam(name = "return", typeName = "Meta")}) + public IBaseParameters metaAdd( + @IdParam IIdType theId, + @OperationParam(name = "meta", typeName = "Meta") IBaseMetaType theMeta, + RequestDetails theRequestDetails) { if (theMeta == null) { throw new InvalidRequestException(Msg.code(554) + "Input contains no parameter with name 'meta'"); } @@ -256,10 +310,14 @@ public abstract class BaseJpaResourceProvider extends B } @Description("Delete tags, profiles, and/or security labels from a resource") - @Operation(name = OPERATION_META_DELETE, idempotent = false, returnParameters = { - @OperationParam(name = "return", typeName = "Meta") - }) - public IBaseParameters metaDelete(@IdParam IIdType theId, @OperationParam(name = "meta", typeName = "Meta") IBaseMetaType theMeta, RequestDetails theRequestDetails) { + @Operation( + name = OPERATION_META_DELETE, + idempotent = false, + returnParameters = {@OperationParam(name = "return", typeName = "Meta")}) + public IBaseParameters metaDelete( + @IdParam IIdType theId, + @OperationParam(name = "meta", typeName = "Meta") IBaseMetaType theMeta, + RequestDetails theRequestDetails) { if (theMeta == null) { throw new InvalidRequestException(Msg.code(555) + "Input contains no parameter with name 'meta'"); } @@ -270,7 +328,12 @@ public abstract class BaseJpaResourceProvider extends B } @Update - public MethodOutcome update(HttpServletRequest theRequest, @ResourceParam T theResource, @IdParam IIdType theId, @ConditionalUrlParam String theConditional, RequestDetails theRequestDetails) { + public MethodOutcome update( + HttpServletRequest theRequest, + @ResourceParam T theResource, + @IdParam IIdType theId, + @ConditionalUrlParam String theConditional, + RequestDetails theRequestDetails) { startRequest(theRequest); try { if (theConditional != null) { @@ -284,15 +347,26 @@ public abstract class BaseJpaResourceProvider extends B } @Validate - public MethodOutcome validate(@ResourceParam T theResource, @ResourceParam String theRawResource, @ResourceParam EncodingEnum theEncoding, @Validate.Mode ValidationModeEnum theMode, - @Validate.Profile String theProfile, RequestDetails theRequestDetails) { + public MethodOutcome validate( + @ResourceParam T theResource, + @ResourceParam String theRawResource, + @ResourceParam EncodingEnum theEncoding, + @Validate.Mode ValidationModeEnum theMode, + @Validate.Profile String theProfile, + RequestDetails theRequestDetails) { return validate(theResource, null, theRawResource, theEncoding, theMode, theProfile, theRequestDetails); } @Validate - public MethodOutcome validate(@ResourceParam T theResource, @IdParam IIdType theId, @ResourceParam String theRawResource, @ResourceParam EncodingEnum theEncoding, @Validate.Mode ValidationModeEnum theMode, - @Validate.Profile String theProfile, RequestDetails theRequestDetails) { - return getDao().validate(theResource, theId, theRawResource, theEncoding, theMode, theProfile, theRequestDetails); + public MethodOutcome validate( + @ResourceParam T theResource, + @IdParam IIdType theId, + @ResourceParam String theRawResource, + @ResourceParam EncodingEnum theEncoding, + @Validate.Mode ValidationModeEnum theMode, + @Validate.Profile String theProfile, + RequestDetails theRequestDetails) { + return getDao().validate( + theResource, theId, theRawResource, theEncoding, theMode, theProfile, theRequestDetails); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseStorageSystemProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseStorageSystemProvider.java index a125ba73dbf..3ad030ff555 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseStorageSystemProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/BaseStorageSystemProvider.java @@ -34,21 +34,38 @@ import org.springframework.beans.factory.annotation.Required; public abstract class BaseStorageSystemProvider extends BaseJpaProvider { protected IFhirSystemDao myDao; - @Operation(name = ProviderConstants.OPERATION_EXPUNGE, idempotent = false, returnParameters = { - @OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer") - }) + @Operation( + name = ProviderConstants.OPERATION_EXPUNGE, + idempotent = false, + returnParameters = { + @OperationParam(name = JpaConstants.OPERATION_EXPUNGE_OUT_PARAM_EXPUNGE_COUNT, typeName = "integer") + }) public IBaseParameters expunge( - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") IPrimitiveType theLimit, - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, typeName = "boolean") IPrimitiveType theExpungeDeletedResources, - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, typeName = "boolean") IPrimitiveType theExpungeOldVersions, - @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_EVERYTHING, typeName = "boolean") IPrimitiveType theExpungeEverything, - RequestDetails theRequestDetails - ) { - return doExpunge(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails); + @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_LIMIT, typeName = "integer") + IPrimitiveType theLimit, + @OperationParam( + name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_DELETED_RESOURCES, + typeName = "boolean") + IPrimitiveType theExpungeDeletedResources, + @OperationParam( + name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_PREVIOUS_VERSIONS, + typeName = "boolean") + IPrimitiveType theExpungeOldVersions, + @OperationParam(name = ProviderConstants.OPERATION_EXPUNGE_PARAM_EXPUNGE_EVERYTHING, typeName = "boolean") + IPrimitiveType theExpungeEverything, + RequestDetails theRequestDetails) { + return doExpunge( + theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything, theRequestDetails); } - protected IBaseParameters doExpunge(IPrimitiveType theLimit, IPrimitiveType theExpungeDeletedResources, IPrimitiveType theExpungeOldVersions, IPrimitiveType theExpungeEverything, RequestDetails theRequestDetails) { - ExpungeOptions options = createExpungeOptions(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything); + protected IBaseParameters doExpunge( + IPrimitiveType theLimit, + IPrimitiveType theExpungeDeletedResources, + IPrimitiveType theExpungeOldVersions, + IPrimitiveType theExpungeEverything, + RequestDetails theRequestDetails) { + ExpungeOptions options = + createExpungeOptions(theLimit, theExpungeDeletedResources, theExpungeOldVersions, theExpungeEverything); ExpungeOutcome outcome = getDao().expunge(options, theRequestDetails); return createExpungeResponse(outcome); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/SubscriptionTriggeringProvider.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/SubscriptionTriggeringProvider.java index 0b660167bf6..fffa1cf74c8 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/SubscriptionTriggeringProvider.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/provider/SubscriptionTriggeringProvider.java @@ -39,31 +39,49 @@ import java.util.List; public class SubscriptionTriggeringProvider implements IResourceProvider { @Autowired private FhirContext myFhirContext; + @Autowired private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc; - @Operation(name = JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION) public IBaseParameters triggerSubscription( - @OperationParam(name = ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "uri") List> theResourceIds, - @OperationParam(name = ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theSearchUrls - ) { + @OperationParam( + name = ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "uri") + List> theResourceIds, + @OperationParam( + name = ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_SEARCH_URL, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theSearchUrls) { return mySubscriptionTriggeringSvc.triggerSubscription(theResourceIds, theSearchUrls, null); } @Operation(name = JpaConstants.OPERATION_TRIGGER_SUBSCRIPTION) public IBaseParameters triggerSubscription( - @IdParam IIdType theSubscriptionId, - @OperationParam(name = ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "uri") List> theResourceIds, - @OperationParam(name = ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_SEARCH_URL, min = 0, max = OperationParam.MAX_UNLIMITED, typeName = "string") List> theSearchUrls - ) { + @IdParam IIdType theSubscriptionId, + @OperationParam( + name = ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_RESOURCE_ID, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "uri") + List> theResourceIds, + @OperationParam( + name = ProviderConstants.SUBSCRIPTION_TRIGGERING_PARAM_SEARCH_URL, + min = 0, + max = OperationParam.MAX_UNLIMITED, + typeName = "string") + List> theSearchUrls) { return mySubscriptionTriggeringSvc.triggerSubscription(theResourceIds, theSearchUrls, theSubscriptionId); } - @Override public Class getResourceType() { - return myFhirContext.getResourceDefinition(ResourceTypeEnum.SUBSCRIPTION.getCode()).getImplementingClass(); + return myFhirContext + .getResourceDefinition(ResourceTypeEnum.SUBSCRIPTION.getCode()) + .getImplementingClass(); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/search/SearchConstants.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/search/SearchConstants.java index 1075dab219f..50db30cfefd 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/search/SearchConstants.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/search/SearchConstants.java @@ -22,6 +22,5 @@ package ca.uhn.fhir.jpa.search; public final class SearchConstants { public static final int MAX_PAGE_SIZE = 800; - private SearchConstants() { - } + private SearchConstants() {} } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamWithInlineReferencesExtractor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamWithInlineReferencesExtractor.java index 74097054ca3..c72601a1368 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamWithInlineReferencesExtractor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamWithInlineReferencesExtractor.java @@ -48,22 +48,28 @@ import java.util.List; import java.util.Optional; import java.util.Set; -public abstract class BaseSearchParamWithInlineReferencesExtractor implements ISearchParamWithInlineReferencesExtractor { +public abstract class BaseSearchParamWithInlineReferencesExtractor + implements ISearchParamWithInlineReferencesExtractor { private static final Logger ourLog = LoggerFactory.getLogger(BaseSearchParamWithInlineReferencesExtractor.class); protected FhirContext myFhirContext; protected JpaStorageSettings myStorageSettings; + @Autowired private MatchResourceUrlService myMatchResourceUrlService; + @Autowired private DaoResourceLinkResolver myDaoResourceLinkResolver; + @Autowired private MemoryCacheService myMemoryCacheService; + @Autowired private IIdHelperService myIdHelperService; @Override - public void extractInlineReferences(RequestDetails theRequestDetails, IBaseResource theResource, TransactionDetails theTransactionDetails) { + public void extractInlineReferences( + RequestDetails theRequestDetails, IBaseResource theResource, TransactionDetails theTransactionDetails) { FhirTerser terser = myFhirContext.newTerser(); List allRefs = terser.getAllPopulatedChildElementsOfType(theResource, IBaseReference.class); for (IBaseReference nextRef : allRefs) { @@ -75,34 +81,51 @@ public abstract class BaseSearchParamWithInlineReferencesExtractor matchResourceType = matchResourceDef.getImplementingClass(); T resolvedMatch = null; if (theTransactionDetails != null) { - resolvedMatch = (T) theTransactionDetails.getResolvedMatchUrls().get(nextIdText); + resolvedMatch = + (T) theTransactionDetails.getResolvedMatchUrls().get(nextIdText); } Set matches; if (resolvedMatch != null && !IResourcePersistentId.NOT_FOUND.equals(resolvedMatch)) { matches = Set.of(resolvedMatch); } else { - matches = myMatchResourceUrlService.processMatchUrl(nextIdText, matchResourceType, theTransactionDetails, theRequestDetails); + matches = myMatchResourceUrlService.processMatchUrl( + nextIdText, matchResourceType, theTransactionDetails, theRequestDetails); } T match; IIdType newId = null; if (matches.isEmpty()) { - Optional placeholderOpt = myDaoResourceLinkResolver.createPlaceholderTargetIfConfiguredToDoSo(matchResourceType, nextRef, null, theRequestDetails, theTransactionDetails); + Optional placeholderOpt = + myDaoResourceLinkResolver.createPlaceholderTargetIfConfiguredToDoSo( + matchResourceType, nextRef, null, theRequestDetails, theTransactionDetails); if (placeholderOpt.isPresent()) { match = (T) placeholderOpt.get().getPersistentId(); newId = myFhirContext.getVersion().newIdType(); @@ -111,11 +134,15 @@ public abstract class BaseSearchParamWithInlineReferencesExtractor 1) { - String msg = myFhirContext.getLocalizer().getMessage(TransactionDetails.class, "invalidMatchUrlMultipleMatches", nextId.getValue()); + String msg = myFhirContext + .getLocalizer() + .getMessage(TransactionDetails.class, "invalidMatchUrlMultipleMatches", nextId.getValue()); throw new PreconditionFailedException(Msg.code(1092) + msg); } else { match = matches.iterator().next(); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ISearchParamWithInlineReferencesExtractor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ISearchParamWithInlineReferencesExtractor.java index 3dfd765000e..601cdc8c92c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ISearchParamWithInlineReferencesExtractor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/ISearchParamWithInlineReferencesExtractor.java @@ -30,5 +30,6 @@ public interface ISearchParamWithInlineReferencesExtractor { * These match URLs are resolved and replaced with the ID of the * matching resource. */ - void extractInlineReferences(RequestDetails theRequestDetails, IBaseResource theResource, TransactionDetails theTransactionDetails); + void extractInlineReferences( + RequestDetails theRequestDetails, IBaseResource theResource, TransactionDetails theTransactionDetails); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/config/SearchParamSubmitterConfig.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/config/SearchParamSubmitterConfig.java index f00c470e2d5..bfddf087f77 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/config/SearchParamSubmitterConfig.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/config/SearchParamSubmitterConfig.java @@ -30,18 +30,17 @@ import org.springframework.context.annotation.Configuration; public class SearchParamSubmitterConfig { @Bean - public SearchParameterCanonicalizer searchParameterCanonicalizer(FhirContext theFhirContext){ + public SearchParameterCanonicalizer searchParameterCanonicalizer(FhirContext theFhirContext) { return new SearchParameterCanonicalizer(theFhirContext); } @Bean - public SearchParamValidatingInterceptor searchParamValidatingInterceptor(){ + public SearchParamValidatingInterceptor searchParamValidatingInterceptor() { return new SearchParamValidatingInterceptor(); } @Bean - public SearchParamSubmitInterceptorLoader searchParamSubmitInterceptorLoader(){ + public SearchParamSubmitInterceptorLoader searchParamSubmitInterceptorLoader() { return new SearchParamSubmitInterceptorLoader(); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/interceptor/SearchParamSubmitInterceptorLoader.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/interceptor/SearchParamSubmitInterceptorLoader.java index 7768865e02f..0cbf280298c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/interceptor/SearchParamSubmitInterceptorLoader.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/interceptor/SearchParamSubmitInterceptorLoader.java @@ -44,7 +44,8 @@ public class SearchParamSubmitInterceptorLoader { } @Autowired - public void setSearchParamValidatingInterceptor(SearchParamValidatingInterceptor theSearchParamValidatingInterceptor) { + public void setSearchParamValidatingInterceptor( + SearchParamValidatingInterceptor theSearchParamValidatingInterceptor) { mySearchParamValidatingInterceptor = theSearchParamValidatingInterceptor; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/interceptor/SearchParamValidatingInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/interceptor/SearchParamValidatingInterceptor.java index 85346c506bf..98c377fa6bd 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/interceptor/SearchParamValidatingInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/submit/interceptor/SearchParamValidatingInterceptor.java @@ -39,14 +39,13 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.HapiExtensions; import org.hl7.fhir.instance.model.api.IBaseExtension; import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nullable; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nullable; import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -71,7 +70,8 @@ public class SearchParamValidatingInterceptor { } @Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED) - public void resourcePreUpdate(IBaseResource theOldResource, IBaseResource theNewResource, RequestDetails theRequestDetails) { + public void resourcePreUpdate( + IBaseResource theOldResource, IBaseResource theNewResource, RequestDetails theRequestDetails) { validateSearchParamOnUpdate(theNewResource, theRequestDetails); } @@ -95,19 +95,28 @@ public class SearchParamValidatingInterceptor { private void validateSearchParamOnCreateAndUpdate(RuntimeSearchParam theRuntimeSearchParam) { // Validate uplifted refchains - List> refChainExtensions = theRuntimeSearchParam.getExtensions(HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN); + List> refChainExtensions = + theRuntimeSearchParam.getExtensions(HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN); for (IBaseExtension nextExtension : refChainExtensions) { - List codeExtensions = nextExtension - .getExtension() - .stream() - .map(t->(IBaseExtension)t) - .filter(t -> HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_PARAM_CODE.equals(t.getUrl())) - .collect(Collectors.toList()); + List codeExtensions = nextExtension.getExtension().stream() + .map(t -> (IBaseExtension) t) + .filter(t -> HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_PARAM_CODE.equals(t.getUrl())) + .collect(Collectors.toList()); if (codeExtensions.size() != 1) { - throw new UnprocessableEntityException(Msg.code(2283) + "Extension with URL " + HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN + " must have exactly one child extension with URL " + HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_PARAM_CODE); + throw new UnprocessableEntityException( + Msg.code(2283) + "Extension with URL " + HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN + + " must have exactly one child extension with URL " + + HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_PARAM_CODE); } - if (codeExtensions.get(0).getValue() == null || !"code".equals(myFhirContext.getElementDefinition(codeExtensions.get(0).getValue().getClass()).getName())) { - throw new UnprocessableEntityException(Msg.code(2284) + "Extension with URL " + HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_PARAM_CODE + " must have a value of type 'code'"); + if (codeExtensions.get(0).getValue() == null + || !"code" + .equals(myFhirContext + .getElementDefinition( + codeExtensions.get(0).getValue().getClass()) + .getName())) { + throw new UnprocessableEntityException(Msg.code(2284) + "Extension with URL " + + HapiExtensions.EXTENSION_SEARCHPARAM_UPLIFT_REFCHAIN_PARAM_CODE + + " must have a value of type 'code'"); } } } @@ -115,7 +124,8 @@ public class SearchParamValidatingInterceptor { private void validateStandardSpOnCreate(RequestDetails theRequestDetails, SearchParameterMap searchParameterMap) { List persistedIdList = getDao().searchForIds(searchParameterMap, theRequestDetails); if (isNotEmpty(persistedIdList)) { - throw new UnprocessableEntityException(Msg.code(2196) + "Can't process submitted SearchParameter as it is overlapping an existing one."); + throw new UnprocessableEntityException( + Msg.code(2196) + "Can't process submitted SearchParameter as it is overlapping an existing one."); } } @@ -137,12 +147,14 @@ public class SearchParamValidatingInterceptor { } private boolean isNewSearchParam(RuntimeSearchParam theSearchParam, Set theExistingIds) { - return theExistingIds - .stream() - .noneMatch(resId -> resId.substring(resId.indexOf("/") + 1).equals(theSearchParam.getId().getIdPart())); + return theExistingIds.stream().noneMatch(resId -> resId.substring(resId.indexOf("/") + 1) + .equals(theSearchParam.getId().getIdPart())); } - private void validateStandardSpOnUpdate(RequestDetails theRequestDetails, RuntimeSearchParam runtimeSearchParam, SearchParameterMap searchParameterMap) { + private void validateStandardSpOnUpdate( + RequestDetails theRequestDetails, + RuntimeSearchParam runtimeSearchParam, + SearchParameterMap searchParameterMap) { List pidList = getDao().searchForIds(searchParameterMap, theRequestDetails); if (isNotEmpty(pidList)) { Set resolvedResourceIds = myIdHelperService.translatePidsToFhirResourceIds(new HashSet<>(pidList)); @@ -153,7 +165,8 @@ public class SearchParamValidatingInterceptor { } private void throwDuplicateError() { - throw new UnprocessableEntityException(Msg.code(2125) + "Can't process submitted SearchParameter as it is overlapping an existing one."); + throw new UnprocessableEntityException( + Msg.code(2125) + "Can't process submitted SearchParameter as it is overlapping an existing one."); } private boolean isNotSearchParameterResource(IBaseResource theResource) { @@ -224,5 +237,4 @@ public class SearchParamValidatingInterceptor { return retVal; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/BaseChannelSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/BaseChannelSettings.java index 079490e85a9..acfddf2c8f3 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/BaseChannelSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/BaseChannelSettings.java @@ -26,7 +26,6 @@ public abstract class BaseChannelSettings implements IChannelSettings { private ChannelRetryConfiguration myRetryConfigurationParameters; - /** * Default true. Used by IChannelNamer to decide how to qualify the channel name. */ diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/IChannelFactory.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/IChannelFactory.java index 79e7c9eef68..719ad9f454e 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/IChannelFactory.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/IChannelFactory.java @@ -39,7 +39,8 @@ public interface IChannelFactory { * @param theMessageType The object type that will be placed on this queue. Objects will be Jackson-annotated structures. * @param theChannelSettings Contains the configuration for subscribers. */ - IChannelReceiver getOrCreateReceiver(String theChannelName, Class theMessageType, ChannelConsumerSettings theChannelSettings); + IChannelReceiver getOrCreateReceiver( + String theChannelName, Class theMessageType, ChannelConsumerSettings theChannelSettings); /** * Create a channel that is used to send messages to the queue. @@ -53,7 +54,8 @@ public interface IChannelFactory { * @param theMessageType The object type that will be placed on this queue. Objects will be Jackson-annotated structures. * @param theChannelSettings Contains the configuration for senders. */ - IChannelProducer getOrCreateProducer(String theChannelName, Class theMessageType, ChannelProducerSettings theChannelSettings); + IChannelProducer getOrCreateProducer( + String theChannelName, Class theMessageType, ChannelProducerSettings theChannelSettings); /** * @return the IChannelNamer used by this factory diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/IChannelProducer.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/IChannelProducer.java index 36ece0efb91..9dfffea08b6 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/IChannelProducer.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/api/IChannelProducer.java @@ -22,5 +22,4 @@ package ca.uhn.fhir.jpa.subscription.channel.api; import org.springframework.messaging.MessageChannel; import org.springframework.messaging.support.InterceptableChannel; -public interface IChannelProducer extends MessageChannel, InterceptableChannel { -} +public interface IChannelProducer extends MessageChannel, InterceptableChannel {} diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/LinkedBlockingChannel.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/LinkedBlockingChannel.java index 9ab977f821f..c42fea383c7 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/LinkedBlockingChannel.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/LinkedBlockingChannel.java @@ -24,11 +24,11 @@ import ca.uhn.fhir.jpa.subscription.channel.api.IChannelReceiver; import org.springframework.messaging.MessageHandler; import org.springframework.messaging.support.ExecutorSubscribableChannel; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Optional; import java.util.concurrent.Executor; import java.util.function.Supplier; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; @@ -58,10 +58,9 @@ public class LinkedBlockingChannel extends ExecutorSubscribableChannel implement @Override public boolean hasSubscription(@Nonnull MessageHandler handler) { - return getSubscribers() - .stream() - .map(t -> (RetryingMessageHandlerWrapper) t) - .anyMatch(t -> t.getWrappedHandler() == handler); + return getSubscribers().stream() + .map(t -> (RetryingMessageHandlerWrapper) t) + .anyMatch(t -> t.getWrappedHandler() == handler); } @Override @@ -71,11 +70,10 @@ public class LinkedBlockingChannel extends ExecutorSubscribableChannel implement @Override public boolean unsubscribe(@Nonnull MessageHandler handler) { - Optional match = getSubscribers() - .stream() - .map(t -> (RetryingMessageHandlerWrapper) t) - .filter(t -> t.getWrappedHandler() == handler) - .findFirst(); + Optional match = getSubscribers().stream() + .map(t -> (RetryingMessageHandlerWrapper) t) + .filter(t -> t.getWrappedHandler() == handler) + .findFirst(); match.ifPresent(super::unsubscribe); return match.isPresent(); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/LinkedBlockingChannelFactory.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/LinkedBlockingChannelFactory.java index 9d9bf771c95..13bd3aede1f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/LinkedBlockingChannelFactory.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/LinkedBlockingChannelFactory.java @@ -30,11 +30,11 @@ import ca.uhn.fhir.subscription.SubscriptionConstants; import ca.uhn.fhir.util.ThreadPoolUtil; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; -import javax.annotation.Nonnull; -import javax.annotation.PreDestroy; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; +import javax.annotation.PreDestroy; public class LinkedBlockingChannelFactory implements IChannelFactory { @@ -46,12 +46,14 @@ public class LinkedBlockingChannelFactory implements IChannelFactory { } @Override - public IChannelReceiver getOrCreateReceiver(String theChannelName, Class theMessageType, ChannelConsumerSettings theChannelSettings) { + public IChannelReceiver getOrCreateReceiver( + String theChannelName, Class theMessageType, ChannelConsumerSettings theChannelSettings) { return getOrCreateChannel(theChannelName, theChannelSettings.getConcurrentConsumers(), theChannelSettings); } @Override - public IChannelProducer getOrCreateProducer(String theChannelName, Class theMessageType, ChannelProducerSettings theChannelSettings) { + public IChannelProducer getOrCreateProducer( + String theChannelName, Class theMessageType, ChannelProducerSettings theChannelSettings) { return getOrCreateChannel(theChannelName, theChannelSettings.getConcurrentConsumers(), theChannelSettings); } @@ -60,27 +62,29 @@ public class LinkedBlockingChannelFactory implements IChannelFactory { return myChannelNamer; } - private LinkedBlockingChannel getOrCreateChannel(String theChannelName, - int theConcurrentConsumers, - IChannelSettings theChannelSettings) { + private LinkedBlockingChannel getOrCreateChannel( + String theChannelName, int theConcurrentConsumers, IChannelSettings theChannelSettings) { // TODO - does this need retry settings? final String channelName = myChannelNamer.getChannelName(theChannelName, theChannelSettings); - return myChannels.computeIfAbsent(channelName, t -> buildLinkedBlockingChannel(theConcurrentConsumers, channelName)); + return myChannels.computeIfAbsent( + channelName, t -> buildLinkedBlockingChannel(theConcurrentConsumers, channelName)); } @Nonnull private LinkedBlockingChannel buildLinkedBlockingChannel(int theConcurrentConsumers, String theChannelName) { String threadNamePrefix = theChannelName + "-"; - ThreadPoolTaskExecutor threadPoolExecutor = ThreadPoolUtil.newThreadPool(theConcurrentConsumers, theConcurrentConsumers, threadNamePrefix, SubscriptionConstants.DELIVERY_EXECUTOR_QUEUE_SIZE); + ThreadPoolTaskExecutor threadPoolExecutor = ThreadPoolUtil.newThreadPool( + theConcurrentConsumers, + theConcurrentConsumers, + threadNamePrefix, + SubscriptionConstants.DELIVERY_EXECUTOR_QUEUE_SIZE); return new LinkedBlockingChannel(theChannelName, threadPoolExecutor, threadPoolExecutor::getQueueSize); } - @PreDestroy public void stop() { myChannels.clear(); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/RetryingMessageHandlerWrapper.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/RetryingMessageHandlerWrapper.java index a538369c1ca..6c88ae84f2f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/RetryingMessageHandlerWrapper.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/impl/RetryingMessageHandlerWrapper.java @@ -62,8 +62,13 @@ class RetryingMessageHandlerWrapper implements MessageHandler { retryTemplate.setThrowLastExceptionOnExhausted(true); RetryListener retryListener = new RetryListenerSupport() { @Override - public void onError(RetryContext theContext, RetryCallback theCallback, Throwable theThrowable) { - ourLog.error("Failure {} processing message in channel[{}]: {}", theContext.getRetryCount(), myChannelName, theThrowable.toString()); + public void onError( + RetryContext theContext, RetryCallback theCallback, Throwable theThrowable) { + ourLog.error( + "Failure {} processing message in channel[{}]: {}", + theContext.getRetryCount(), + myChannelName, + theThrowable.toString()); ourLog.error("Failure", theThrowable); if (theThrowable instanceof BaseUnrecoverableRuntimeException) { theContext.setExhaustedOnly(); @@ -79,7 +84,7 @@ class RetryingMessageHandlerWrapper implements MessageHandler { } } }; - retryTemplate.setListeners(new RetryListener[]{retryListener}); + retryTemplate.setListeners(new RetryListener[] {retryListener}); retryTemplate.execute(context -> { myWrap.handleMessage(theMessage); return null; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelFactory.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelFactory.java index 7dd5260fb25..cfec1f64e96 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelFactory.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/channel/subscription/SubscriptionChannelFactory.java @@ -40,26 +40,32 @@ public class SubscriptionChannelFactory { myChannelFactory = theChannelFactory; } - public IChannelProducer newDeliverySendingChannel(String theChannelName, ChannelProducerSettings theChannelSettings) { + public IChannelProducer newDeliverySendingChannel( + String theChannelName, ChannelProducerSettings theChannelSettings) { ChannelProducerSettings config = newProducerConfigForDeliveryChannel(theChannelSettings); config.setRetryConfiguration(theChannelSettings.getRetryConfigurationParameters()); return myChannelFactory.getOrCreateProducer(theChannelName, ResourceDeliveryJsonMessage.class, config); } - public IChannelReceiver newDeliveryReceivingChannel(String theChannelName, ChannelConsumerSettings theChannelSettings) { + public IChannelReceiver newDeliveryReceivingChannel( + String theChannelName, ChannelConsumerSettings theChannelSettings) { ChannelConsumerSettings config = newConsumerConfigForDeliveryChannel(theChannelSettings); - IChannelReceiver channel = myChannelFactory.getOrCreateReceiver(theChannelName, ResourceDeliveryJsonMessage.class, config); + IChannelReceiver channel = + myChannelFactory.getOrCreateReceiver(theChannelName, ResourceDeliveryJsonMessage.class, config); return new BroadcastingSubscribableChannelWrapper(channel); } - public IChannelProducer newMatchingSendingChannel(String theChannelName, ChannelProducerSettings theChannelSettings) { + public IChannelProducer newMatchingSendingChannel( + String theChannelName, ChannelProducerSettings theChannelSettings) { ChannelProducerSettings config = newProducerConfigForMatchingChannel(theChannelSettings); return myChannelFactory.getOrCreateProducer(theChannelName, ResourceModifiedJsonMessage.class, config); } - public IChannelReceiver newMatchingReceivingChannel(String theChannelName, ChannelConsumerSettings theChannelSettings) { + public IChannelReceiver newMatchingReceivingChannel( + String theChannelName, ChannelConsumerSettings theChannelSettings) { ChannelConsumerSettings config = newConsumerConfigForMatchingChannel(theChannelSettings); - IChannelReceiver channel = myChannelFactory.getOrCreateReceiver(theChannelName, ResourceModifiedJsonMessage.class, config); + IChannelReceiver channel = + myChannelFactory.getOrCreateReceiver(theChannelName, ResourceModifiedJsonMessage.class, config); return new BroadcastingSubscribableChannelWrapper(channel); } @@ -110,5 +116,4 @@ public class SubscriptionChannelFactory { public IChannelFactory getChannelFactory() { return myChannelFactory; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/IResourceModifiedConsumer.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/IResourceModifiedConsumer.java index d60c2402828..094a5c2110c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/IResourceModifiedConsumer.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/IResourceModifiedConsumer.java @@ -28,11 +28,13 @@ public interface IResourceModifiedConsumer { /** * This is an internal API - Use with caution! */ - void submitResourceModified(IBaseResource theNewResource, ResourceModifiedMessage.OperationTypeEnum theOperationType, RequestDetails theRequest); + void submitResourceModified( + IBaseResource theNewResource, + ResourceModifiedMessage.OperationTypeEnum theOperationType, + RequestDetails theRequest); /** * This is an internal API - Use with caution! */ void submitResourceModified(ResourceModifiedMessage theMsg); - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/SubscriptionMatchingStrategy.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/SubscriptionMatchingStrategy.java index b28d538825d..e57e8266b05 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/SubscriptionMatchingStrategy.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/SubscriptionMatchingStrategy.java @@ -25,7 +25,7 @@ public enum SubscriptionMatchingStrategy { */ IN_MEMORY, - /** + /** * Resources cannot be matched against this subscription in-memory. We need to make a call to a FHIR Repository to determine a match */ DATABASE, @@ -35,4 +35,3 @@ public enum SubscriptionMatchingStrategy { */ TOPIC } - diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizer.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizer.java index 54decc51405..79470188949 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizer.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/match/registry/SubscriptionCanonicalizer.java @@ -51,13 +51,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static ca.uhn.fhir.util.HapiExtensions.EX_SEND_DELETE_MESSAGES; import static java.util.stream.Collectors.mapping; @@ -88,12 +88,14 @@ public class SubscriptionCanonicalizer { case DSTU2_HL7ORG: case DSTU2_1: default: - throw new ConfigurationException(Msg.code(556) + "Subscription not supported for version: " + myFhirContext.getVersion().getVersion()); + throw new ConfigurationException(Msg.code(556) + "Subscription not supported for version: " + + myFhirContext.getVersion().getVersion()); } } private CanonicalSubscription canonicalizeDstu2(IBaseResource theSubscription) { - ca.uhn.fhir.model.dstu2.resource.Subscription subscription = (ca.uhn.fhir.model.dstu2.resource.Subscription) theSubscription; + ca.uhn.fhir.model.dstu2.resource.Subscription subscription = + (ca.uhn.fhir.model.dstu2.resource.Subscription) theSubscription; CanonicalSubscription retVal = new CanonicalSubscription(); try { retVal.setStatus(org.hl7.fhir.r4.model.Subscription.SubscriptionStatus.fromCode(subscription.getStatus())); @@ -115,13 +117,12 @@ public class SubscriptionCanonicalizer { } private boolean extractDeleteExtensionDstu2(ca.uhn.fhir.model.dstu2.resource.Subscription theSubscription) { - return theSubscription.getChannel().getUndeclaredExtensionsByUrl(EX_SEND_DELETE_MESSAGES) - .stream() - .map(ExtensionDt::getValue) - .map(value -> (BooleanDt) value) - .map(BasePrimitive::getValue) - .findFirst() - .orElse(false); + return theSubscription.getChannel().getUndeclaredExtensionsByUrl(EX_SEND_DELETE_MESSAGES).stream() + .map(ExtensionDt::getValue) + .map(value -> (BooleanDt) value) + .map(BasePrimitive::getValue) + .findFirst() + .orElse(false); } /** @@ -132,10 +133,9 @@ public class SubscriptionCanonicalizer { */ private Map extractTags(IBaseResource theSubscription) { Map retVal = new HashMap<>(); - theSubscription.getMeta().getTag() - .stream() - .filter(t -> t.getSystem() != null && t.getCode() != null) - .forEach(t -> retVal.put(t.getSystem(), t.getCode())); + theSubscription.getMeta().getTag().stream() + .filter(t -> t.getSystem() != null && t.getCode() != null) + .forEach(t -> retVal.put(t.getSystem(), t.getCode())); return retVal; } @@ -157,7 +157,8 @@ public class SubscriptionCanonicalizer { retVal.setChannelExtensions(extractExtension(subscription)); retVal.setIdElement(subscription.getIdElement()); retVal.setPayloadString(channel.getPayload()); - retVal.setPayloadSearchCriteria(getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); + retVal.setPayloadSearchCriteria( + getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); retVal.setTags(extractTags(subscription)); retVal.setCrossPartitionEnabled(SubscriptionUtil.isCrossPartition(theSubscription)); @@ -169,7 +170,8 @@ public class SubscriptionCanonicalizer { from = channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_EMAIL_FROM); subjectTemplate = channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_SUBJECT_TEMPLATE); } catch (FHIRException theE) { - throw new ConfigurationException(Msg.code(558) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); + throw new ConfigurationException( + Msg.code(558) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); } retVal.getEmailDetails().setFrom(from); retVal.getEmailDetails().setSubjectTemplate(subjectTemplate); @@ -180,10 +182,13 @@ public class SubscriptionCanonicalizer { String stripVersionIds; String deliverLatestVersion; try { - stripVersionIds = channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS); - deliverLatestVersion = channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION); + stripVersionIds = + channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS); + deliverLatestVersion = + channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION); } catch (FHIRException theE) { - throw new ConfigurationException(Msg.code(559) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); + throw new ConfigurationException( + Msg.code(559) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); } retVal.getRestHookDetails().setStripVersionId(Boolean.parseBoolean(stripVersionIds)); retVal.getRestHookDetails().setDeliverLatestVersion(Boolean.parseBoolean(deliverLatestVersion)); @@ -198,70 +203,77 @@ public class SubscriptionCanonicalizer { private Boolean extractSendDeletesDstu3(org.hl7.fhir.dstu3.model.Subscription subscription) { return subscription.getChannel().getExtensionsByUrl(EX_SEND_DELETE_MESSAGES).stream() - .map(org.hl7.fhir.dstu3.model.Extension::getValue) - .filter(val -> val instanceof org.hl7.fhir.dstu3.model.BooleanType) - .map(val -> (org.hl7.fhir.dstu3.model.BooleanType) val) - .map(org.hl7.fhir.dstu3.model.BooleanType::booleanValue) - .findFirst() - .orElse(false); + .map(org.hl7.fhir.dstu3.model.Extension::getValue) + .filter(val -> val instanceof org.hl7.fhir.dstu3.model.BooleanType) + .map(val -> (org.hl7.fhir.dstu3.model.BooleanType) val) + .map(org.hl7.fhir.dstu3.model.BooleanType::booleanValue) + .findFirst() + .orElse(false); } - private @Nonnull - Map> extractExtension(IBaseResource theSubscription) { + private @Nonnull Map> extractExtension(IBaseResource theSubscription) { try { switch (theSubscription.getStructureFhirVersionEnum()) { case DSTU2: { - ca.uhn.fhir.model.dstu2.resource.Subscription subscription = (ca.uhn.fhir.model.dstu2.resource.Subscription) theSubscription; - return subscription - .getChannel() - .getUndeclaredExtensions() - .stream() - .collect(Collectors.groupingBy(t -> t.getUrl(), mapping(t -> t.getValueAsPrimitive().getValueAsString(), toList()))); + ca.uhn.fhir.model.dstu2.resource.Subscription subscription = + (ca.uhn.fhir.model.dstu2.resource.Subscription) theSubscription; + return subscription.getChannel().getUndeclaredExtensions().stream() + .collect(Collectors.groupingBy( + t -> t.getUrl(), + mapping(t -> t.getValueAsPrimitive().getValueAsString(), toList()))); } case DSTU3: { - org.hl7.fhir.dstu3.model.Subscription subscription = (org.hl7.fhir.dstu3.model.Subscription) theSubscription; - return subscription - .getChannel() - .getExtension() - .stream() - .collect(Collectors.groupingBy(t -> t.getUrl(), mapping(t -> t.getValueAsPrimitive().getValueAsString(), toList()))); + org.hl7.fhir.dstu3.model.Subscription subscription = + (org.hl7.fhir.dstu3.model.Subscription) theSubscription; + return subscription.getChannel().getExtension().stream() + .collect(Collectors.groupingBy( + t -> t.getUrl(), + mapping(t -> t.getValueAsPrimitive().getValueAsString(), toList()))); } case R4: { - org.hl7.fhir.r4.model.Subscription subscription = (org.hl7.fhir.r4.model.Subscription) theSubscription; - return subscription - .getChannel() - .getExtension() - .stream() - .collect(Collectors.groupingBy(t -> t.getUrl(), - mapping(t -> { - return t.getValueAsPrimitive().getValueAsString(); - }, toList()))); + org.hl7.fhir.r4.model.Subscription subscription = + (org.hl7.fhir.r4.model.Subscription) theSubscription; + return subscription.getChannel().getExtension().stream() + .collect(Collectors.groupingBy( + t -> t.getUrl(), + mapping( + t -> { + return t.getValueAsPrimitive().getValueAsString(); + }, + toList()))); } case R5: { // TODO KHS fix org.hl7.fhir.r4b.model.BaseResource.getStructureFhirVersionEnum() for R4B if (theSubscription instanceof org.hl7.fhir.r4b.model.Subscription) { - org.hl7.fhir.r4b.model.Subscription subscription = (org.hl7.fhir.r4b.model.Subscription) theSubscription; - return subscription - .getExtension() - .stream() - .collect(Collectors.groupingBy(t -> t.getUrl(), mapping(t -> t.getValueAsPrimitive().getValueAsString(), toList()))); + org.hl7.fhir.r4b.model.Subscription subscription = + (org.hl7.fhir.r4b.model.Subscription) theSubscription; + return subscription.getExtension().stream() + .collect(Collectors.groupingBy( + t -> t.getUrl(), + mapping(t -> t.getValueAsPrimitive().getValueAsString(), toList()))); } else if (theSubscription instanceof org.hl7.fhir.r5.model.Subscription) { - org.hl7.fhir.r5.model.Subscription subscription = (org.hl7.fhir.r5.model.Subscription) theSubscription; - return subscription - .getExtension() - .stream() - .collect(Collectors.groupingBy(t -> t.getUrl(), mapping(t -> t.getValueAsPrimitive().getValueAsString(), toList()))); + org.hl7.fhir.r5.model.Subscription subscription = + (org.hl7.fhir.r5.model.Subscription) theSubscription; + return subscription.getExtension().stream() + .collect(Collectors.groupingBy( + t -> t.getUrl(), + mapping(t -> t.getValueAsPrimitive().getValueAsString(), toList()))); } } case DSTU2_HL7ORG: case DSTU2_1: default: { - ourLog.error("Failed to extract extension from subscription {}", theSubscription.getIdElement().toUnqualified().getValue()); + ourLog.error( + "Failed to extract extension from subscription {}", + theSubscription.getIdElement().toUnqualified().getValue()); break; } } } catch (FHIRException theE) { - ourLog.error("Failed to extract extension from subscription {}", theSubscription.getIdElement().toUnqualified().getValue(), theE); + ourLog.error( + "Failed to extract extension from subscription {}", + theSubscription.getIdElement().toUnqualified().getValue(), + theE); } return Collections.emptyMap(); } @@ -275,12 +287,14 @@ public class SubscriptionCanonicalizer { retVal.setChannelExtensions(extractExtension(subscription)); retVal.setIdElement(subscription.getIdElement()); retVal.setPayloadString(channel.getPayload()); - retVal.setPayloadSearchCriteria(getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); + retVal.setPayloadSearchCriteria( + getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); retVal.setTags(extractTags(subscription)); setPartitionIdOnReturnValue(theSubscription, retVal); retVal.setCrossPartitionEnabled(SubscriptionUtil.isCrossPartition(theSubscription)); - List profiles = subscription.getMeta().getProfile(); + List profiles = + subscription.getMeta().getProfile(); for (org.hl7.fhir.r4.model.CanonicalType next : profiles) { if (SubscriptionConstants.SUBSCRIPTION_TOPIC_PROFILE_URL.equals(next.getValueAsString())) { retVal.setTopicSubscription(true); @@ -296,28 +310,39 @@ public class SubscriptionCanonicalizer { retVal.setEndpointUrl(channel.getEndpoint()); retVal.setChannelType(getChannelType(subscription)); - for (org.hl7.fhir.r4.model.Extension next : subscription.getCriteriaElement().getExtension()) { + for (org.hl7.fhir.r4.model.Extension next : + subscription.getCriteriaElement().getExtension()) { if (SubscriptionConstants.SUBSCRIPTION_TOPIC_FILTER_URL.equals(next.getUrl())) { - List filters = CanonicalTopicSubscriptionFilter.fromQueryUrl(next.getValue().primitiveValue()); + List filters = CanonicalTopicSubscriptionFilter.fromQueryUrl( + next.getValue().primitiveValue()); filters.forEach(topicSubscription::addFilter); } } if (channel.hasExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_HEARTBEAT_PERIOD_URL)) { - org.hl7.fhir.r4.model.Extension timeoutExtension = channel.getExtensionByUrl(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_HEARTBEAT_PERIOD_URL); - topicSubscription.setHeartbeatPeriod(Integer.valueOf(timeoutExtension.getValue().primitiveValue())); + org.hl7.fhir.r4.model.Extension timeoutExtension = channel.getExtensionByUrl( + SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_HEARTBEAT_PERIOD_URL); + topicSubscription.setHeartbeatPeriod( + Integer.valueOf(timeoutExtension.getValue().primitiveValue())); } if (channel.hasExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_TIMEOUT_URL)) { - org.hl7.fhir.r4.model.Extension timeoutExtension = channel.getExtensionByUrl(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_TIMEOUT_URL); - topicSubscription.setTimeout(Integer.valueOf(timeoutExtension.getValue().primitiveValue())); + org.hl7.fhir.r4.model.Extension timeoutExtension = + channel.getExtensionByUrl(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_TIMEOUT_URL); + topicSubscription.setTimeout( + Integer.valueOf(timeoutExtension.getValue().primitiveValue())); } if (channel.hasExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_MAX_COUNT)) { - org.hl7.fhir.r4.model.Extension timeoutExtension = channel.getExtensionByUrl(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_MAX_COUNT); - topicSubscription.setMaxCount(Integer.valueOf(timeoutExtension.getValue().primitiveValue())); + org.hl7.fhir.r4.model.Extension timeoutExtension = + channel.getExtensionByUrl(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_MAX_COUNT); + topicSubscription.setMaxCount( + Integer.valueOf(timeoutExtension.getValue().primitiveValue())); } - if (channel.getPayloadElement().hasExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_PAYLOAD_CONTENT)) { - org.hl7.fhir.r4.model.Extension timeoutExtension = channel.getPayloadElement().getExtensionByUrl(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_PAYLOAD_CONTENT); - topicSubscription.setContent(org.hl7.fhir.r5.model.Subscription.SubscriptionPayloadContent.fromCode(timeoutExtension.getValue().primitiveValue())); + if (channel.getPayloadElement() + .hasExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_PAYLOAD_CONTENT)) { + org.hl7.fhir.r4.model.Extension timeoutExtension = channel.getPayloadElement() + .getExtensionByUrl(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_PAYLOAD_CONTENT); + topicSubscription.setContent(org.hl7.fhir.r5.model.Subscription.SubscriptionPayloadContent.fromCode( + timeoutExtension.getValue().primitiveValue())); } } else { @@ -333,7 +358,8 @@ public class SubscriptionCanonicalizer { from = channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_EMAIL_FROM); subjectTemplate = channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_SUBJECT_TEMPLATE); } catch (FHIRException theE) { - throw new ConfigurationException(Msg.code(561) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); + throw new ConfigurationException( + Msg.code(561) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); } retVal.getEmailDetails().setFrom(from); retVal.getEmailDetails().setSubjectTemplate(subjectTemplate); @@ -343,10 +369,13 @@ public class SubscriptionCanonicalizer { String stripVersionIds; String deliverLatestVersion; try { - stripVersionIds = channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS); - deliverLatestVersion = channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION); + stripVersionIds = + channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS); + deliverLatestVersion = + channel.getExtensionString(HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION); } catch (FHIRException theE) { - throw new ConfigurationException(Msg.code(562) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); + throw new ConfigurationException( + Msg.code(562) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); } retVal.getRestHookDetails().setStripVersionId(Boolean.parseBoolean(stripVersionIds)); retVal.getRestHookDetails().setDeliverLatestVersion(Boolean.parseBoolean(deliverLatestVersion)); @@ -381,10 +410,12 @@ public class SubscriptionCanonicalizer { retVal.setChannelExtensions(extractExtension(subscription)); retVal.setIdElement(subscription.getIdElement()); retVal.setPayloadString(channel.getPayload()); - retVal.setPayloadSearchCriteria(getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); + retVal.setPayloadSearchCriteria( + getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); retVal.setTags(extractTags(subscription)); - List profiles = subscription.getMeta().getProfile(); + List profiles = + subscription.getMeta().getProfile(); for (org.hl7.fhir.r4b.model.CanonicalType next : profiles) { if (SubscriptionConstants.SUBSCRIPTION_TOPIC_PROFILE_URL.equals(next.getValueAsString())) { retVal.setTopicSubscription(true); @@ -395,7 +426,8 @@ public class SubscriptionCanonicalizer { retVal.getTopicSubscription().setTopic(getCriteria(theSubscription)); // WIP STR5 support other content types - retVal.getTopicSubscription().setContent(org.hl7.fhir.r5.model.Subscription.SubscriptionPayloadContent.FULLRESOURCE); + retVal.getTopicSubscription() + .setContent(org.hl7.fhir.r5.model.Subscription.SubscriptionPayloadContent.FULLRESOURCE); retVal.setEndpointUrl(channel.getEndpoint()); retVal.setChannelType(getChannelType(subscription)); } else { @@ -411,7 +443,8 @@ public class SubscriptionCanonicalizer { from = getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_EMAIL_FROM); subjectTemplate = getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_SUBJECT_TEMPLATE); } catch (FHIRException theE) { - throw new ConfigurationException(Msg.code(564) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); + throw new ConfigurationException( + Msg.code(564) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); } retVal.getEmailDetails().setFrom(from); retVal.getEmailDetails().setSubjectTemplate(subjectTemplate); @@ -421,16 +454,20 @@ public class SubscriptionCanonicalizer { String stripVersionIds; String deliverLatestVersion; try { - stripVersionIds = getExtensionString(channel, HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS); - deliverLatestVersion = getExtensionString(channel, HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION); + stripVersionIds = + getExtensionString(channel, HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS); + deliverLatestVersion = + getExtensionString(channel, HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION); } catch (FHIRException theE) { - throw new ConfigurationException(Msg.code(565) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); + throw new ConfigurationException( + Msg.code(565) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); } retVal.getRestHookDetails().setStripVersionId(Boolean.parseBoolean(stripVersionIds)); retVal.getRestHookDetails().setDeliverLatestVersion(Boolean.parseBoolean(deliverLatestVersion)); } - List topicExts = subscription.getExtensionsByUrl("http://hl7.org/fhir/subscription/topics"); + List topicExts = + subscription.getExtensionsByUrl("http://hl7.org/fhir/subscription/topics"); if (topicExts.size() > 0) { IBaseReference ref = (IBaseReference) topicExts.get(0).getValueAsPrimitive(); if (!"EventDefinition".equals(ref.getReferenceElement().getResourceType())) { @@ -455,10 +492,12 @@ public class SubscriptionCanonicalizer { retVal.setChannelExtensions(extractExtension(subscription)); retVal.setIdElement(subscription.getIdElement()); retVal.setPayloadString(subscription.getContentType()); - retVal.setPayloadSearchCriteria(getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); + retVal.setPayloadSearchCriteria( + getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_PAYLOAD_SEARCH_CRITERIA)); retVal.setTags(extractTags(subscription)); - List topicExts = subscription.getExtensionsByUrl("http://hl7.org/fhir/subscription/topics"); + List topicExts = + subscription.getExtensionsByUrl("http://hl7.org/fhir/subscription/topics"); if (topicExts.size() > 0) { IBaseReference ref = (IBaseReference) topicExts.get(0).getValueAsPrimitive(); if (!"EventDefinition".equals(ref.getReferenceElement().getResourceType())) { @@ -471,7 +510,7 @@ public class SubscriptionCanonicalizer { Enumerations.SubscriptionStatusCodes status = subscription.getStatus(); if (status != null) { - switch(status) { + switch (status) { case REQUESTED: retVal.setStatus(org.hl7.fhir.r4.model.Subscription.SubscriptionStatus.REQUESTED); break; @@ -508,7 +547,8 @@ public class SubscriptionCanonicalizer { return retVal; } - private void setR5FlagsBasedOnChannelType(org.hl7.fhir.r5.model.Subscription subscription, CanonicalSubscription retVal) { + private void setR5FlagsBasedOnChannelType( + org.hl7.fhir.r5.model.Subscription subscription, CanonicalSubscription retVal) { if (retVal.getChannelType() == CanonicalSubscriptionChannelType.EMAIL) { String from; String subjectTemplate; @@ -516,7 +556,8 @@ public class SubscriptionCanonicalizer { from = getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_EMAIL_FROM); subjectTemplate = getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_SUBJECT_TEMPLATE); } catch (FHIRException theE) { - throw new ConfigurationException(Msg.code(2323) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); + throw new ConfigurationException( + Msg.code(2323) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); } retVal.getEmailDetails().setFrom(from); retVal.getEmailDetails().setSubjectTemplate(subjectTemplate); @@ -526,17 +567,21 @@ public class SubscriptionCanonicalizer { String stripVersionIds; String deliverLatestVersion; try { - stripVersionIds = getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS); - deliverLatestVersion = getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION); + stripVersionIds = + getExtensionString(subscription, HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_STRIP_VERSION_IDS); + deliverLatestVersion = getExtensionString( + subscription, HapiExtensions.EXT_SUBSCRIPTION_RESTHOOK_DELIVER_LATEST_VERSION); } catch (FHIRException theE) { - throw new ConfigurationException(Msg.code(2324) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); + throw new ConfigurationException( + Msg.code(2324) + "Failed to extract subscription extension(s): " + theE.getMessage(), theE); } retVal.getRestHookDetails().setStripVersionId(Boolean.parseBoolean(stripVersionIds)); retVal.getRestHookDetails().setDeliverLatestVersion(Boolean.parseBoolean(deliverLatestVersion)); } } - private CanonicalTopicSubscriptionFilter convertFilter(org.hl7.fhir.r5.model.Subscription.SubscriptionFilterByComponent theFilter) { + private CanonicalTopicSubscriptionFilter convertFilter( + org.hl7.fhir.r5.model.Subscription.SubscriptionFilterByComponent theFilter) { CanonicalTopicSubscriptionFilter retVal = new CanonicalTopicSubscriptionFilter(); retVal.setResourceType(theFilter.getResourceType()); retVal.setFilterParameter(theFilter.getFilterParameter()); @@ -547,22 +592,21 @@ public class SubscriptionCanonicalizer { } private void setPartitionIdOnReturnValue(IBaseResource theSubscription, CanonicalSubscription retVal) { - RequestPartitionId requestPartitionId = (RequestPartitionId) theSubscription.getUserData(Constants.RESOURCE_PARTITION_ID); + RequestPartitionId requestPartitionId = + (RequestPartitionId) theSubscription.getUserData(Constants.RESOURCE_PARTITION_ID); if (requestPartitionId != null) { retVal.setPartitionId(requestPartitionId.getFirstPartitionIdOrNull()); } } private String getExtensionString(IBaseHasExtensions theBase, String theUrl) { - return theBase - .getExtension() - .stream() - .filter(t -> theUrl.equals(t.getUrl())) - .filter(t -> t.getValue() instanceof IPrimitiveType) - .map(t -> (IPrimitiveType) t.getValue()) - .map(t -> t.getValueAsString()) - .findFirst() - .orElse(null); + return theBase.getExtension().stream() + .filter(t -> theUrl.equals(t.getUrl())) + .filter(t -> t.getValue() instanceof IPrimitiveType) + .map(t -> (IPrimitiveType) t.getValue()) + .map(t -> t.getValueAsString()) + .findFirst() + .orElse(null); } @SuppressWarnings("EnumSwitchStatementWhichMissesCases") @@ -571,12 +615,17 @@ public class SubscriptionCanonicalizer { switch (myFhirContext.getVersion().getVersion()) { case DSTU2: { - String channelTypeCode = ((ca.uhn.fhir.model.dstu2.resource.Subscription) theSubscription).getChannel().getType(); + String channelTypeCode = ((ca.uhn.fhir.model.dstu2.resource.Subscription) theSubscription) + .getChannel() + .getType(); retVal = CanonicalSubscriptionChannelType.fromCode(null, channelTypeCode); break; } case DSTU3: { - org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType type = ((org.hl7.fhir.dstu3.model.Subscription) theSubscription).getChannel().getType(); + org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType type = + ((org.hl7.fhir.dstu3.model.Subscription) theSubscription) + .getChannel() + .getType(); if (type != null) { String channelTypeCode = type.toCode(); retVal = CanonicalSubscriptionChannelType.fromCode(null, channelTypeCode); @@ -584,7 +633,10 @@ public class SubscriptionCanonicalizer { break; } case R4: { - org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType type = ((org.hl7.fhir.r4.model.Subscription) theSubscription).getChannel().getType(); + org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType type = ((org.hl7.fhir.r4.model.Subscription) + theSubscription) + .getChannel() + .getType(); if (type != null) { String channelTypeCode = type.toCode(); retVal = CanonicalSubscriptionChannelType.fromCode(null, channelTypeCode); @@ -592,7 +644,10 @@ public class SubscriptionCanonicalizer { break; } case R4B: { - org.hl7.fhir.r4b.model.Subscription.SubscriptionChannelType type = ((org.hl7.fhir.r4b.model.Subscription) theSubscription).getChannel().getType(); + org.hl7.fhir.r4b.model.Subscription.SubscriptionChannelType type = + ((org.hl7.fhir.r4b.model.Subscription) theSubscription) + .getChannel() + .getType(); if (type != null) { String channelTypeCode = type.toCode(); retVal = CanonicalSubscriptionChannelType.fromCode(null, channelTypeCode); @@ -600,15 +655,18 @@ public class SubscriptionCanonicalizer { break; } case R5: { - org.hl7.fhir.r5.model.Coding nextTypeCode = ((org.hl7.fhir.r5.model.Subscription) theSubscription).getChannelType(); - CanonicalSubscriptionChannelType code = CanonicalSubscriptionChannelType.fromCode(nextTypeCode.getSystem(), nextTypeCode.getCode()); + org.hl7.fhir.r5.model.Coding nextTypeCode = + ((org.hl7.fhir.r5.model.Subscription) theSubscription).getChannelType(); + CanonicalSubscriptionChannelType code = + CanonicalSubscriptionChannelType.fromCode(nextTypeCode.getSystem(), nextTypeCode.getCode()); if (code != null) { retVal = code; } break; } default: - throw new IllegalStateException(Msg.code(2326) + "Unsupported Subscription FHIR version: " + myFhirContext.getVersion().getVersion()); + throw new IllegalStateException(Msg.code(2326) + "Unsupported Subscription FHIR version: " + + myFhirContext.getVersion().getVersion()); } return retVal; @@ -634,27 +692,26 @@ public class SubscriptionCanonicalizer { break; case R5: default: - throw new IllegalStateException(Msg.code(2327) + "Subscription criteria is not supported for FHIR version: " + myFhirContext.getVersion().getVersion()); + throw new IllegalStateException( + Msg.code(2327) + "Subscription criteria is not supported for FHIR version: " + + myFhirContext.getVersion().getVersion()); } return retVal; } - - public void setMatchingStrategyTag(@Nonnull IBaseResource theSubscription, @Nullable SubscriptionMatchingStrategy - theStrategy) { + public void setMatchingStrategyTag( + @Nonnull IBaseResource theSubscription, @Nullable SubscriptionMatchingStrategy theStrategy) { IBaseMetaType meta = theSubscription.getMeta(); // Remove any existing strategy tag - meta - .getTag() - .stream() - .filter(t -> HapiExtensions.EXT_SUBSCRIPTION_MATCHING_STRATEGY.equals(t.getSystem())) - .forEach(t -> { - t.setCode(null); - t.setSystem(null); - t.setDisplay(null); - }); + meta.getTag().stream() + .filter(t -> HapiExtensions.EXT_SUBSCRIPTION_MATCHING_STRATEGY.equals(t.getSystem())) + .forEach(t -> { + t.setCode(null); + t.setSystem(null); + t.setDisplay(null); + }); if (theStrategy == null) { return; @@ -670,17 +727,22 @@ public class SubscriptionCanonicalizer { } else if (theStrategy == SubscriptionMatchingStrategy.TOPIC) { display = "SubscriptionTopic"; } else { - throw new IllegalStateException(Msg.code(567) + "Unknown " + SubscriptionMatchingStrategy.class.getSimpleName() + ": " + theStrategy); + throw new IllegalStateException(Msg.code(567) + "Unknown " + + SubscriptionMatchingStrategy.class.getSimpleName() + ": " + theStrategy); } - meta.addTag().setSystem(HapiExtensions.EXT_SUBSCRIPTION_MATCHING_STRATEGY).setCode(value).setDisplay(display); + meta.addTag() + .setSystem(HapiExtensions.EXT_SUBSCRIPTION_MATCHING_STRATEGY) + .setCode(value) + .setDisplay(display); } public String getSubscriptionStatus(IBaseResource theSubscription) { - final IPrimitiveType status = myFhirContext.newTerser().getSingleValueOrNull(theSubscription, SubscriptionConstants.SUBSCRIPTION_STATUS, IPrimitiveType.class); + final IPrimitiveType status = myFhirContext + .newTerser() + .getSingleValueOrNull(theSubscription, SubscriptionConstants.SUBSCRIPTION_STATUS, IPrimitiveType.class); if (status == null) { return null; } return status.getValueAsString(); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalSubscription.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalSubscription.java index 41b9a0dca70..5dce9dae0ef 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalSubscription.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalSubscription.java @@ -30,14 +30,14 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Subscription; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -47,37 +47,53 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso @JsonProperty("id") private String myIdElement; + @JsonProperty("criteria") private String myCriteriaString; + @JsonProperty("endpointUrl") private String myEndpointUrl; + @JsonProperty("payload") private String myPayloadString; + @JsonProperty("headers") private List myHeaders; + @JsonProperty("channelType") private CanonicalSubscriptionChannelType myChannelType; + @JsonProperty("status") private Subscription.SubscriptionStatus myStatus; + @JsonProperty("triggerDefinition") @Deprecated private CanonicalEventDefinition myTrigger; + @JsonProperty("emailDetails") private EmailDetails myEmailDetails; + @JsonProperty("restHookDetails") private RestHookDetails myRestHookDetails; + @JsonProperty("extensions") private Map> myChannelExtensions; + @JsonProperty("tags") private Map myTags; + @JsonProperty("payloadSearchCriteria") private String myPayloadSearchCriteria; + @JsonProperty("partitionId") private Integer myPartitionId; + @JsonProperty("crossPartitionEnabled") private boolean myCrossPartitionEnabled; + @JsonProperty("sendDeleteMessages") private boolean mySendDeleteMessages; + @JsonProperty("isTopicSubscription") private boolean myIsTopicSubscription; @@ -297,18 +313,18 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(myIdElement) - .append(myCriteriaString) - .append(myEndpointUrl) - .append(myPayloadString) - .append(myHeaders) - .append(myChannelType) - .append(myStatus) - .append(myTrigger) - .append(myEmailDetails) - .append(myRestHookDetails) - .append(myChannelExtensions) - .toHashCode(); + .append(myIdElement) + .append(myCriteriaString) + .append(myEndpointUrl) + .append(myPayloadString) + .append(myHeaders) + .append(myChannelType) + .append(myStatus) + .append(myTrigger) + .append(myEmailDetails) + .append(myRestHookDetails) + .append(myChannelExtensions) + .toHashCode(); } public void setIdElement(IIdType theIdElement) { @@ -339,17 +355,17 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso @Override public String toString() { ToStringBuilder stringBuilder = new ToStringBuilder(this) - .append("myIdElement", myIdElement) - .append("myStatus", myStatus) - .append("myCriteriaString", myCriteriaString); -// .append("myEndpointUrl", myEndpointUrl) -// .append("myPayloadString", myPayloadString) -// .append("myHeaders", myHeaders) -// .append("myChannelType", myChannelType) -// .append("myTrigger", myTrigger) -// .append("myEmailDetails", myEmailDetails) -// .append("myRestHookDetails", myRestHookDetails) -// .append("myChannelExtensions", myChannelExtensions) + .append("myIdElement", myIdElement) + .append("myStatus", myStatus) + .append("myCriteriaString", myCriteriaString); + // .append("myEndpointUrl", myEndpointUrl) + // .append("myPayloadString", myPayloadString) + // .append("myHeaders", myHeaders) + // .append("myChannelType", myChannelType) + // .append("myTrigger", myTrigger) + // .append("myEmailDetails", myEmailDetails) + // .append("myRestHookDetails", myRestHookDetails) + // .append("myChannelExtensions", myChannelExtensions) if (isTopicSubscription()) { stringBuilder.append("topic", myTopicSubscription.getTopic()); } else { @@ -419,6 +435,7 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso @JsonProperty("from") private String myFrom; + @JsonProperty("subjectTemplate") private String mySubjectTemplate; @@ -454,17 +471,17 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso EmailDetails that = (EmailDetails) theO; return new EqualsBuilder() - .append(myFrom, that.myFrom) - .append(mySubjectTemplate, that.mySubjectTemplate) - .isEquals(); + .append(myFrom, that.myFrom) + .append(mySubjectTemplate, that.mySubjectTemplate) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(myFrom) - .append(mySubjectTemplate) - .toHashCode(); + .append(myFrom) + .append(mySubjectTemplate) + .toHashCode(); } } @@ -472,6 +489,7 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso @JsonProperty("stripVersionId") private boolean myStripVersionId; + @JsonProperty("deliverLatestVersion") private boolean myDeliverLatestVersion; @@ -490,7 +508,6 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso myDeliverLatestVersion = theDeliverLatestVersion; } - public boolean isStripVersionId() { return myStripVersionId; } @@ -508,19 +525,18 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso RestHookDetails that = (RestHookDetails) theO; return new EqualsBuilder() - .append(myStripVersionId, that.myStripVersionId) - .append(myDeliverLatestVersion, that.myDeliverLatestVersion) - .isEquals(); + .append(myStripVersionId, that.myStripVersionId) + .append(myDeliverLatestVersion, that.myDeliverLatestVersion) + .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) - .append(myStripVersionId) - .append(myDeliverLatestVersion) - .toHashCode(); + .append(myStripVersionId) + .append(myDeliverLatestVersion) + .toHashCode(); } - } @Deprecated @@ -533,6 +549,5 @@ public class CanonicalSubscription implements Serializable, Cloneable, IModelJso public CanonicalEventDefinition() { // nothing yet } - } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalSubscriptionChannelType.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalSubscriptionChannelType.java index 6327d9d238b..58b0f725a2d 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalSubscriptionChannelType.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalSubscriptionChannelType.java @@ -54,27 +54,33 @@ public enum CanonicalSubscriptionChannelType { */ NULL; - public static CanonicalSubscriptionChannelType fromCode(@Nullable String theSystem, @Nonnull String codeString) throws FHIRException { + public static CanonicalSubscriptionChannelType fromCode(@Nullable String theSystem, @Nonnull String codeString) + throws FHIRException { if (isBlank(codeString)) { return null; } else if ("rest-hook".equals(codeString)) { - if (theSystem == null || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { + if (theSystem == null + || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { return RESTHOOK; } } else if ("websocket".equals(codeString)) { - if (theSystem == null || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { + if (theSystem == null + || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { return WEBSOCKET; } } else if ("email".equals(codeString)) { - if (theSystem == null || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { + if (theSystem == null + || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { return EMAIL; } } else if ("sms".equals(codeString)) { - if (theSystem == null || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { + if (theSystem == null + || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { return SMS; } } else if ("message".equals(codeString)) { - if (theSystem == null || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { + if (theSystem == null + || theSystem.equals("http://terminology.hl7.org/CodeSystem/subscription-channel-type")) { return MESSAGE; } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalTopicSubscription.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalTopicSubscription.java index 2e9e1344651..111c085fb73 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalTopicSubscription.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalTopicSubscription.java @@ -133,7 +133,7 @@ public class CanonicalTopicSubscription { return new HashCodeBuilder(17, 37).append(myTopic).toHashCode(); } - public boolean hasFilters() { + public boolean hasFilters() { return myFilters != null && !myFilters.isEmpty(); - } + } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalTopicSubscriptionFilter.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalTopicSubscriptionFilter.java index 41f7049dbf3..d156a37563b 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalTopicSubscriptionFilter.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/CanonicalTopicSubscriptionFilter.java @@ -99,7 +99,8 @@ public class CanonicalTopicSubscriptionFilter { CanonicalTopicSubscriptionFilter filter = new CanonicalTopicSubscriptionFilter(); filter.setResourceType(resourceName); filter.setFilterParameter(key); - // WIP STR5 set modifier and comparator properly. This may be tricky without access to searchparameters, + // WIP STR5 set modifier and comparator properly. This may be tricky without access to + // searchparameters, // But this method cannot assume searchparameters exist on the server. filter.setComparator(Enumerations.SearchComparator.EQ); filter.setValue(value); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/ResourceDeliveryJsonMessage.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/ResourceDeliveryJsonMessage.java index 90db87342db..4f1482fafd7 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/ResourceDeliveryJsonMessage.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/subscription/model/ResourceDeliveryJsonMessage.java @@ -28,7 +28,8 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import javax.annotation.Nullable; public class ResourceDeliveryJsonMessage extends BaseJsonMessage { - private static final ObjectMapper ourObjectMapper = new ObjectMapper().registerModule(new com.fasterxml.jackson.datatype.jsr310.JavaTimeModule()); + private static final ObjectMapper ourObjectMapper = + new ObjectMapper().registerModule(new com.fasterxml.jackson.datatype.jsr310.JavaTimeModule()); @JsonProperty("payload") private ResourceDeliveryMessage myPayload; @@ -76,9 +77,7 @@ public class ResourceDeliveryJsonMessage extends BaseJsonMessage> theResourceIds, @Nullable List> theSearchUrls, @Nullable IIdType theSubscriptionId); + IBaseParameters triggerSubscription( + @Nullable List> theResourceIds, + @Nullable List> theSearchUrls, + @Nullable IIdType theSubscriptionId); void runDeliveryPass(); } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/UploadStatistics.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/UploadStatistics.java index 1a84f657d0b..3877312ad7f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/UploadStatistics.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/UploadStatistics.java @@ -45,5 +45,4 @@ public class UploadStatistics { public IIdType getTarget() { return myTarget; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/api/ITermLoaderSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/api/ITermLoaderSvc.java index 534bb5fdeb1..2aa3cdf90c6 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/api/ITermLoaderSvc.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/api/ITermLoaderSvc.java @@ -62,7 +62,6 @@ public interface ITermLoaderSvc { String getFilename(); InputStream getInputStream(); - } class ByteArrayFileDescriptor implements FileDescriptor { @@ -85,5 +84,4 @@ public interface ITermLoaderSvc { return new ByteArrayInputStream(myNextData); } } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/models/DeleteCodeSystemBaseParameters.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/models/DeleteCodeSystemBaseParameters.java index 69f21f9f02e..3f86bb881f7 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/models/DeleteCodeSystemBaseParameters.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/term/models/DeleteCodeSystemBaseParameters.java @@ -21,5 +21,4 @@ package ca.uhn.fhir.jpa.term.models; import ca.uhn.fhir.model.api.IModelJson; -public class DeleteCodeSystemBaseParameters implements IModelJson { -} +public class DeleteCodeSystemBaseParameters implements IModelJson {} diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java index 9e647974b6d..6ec1911b70c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/BaseCaptureQueriesListener.java @@ -27,16 +27,17 @@ import net.ttddyy.dsproxy.listener.MethodExecutionContext; import net.ttddyy.dsproxy.proxy.ParameterSetOperation; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; -import javax.annotation.Nullable; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.trim; -public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuilder.SingleQueryExecution, ProxyDataSourceBuilder.SingleMethodExecution { +public abstract class BaseCaptureQueriesListener + implements ProxyDataSourceBuilder.SingleQueryExecution, ProxyDataSourceBuilder.SingleMethodExecution { private boolean myCaptureQueryStackTrace = false; @@ -80,15 +81,14 @@ public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuild String sql = trim(next.getQuery()); List params; int size; - if (next.getParametersList().size() > 0 && next.getParametersList().get(0).size() > 0) { + if (next.getParametersList().size() > 0 + && next.getParametersList().get(0).size() > 0) { size = next.getParametersList().size(); - List values = next - .getParametersList() - .get(0); + List values = next.getParametersList().get(0); params = values.stream() - .map(t -> t.getArgs()[1]) - .map(t -> t != null ? t.toString() : "NULL") - .collect(Collectors.toList()); + .map(t -> t.getArgs()[1]) + .map(t -> t != null ? t.toString() : "NULL") + .collect(Collectors.toList()); } else { params = Collections.emptyList(); size = next.getParametersList().size(); @@ -101,7 +101,8 @@ public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuild long elapsedTime = theExecutionInfo.getElapsedTime(); long startTime = System.currentTimeMillis() - elapsedTime; - SqlQuery sqlQuery = new SqlQuery(sql, params, startTime, elapsedTime, stackTraceElements, size, requestPartitionId); + SqlQuery sqlQuery = + new SqlQuery(sql, params, startTime, elapsedTime, stackTraceElements, size, requestPartitionId); queryList.add(sqlQuery); } } @@ -116,7 +117,7 @@ public abstract class BaseCaptureQueriesListener implements ProxyDataSourceBuild @Override public void execute(MethodExecutionContext executionContext) { - AtomicInteger counter = null; + AtomicInteger counter = null; switch (executionContext.getMethod().getName()) { case "commit": counter = provideCommitCounter(); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java index adcac87a9f4..3251fc242f0 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.jpa.util; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.util.StopWatch; import com.google.common.collect.Queues; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; @@ -28,7 +27,6 @@ import org.hl7.fhir.r4.model.InstantType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -39,6 +37,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; /** * This is a query listener designed to be plugged into a {@link ProxyDataSourceBuilder proxy DataSource}. @@ -124,11 +123,10 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe } private List getQueriesStartingWith(String theStart, String theThreadName) { - return getCapturedQueries() - .stream() - .filter(t -> theThreadName == null || t.getThreadName().equals(theThreadName)) - .filter(t -> t.getSql(false, false).toLowerCase().startsWith(theStart)) - .collect(Collectors.toList()); + return getCapturedQueries().stream() + .filter(t -> theThreadName == null || t.getThreadName().equals(theThreadName)) + .filter(t -> t.getSql(false, false).toLowerCase().startsWith(theStart)) + .collect(Collectors.toList()); } private List getQueriesStartingWith(String theStart) { @@ -210,10 +208,9 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe * Log all captured UPDATE queries */ public String logUpdateQueriesForCurrentThread() { - List queries = getUpdateQueriesForCurrentThread() - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = getUpdateQueriesForCurrentThread().stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); String joined = String.join("\n", queries); ourLog.info("Update Queries:\n{}", joined); return joined; @@ -225,10 +222,9 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe * @return */ public String logSelectQueriesForCurrentThread(int... theIndexes) { - List queries = getSelectQueriesForCurrentThread() - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = getSelectQueriesForCurrentThread().stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); List newList = new ArrayList<>(); if (theIndexes != null && theIndexes.length > 0) { @@ -260,24 +256,21 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe */ public List logSelectQueries(boolean theInlineParams, boolean theFormatSql) { List queries = getSelectQueries(); - List queriesStrings = queries - .stream() - .map(t -> CircularQueueCaptureQueriesListener.formatQueryAsSql(t, theInlineParams, theFormatSql)) - .collect(Collectors.toList()); + List queriesStrings = queries.stream() + .map(t -> CircularQueueCaptureQueriesListener.formatQueryAsSql(t, theInlineParams, theFormatSql)) + .collect(Collectors.toList()); ourLog.info("Select Queries:\n{}", String.join("\n", queriesStrings)); return queries; } - /** * Log first captured SELECT query */ public void logFirstSelectQueryForCurrentThread() { - String firstSelectQuery = getSelectQueriesForCurrentThread() - .stream() - .findFirst() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .orElse("NONE FOUND"); + String firstSelectQuery = getSelectQueriesForCurrentThread().stream() + .findFirst() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .orElse("NONE FOUND"); ourLog.info("First select SqlQuery:\n{}", firstSelectQuery); } @@ -285,10 +278,9 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe * Log all captured INSERT queries */ public String logInsertQueriesForCurrentThread() { - List queries = getInsertQueriesForCurrentThread() - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = getInsertQueriesForCurrentThread().stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); String queriesAsString = String.join("\n", queries); ourLog.info("Insert Queries:\n{}", queriesAsString); return queriesAsString; @@ -298,10 +290,9 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe * Log all captured queries */ public void logAllQueriesForCurrentThread() { - List queries = getAllQueriesForCurrentThread() - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = getAllQueriesForCurrentThread().stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); ourLog.info("Queries:\n{}", String.join("\n", queries)); } @@ -309,10 +300,9 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe * Log all captured queries */ public void logAllQueries() { - List queries = getCapturedQueries() - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = getCapturedQueries().stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); ourLog.info("Queries:\n{}", String.join("\n", queries)); } @@ -328,11 +318,10 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe */ public int logInsertQueries(Predicate theInclusionPredicate) { List insertQueries = getInsertQueries(); - List queries = insertQueries - .stream() - .filter(t -> theInclusionPredicate == null || theInclusionPredicate.test(t)) - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = insertQueries.stream() + .filter(t -> theInclusionPredicate == null || theInclusionPredicate.test(t)) + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); ourLog.info("Insert Queries:\n{}", String.join("\n", queries)); return countQueries(insertQueries); @@ -343,10 +332,9 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe */ public int logUpdateQueries() { List updateQueries = getUpdateQueries(); - List queries = updateQueries - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = updateQueries.stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); ourLog.info("Update Queries:\n{}", String.join("\n", queries)); return countQueries(updateQueries); @@ -356,25 +344,22 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe * Log all captured DELETE queries */ public String logDeleteQueriesForCurrentThread() { - List queries = getDeleteQueriesForCurrentThread() - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = getDeleteQueriesForCurrentThread().stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); String joined = String.join("\n", queries); ourLog.info("Delete Queries:\n{}", joined); return joined; } - /** * Log all captured DELETE queries */ public int logDeleteQueries() { List deleteQueries = getDeleteQueries(); - List queries = deleteQueries - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = deleteQueries.stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); ourLog.info("Delete Queries:\n{}", String.join("\n", queries)); return countQueries(deleteQueries); @@ -414,13 +399,9 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe @Nonnull private static Integer countQueries(List theQueries) { - return theQueries - .stream() - .map(t -> t.getSize()) - .reduce(0, Integer::sum); + return theQueries.stream().map(t -> t.getSize()).reduce(0, Integer::sum); } - @Nonnull static String formatQueryAsSql(SqlQuery theQuery) { boolean inlineParams = true; @@ -434,25 +415,27 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe StringBuilder b = new StringBuilder(); b.append("SqlQuery at "); b.append(new InstantType(new Date(theQuery.getQueryTimestamp())).getValueAsString()); - if (theQuery.getRequestPartitionId() != null && theQuery.getRequestPartitionId().hasPartitionIds()) { + if (theQuery.getRequestPartitionId() != null + && theQuery.getRequestPartitionId().hasPartitionIds()) { b.append(" on partition "); b.append(theQuery.getRequestPartitionId().getPartitionIds()); } b.append(" took ").append(StopWatch.formatMillis(theQuery.getElapsedTime())); b.append(" on Thread: ").append(theQuery.getThreadName()); if (theQuery.getSize() > 1) { - b.append("\nExecution Count: ").append(theQuery.getSize()).append(" (parameters shown are for first execution)"); + b.append("\nExecution Count: ") + .append(theQuery.getSize()) + .append(" (parameters shown are for first execution)"); } b.append("\nSQL:\n").append(formattedSql); if (theQuery.getStackTrace() != null) { b.append("\nStack:\n "); Stream stackTraceStream = Arrays.stream(theQuery.getStackTrace()) - .map(StackTraceElement::toString) - .filter(t -> t.startsWith("ca.")); + .map(StackTraceElement::toString) + .filter(t -> t.startsWith("ca.")); b.append(stackTraceStream.collect(Collectors.joining("\n "))); } b.append("\n"); return b.toString(); } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java index 1020fddb4f8..5921610315e 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CurrentThreadCaptureQueriesListener.java @@ -65,7 +65,6 @@ public class CurrentThreadCaptureQueriesListener extends BaseCaptureQueriesListe return new SqlQueryList(retVal); } - /** * Starts capturing queries for the current thread. *

    @@ -87,10 +86,9 @@ public class CurrentThreadCaptureQueriesListener extends BaseCaptureQueriesListe * @return Returns the number of queries captured */ public static int logQueriesForCurrentThreadAndStopCapturing(int... theIndexes) { - List queries = getCurrentQueueAndStopCapturing() - .stream() - .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) - .collect(Collectors.toList()); + List queries = getCurrentQueueAndStopCapturing().stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); if (theIndexes != null && theIndexes.length > 0) { List newList = new ArrayList<>(); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java index 3b64669adf2..c0d0c755040 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java @@ -29,11 +29,11 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; -import javax.annotation.Nonnull; import java.util.Collection; import java.util.EnumMap; import java.util.Map; import java.util.function.Function; +import javax.annotation.Nonnull; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.SECONDS; @@ -66,7 +66,8 @@ public class MemoryCacheService { switch (next) { case CONCEPT_TRANSLATION: case CONCEPT_TRANSLATION_REVERSE: - timeoutSeconds = SECONDS.convert(myStorageSettings.getTranslationCachesExpireAfterWriteInMinutes(), MINUTES); + timeoutSeconds = + SECONDS.convert(myStorageSettings.getTranslationCachesExpireAfterWriteInMinutes(), MINUTES); maximumSize = 10000; break; case PID_TO_FORCED_ID: @@ -92,7 +93,6 @@ public class MemoryCacheService { } } - public T get(CacheEnum theCache, K theKey, Function theSupplier) { assert theCache.getKeyType().isAssignableFrom(theKey.getClass()); return doGet(theCache, theKey, theSupplier); @@ -184,7 +184,6 @@ public class MemoryCacheService { } public enum CacheEnum { - TAG_DEFINITION(TagDefinitionCacheKey.class), RESOURCE_LOOKUP(String.class), FORCED_ID_TO_PID(String.class), @@ -210,29 +209,29 @@ public class MemoryCacheService { } } - public static class TagDefinitionCacheKey { private final TagTypeEnum myType; private final String mySystem; private final String myCode; private final String myVersion; - private Boolean myUserSelected; + private Boolean myUserSelected; private final int myHashCode; - public TagDefinitionCacheKey(TagTypeEnum theType, String theSystem, String theCode, String theVersion, Boolean theUserSelected) { + public TagDefinitionCacheKey( + TagTypeEnum theType, String theSystem, String theCode, String theVersion, Boolean theUserSelected) { myType = theType; mySystem = theSystem; myCode = theCode; myVersion = theVersion; myUserSelected = theUserSelected; myHashCode = new HashCodeBuilder(17, 37) - .append(myType) - .append(mySystem) - .append(myCode) - .append(myVersion) - .append(myUserSelected) - .toHashCode(); + .append(myType) + .append(mySystem) + .append(myCode) + .append(myVersion) + .append(myUserSelected) + .toHashCode(); } @Override @@ -242,10 +241,10 @@ public class MemoryCacheService { TagDefinitionCacheKey that = (TagDefinitionCacheKey) theO; retVal = new EqualsBuilder() - .append(myType, that.myType) - .append(mySystem, that.mySystem) - .append(myCode, that.myCode) - .isEquals(); + .append(myType, that.myType) + .append(mySystem, that.mySystem) + .append(myCode, that.myCode) + .isEquals(); } return retVal; } @@ -256,7 +255,6 @@ public class MemoryCacheService { } } - public static class HistoryCountKey { private final String myTypeName; private final Long myInstanceId; @@ -265,7 +263,10 @@ public class MemoryCacheService { private HistoryCountKey(String theTypeName, Long theInstanceId) { myTypeName = theTypeName; myInstanceId = theInstanceId; - myHashCode = new HashCodeBuilder().append(myTypeName).append(myInstanceId).toHashCode(); + myHashCode = new HashCodeBuilder() + .append(myTypeName) + .append(myInstanceId) + .toHashCode(); } public static HistoryCountKey forSystem() { @@ -287,7 +288,10 @@ public class MemoryCacheService { boolean retVal = false; if (theO instanceof HistoryCountKey) { HistoryCountKey that = (HistoryCountKey) theO; - retVal = new EqualsBuilder().append(myTypeName, that.myTypeName).append(myInstanceId, that.myInstanceId).isEquals(); + retVal = new EqualsBuilder() + .append(myTypeName, that.myTypeName) + .append(myInstanceId, that.myInstanceId) + .isEquals(); } return retVal; } @@ -296,7 +300,5 @@ public class MemoryCacheService { public int hashCode() { return myHashCode; } - } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java index ff20c087f6d..1717acdcbf7 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/SqlQuery.java @@ -42,11 +42,36 @@ public class SqlQuery { private final String myNamespace; private final RequestPartitionId myRequestPartitionId; - public SqlQuery(String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, RequestPartitionId theRequestPartitionId) { - this(null, theSql, theParams, theQueryTimestamp, theElapsedTime, theStackTraceElements, theSize, LanguageEnum.SQL, theRequestPartitionId); + public SqlQuery( + String theSql, + List theParams, + long theQueryTimestamp, + long theElapsedTime, + StackTraceElement[] theStackTraceElements, + int theSize, + RequestPartitionId theRequestPartitionId) { + this( + null, + theSql, + theParams, + theQueryTimestamp, + theElapsedTime, + theStackTraceElements, + theSize, + LanguageEnum.SQL, + theRequestPartitionId); } - public SqlQuery(String theNamespace, String theSql, List theParams, long theQueryTimestamp, long theElapsedTime, StackTraceElement[] theStackTraceElements, int theSize, LanguageEnum theLanguage, RequestPartitionId theRequestPartitionId) { + public SqlQuery( + String theNamespace, + String theSql, + List theParams, + long theQueryTimestamp, + long theElapsedTime, + StackTraceElement[] theStackTraceElements, + int theSize, + LanguageEnum theLanguage, + RequestPartitionId theRequestPartitionId) { Validate.notNull(theLanguage, "theLanguage must not be null"); myNamespace = theNamespace; @@ -136,9 +161,7 @@ public class SqlQuery { } public enum LanguageEnum { - SQL, JSON - } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidationSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidationSettings.java index c2dd2c28c53..9f0fef49289 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidationSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidationSettings.java @@ -53,8 +53,10 @@ public class ValidationSettings { * * @since 5.1.0 */ - public void setLocalReferenceValidationDefaultPolicy(@Nonnull ReferenceValidationPolicy theLocalReferenceValidationDefaultPolicy) { - Validate.notNull(theLocalReferenceValidationDefaultPolicy, "theLocalReferenceValidationDefaultPolicy must not be null"); + public void setLocalReferenceValidationDefaultPolicy( + @Nonnull ReferenceValidationPolicy theLocalReferenceValidationDefaultPolicy) { + Validate.notNull( + theLocalReferenceValidationDefaultPolicy, "theLocalReferenceValidationDefaultPolicy must not be null"); myLocalReferenceValidationDefaultPolicy = theLocalReferenceValidationDefaultPolicy; } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidatorPolicyAdvisor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidatorPolicyAdvisor.java index 1cb484c11d9..12234b28d56 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidatorPolicyAdvisor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidatorPolicyAdvisor.java @@ -42,11 +42,13 @@ public class ValidatorPolicyAdvisor implements IValidationPolicyAdvisor { @Autowired private ValidationSettings myValidationSettings; + @Autowired private FhirContext myFhirContext; @Override - public ReferenceValidationPolicy policyForReference(IResourceValidator validator, Object appContext, String path, String url) { + public ReferenceValidationPolicy policyForReference( + IResourceValidator validator, Object appContext, String path, String url) { int slashIdx = url.indexOf("/"); if (slashIdx > 0 && myFhirContext.getResourceTypes().contains(url.substring(0, slashIdx))) { return myValidationSettings.getLocalReferenceValidationDefaultPolicy(); @@ -56,12 +58,27 @@ public class ValidatorPolicyAdvisor implements IValidationPolicyAdvisor { } @Override - public CodedContentValidationPolicy policyForCodedContent(IResourceValidator iResourceValidator, Object o, String s, ElementDefinition elementDefinition, StructureDefinition structureDefinition, BindingKind bindingKind, ValueSet valueSet, List list) { + public CodedContentValidationPolicy policyForCodedContent( + IResourceValidator iResourceValidator, + Object o, + String s, + ElementDefinition elementDefinition, + StructureDefinition structureDefinition, + BindingKind bindingKind, + ValueSet valueSet, + List list) { return CodedContentValidationPolicy.CODE; } @Override - public ContainedReferenceValidationPolicy policyForContained(IResourceValidator validator, Object appContext, String containerType, String containerId, Element.SpecialElement containingResourceType, String path, String url) { + public ContainedReferenceValidationPolicy policyForContained( + IResourceValidator validator, + Object appContext, + String containerType, + String containerId, + Element.SpecialElement containingResourceType, + String path, + String url) { return ContainedReferenceValidationPolicy.CHECK_VALID; } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidatorResourceFetcher.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidatorResourceFetcher.java index 799e3fd8c60..037f43698bd 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidatorResourceFetcher.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/validation/ValidatorResourceFetcher.java @@ -54,15 +54,18 @@ public class ValidatorResourceFetcher implements IValidatorResourceFetcher { private final DaoRegistry myDaoRegistry; private final VersionSpecificWorkerContextWrapper myVersionSpecificContextWrapper; - public ValidatorResourceFetcher(FhirContext theFhirContext, IValidationSupport theValidationSupport, DaoRegistry theDaoRegistry) { + public ValidatorResourceFetcher( + FhirContext theFhirContext, IValidationSupport theValidationSupport, DaoRegistry theDaoRegistry) { myFhirContext = theFhirContext; myValidationSupport = theValidationSupport; myDaoRegistry = theDaoRegistry; - myVersionSpecificContextWrapper = VersionSpecificWorkerContextWrapper.newVersionSpecificWorkerContextWrapper(myValidationSupport); + myVersionSpecificContextWrapper = + VersionSpecificWorkerContextWrapper.newVersionSpecificWorkerContextWrapper(myValidationSupport); } @Override - public Element fetch(IResourceValidator iResourceValidator, Object appContext, String theUrl) throws FHIRFormatError, DefinitionException, FHIRException, IOException { + public Element fetch(IResourceValidator iResourceValidator, Object appContext, String theUrl) + throws FHIRFormatError, DefinitionException, FHIRException, IOException { IdType id = new IdType(theUrl); String resourceType = id.getResourceType(); IFhirResourceDao dao = myDaoRegistry.getResourceDao(resourceType); @@ -75,14 +78,16 @@ public class ValidatorResourceFetcher implements IValidatorResourceFetcher { } try { - return new JsonParser(myVersionSpecificContextWrapper).parse(myFhirContext.newJsonParser().encodeResourceToString(target), resourceType); + return new JsonParser(myVersionSpecificContextWrapper) + .parse(myFhirContext.newJsonParser().encodeResourceToString(target), resourceType); } catch (Exception e) { throw new FHIRException(Msg.code(576) + e); } } @Override - public boolean resolveURL(IResourceValidator iResourceValidator, Object o, String s, String s1, String s2) throws IOException, FHIRException { + public boolean resolveURL(IResourceValidator iResourceValidator, Object o, String s, String s1, String s2) + throws IOException, FHIRException { return true; } @@ -98,7 +103,8 @@ public class ValidatorResourceFetcher implements IValidatorResourceFetcher { } @Override - public CanonicalResource fetchCanonicalResource(IResourceValidator iResourceValidator, String s) throws URISyntaxException { + public CanonicalResource fetchCanonicalResource(IResourceValidator iResourceValidator, String s) + throws URISyntaxException { return null; } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/mdm/log/Logs.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/mdm/log/Logs.java index dc7697e4073..c9543e4dd29 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/mdm/log/Logs.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/mdm/log/Logs.java @@ -23,7 +23,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Logs { - private static final Logger ourMdmTroubleshootingLog = LoggerFactory.getLogger("ca.uhn.fhir.log.mdm_troubleshooting"); + private static final Logger ourMdmTroubleshootingLog = + LoggerFactory.getLogger("ca.uhn.fhir.log.mdm_troubleshooting"); public static Logger getMdmTroubleshootingLog() { return ourMdmTroubleshootingLog; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/AsyncMemoryQueueBackedFhirClientBalpSink.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/AsyncMemoryQueueBackedFhirClientBalpSink.java index 847687098ce..4a39746796c 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/AsyncMemoryQueueBackedFhirClientBalpSink.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/AsyncMemoryQueueBackedFhirClientBalpSink.java @@ -30,13 +30,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.PreDestroy; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.atomic.AtomicLong; /** * This implementation of the {@link IBalpAuditEventSink} transmits audit events to @@ -67,11 +66,11 @@ public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink * @param theTargetBaseUrl The FHIR server base URL for the target/sink server to * receive audit events. */ - public AsyncMemoryQueueBackedFhirClientBalpSink(@Nonnull FhirContext theFhirContext, @Nonnull String theTargetBaseUrl) { + public AsyncMemoryQueueBackedFhirClientBalpSink( + @Nonnull FhirContext theFhirContext, @Nonnull String theTargetBaseUrl) { this(theFhirContext, theTargetBaseUrl, null); } - /** * Sets the FhirContext to use when initiating outgoing connections * @@ -84,7 +83,10 @@ public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink * @param theClientInterceptors An optional list of interceptors to register against * the client. May be {@literal null}. */ - public AsyncMemoryQueueBackedFhirClientBalpSink(@Nonnull FhirContext theFhirContext, @Nonnull String theTargetBaseUrl, @Nullable List theClientInterceptors) { + public AsyncMemoryQueueBackedFhirClientBalpSink( + @Nonnull FhirContext theFhirContext, + @Nonnull String theTargetBaseUrl, + @Nullable List theClientInterceptors) { this(createClient(theFhirContext, theTargetBaseUrl, theClientInterceptors)); } @@ -95,7 +97,8 @@ public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink */ public AsyncMemoryQueueBackedFhirClientBalpSink(IGenericClient theClient) { super(theClient); - myThreadPool = ThreadPoolUtil.newThreadPool(1, 1, "BalpClientSink-" + ourNextThreadId.getAndIncrement() + "-", Integer.MAX_VALUE); + myThreadPool = ThreadPoolUtil.newThreadPool( + 1, 1, "BalpClientSink-" + ourNextThreadId.getAndIncrement() + "-", Integer.MAX_VALUE); } @Override @@ -139,7 +142,10 @@ public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink myClient.transaction().withBundle(transactionBundle).execute(); return; } catch (BaseServerResponseException e) { - ourLog.error("Failed to transmit AuditEvent items to target. Will re-attempt {} failed events once. Error: {}", queue.length, e.toString()); + ourLog.error( + "Failed to transmit AuditEvent items to target. Will re-attempt {} failed events once. Error: {}", + queue.length, + e.toString()); } // Retry once then give up @@ -152,5 +158,4 @@ public class AsyncMemoryQueueBackedFhirClientBalpSink extends FhirClientBalpSink } } } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpAuditCaptureInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpAuditCaptureInterceptor.java index 13639423b42..d7ae427a476 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpAuditCaptureInterceptor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpAuditCaptureInterceptor.java @@ -33,10 +33,10 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.AuditEvent; -import javax.annotation.Nonnull; -import javax.servlet.http.HttpServletRequest; import java.nio.charset.StandardCharsets; import java.util.*; +import javax.annotation.Nonnull; +import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -66,7 +66,8 @@ public class BalpAuditCaptureInterceptor { * identity of the user and the client from the {@link ca.uhn.fhir.rest.api.server.RequestDetails} * object. */ - public BalpAuditCaptureInterceptor(@Nonnull IBalpAuditEventSink theAuditEventSink, @Nonnull IBalpAuditContextServices theContextServices) { + public BalpAuditCaptureInterceptor( + @Nonnull IBalpAuditEventSink theAuditEventSink, @Nonnull IBalpAuditContextServices theContextServices) { Validate.notNull(theAuditEventSink); Validate.notNull(theContextServices); myAuditEventSink = theAuditEventSink; @@ -76,35 +77,31 @@ public class BalpAuditCaptureInterceptor { private static void addEntityPatient(AuditEvent theAuditEvent, String thePatientId) { AuditEvent.AuditEventEntityComponent entityPatient = theAuditEvent.addEntity(); entityPatient - .getType() - .setSystem(BalpConstants.CS_AUDIT_ENTITY_TYPE) - .setCode(BalpConstants.CS_AUDIT_ENTITY_TYPE_1_PERSON) - .setDisplay(BalpConstants.CS_AUDIT_ENTITY_TYPE_1_PERSON_DISPLAY); + .getType() + .setSystem(BalpConstants.CS_AUDIT_ENTITY_TYPE) + .setCode(BalpConstants.CS_AUDIT_ENTITY_TYPE_1_PERSON) + .setDisplay(BalpConstants.CS_AUDIT_ENTITY_TYPE_1_PERSON_DISPLAY); entityPatient - .getRole() - .setSystem(BalpConstants.CS_OBJECT_ROLE) - .setCode(BalpConstants.CS_OBJECT_ROLE_1_PATIENT) - .setDisplay(BalpConstants.CS_OBJECT_ROLE_1_PATIENT_DISPLAY); - entityPatient - .getWhat() - .setReference(thePatientId); + .getRole() + .setSystem(BalpConstants.CS_OBJECT_ROLE) + .setCode(BalpConstants.CS_OBJECT_ROLE_1_PATIENT) + .setDisplay(BalpConstants.CS_OBJECT_ROLE_1_PATIENT_DISPLAY); + entityPatient.getWhat().setReference(thePatientId); } private static void addEntityData(AuditEvent theAuditEvent, String theDataResourceId) { AuditEvent.AuditEventEntityComponent entityData = theAuditEvent.addEntity(); entityData - .getType() - .setSystem(BalpConstants.CS_AUDIT_ENTITY_TYPE) - .setCode(BalpConstants.CS_AUDIT_ENTITY_TYPE_2_SYSTEM_OBJECT) - .setDisplay(BalpConstants.CS_AUDIT_ENTITY_TYPE_2_SYSTEM_OBJECT_DISPLAY); + .getType() + .setSystem(BalpConstants.CS_AUDIT_ENTITY_TYPE) + .setCode(BalpConstants.CS_AUDIT_ENTITY_TYPE_2_SYSTEM_OBJECT) + .setDisplay(BalpConstants.CS_AUDIT_ENTITY_TYPE_2_SYSTEM_OBJECT_DISPLAY); entityData - .getRole() - .setSystem(BalpConstants.CS_OBJECT_ROLE) - .setCode(BalpConstants.CS_OBJECT_ROLE_4_DOMAIN_RESOURCE) - .setDisplay(BalpConstants.CS_OBJECT_ROLE_4_DOMAIN_RESOURCE_DISPLAY); - entityData - .getWhat() - .setReference(theDataResourceId); + .getRole() + .setSystem(BalpConstants.CS_OBJECT_ROLE) + .setCode(BalpConstants.CS_OBJECT_ROLE_4_DOMAIN_RESOURCE) + .setDisplay(BalpConstants.CS_OBJECT_ROLE_4_DOMAIN_RESOURCE_DISPLAY); + entityData.getWhat().setReference(theDataResourceId); } public void setAdditionalPatientCompartmentParamNames(Set theAdditionalPatientCompartmentParamNames) { @@ -133,27 +130,40 @@ public class BalpAuditCaptureInterceptor { } @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED) - public void hookStoragePrecommitResourceCreated(IBaseResource theResource, ServletRequestDetails theRequestDetails) { - handleCreateUpdateDelete(theResource, theRequestDetails, BalpProfileEnum.BASIC_CREATE, BalpProfileEnum.PATIENT_CREATE); + public void hookStoragePrecommitResourceCreated( + IBaseResource theResource, ServletRequestDetails theRequestDetails) { + handleCreateUpdateDelete( + theResource, theRequestDetails, BalpProfileEnum.BASIC_CREATE, BalpProfileEnum.PATIENT_CREATE); } @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED) - public void hookStoragePrecommitResourceDeleted(IBaseResource theResource, ServletRequestDetails theRequestDetails) { - handleCreateUpdateDelete(theResource, theRequestDetails, BalpProfileEnum.BASIC_DELETE, BalpProfileEnum.PATIENT_DELETE); + public void hookStoragePrecommitResourceDeleted( + IBaseResource theResource, ServletRequestDetails theRequestDetails) { + handleCreateUpdateDelete( + theResource, theRequestDetails, BalpProfileEnum.BASIC_DELETE, BalpProfileEnum.PATIENT_DELETE); } @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED) - public void hookStoragePrecommitResourceUpdated(IBaseResource theOldResource, IBaseResource theResource, ServletRequestDetails theRequestDetails) { - handleCreateUpdateDelete(theResource, theRequestDetails, BalpProfileEnum.BASIC_UPDATE, BalpProfileEnum.PATIENT_UPDATE); + public void hookStoragePrecommitResourceUpdated( + IBaseResource theOldResource, IBaseResource theResource, ServletRequestDetails theRequestDetails) { + handleCreateUpdateDelete( + theResource, theRequestDetails, BalpProfileEnum.BASIC_UPDATE, BalpProfileEnum.PATIENT_UPDATE); } - private void handleCreateUpdateDelete(IBaseResource theResource, ServletRequestDetails theRequestDetails, BalpProfileEnum theBasicProfile, BalpProfileEnum thePatientProfile) { - Set patientCompartmentOwners = determinePatientCompartmentOwnersForResources(List.of(theResource), theRequestDetails); + private void handleCreateUpdateDelete( + IBaseResource theResource, + ServletRequestDetails theRequestDetails, + BalpProfileEnum theBasicProfile, + BalpProfileEnum thePatientProfile) { + Set patientCompartmentOwners = + determinePatientCompartmentOwnersForResources(List.of(theResource), theRequestDetails); if (patientCompartmentOwners.isEmpty()) { - AuditEvent auditEvent = createAuditEventBasicCreateUpdateDelete(theRequestDetails, theResource, theBasicProfile); + AuditEvent auditEvent = + createAuditEventBasicCreateUpdateDelete(theRequestDetails, theResource, theBasicProfile); myAuditEventSink.recordAuditEvent(auditEvent); } else { - AuditEvent auditEvent = createAuditEventPatientCreateUpdateDelete(theRequestDetails, theResource, patientCompartmentOwners, thePatientProfile); + AuditEvent auditEvent = createAuditEventPatientCreateUpdateDelete( + theRequestDetails, theResource, patientCompartmentOwners, thePatientProfile); myAuditEventSink.recordAuditEvent(auditEvent); } } @@ -162,8 +172,10 @@ public class BalpAuditCaptureInterceptor { Validate.isTrue(theDetails.size() == 1, "Unexpected number of results for read: %d", theDetails.size()); IBaseResource resource = theDetails.getResource(0); if (resource != null) { - String dataResourceId = myContextServices.massageResourceIdForStorage(theRequestDetails, resource, resource.getIdElement()); - Set patientIds = determinePatientCompartmentOwnersForResources(List.of(resource), theRequestDetails); + String dataResourceId = + myContextServices.massageResourceIdForStorage(theRequestDetails, resource, resource.getIdElement()); + Set patientIds = + determinePatientCompartmentOwnersForResources(List.of(resource), theRequestDetails); // If the resource is in the Patient compartment, create one audit // event for each compartment owner @@ -192,28 +204,30 @@ public class BalpAuditCaptureInterceptor { AuditEvent auditEvent = createAuditEventBasicQuery(theRequestDetails); myAuditEventSink.recordAuditEvent(auditEvent); } - - } @Nonnull - private Set determinePatientCompartmentOwnersForResources(List theResources, ServletRequestDetails theRequestDetails) { + private Set determinePatientCompartmentOwnersForResources( + List theResources, ServletRequestDetails theRequestDetails) { Set patientIds = new TreeSet<>(); FhirContext fhirContext = theRequestDetails.getFhirContext(); for (IBaseResource resource : theResources) { RuntimeResourceDefinition resourceDef = fhirContext.getResourceDefinition(resource); if (resourceDef.getName().equals("Patient")) { - patientIds.add(myContextServices.massageResourceIdForStorage(theRequestDetails, resource, resource.getIdElement())); + patientIds.add(myContextServices.massageResourceIdForStorage( + theRequestDetails, resource, resource.getIdElement())); } else { - List compartmentSearchParameters = resourceDef.getSearchParamsForCompartmentName("Patient"); + List compartmentSearchParameters = + resourceDef.getSearchParamsForCompartmentName("Patient"); if (!compartmentSearchParameters.isEmpty()) { FhirTerser terser = fhirContext.newTerser(); terser - .getCompartmentOwnersForResource("Patient", resource, myAdditionalPatientCompartmentParamNames) - .stream() - .map(t -> myContextServices.massageResourceIdForStorage(theRequestDetails, resource, t)) - .forEach(patientIds::add); + .getCompartmentOwnersForResource( + "Patient", resource, myAdditionalPatientCompartmentParamNames) + .stream() + .map(t -> myContextServices.massageResourceIdForStorage(theRequestDetails, resource, t)) + .forEach(patientIds::add); } } } @@ -221,16 +235,19 @@ public class BalpAuditCaptureInterceptor { } @Nonnull - private AuditEvent createAuditEventCommonCreate(ServletRequestDetails theRequestDetails, IBaseResource theResource, BalpProfileEnum profile) { + private AuditEvent createAuditEventCommonCreate( + ServletRequestDetails theRequestDetails, IBaseResource theResource, BalpProfileEnum profile) { AuditEvent auditEvent = createAuditEventCommon(theRequestDetails, profile); - String resourceId = myContextServices.massageResourceIdForStorage(theRequestDetails, theResource, theResource.getIdElement()); + String resourceId = myContextServices.massageResourceIdForStorage( + theRequestDetails, theResource, theResource.getIdElement()); addEntityData(auditEvent, resourceId); return auditEvent; } @Nonnull - private AuditEvent createAuditEventBasicCreateUpdateDelete(ServletRequestDetails theRequestDetails, IBaseResource theResource, BalpProfileEnum theProfile) { + private AuditEvent createAuditEventBasicCreateUpdateDelete( + ServletRequestDetails theRequestDetails, IBaseResource theResource, BalpProfileEnum theProfile) { return createAuditEventCommonCreate(theRequestDetails, theResource, theProfile); } @@ -247,7 +264,11 @@ public class BalpAuditCaptureInterceptor { } @Nonnull - private AuditEvent createAuditEventPatientCreateUpdateDelete(ServletRequestDetails theRequestDetails, IBaseResource theResource, Set thePatientCompartmentOwners, BalpProfileEnum theProfile) { + private AuditEvent createAuditEventPatientCreateUpdateDelete( + ServletRequestDetails theRequestDetails, + IBaseResource theResource, + Set thePatientCompartmentOwners, + BalpProfileEnum theProfile) { AuditEvent retVal = createAuditEventCommonCreate(theRequestDetails, theResource, theProfile); for (String next : thePatientCompartmentOwners) { addEntityPatient(retVal, next); @@ -256,7 +277,8 @@ public class BalpAuditCaptureInterceptor { } @Nonnull - private AuditEvent createAuditEventPatientQuery(ServletRequestDetails theRequestDetails, Set compartmentOwners) { + private AuditEvent createAuditEventPatientQuery( + ServletRequestDetails theRequestDetails, Set compartmentOwners) { BalpProfileEnum profile = BalpProfileEnum.PATIENT_QUERY; AuditEvent auditEvent = createAuditEventCommonQuery(theRequestDetails, profile); for (String next : compartmentOwners) { @@ -266,7 +288,8 @@ public class BalpAuditCaptureInterceptor { } @Nonnull - private AuditEvent createAuditEventPatientRead(ServletRequestDetails theRequestDetails, String dataResourceId, String patientId) { + private AuditEvent createAuditEventPatientRead( + ServletRequestDetails theRequestDetails, String dataResourceId, String patientId) { BalpProfileEnum profile = BalpProfileEnum.PATIENT_READ; AuditEvent auditEvent = createAuditEventCommonRead(theRequestDetails, dataResourceId, profile); addEntityPatient(auditEvent, patientId); @@ -282,69 +305,54 @@ public class BalpAuditCaptureInterceptor { AuditEvent auditEvent = new AuditEvent(); auditEvent.getMeta().addProfile(theProfile.getProfileUrl()); - auditEvent.getType() - .setSystem(BalpConstants.CS_AUDIT_EVENT_TYPE) - .setCode("rest") - .setDisplay("Restful Operation"); - auditEvent.addSubtype() - .setSystem(BalpConstants.CS_RESTFUL_INTERACTION) - .setCode(restOperationType.getCode()) - .setDisplay(restOperationType.getCode()); + auditEvent + .getType() + .setSystem(BalpConstants.CS_AUDIT_EVENT_TYPE) + .setCode("rest") + .setDisplay("Restful Operation"); + auditEvent + .addSubtype() + .setSystem(BalpConstants.CS_RESTFUL_INTERACTION) + .setCode(restOperationType.getCode()) + .setDisplay(restOperationType.getCode()); auditEvent.setAction(theProfile.getAction()); auditEvent.setOutcome(AuditEvent.AuditEventOutcome._0); auditEvent.setRecorded(new Date()); - auditEvent - .getSource() - .getObserver() - .setDisplay(theRequestDetails.getServerBaseForRequest()); + auditEvent.getSource().getObserver().setDisplay(theRequestDetails.getServerBaseForRequest()); AuditEvent.AuditEventAgentComponent clientAgent = auditEvent.addAgent(); clientAgent.setWho(myContextServices.getAgentClientWho(theRequestDetails)); + clientAgent.getType().addCoding(theProfile.getAgentClientTypeCoding()); + clientAgent.getWho().setDisplay(myContextServices.getNetworkAddress(theRequestDetails)); clientAgent - .getType() - .addCoding(theProfile.getAgentClientTypeCoding()); - clientAgent - .getWho() - .setDisplay(myContextServices.getNetworkAddress(theRequestDetails)); - clientAgent - .getNetwork() - .setAddress(myContextServices.getNetworkAddress(theRequestDetails)) - .setType(myContextServices.getNetworkAddressType(theRequestDetails)); + .getNetwork() + .setAddress(myContextServices.getNetworkAddress(theRequestDetails)) + .setType(myContextServices.getNetworkAddressType(theRequestDetails)); clientAgent.setRequestor(false); AuditEvent.AuditEventAgentComponent serverAgent = auditEvent.addAgent(); - serverAgent - .getType() - .addCoding(theProfile.getAgentServerTypeCoding()); - serverAgent - .getWho() - .setDisplay(theRequestDetails.getServerBaseForRequest()); - serverAgent - .getNetwork() - .setAddress(theRequestDetails.getServerBaseForRequest()); + serverAgent.getType().addCoding(theProfile.getAgentServerTypeCoding()); + serverAgent.getWho().setDisplay(theRequestDetails.getServerBaseForRequest()); + serverAgent.getNetwork().setAddress(theRequestDetails.getServerBaseForRequest()); serverAgent.setRequestor(false); AuditEvent.AuditEventAgentComponent userAgent = auditEvent.addAgent(); userAgent - .getType() - .addCoding() - .setSystem("http://terminology.hl7.org/CodeSystem/v3-ParticipationType") - .setCode("IRCP") - .setDisplay("information recipient"); + .getType() + .addCoding() + .setSystem("http://terminology.hl7.org/CodeSystem/v3-ParticipationType") + .setCode("IRCP") + .setDisplay("information recipient"); userAgent.setWho(myContextServices.getAgentUserWho(theRequestDetails)); - userAgent - .setRequestor(true); + userAgent.setRequestor(true); AuditEvent.AuditEventEntityComponent entityTransaction = auditEvent.addEntity(); entityTransaction - .getType() - .setSystem("https://profiles.ihe.net/ITI/BALP/CodeSystem/BasicAuditEntityType") - .setCode("XrequestId"); - entityTransaction - .getWhat() - .getIdentifier() - .setValue(theRequestDetails.getRequestId()); + .getType() + .setSystem("https://profiles.ihe.net/ITI/BALP/CodeSystem/BasicAuditEntityType") + .setCode("XrequestId"); + entityTransaction.getWhat().getIdentifier().setValue(theRequestDetails.getRequestId()); return auditEvent; } @@ -354,15 +362,15 @@ public class BalpAuditCaptureInterceptor { AuditEvent.AuditEventEntityComponent queryEntity = auditEvent.addEntity(); queryEntity - .getType() - .setSystem(BalpConstants.CS_AUDIT_ENTITY_TYPE) - .setCode(BalpConstants.CS_AUDIT_ENTITY_TYPE_2_SYSTEM_OBJECT) - .setDisplay(BalpConstants.CS_AUDIT_ENTITY_TYPE_2_SYSTEM_OBJECT_DISPLAY); + .getType() + .setSystem(BalpConstants.CS_AUDIT_ENTITY_TYPE) + .setCode(BalpConstants.CS_AUDIT_ENTITY_TYPE_2_SYSTEM_OBJECT) + .setDisplay(BalpConstants.CS_AUDIT_ENTITY_TYPE_2_SYSTEM_OBJECT_DISPLAY); queryEntity - .getRole() - .setSystem(BalpConstants.CS_OBJECT_ROLE) - .setCode(BalpConstants.CS_OBJECT_ROLE_24_QUERY) - .setDisplay(BalpConstants.CS_OBJECT_ROLE_24_QUERY_DISPLAY); + .getRole() + .setSystem(BalpConstants.CS_OBJECT_ROLE) + .setCode(BalpConstants.CS_OBJECT_ROLE_24_QUERY) + .setDisplay(BalpConstants.CS_OBJECT_ROLE_24_QUERY_DISPLAY); // Description StringBuilder description = new StringBuilder(); @@ -382,7 +390,8 @@ public class BalpAuditCaptureInterceptor { queryString.append("/"); queryString.append(theRequestDetails.getRequestPath()); boolean first = true; - for (Map.Entry nextEntrySet : theRequestDetails.getParameters().entrySet()) { + for (Map.Entry nextEntrySet : + theRequestDetails.getParameters().entrySet()) { for (String nextValue : nextEntrySet.getValue()) { if (first) { queryString.append("?"); @@ -396,17 +405,15 @@ public class BalpAuditCaptureInterceptor { } } - queryEntity - .getQueryElement() - .setValue(queryString.toString().getBytes(StandardCharsets.UTF_8)); + queryEntity.getQueryElement().setValue(queryString.toString().getBytes(StandardCharsets.UTF_8)); return auditEvent; } @Nonnull - private AuditEvent createAuditEventCommonRead(ServletRequestDetails theRequestDetails, String theDataResourceId, BalpProfileEnum theProfile) { + private AuditEvent createAuditEventCommonRead( + ServletRequestDetails theRequestDetails, String theDataResourceId, BalpProfileEnum theProfile) { AuditEvent auditEvent = createAuditEventCommon(theRequestDetails, theProfile); addEntityData(auditEvent, theDataResourceId); return auditEvent; } - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpConstants.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpConstants.java index 2a6e4d15a1d..2478e292f81 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpConstants.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpConstants.java @@ -27,19 +27,22 @@ public class BalpConstants { * 1 - Machine name. This constant is used only for convenience since the * existing Enum uses numerical codes that are not great for readability. */ - public static final AuditEvent.AuditEventAgentNetworkType AUDIT_EVENT_AGENT_NETWORK_TYPE_MACHINE_NAME = AuditEvent.AuditEventAgentNetworkType._1; + public static final AuditEvent.AuditEventAgentNetworkType AUDIT_EVENT_AGENT_NETWORK_TYPE_MACHINE_NAME = + AuditEvent.AuditEventAgentNetworkType._1; /** * Constant for {@link AuditEvent.AuditEventAgentNetworkType} representing the code * 2 - IP Address. This constant is used only for convenience since the * existing Enum uses numerical codes that are not great for readability. */ - public static final AuditEvent.AuditEventAgentNetworkType AUDIT_EVENT_AGENT_NETWORK_TYPE_IP_ADDRESS = AuditEvent.AuditEventAgentNetworkType._2; + public static final AuditEvent.AuditEventAgentNetworkType AUDIT_EVENT_AGENT_NETWORK_TYPE_IP_ADDRESS = + AuditEvent.AuditEventAgentNetworkType._2; /** * Constant for {@link AuditEvent.AuditEventAgentNetworkType} representing the code * 3 - URI. This constant is used only for convenience since the * existing Enum uses numerical codes that are not great for readability. */ - public static final AuditEvent.AuditEventAgentNetworkType AUDIT_EVENT_AGENT_NETWORK_TYPE_URI = AuditEvent.AuditEventAgentNetworkType._5; + public static final AuditEvent.AuditEventAgentNetworkType AUDIT_EVENT_AGENT_NETWORK_TYPE_URI = + AuditEvent.AuditEventAgentNetworkType._5; public static final String CS_AUDIT_EVENT_TYPE = "http://terminology.hl7.org/CodeSystem/audit-event-type"; public static final String CS_AUDIT_ENTITY_TYPE = "http://terminology.hl7.org/CodeSystem/audit-entity-type"; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpProfileEnum.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpProfileEnum.java index fb22cc07233..c06a04cdc95 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpProfileEnum.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/BalpProfileEnum.java @@ -26,77 +26,72 @@ import java.util.function.Supplier; public enum BalpProfileEnum { BASIC_CREATE( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Create", - AuditEvent.AuditEventAction.C, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Create", + AuditEvent.AuditEventAction.C, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID")), PATIENT_CREATE( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientCreate", - AuditEvent.AuditEventAction.C, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientCreate", + AuditEvent.AuditEventAction.C, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID")), BASIC_UPDATE( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Update", - AuditEvent.AuditEventAction.U, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Update", + AuditEvent.AuditEventAction.U, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID")), PATIENT_UPDATE( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientUpdate", - AuditEvent.AuditEventAction.U, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientUpdate", + AuditEvent.AuditEventAction.U, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID")), BASIC_DELETE( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Delete", - AuditEvent.AuditEventAction.D, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110150", "Application"), - () -> new Coding("http://terminology.hl7.org/CodeSystem/provenance-participant-type", "custodian", "Custodian") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Delete", + AuditEvent.AuditEventAction.D, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110150", "Application"), + () -> new Coding( + "http://terminology.hl7.org/CodeSystem/provenance-participant-type", "custodian", "Custodian")), PATIENT_DELETE( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientDelete", - AuditEvent.AuditEventAction.D, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110150", "Application"), - () -> new Coding("http://terminology.hl7.org/CodeSystem/provenance-participant-type", "custodian", "Custodian") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientDelete", + AuditEvent.AuditEventAction.D, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110150", "Application"), + () -> new Coding( + "http://terminology.hl7.org/CodeSystem/provenance-participant-type", "custodian", "Custodian")), BASIC_READ( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Read", - AuditEvent.AuditEventAction.R, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Read", + AuditEvent.AuditEventAction.R, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID")), PATIENT_READ( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientRead", - AuditEvent.AuditEventAction.R, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientRead", + AuditEvent.AuditEventAction.R, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID")), BASIC_QUERY( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Query", - AuditEvent.AuditEventAction.E, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID") - ), + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.Query", + AuditEvent.AuditEventAction.E, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID")), PATIENT_QUERY( - "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientQuery", - AuditEvent.AuditEventAction.E, - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), - () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID") - ), - + "https://profiles.ihe.net/ITI/BALP/StructureDefinition/IHE.BasicAudit.PatientQuery", + AuditEvent.AuditEventAction.E, + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110153", "Source Role ID"), + () -> new Coding("http://dicom.nema.org/resources/ontology/DCM", "110152", "Destination Role ID")), ; private final String myProfileUrl; private final AuditEvent.AuditEventAction myAction; private final Supplier myAgentClientTypeCoding; private final Supplier myAgentServerTypeCoding; - BalpProfileEnum(String theProfileUrl, AuditEvent.AuditEventAction theAction, Supplier theAgentClientTypeCoding, Supplier theAgentServerTypeCoding) { + BalpProfileEnum( + String theProfileUrl, + AuditEvent.AuditEventAction theAction, + Supplier theAgentClientTypeCoding, + Supplier theAgentServerTypeCoding) { myProfileUrl = theProfileUrl; myAction = theAction; myAgentClientTypeCoding = theAgentClientTypeCoding; diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/FhirClientBalpSink.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/FhirClientBalpSink.java index e7df5fdbde3..23911345825 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/FhirClientBalpSink.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/FhirClientBalpSink.java @@ -26,9 +26,9 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.AuditEvent; +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; public class FhirClientBalpSink implements IBalpAuditEventSink { @@ -61,7 +61,10 @@ public class FhirClientBalpSink implements IBalpAuditEventSink { * @param theClientInterceptors An optional list of interceptors to register against * the client. May be {@literal null}. */ - public FhirClientBalpSink(@Nonnull FhirContext theFhirContext, @Nonnull String theTargetBaseUrl, @Nullable List theClientInterceptors) { + public FhirClientBalpSink( + @Nonnull FhirContext theFhirContext, + @Nonnull String theTargetBaseUrl, + @Nullable List theClientInterceptors) { this(createClient(theFhirContext, theTargetBaseUrl, theClientInterceptors)); } @@ -86,13 +89,13 @@ public class FhirClientBalpSink implements IBalpAuditEventSink { } protected void transmitEventToClient(IBaseResource auditEvent) { - myClient - .create() - .resource(auditEvent) - .execute(); + myClient.create().resource(auditEvent).execute(); } - static IGenericClient createClient(@Nonnull FhirContext theFhirContext, @Nonnull String theTargetBaseUrl, @Nullable List theClientInterceptors) { + static IGenericClient createClient( + @Nonnull FhirContext theFhirContext, + @Nonnull String theTargetBaseUrl, + @Nullable List theClientInterceptors) { Validate.notNull(theFhirContext, "theFhirContext must not be null"); Validate.notBlank(theTargetBaseUrl, "theTargetBaseUrl must not be null or blank"); IGenericClient client = theFhirContext.newRestfulGenericClient(theTargetBaseUrl); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/IBalpAuditContextServices.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/IBalpAuditContextServices.java index d95ae94176d..412991d6d08 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/IBalpAuditContextServices.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/IBalpAuditContextServices.java @@ -63,7 +63,9 @@ public interface IBalpAuditContextServices { default String getNetworkAddress(RequestDetails theRequestDetails) { String remoteAddr = null; if (theRequestDetails instanceof ServletRequestDetails) { - remoteAddr = ((ServletRequestDetails) theRequestDetails).getServletRequest().getRemoteAddr(); + remoteAddr = ((ServletRequestDetails) theRequestDetails) + .getServletRequest() + .getRemoteAddr(); } return remoteAddr; } @@ -89,7 +91,10 @@ public interface IBalpAuditContextServices { * references within BALP events. */ @Nonnull - default String massageResourceIdForStorage(@Nonnull RequestDetails theRequestDetails, @Nonnull IBaseResource theResource, @Nonnull IIdType theResourceId) { + default String massageResourceIdForStorage( + @Nonnull RequestDetails theRequestDetails, + @Nonnull IBaseResource theResource, + @Nonnull IIdType theResourceId) { String serverBaseUrl = theRequestDetails.getServerBaseForRequest(); String resourceName = theResourceId.getResourceType(); return theResourceId.withServerBase(serverBaseUrl, resourceName).getValue(); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/IBalpAuditEventSink.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/IBalpAuditEventSink.java index 7ea24b1d47e..f32e2fa56ee 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/IBalpAuditEventSink.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/storage/interceptor/balp/IBalpAuditEventSink.java @@ -24,5 +24,4 @@ import org.hl7.fhir.r4.model.AuditEvent; public interface IBalpAuditEventSink { void recordAuditEvent(AuditEvent theAuditEvent); - } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/CanonicalIdentifier.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/CanonicalIdentifier.java index 6f678768ac7..5018a49dfff 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/CanonicalIdentifier.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/CanonicalIdentifier.java @@ -83,16 +83,13 @@ public class CanonicalIdentifier extends BaseIdentifierDt { CanonicalIdentifier that = (CanonicalIdentifier) theO; return new EqualsBuilder() - .append(mySystem, that.mySystem) - .append(myValue, that.myValue) - .isEquals(); + .append(mySystem, that.mySystem) + .append(myValue, that.myValue) + .isEquals(); } @Override public int hashCode() { - return new HashCodeBuilder(17, 37) - .append(mySystem) - .append(myValue) - .toHashCode(); + return new HashCodeBuilder(17, 37).append(mySystem).append(myValue).toHashCode(); } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/ThreadPoolUtil.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/ThreadPoolUtil.java index acb06b8fa0d..f049eac59a0 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/ThreadPoolUtil.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/util/ThreadPoolUtil.java @@ -27,23 +27,30 @@ import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import javax.annotation.Nonnull; public final class ThreadPoolUtil { - private ThreadPoolUtil() { - } - + private ThreadPoolUtil() {} @Nonnull - public static ThreadPoolTaskExecutor newThreadPool(int theCorePoolSize, int theMaxPoolSize, String theThreadNamePrefix) { + public static ThreadPoolTaskExecutor newThreadPool( + int theCorePoolSize, int theMaxPoolSize, String theThreadNamePrefix) { return newThreadPool(theCorePoolSize, theMaxPoolSize, theThreadNamePrefix, 0); } @Nonnull - public static ThreadPoolTaskExecutor newThreadPool(int theCorePoolSize, int theMaxPoolSize, String theThreadNamePrefix, int theQueueCapacity) { + public static ThreadPoolTaskExecutor newThreadPool( + int theCorePoolSize, int theMaxPoolSize, String theThreadNamePrefix, int theQueueCapacity) { return newThreadPool(theCorePoolSize, theMaxPoolSize, theThreadNamePrefix, theQueueCapacity, null); } @Nonnull - public static ThreadPoolTaskExecutor newThreadPool(int theCorePoolSize, int theMaxPoolSize, String theThreadNamePrefix, int theQueueCapacity, TaskDecorator taskDecorator) { - Validate.isTrue(theCorePoolSize == theMaxPoolSize || theQueueCapacity == 0, "If the queue capacity is greater than 0, core pool size needs to match max pool size or the system won't grow the queue"); + public static ThreadPoolTaskExecutor newThreadPool( + int theCorePoolSize, + int theMaxPoolSize, + String theThreadNamePrefix, + int theQueueCapacity, + TaskDecorator taskDecorator) { + Validate.isTrue( + theCorePoolSize == theMaxPoolSize || theQueueCapacity == 0, + "If the queue capacity is greater than 0, core pool size needs to match max pool size or the system won't grow the queue"); Validate.isTrue(theThreadNamePrefix.endsWith("-"), "Thread pool prefix name must end with a hyphen"); ThreadPoolTaskExecutor asyncTaskExecutor = new ThreadPoolTaskExecutor(); asyncTaskExecutor.setCorePoolSize(theCorePoolSize); diff --git a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/ctx/FhirDstu2_1.java b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/ctx/FhirDstu2_1.java index 6f3d89da9af..cf90e7a2201 100644 --- a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/ctx/FhirDstu2_1.java +++ b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/ctx/FhirDstu2_1.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,22 +19,21 @@ */ package org.hl7.fhir.dstu2016may.hapi.ctx; +import ca.uhn.fhir.context.*; +import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.i18n.Msg; -import java.io.InputStream; -import java.util.Date; -import java.util.List; - +import ca.uhn.fhir.model.api.IFhirVersion; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; +import ca.uhn.fhir.util.ReflectionUtil; import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.dstu2016may.hapi.rest.server.Dstu2_1BundleFactory; import org.hl7.fhir.dstu2016may.model.*; import org.hl7.fhir.instance.model.api.*; -import ca.uhn.fhir.context.*; -import ca.uhn.fhir.fhirpath.IFhirPath; -import ca.uhn.fhir.model.api.IFhirVersion; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; -import ca.uhn.fhir.util.ReflectionUtil; +import java.io.InputStream; +import java.util.Date; +import java.util.List; public class FhirDstu2_1 implements IFhirVersion { @@ -68,12 +67,14 @@ public class FhirDstu2_1 implements IFhirVersion { @Override public InputStream getFhirVersionPropertiesFile() { - InputStream str = FhirDstu2_1.class.getResourceAsStream("/org/hl7/fhir/dstu2016may/model/fhirversion.properties"); + InputStream str = + FhirDstu2_1.class.getResourceAsStream("/org/hl7/fhir/dstu2016may/model/fhirversion.properties"); if (str == null) { str = FhirDstu2_1.class.getResourceAsStream("/org/hl7/fhir/dstu2016may/model/fhirversion.properties"); } if (str == null) { - throw new ConfigurationException(Msg.code(467) + "Can not find model property file on classpath: " + "/ca/uhn/fhir/model/dstu2016may/fhirversion.properties"); + throw new ConfigurationException(Msg.code(467) + "Can not find model property file on classpath: " + + "/ca/uhn/fhir/model/dstu2016may/fhirversion.properties"); } return str; } @@ -117,5 +118,4 @@ public class FhirDstu2_1 implements IFhirVersion { public Object getServerVersion() { return ReflectionUtil.newInstanceOfFhirServerType("org.hl7.fhir.dstu2016may.hapi.ctx.FhirServerDstu2_1"); } - } diff --git a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/ctx/FhirServerDstu2_1.java b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/ctx/FhirServerDstu2_1.java index 2535c1d2853..39b7b6d2b37 100644 --- a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/ctx/FhirServerDstu2_1.java +++ b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/ctx/FhirServerDstu2_1.java @@ -10,5 +10,4 @@ public class FhirServerDstu2_1 implements IFhirVersionServer { public ServerConformanceProvider createServerConformanceProvider(RestfulServer theServer) { return new ServerConformanceProvider(theServer); } - } diff --git a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/Dstu2_1BundleFactory.java b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/Dstu2_1BundleFactory.java index 1afe308989e..be6e75ff6b7 100644 --- a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/Dstu2_1BundleFactory.java +++ b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/Dstu2_1BundleFactory.java @@ -42,13 +42,13 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -63,7 +63,12 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory { } @Override - public void addResourcesToBundle(List theResult, BundleTypeEnum theBundleType, String theServerBase, BundleInclusionRule theBundleInclusionRule, Set theIncludes) { + public void addResourcesToBundle( + List theResult, + BundleTypeEnum theBundleType, + String theServerBase, + BundleInclusionRule theBundleInclusionRule, + Set theIncludes) { ensureBundle(); List includedResources = new ArrayList(); @@ -92,11 +97,13 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory { List addedResourcesThisPass = new ArrayList(); for (ResourceReferenceInfo nextRefInfo : references) { - if (theBundleInclusionRule != null && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { + if (theBundleInclusionRule != null + && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { continue; } - IAnyResource nextRes = (IAnyResource) nextRefInfo.getResourceReference().getResource(); + IAnyResource nextRes = + (IAnyResource) nextRefInfo.getResourceReference().getResource(); if (nextRes != null) { if (nextRes.getIdElement().hasIdPart()) { if (containedIds.contains(nextRes.getIdElement().getValue())) { @@ -114,7 +121,6 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory { addedResourceIds.add(id); addedResourcesThisPass.add(nextRes); } - } } } @@ -124,7 +130,8 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory { // Linked resources may themselves have linked resources references = new ArrayList(); for (IAnyResource iResource : addedResourcesThisPass) { - List newReferences = myContext.newTerser().getAllResourceReferences(iResource); + List newReferences = + myContext.newTerser().getAllResourceReferences(iResource); references.addAll(newReferences); } } while (references.isEmpty() == false); @@ -132,7 +139,8 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory { BundleEntryComponent entry = myBundle.addEntry().setResource((Resource) next); Resource nextAsResource = (Resource) next; IIdType id = populateBundleEntryFullUrl(next, entry); - BundleEntryTransactionMethodEnum httpVerb = ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); + BundleEntryTransactionMethodEnum httpVerb = + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); if (httpVerb != null) { entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); if (id != null) { @@ -154,12 +162,14 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory { entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); populateBundleEntryFullUrl(next, entry); } - } @Override - public void addRootPropertiesToBundle(String theId, @Nonnull BundleLinks theBundleLinks, Integer theTotalResults, - IPrimitiveType theLastUpdated) { + public void addRootPropertiesToBundle( + String theId, + @Nonnull BundleLinks theBundleLinks, + Integer theTotalResults, + IPrimitiveType theLastUpdated) { ensureBundle(); myBase = theBundleLinks.serverBase; @@ -252,7 +262,8 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory { IdType id = new IdType(next.getResponse().getLocation()); String resourceType = id.getResourceType(); if (isNotBlank(resourceType)) { - IAnyResource res = (IAnyResource) myContext.getResourceDefinition(resourceType).newInstance(); + IAnyResource res = (IAnyResource) + myContext.getResourceDefinition(resourceType).newInstance(); res.setId(id); retVal.add(res); } @@ -260,5 +271,4 @@ public class Dstu2_1BundleFactory implements IVersionSpecificBundleFactory { } return retVal; } - } diff --git a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/ServerConformanceProvider.java b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/ServerConformanceProvider.java index e69f1974909..59f3614d32f 100644 --- a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/ServerConformanceProvider.java +++ b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/rest/server/ServerConformanceProvider.java @@ -19,10 +19,10 @@ */ package org.hl7.fhir.dstu2016may.hapi.rest.server; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Metadata; @@ -30,28 +30,28 @@ import ca.uhn.fhir.rest.annotation.Read; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.server.*; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; -import ca.uhn.fhir.rest.server.method.*; -import ca.uhn.fhir.rest.server.method.SearchParameter; import ca.uhn.fhir.rest.server.method.OperationMethodBinding.ReturnType; +import ca.uhn.fhir.rest.server.method.SearchParameter; import ca.uhn.fhir.rest.server.util.BaseServerCapabilityStatementProvider; +import ca.uhn.fhir.rest.server.*; +import ca.uhn.fhir.rest.server.method.*; import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.dstu2016may.model.*; -import org.hl7.fhir.dstu2016may.model.Conformance.*; import org.hl7.fhir.dstu2016may.model.Enumerations.ConformanceResourceStatus; import org.hl7.fhir.dstu2016may.model.Enumerations.ResourceType; import org.hl7.fhir.dstu2016may.model.OperationDefinition.OperationDefinitionParameterComponent; -import org.hl7.fhir.dstu2016may.model.OperationDefinition.OperationKind; import org.hl7.fhir.dstu2016may.model.OperationDefinition.OperationParameterUse; +import org.hl7.fhir.dstu2016may.model.*; +import org.hl7.fhir.dstu2016may.model.Conformance.*; +import org.hl7.fhir.dstu2016may.model.OperationDefinition.OperationKind; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; +import java.util.Map.Entry; +import java.util.*; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; -import java.util.*; -import java.util.Map.Entry; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -59,7 +59,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * Server FHIR Provider which serves the conformance statement for a RESTful server implementation */ -public class ServerConformanceProvider extends BaseServerCapabilityStatementProvider implements IServerConformanceProvider { +public class ServerConformanceProvider extends BaseServerCapabilityStatementProvider + implements IServerConformanceProvider { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServerConformanceProvider.class); private String myPublisher = "Not provided"; @@ -88,7 +89,10 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv super(theServerConfiguration); } - private void checkBindingForSystemOps(ConformanceRestComponent rest, Set systemOps, BaseMethodBinding nextMethodBinding) { + private void checkBindingForSystemOps( + ConformanceRestComponent rest, + Set systemOps, + BaseMethodBinding nextMethodBinding) { if (nextMethodBinding.getRestOperationType() != null) { String sysOpCode = nextMethodBinding.getRestOperationType().getCode(); if (sysOpCode != null) { @@ -120,7 +124,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv resourceToMethods.get(resourceName).add(nextMethodBinding); } } - for (BaseMethodBinding nextMethodBinding : getServerConfiguration(theRequestDetails).getServerBindings()) { + for (BaseMethodBinding nextMethodBinding : + getServerConfiguration(theRequestDetails).getServerBindings()) { String resourceName = ""; if (resourceToMethods.containsKey(resourceName) == false) { resourceToMethods.put(resourceName, new ArrayList<>()); @@ -131,7 +136,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv } private DateTimeType conformanceDate(RequestDetails theRequestDetails) { - IPrimitiveType buildDate = getServerConfiguration(theRequestDetails).getConformanceDate(); + IPrimitiveType buildDate = + getServerConfiguration(theRequestDetails).getConformanceDate(); if (buildDate != null && buildDate.getValue() != null) { try { return new DateTimeType(buildDate.getValueAsString()); @@ -156,10 +162,10 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv retVal.append('s'); } retVal.append('-'); - + // Exclude the leading $ retVal.append(theMethodBinding.getName(), 1, theMethodBinding.getName().length()); - + return retVal.toString(); } @@ -187,12 +193,13 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv // effort since the parser // needs to be modified to actually allow it - ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); - String serverBase = serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); - retVal - .getImplementation() - .setUrl(serverBase) - .setDescription(serverConfiguration.getImplementationDescription()); + ServletContext servletContext = (ServletContext) + (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); + String serverBase = + serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); + retVal.getImplementation() + .setUrl(serverBase) + .setDescription(serverConfiguration.getImplementationDescription()); retVal.setKind(ConformanceStatementKind.INSTANCE); retVal.getSoftware().setName(serverConfiguration.getServerName()); @@ -214,7 +221,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv Set resourceOps = new HashSet<>(); ConformanceRestResourceComponent resource = rest.addResource(); String resourceName = nextEntry.getKey(); - RuntimeResourceDefinition def = serverConfiguration.getFhirContext().getResourceDefinition(resourceName); + RuntimeResourceDefinition def = + serverConfiguration.getFhirContext().getResourceDefinition(resourceName); resource.getTypeElement().setValue(def.getName()); resource.getProfile().setReference((def.getResourceProfile(serverBase))); @@ -224,7 +232,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv // Conformance.RestResourceSearchParam>(); for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { if (nextMethodBinding.getRestOperationType() != null) { - String resOpCode = nextMethodBinding.getRestOperationType().getCode(); + String resOpCode = + nextMethodBinding.getRestOperationType().getCode(); if (resOpCode != null) { TypeRestfulInteraction resOp; try { @@ -248,21 +257,21 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv if (nextMethodBinding.isSupportsConditional()) { switch (resOp) { - case CREATE: - resource.setConditionalCreate(true); - break; - case DELETE: - if (nextMethodBinding.isSupportsConditionalMultiple()) { - resource.setConditionalDelete(ConditionalDeleteStatus.MULTIPLE); - } else { - resource.setConditionalDelete(ConditionalDeleteStatus.SINGLE); - } - break; - case UPDATE: - resource.setConditionalUpdate(true); - break; - default: - break; + case CREATE: + resource.setConditionalCreate(true); + break; + case DELETE: + if (nextMethodBinding.isSupportsConditionalMultiple()) { + resource.setConditionalDelete(ConditionalDeleteStatus.MULTIPLE); + } else { + resource.setConditionalDelete(ConditionalDeleteStatus.SINGLE); + } + break; + case UPDATE: + resource.setConditionalUpdate(true); + break; + default: + break; } } } @@ -272,13 +281,22 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv checkBindingForSystemOps(rest, systemOps, nextMethodBinding); if (nextMethodBinding instanceof SearchMethodBinding) { - handleSearchMethodBinding(rest, resource, resourceName, def, includes, (SearchMethodBinding) nextMethodBinding, theRequestDetails); + handleSearchMethodBinding( + rest, + resource, + resourceName, + def, + includes, + (SearchMethodBinding) nextMethodBinding, + theRequestDetails); } else if (nextMethodBinding instanceof OperationMethodBinding) { OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; String opName = bindings.getOperationBindingToId().get(methodBinding); if (operationNames.add(opName)) { // Only add each operation (by name) once - rest.addOperation().setName(methodBinding.getName().substring(1)).setDefinition(new Reference("OperationDefinition/" + opName)); + rest.addOperation() + .setName(methodBinding.getName().substring(1)) + .setDefinition(new Reference("OperationDefinition/" + opName)); } } @@ -299,7 +317,6 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv return o1.ordinal() - o2.ordinal(); } }); - } for (String nextInclude : includes) { @@ -313,7 +330,9 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv String opName = bindings.getOperationBindingToId().get(methodBinding); if (operationNames.add(opName)) { ourLog.debug("Found bound operation: {}", opName); - rest.addOperation().setName(methodBinding.getName().substring(1)).setDefinition(new Reference("OperationDefinition/" + opName)); + rest.addOperation() + .setName(methodBinding.getName().substring(1)) + .setDefinition(new Reference("OperationDefinition/" + opName)); } } } @@ -323,8 +342,14 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv return retVal; } - private void handleSearchMethodBinding(ConformanceRestComponent rest, ConformanceRestResourceComponent resource, String resourceName, RuntimeResourceDefinition def, TreeSet includes, - SearchMethodBinding searchMethodBinding, RequestDetails theRequestDetails) { + private void handleSearchMethodBinding( + ConformanceRestComponent rest, + ConformanceRestResourceComponent resource, + String resourceName, + RuntimeResourceDefinition def, + TreeSet includes, + SearchMethodBinding searchMethodBinding, + RequestDetails theRequestDetails) { includes.addAll(searchMethodBinding.getIncludes()); List params = searchMethodBinding.getParameters(); @@ -389,10 +414,13 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv param.setDocumentation(nextParamDescription); if (nextParameter.getParamType() != null) { - param.getTypeElement().setValueAsString(nextParameter.getParamType().getCode()); + param.getTypeElement() + .setValueAsString(nextParameter.getParamType().getCode()); } for (Class nextTarget : nextParameter.getDeclaredTypes()) { - RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails).getFhirContext().getResourceDefinition(nextTarget); + RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails) + .getFhirContext() + .getResourceDefinition(nextTarget); if (targetDef != null) { ResourceType code; try { @@ -409,8 +437,6 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv } } - - @Read(type = OperationDefinition.class) public OperationDefinition readOperationDefinition(@IdParam IdType theId, RequestDetails theRequestDetails) { if (theId == null || theId.hasIdPart() == false) { @@ -419,7 +445,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails); Bindings bindings = serverConfiguration.provideBindings(); - List sharedDescriptions = bindings.getOperationIdToBindings().get(theId.getIdPart()); + List sharedDescriptions = + bindings.getOperationIdToBindings().get(theId.getIdPart()); if (sharedDescriptions == null || sharedDescriptions.isEmpty()) { throw new ResourceNotFoundException(Msg.code(1991) + theId); } @@ -500,14 +527,14 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv op.setName(op.getCode()); } } - + if (op.hasSystem() == false) { op.setSystem(false); } if (op.hasInstance() == false) { op.setInstance(false); } - + return op; } diff --git a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/BaseValidatorBridge.java b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/BaseValidatorBridge.java index 2bcf8cbfd61..a32a6d6e272 100644 --- a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/BaseValidatorBridge.java +++ b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/BaseValidatorBridge.java @@ -1,9 +1,8 @@ package org.hl7.fhir.dstu2016may.hapi.validation; -import org.hl7.fhir.utilities.validation.ValidationMessage; -import org.hl7.fhir.instance.model.api.IBaseResource; - import ca.uhn.fhir.validation.*; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.utilities.validation.ValidationMessage; import java.util.List; @@ -30,7 +29,8 @@ abstract class BaseValidatorBridge implements IValidatorModule { hapiMessage.setLocationString(riMessage.getLocation()); hapiMessage.setMessage(riMessage.getMessage()); if (riMessage.getLevel() != null) { - hapiMessage.setSeverity(ResultSeverityEnum.fromCode(riMessage.getLevel().toCode())); + hapiMessage.setSeverity( + ResultSeverityEnum.fromCode(riMessage.getLevel().toCode())); } theCtx.addValidationMessage(hapiMessage); } @@ -42,5 +42,4 @@ abstract class BaseValidatorBridge implements IValidatorModule { public void validateResource(IValidationContext theCtx) { doValidate(theCtx); } - } diff --git a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/HapiWorkerContext.java b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/HapiWorkerContext.java index dc174c2e22b..0c3dd401c3f 100644 --- a/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/HapiWorkerContext.java +++ b/hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/HapiWorkerContext.java @@ -1,10 +1,10 @@ package org.hl7.fhir.dstu2016may.hapi.validation; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.ConceptValidationOptions; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.ValidationSupportContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.Validate; import org.hl7.fhir.dstu2016may.formats.IParser; @@ -96,7 +96,6 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext throw new UnsupportedOperationException(Msg.code(471)); } - @Override public IParser getParser(ParserType theType) { throw new UnsupportedOperationException(Msg.code(472)); @@ -144,7 +143,9 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext @Override public StructureDefinition fetchTypeDefinition(String typeName) { - return fetchResource(org.hl7.fhir.dstu2016may.model.StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/" + typeName); + return fetchResource( + org.hl7.fhir.dstu2016may.model.StructureDefinition.class, + "http://hl7.org/fhir/StructureDefinition/" + typeName); } @Override @@ -152,15 +153,48 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext if (myValidationSupport == null) { return false; } else { - return myValidationSupport.isCodeSystemSupported(new ValidationSupportContext(myValidationSupport), theSystem); + return myValidationSupport.isCodeSystemSupported( + new ValidationSupportContext(myValidationSupport), theSystem); } } @Override public Set typeTails() { - return new HashSet<>(Arrays.asList("Integer", "UnsignedInt", "PositiveInt", "Decimal", "DateTime", "Date", "Time", "Instant", "String", "Uri", "Oid", "Uuid", "Id", "Boolean", "Code", - "Markdown", "Base64Binary", "Coding", "CodeableConcept", "Attachment", "Identifier", "Quantity", "SampledData", "Range", "Period", "Ratio", "HumanName", "Address", "ContactPoint", - "Timing", "Reference", "Annotation", "Signature", "Meta")); + return new HashSet<>(Arrays.asList( + "Integer", + "UnsignedInt", + "PositiveInt", + "Decimal", + "DateTime", + "Date", + "Time", + "Instant", + "String", + "Uri", + "Oid", + "Uuid", + "Id", + "Boolean", + "Code", + "Markdown", + "Base64Binary", + "Coding", + "CodeableConcept", + "Attachment", + "Identifier", + "Quantity", + "SampledData", + "Range", + "Period", + "Ratio", + "HumanName", + "Address", + "ContactPoint", + "Timing", + "Reference", + "Annotation", + "Signature", + "Meta")); } @Override @@ -185,7 +219,13 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext @Override public ValidationResult validateCode(String theSystem, String theCode, String theDisplay) { - IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), new ConceptValidationOptions(), theSystem, theCode, theDisplay, null); + IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + new ConceptValidationOptions(), + theSystem, + theCode, + theDisplay, + null); if (result == null) { return null; } @@ -193,12 +233,14 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext if (result.getSeverity() != null) { severity = OperationOutcome.IssueSeverity.fromCode(result.getSeverityCode()); } - ConceptDefinitionComponent definition = result.getCode() != null ? new ConceptDefinitionComponent().setCode(result.getCode()) : null; + ConceptDefinitionComponent definition = + result.getCode() != null ? new ConceptDefinitionComponent().setCode(result.getCode()) : null; return new ValidationResult(severity, result.getMessage(), definition); } @Override - public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) { + public ValidationResult validateCode( + String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) { throw new UnsupportedOperationException(Msg.code(479)); } @@ -218,12 +260,14 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } } - boolean caseSensitive = true; if (isNotBlank(theSystem)) { CodeSystem system = fetchCodeSystem(theSystem); if (system == null) { - return new ValidationResult(OperationOutcome.IssueSeverity.INFORMATION, "Code " + Constants.codeSystemWithDefaultDescription(theSystem) + "/" + theCode + " was not validated because the code system is not present"); + return new ValidationResult( + OperationOutcome.IssueSeverity.INFORMATION, + "Code " + Constants.codeSystemWithDefaultDescription(theSystem) + "/" + theCode + + " was not validated because the code system is not present"); } if (system.hasCaseSensitive()) { @@ -245,7 +289,11 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext ValueSet expansion = new ValueSet(); for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) { for (ConceptReferenceComponent nextConcept : nextInclude.getConcept()) { - expansion.getExpansion().addContains().setCode(nextConcept.getCode()).setDisplay(nextConcept.getDisplay()); + expansion + .getExpansion() + .addContains() + .setCode(nextConcept.getCode()) + .setDisplay(nextConcept.getDisplay()); } } expandedValueSet = new ValueSetExpansionOutcome(expansion); @@ -255,7 +303,8 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext expandedValueSet = expandVS(theVs, true); } - for (ValueSetExpansionContainsComponent next : expandedValueSet.getValueset().getExpansion().getContains()) { + for (ValueSetExpansionContainsComponent next : + expandedValueSet.getValueset().getExpansion().getContains()) { String nextCode = next.getCode(); if (!caseSensitive) { nextCode = nextCode.toUpperCase(); @@ -271,7 +320,10 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } } - return new ValidationResult(OperationOutcome.IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + "]"); + return new ValidationResult( + OperationOutcome.IssueSeverity.ERROR, + "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + + "]"); } @Override @@ -283,6 +335,4 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext public ValueSetExpansionComponent expandVS(ConceptSetComponent theInc) { throw new UnsupportedOperationException(Msg.code(481)); } - - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/FhirDstu2.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/FhirDstu2.java index e58e425902b..b78b6a7f52d 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/FhirDstu2.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/FhirDstu2.java @@ -19,15 +19,9 @@ */ package ca.uhn.fhir.model.dstu2; -import ca.uhn.fhir.i18n.Msg; -import java.io.InputStream; -import java.util.Date; - -import ca.uhn.fhir.fhirpath.IFhirPath; -import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.instance.model.api.*; - import ca.uhn.fhir.context.*; +import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.*; import ca.uhn.fhir.model.base.composite.*; import ca.uhn.fhir.model.dstu2.composite.*; @@ -36,6 +30,11 @@ import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; import ca.uhn.fhir.rest.server.provider.dstu2.Dstu2BundleFactory; import ca.uhn.fhir.util.ReflectionUtil; +import org.apache.commons.lang3.StringUtils; +import org.hl7.fhir.instance.model.api.*; + +import java.io.InputStream; +import java.util.Date; public class FhirDstu2 implements IFhirVersion { @@ -46,7 +45,6 @@ public class FhirDstu2 implements IFhirVersion { throw new UnsupportedOperationException(Msg.code(578) + "FluentPath is not supported in DSTU2 contexts"); } - @Override public IResource generateProfile(RuntimeResourceDefinition theRuntimeResourceDefinition, String theServerBase) { StructureDefinition retVal = new StructureDefinition(); @@ -74,7 +72,8 @@ public class FhirDstu2 implements IFhirVersion { str = FhirDstu2.class.getResourceAsStream("ca/uhn/fhir/model/dstu2/fhirversion.properties"); } if (str == null) { - throw new ConfigurationException(Msg.code(579) + "Can not find model property file on classpath: " + "/ca/uhn/fhir/model/dstu2/fhirversion.properties"); + throw new ConfigurationException(Msg.code(579) + "Can not find model property file on classpath: " + + "/ca/uhn/fhir/model/dstu2/fhirversion.properties"); } return str; } @@ -114,13 +113,8 @@ public class FhirDstu2 implements IFhirVersion { return new IdDt(); } - - - @Override public Object getServerVersion() { return ReflectionUtil.newInstanceOfFhirServerType("ca.uhn.fhir.model.dstu2.FhirServerDstu2"); } - - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/FhirServerDstu2.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/FhirServerDstu2.java index 8062ad09970..4a45b86ec68 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/FhirServerDstu2.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/FhirServerDstu2.java @@ -29,5 +29,4 @@ public class FhirServerDstu2 implements IFhirVersionServer { public ServerConformanceProvider createServerConformanceProvider(RestfulServer theServer) { return new ServerConformanceProvider(theServer); } - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/AgeDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/AgeDt.java index f2ea9ccc2c8..a42b3aa5c17 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/AgeDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/AgeDt.java @@ -20,10 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -import ca.uhn.fhir.model.primitive.IntegerDt; -@DatatypeDef(name="AgeDt", profileOf=QuantityDt.class) -public class AgeDt extends QuantityDt { - -} +@DatatypeDef(name = "AgeDt", profileOf = QuantityDt.class) +public class AgeDt extends QuantityDt {} diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/BoundCodeableConceptDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/BoundCodeableConceptDt.java index 546c8eec445..a1cc716e1cd 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/BoundCodeableConceptDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/BoundCodeableConceptDt.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.model.dstu2.composite; -import static org.apache.commons.lang3.StringUtils.defaultString; +import ca.uhn.fhir.model.api.IBoundCodeableConcept; +import ca.uhn.fhir.model.api.IValueSetEnumBinder; +import ca.uhn.fhir.model.api.annotation.DatatypeDef; +import org.apache.commons.lang3.Validate; import java.util.Collection; import java.util.HashSet; import java.util.Set; -import org.apache.commons.lang3.Validate; - -import ca.uhn.fhir.model.api.IBoundCodeableConcept; -import ca.uhn.fhir.model.api.IValueSetEnumBinder; -import ca.uhn.fhir.model.api.annotation.DatatypeDef; +import static org.apache.commons.lang3.StringUtils.defaultString; @DatatypeDef(name = "CodeableConcept", isSpecialization = true) public class BoundCodeableConceptDt> extends CodeableConceptDt implements IBoundCodeableConcept { @@ -75,12 +74,14 @@ public class BoundCodeableConceptDt> extends CodeableConceptDt * system defined by the given enumerated types, AND clearing any existing * codings first. If theValue is null, existing codings are cleared and no * codings are added. - * + * * @param theValues * The value to add, or null */ public void setValueAsEnum(Collection theValues) { - Validate.notNull(myBinder, "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); + Validate.notNull( + myBinder, + "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); getCoding().clear(); if (theValues != null) { for (T next : theValues) { @@ -94,12 +95,14 @@ public class BoundCodeableConceptDt> extends CodeableConceptDt * system defined by the given enumerated type, AND clearing any existing * codings first. If theValue is null, existing codings are cleared and no * codings are added. - * + * * @param theValue * The value to add, or null */ public void setValueAsEnum(T theValue) { - Validate.notNull(myBinder, "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); + Validate.notNull( + myBinder, + "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); getCoding().clear(); if (theValue == null) { return; @@ -112,20 +115,24 @@ public class BoundCodeableConceptDt> extends CodeableConceptDt * and returns the first bound enumerated type that matches. Use * caution using this method, see the return description for more * information. - * + * * @return Returns the bound enumerated type, or null if none * are found. Note that a null return value doesn't neccesarily * imply that this Codeable Concept has no codes, only that it has * no codes that match the enum. */ public Set getValueAsEnum() { - Validate.notNull(myBinder, "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); + Validate.notNull( + myBinder, + "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); Set retVal = new HashSet(); for (CodingDt next : getCoding()) { if (next == null) { continue; } - T nextT = myBinder.fromCodeString(defaultString(next.getCodeElement().getValue()), defaultString(next.getSystemElement().getValueAsString())); + T nextT = myBinder.fromCodeString( + defaultString(next.getCodeElement().getValue()), + defaultString(next.getSystemElement().getValueAsString())); if (nextT != null) { retVal.add(nextT); } else { @@ -134,5 +141,4 @@ public class BoundCodeableConceptDt> extends CodeableConceptDt } return retVal; } - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/ContainedDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/ContainedDt.java index 55fdf7c9054..f6432768d27 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/ContainedDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/ContainedDt.java @@ -20,15 +20,14 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.i18n.Msg; -import java.util.ArrayList; -import java.util.List; - -import ca.uhn.fhir.model.api.IDatatype; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.base.composite.BaseContainedDt; +import java.util.ArrayList; +import java.util.List; + @DatatypeDef(name = "contained") public class ContainedDt extends BaseContainedDt { @@ -61,5 +60,4 @@ public class ContainedDt extends BaseContainedDt { public void setUserData(String theName, Object theValue) { throw new UnsupportedOperationException(Msg.code(581)); } - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/CountDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/CountDt.java index 7b5ff9fade3..10c05eab41c 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/CountDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/CountDt.java @@ -20,9 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -@DatatypeDef(name="CountDt", profileOf=QuantityDt.class) -public class CountDt extends QuantityDt { - -} +@DatatypeDef(name = "CountDt", profileOf = QuantityDt.class) +public class CountDt extends QuantityDt {} diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/DistanceDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/DistanceDt.java index 0faf303ede8..b5a6a94bf19 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/DistanceDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/DistanceDt.java @@ -20,10 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -import ca.uhn.fhir.model.primitive.IntegerDt; -@DatatypeDef(name="DistanceDt", profileOf=QuantityDt.class) -public class DistanceDt extends QuantityDt { - -} +@DatatypeDef(name = "DistanceDt", profileOf = QuantityDt.class) +public class DistanceDt extends QuantityDt {} diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/DurationDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/DurationDt.java index 84aa3255894..4520ca29009 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/DurationDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/DurationDt.java @@ -20,9 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -@DatatypeDef(name="DurationDt", profileOf=QuantityDt.class) -public class DurationDt extends QuantityDt { - -} +@DatatypeDef(name = "DurationDt", profileOf = QuantityDt.class) +public class DurationDt extends QuantityDt {} diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/MoneyDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/MoneyDt.java index 7da7f80066f..99c58ae9750 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/MoneyDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/MoneyDt.java @@ -22,32 +22,31 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; -@DatatypeDef(name="Money", profileOf=QuantityDt.class) +@DatatypeDef(name = "Money", profileOf = QuantityDt.class) public class MoneyDt extends QuantityDt { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - /** - * Constructor - */ - public MoneyDt() { - // nothing - } + /** + * Constructor + */ + public MoneyDt() { + // nothing + } - /** - * Constructor - */ - @SimpleSetter - public MoneyDt(@SimpleSetter.Parameter(name = "theValue") double theValue) { - setValue(theValue); - } - - /** - * Constructor - */ - @SimpleSetter - public MoneyDt(@SimpleSetter.Parameter(name = "theValue") long theValue) { - setValue(theValue); - } + /** + * Constructor + */ + @SimpleSetter + public MoneyDt(@SimpleSetter.Parameter(name = "theValue") double theValue) { + setValue(theValue); + } + /** + * Constructor + */ + @SimpleSetter + public MoneyDt(@SimpleSetter.Parameter(name = "theValue") long theValue) { + setValue(theValue); + } } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/NarrativeDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/NarrativeDt.java index d6378c118e6..74413cea569 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/NarrativeDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/NarrativeDt.java @@ -19,8 +19,6 @@ */ package ca.uhn.fhir.model.dstu2.composite; -import java.util.List; - import ca.uhn.fhir.model.api.IElement; import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.DatatypeDef; @@ -30,6 +28,8 @@ import ca.uhn.fhir.model.primitive.BoundCodeDt; import ca.uhn.fhir.model.primitive.CodeDt; import ca.uhn.fhir.model.primitive.XhtmlDt; +import java.util.List; + /** * HAPI/FHIR Narrative Datatype * (A human-readable formatted text, including images) @@ -37,26 +37,26 @@ import ca.uhn.fhir.model.primitive.XhtmlDt; *

    * Definition: * A human-readable formatted text, including images - *

    + *

    * *

    * Requirements: - * - *

    + * + *

    */ -@DatatypeDef(name="Narrative") +@DatatypeDef(name = "Narrative") public class NarrativeDt extends BaseNarrativeDt { - @Child(name="status", type=CodeDt.class, order=0, min=1, max=1) + @Child(name = "status", type = CodeDt.class, order = 0, min = 1, max = 1) private BoundCodeDt myStatus; - - @Child(name="div", type=XhtmlDt.class, order=1, min=1, max=1) + + @Child(name = "div", type = XhtmlDt.class, order = 1, min = 1, max = 1) private XhtmlDt myDiv; - + public NarrativeDt() { // nothing } - + public NarrativeDt(XhtmlDt theDiv, NarrativeStatusEnum theStatus) { setDiv(theDiv); setStatus(theStatus); @@ -64,13 +64,12 @@ public class NarrativeDt extends BaseNarrativeDt { @Override public boolean isEmpty() { - return ca.uhn.fhir.util.ElementUtil.isEmpty( myStatus, myDiv ); + return ca.uhn.fhir.util.ElementUtil.isEmpty(myStatus, myDiv); } - @Override public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( theType, myStatus, myDiv ); + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myStatus, myDiv); } /** @@ -78,24 +77,24 @@ public class NarrativeDt extends BaseNarrativeDt { * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data - *

    + *

    + * Definition: + * The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data + *

    */ public BoundCodeDt getStatusElement() { return getStatus(); } - + /** * Gets the value(s) for status (generated | extensions | additional). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data - *

    + *

    + * Definition: + * The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data + *

    */ @Override public BoundCodeDt getStatus() { @@ -108,10 +107,10 @@ public class NarrativeDt extends BaseNarrativeDt { /** * Sets the value(s) for status (generated | extensions | additional) * - *

    - * Definition: - * The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data - *

    + *

    + * Definition: + * The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data + *

    */ public void setStatus(BoundCodeDt theValue) { myStatus = theValue; @@ -120,39 +119,38 @@ public class NarrativeDt extends BaseNarrativeDt { /** * Sets the value(s) for status (generated | extensions | additional) * - *

    - * Definition: - * The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data - *

    + *

    + * Definition: + * The status of the narrative - whether it's entirely generated (from just the defined data or the extensions too), or whether a human authored it and it may contain additional data + *

    */ public void setStatus(NarrativeStatusEnum theValue) { getStatus().setValueAsEnum(theValue); } - /** * Gets the value(s) for div (Limited xhtml content). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The actual narrative content, a stripped down version of XHTML - *

    + *

    + * Definition: + * The actual narrative content, a stripped down version of XHTML + *

    */ public XhtmlDt getDivElement() { return getDiv(); } - + /** * Gets the value(s) for div (Limited xhtml content). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The actual narrative content, a stripped down version of XHTML - *

    + *

    + * Definition: + * The actual narrative content, a stripped down version of XHTML + *

    */ @Override public XhtmlDt getDiv() { @@ -165,10 +163,10 @@ public class NarrativeDt extends BaseNarrativeDt { /** * Sets the value(s) for div (Limited xhtml content) * - *

    - * Definition: - * The actual narrative content, a stripped down version of XHTML - *

    + *

    + * Definition: + * The actual narrative content, a stripped down version of XHTML + *

    */ public void setDiv(XhtmlDt theValue) { myDiv = theValue; @@ -181,8 +179,4 @@ public class NarrativeDt extends BaseNarrativeDt { public void setDiv(String theTextDiv) { myDiv = new XhtmlDt(theTextDiv); } - - - - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/ResourceReferenceDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/ResourceReferenceDt.java index cfca3417da4..318dd96aabc 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/ResourceReferenceDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/ResourceReferenceDt.java @@ -18,27 +18,8 @@ * #L% */ - - - - - - - - - - - - - - - package ca.uhn.fhir.model.dstu2.composite; -import java.util.List; - -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.model.api.ICompositeDatatype; import ca.uhn.fhir.model.api.IElement; import ca.uhn.fhir.model.api.IResource; @@ -49,6 +30,9 @@ import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.StringDt; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.util.List; /** * HAPI/FHIR ResourceReferenceDt Datatype @@ -57,17 +41,15 @@ import ca.uhn.fhir.model.primitive.StringDt; *

    * Definition: * A reference from one resource to another - *

    + *

    * *

    * Requirements: - * - *

    + * + *

    */ -@DatatypeDef(name="reference") -public class ResourceReferenceDt - extends BaseResourceReferenceDt implements ICompositeDatatype -{ +@DatatypeDef(name = "reference") +public class ResourceReferenceDt extends BaseResourceReferenceDt implements ICompositeDatatype { /** * Constructor @@ -84,7 +66,7 @@ public class ResourceReferenceDt * a hard-and-fast rule however, as the server can be configured to not serialized this resource, or to load an ID * and contain even if this constructor is not used. *

    - * + * * @param theResource * The resource instance */ @@ -96,7 +78,7 @@ public class ResourceReferenceDt /** * Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute * URL) - * + * * @param theId * The reference itself */ @@ -107,7 +89,7 @@ public class ResourceReferenceDt /** * Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute * URL) - * + * * @param theResourceId * The reference itself */ @@ -118,7 +100,7 @@ public class ResourceReferenceDt /** * Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute * URL) - * + * * @param theResourceId * The reference itself */ @@ -126,26 +108,25 @@ public class ResourceReferenceDt setReference(theResourceId); } - @Child(name="reference", type=IdDt.class, order=0, min=0, max=1) + @Child(name = "reference", type = IdDt.class, order = 0, min = 0, max = 1) @Description( - shortDefinition="Relative, internal or absolute URL reference", - formalDefinition="A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources" - ) + shortDefinition = "Relative, internal or absolute URL reference", + formalDefinition = + "A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources") private IdDt myReference; - - @Child(name="display", type=StringDt.class, order=1, min=0, max=1) + + @Child(name = "display", type = StringDt.class, order = 1, min = 0, max = 1) @Description( - shortDefinition="Text alternative for the resource", - formalDefinition="Plain text narrative that identifies the resource in addition to the resource reference" - ) + shortDefinition = "Text alternative for the resource", + formalDefinition = + "Plain text narrative that identifies the resource in addition to the resource reference") private StringDt myDisplay; - @Override public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myReference, myDisplay); + return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(myReference, myDisplay); } - + @Override public List getAllPopulatedChildElementsOfType(Class theType) { return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myReference, myDisplay); @@ -156,10 +137,10 @@ public class ResourceReferenceDt * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources - *

    + *

    + * Definition: + * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources + *

    */ @Override public IdDt getReference() { @@ -174,14 +155,13 @@ public class ResourceReferenceDt return getReference(); } - /** * Sets the value(s) for reference (Relative, internal or absolute URL reference) * - *

    - * Definition: - * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources - *

    + *

    + * Definition: + * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources + *

    */ @Override public ResourceReferenceDt setReference(IdDt theValue) { @@ -189,32 +169,31 @@ public class ResourceReferenceDt return this; } - /** + /** * Sets the value for reference (Relative, internal or absolute URL reference) * - *

    - * Definition: - * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources - *

    + *

    + * Definition: + * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources + *

    */ @Override public ResourceReferenceDt setReference(String theId) { - myReference = new IdDt(theId); - return this; + myReference = new IdDt(theId); + return this; } - /** * Gets the value(s) for display (Text alternative for the resource). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Plain text narrative that identifies the resource in addition to the resource reference - *

    + *

    + * Definition: + * Plain text narrative that identifies the resource in addition to the resource reference + *

    */ - public StringDt getDisplay() { + public StringDt getDisplay() { if (myDisplay == null) { myDisplay = new StringDt(); } @@ -224,36 +203,32 @@ public class ResourceReferenceDt /** * Sets the value(s) for display (Text alternative for the resource) * - *

    - * Definition: - * Plain text narrative that identifies the resource in addition to the resource reference - *

    + *

    + * Definition: + * Plain text narrative that identifies the resource in addition to the resource reference + *

    */ public ResourceReferenceDt setDisplay(StringDt theValue) { myDisplay = theValue; return this; } - /** + /** * Sets the value for display (Text alternative for the resource) * - *

    - * Definition: - * Plain text narrative that identifies the resource in addition to the resource reference - *

    + *

    + * Definition: + * Plain text narrative that identifies the resource in addition to the resource reference + *

    */ @Override public ResourceReferenceDt setDisplay(String theString) { - myDisplay = new StringDt(theString); - return this; + myDisplay = new StringDt(theString); + return this; } @Override public StringDt getDisplayElement() { return getDisplay(); } - - - - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/SimpleQuantityDt.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/SimpleQuantityDt.java index 50dc4d666a0..1458a8df90b 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/SimpleQuantityDt.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/composite/SimpleQuantityDt.java @@ -21,10 +21,9 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; import ca.uhn.fhir.model.dstu2.valueset.QuantityComparatorEnum; -@DatatypeDef(name="SimpleQuantity", profileOf=QuantityDt.class) +@DatatypeDef(name = "SimpleQuantity", profileOf = QuantityDt.class) public class SimpleQuantityDt extends QuantityDt { private static final long serialVersionUID = 1L; @@ -36,12 +35,11 @@ public class SimpleQuantityDt extends QuantityDt { // nothing } - /** * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") double theValue) { + public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theValue") double theValue) { setValue(theValue); } @@ -49,15 +47,17 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") long theValue) { + public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theValue") long theValue) { setValue(theValue); } - + /** * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name = "theValue") double theValue, + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") double theValue, @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); @@ -68,7 +68,9 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name = "theValue") long theValue, + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") long theValue, @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); @@ -79,7 +81,10 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") double theValue, @SimpleSetter.Parameter(name="theSystem") String theSystem, @SimpleSetter.Parameter(name="theUnits") String theUnits) { + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theValue") double theValue, + @SimpleSetter.Parameter(name = "theSystem") String theSystem, + @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setSystem(theSystem); setUnit(theUnits); @@ -89,10 +94,12 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") long theValue, @SimpleSetter.Parameter(name="theSystem") String theSystem, @SimpleSetter.Parameter(name="theUnits") String theUnits) { + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theValue") long theValue, + @SimpleSetter.Parameter(name = "theSystem") String theSystem, + @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setSystem(theSystem); setUnit(theUnits); } - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/resource/BaseResource.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/resource/BaseResource.java index dfdb3afb668..831c3ad1dea 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/resource/BaseResource.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/model/dstu2/resource/BaseResource.java @@ -20,19 +20,6 @@ package ca.uhn.fhir.model.dstu2.resource; import ca.uhn.fhir.i18n.Msg; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; - -import org.apache.commons.lang3.Validate; -import org.apache.commons.lang3.builder.ToStringBuilder; -import org.apache.commons.lang3.builder.ToStringStyle; -import org.hl7.fhir.instance.model.api.IBaseCoding; -import org.hl7.fhir.instance.model.api.IBaseMetaType; -import org.hl7.fhir.instance.model.api.IIdType; -import org.hl7.fhir.instance.model.api.IPrimitiveType; - import ca.uhn.fhir.model.api.BaseElement; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; @@ -50,6 +37,18 @@ import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.rest.gclient.StringClientParam; import ca.uhn.fhir.util.ElementUtil; +import org.apache.commons.lang3.Validate; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; +import org.hl7.fhir.instance.model.api.IBaseCoding; +import org.hl7.fhir.instance.model.api.IBaseMetaType; +import org.hl7.fhir.instance.model.api.IIdType; +import org.hl7.fhir.instance.model.api.IPrimitiveType; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.List; public abstract class BaseResource extends BaseElement implements IResource { @@ -62,17 +61,16 @@ public abstract class BaseResource extends BaseElement implements IResource { *

    */ public static final StringClientParam RES_ID = new StringClientParam(BaseResource.SP_RES_ID); - + /** * Search parameter constant for _id */ - @SearchParamDefinition(name="_id", path="", description="The ID of the resource", type="string" ) + @SearchParamDefinition(name = "_id", path = "", description = "The ID of the resource", type = "string") public static final String SP_RES_ID = "_id"; @Child(name = "contained", order = 2, min = 0, max = 1) private ContainedDt myContained; - private IdDt myId; @Child(name = "language", order = 0, min = 0, max = 1) @@ -206,7 +204,10 @@ public abstract class BaseResource extends BaseElement implements IResource { return Collections.emptyList(); } for (BaseCodingDt next : labelsList) { - retVal.add(new CodingDt(next.getSystemElement().getValue(), next.getCodeElement().getValue()).setDisplay(next.getDisplayElement().getValue())); + retVal.add(new CodingDt( + next.getSystemElement().getValue(), + next.getCodeElement().getValue()) + .setDisplay(next.getDisplayElement().getValue())); } return Collections.unmodifiableList(retVal); } @@ -305,7 +306,7 @@ public abstract class BaseResource extends BaseElement implements IResource { public void setContained(ContainedDt theContained) { myContained = theContained; } - + @Override public void setId(IdDt theId) { myId = theId; @@ -354,5 +355,4 @@ public abstract class BaseResource extends BaseElement implements IResource { b.append("id", getId().toUnqualified()); return b.toString(); } - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/Dstu2BundleFactory.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/Dstu2BundleFactory.java index 973678846ad..a64ae6236fa 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/Dstu2BundleFactory.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/Dstu2BundleFactory.java @@ -40,13 +40,13 @@ import ca.uhn.fhir.util.ResourceReferenceInfo; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -60,7 +60,12 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory { } @Override - public void addResourcesToBundle(List theResult, BundleTypeEnum theBundleType, String theServerBase, BundleInclusionRule theBundleInclusionRule, Set theIncludes) { + public void addResourcesToBundle( + List theResult, + BundleTypeEnum theBundleType, + String theServerBase, + BundleInclusionRule theBundleInclusionRule, + Set theIncludes) { ensureBundle(); List includedResources = new ArrayList(); @@ -87,11 +92,13 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory { List addedResourcesThisPass = new ArrayList(); for (ResourceReferenceInfo nextRefInfo : references) { - if (theBundleInclusionRule != null && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { + if (theBundleInclusionRule != null + && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { continue; } - IResource nextRes = (IResource) nextRefInfo.getResourceReference().getResource(); + IResource nextRes = + (IResource) nextRefInfo.getResourceReference().getResource(); if (nextRes != null) { if (nextRes.getId().hasIdPart()) { if (containedIds.contains(nextRes.getId().getValue())) { @@ -109,7 +116,6 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory { addedResourceIds.add(id); addedResourcesThisPass.add(nextRes); } - } } } @@ -119,7 +125,8 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory { // Linked resources may themselves have linked resources references = new ArrayList(); for (IResource iResource : addedResourcesThisPass) { - List newReferences = myContext.newTerser().getAllResourceReferences(iResource); + List newReferences = + myContext.newTerser().getAllResourceReferences(iResource); references.addAll(newReferences); } } while (references.isEmpty() == false); @@ -145,12 +152,14 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory { entry.setResource(next).getSearch().setMode(SearchEntryModeEnum.INCLUDE); populateBundleEntryFullUrl(next, entry); } - } @Override - public void addRootPropertiesToBundle(String theId, @Nonnull BundleLinks theBundleLinks, Integer theTotalResults, - IPrimitiveType theLastUpdated) { + public void addRootPropertiesToBundle( + String theId, + @Nonnull BundleLinks theBundleLinks, + Integer theTotalResults, + IPrimitiveType theLastUpdated) { ensureBundle(); myBase = theBundleLinks.serverBase; @@ -240,7 +249,8 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory { IdDt id = new IdDt(next.getResponse().getLocation()); String resourceType = id.getResourceType(); if (isNotBlank(resourceType)) { - IResource res = (IResource) myContext.getResourceDefinition(resourceType).newInstance(); + IResource res = (IResource) + myContext.getResourceDefinition(resourceType).newInstance(); res.setId(id); retVal.add(res); } @@ -248,5 +258,4 @@ public class Dstu2BundleFactory implements IVersionSpecificBundleFactory { } return retVal; } - } diff --git a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/ServerConformanceProvider.java b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/ServerConformanceProvider.java index 61d97987c27..bedc07b6338 100644 --- a/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/ServerConformanceProvider.java +++ b/hapi-fhir-structures-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2/ServerConformanceProvider.java @@ -19,10 +19,10 @@ */ package ca.uhn.fhir.rest.server.provider.dstu2; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.dstu2.resource.Conformance; import ca.uhn.fhir.model.dstu2.resource.Conformance.Rest; import ca.uhn.fhir.model.dstu2.resource.Conformance.RestResource; @@ -40,19 +40,19 @@ import ca.uhn.fhir.rest.annotation.Read; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.server.*; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; -import ca.uhn.fhir.rest.server.method.*; import ca.uhn.fhir.rest.server.method.OperationMethodBinding.ReturnType; +import ca.uhn.fhir.rest.server.*; +import ca.uhn.fhir.rest.server.method.*; import ca.uhn.fhir.rest.server.util.BaseServerCapabilityStatementProvider; import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; +import java.util.Map.Entry; +import java.util.*; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; -import java.util.*; -import java.util.Map.Entry; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -60,7 +60,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * Server FHIR Provider which serves the conformance statement for a RESTful server implementation */ -public class ServerConformanceProvider extends BaseServerCapabilityStatementProvider implements IServerConformanceProvider { +public class ServerConformanceProvider extends BaseServerCapabilityStatementProvider + implements IServerConformanceProvider { private String myPublisher = "Not provided"; @@ -88,11 +89,13 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv super(theServerConfiguration); } - private void checkBindingForSystemOps(Rest rest, Set systemOps, BaseMethodBinding nextMethodBinding) { + private void checkBindingForSystemOps( + Rest rest, Set systemOps, BaseMethodBinding nextMethodBinding) { if (nextMethodBinding.getRestOperationType() != null) { String sysOpCode = nextMethodBinding.getRestOperationType().getCode(); if (sysOpCode != null) { - SystemRestfulInteractionEnum sysOp = SystemRestfulInteractionEnum.VALUESET_BINDER.fromCodeString(sysOpCode); + SystemRestfulInteractionEnum sysOp = + SystemRestfulInteractionEnum.VALUESET_BINDER.fromCodeString(sysOpCode); if (sysOp == null) { return; } @@ -115,7 +118,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv resourceToMethods.get(resourceName).add(nextMethodBinding); } } - for (BaseMethodBinding nextMethodBinding : getServerConfiguration(theRequestDetails).getServerBindings()) { + for (BaseMethodBinding nextMethodBinding : + getServerConfiguration(theRequestDetails).getServerBindings()) { String resourceName = ""; if (resourceToMethods.containsKey(resourceName) == false) { resourceToMethods.put(resourceName, new ArrayList()); @@ -126,7 +130,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv } private DateTimeDt conformanceDate(RequestDetails theRequestDetails) { - IPrimitiveType buildDate = getServerConfiguration(theRequestDetails).getConformanceDate(); + IPrimitiveType buildDate = + getServerConfiguration(theRequestDetails).getConformanceDate(); if (buildDate != null && buildDate.getValue() != null) { try { return new DateTimeDt(buildDate.getValueAsString()); @@ -186,15 +191,19 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv retVal.setPublisher(myPublisher); retVal.setDate(conformanceDate(theRequestDetails)); retVal.setFhirVersion(FhirVersionEnum.DSTU2.getFhirVersionString()); - retVal.setAcceptUnknown(UnknownContentCodeEnum.UNKNOWN_EXTENSIONS); // TODO: make this configurable - this is a fairly big effort since the parser + retVal.setAcceptUnknown( + UnknownContentCodeEnum + .UNKNOWN_EXTENSIONS); // TODO: make this configurable - this is a fairly big effort since the + // parser // needs to be modified to actually allow it - ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); - String serverBase = serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); - retVal - .getImplementation() - .setUrl(serverBase) - .setDescription(serverConfiguration.getImplementationDescription()); + ServletContext servletContext = (ServletContext) + (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); + String serverBase = + serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); + retVal.getImplementation() + .setUrl(serverBase) + .setDescription(serverConfiguration.getImplementationDescription()); retVal.setKind(ConformanceStatementKindEnum.INSTANCE); retVal.getSoftware().setName(serverConfiguration.getServerName()); @@ -215,7 +224,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv Set resourceOps = new HashSet<>(); RestResource resource = rest.addResource(); String resourceName = nextEntry.getKey(); - RuntimeResourceDefinition def = serverConfiguration.getFhirContext().getResourceDefinition(resourceName); + RuntimeResourceDefinition def = + serverConfiguration.getFhirContext().getResourceDefinition(resourceName); resource.getTypeElement().setValue(def.getName()); resource.getProfile().setReference(new IdDt(def.getResourceProfile(serverBase))); @@ -225,9 +235,11 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv // Conformance.RestResourceSearchParam>(); for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { if (nextMethodBinding.getRestOperationType() != null) { - String resOpCode = nextMethodBinding.getRestOperationType().getCode(); + String resOpCode = + nextMethodBinding.getRestOperationType().getCode(); if (resOpCode != null) { - TypeRestfulInteractionEnum resOp = TypeRestfulInteractionEnum.VALUESET_BINDER.fromCodeString(resOpCode); + TypeRestfulInteractionEnum resOp = + TypeRestfulInteractionEnum.VALUESET_BINDER.fromCodeString(resOpCode); if (resOp != null) { if (resourceOps.contains(resOp) == false) { resourceOps.add(resOp); @@ -249,9 +261,11 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv break; case DELETE: if (nextMethodBinding.isSupportsConditionalMultiple()) { - resource.setConditionalDelete(ConditionalDeleteStatusEnum.MULTIPLE_DELETES_SUPPORTED); + resource.setConditionalDelete( + ConditionalDeleteStatusEnum.MULTIPLE_DELETES_SUPPORTED); } else { - resource.setConditionalDelete(ConditionalDeleteStatusEnum.SINGLE_DELETES_SUPPORTED); + resource.setConditionalDelete( + ConditionalDeleteStatusEnum.SINGLE_DELETES_SUPPORTED); } break; case UPDATE: @@ -268,21 +282,27 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv checkBindingForSystemOps(rest, systemOps, nextMethodBinding); if (nextMethodBinding instanceof SearchMethodBinding) { - handleSearchMethodBinding(resource, def, includes, (SearchMethodBinding) nextMethodBinding, theRequestDetails); + handleSearchMethodBinding( + resource, def, includes, (SearchMethodBinding) nextMethodBinding, theRequestDetails); } else if (nextMethodBinding instanceof OperationMethodBinding) { OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; String opName = bindings.getOperationBindingToId().get(methodBinding); if (operationNames.add(opName)) { // Only add each operation (by name) once - rest.addOperation().setName(methodBinding.getName().substring(1)).getDefinition().setReference("OperationDefinition/" + opName); + rest.addOperation() + .setName(methodBinding.getName().substring(1)) + .getDefinition() + .setReference("OperationDefinition/" + opName); } } Collections.sort(resource.getInteraction(), new Comparator() { @Override public int compare(RestResourceInteraction theO1, RestResourceInteraction theO2) { - TypeRestfulInteractionEnum o1 = theO1.getCodeElement().getValueAsEnum(); - TypeRestfulInteractionEnum o2 = theO2.getCodeElement().getValueAsEnum(); + TypeRestfulInteractionEnum o1 = + theO1.getCodeElement().getValueAsEnum(); + TypeRestfulInteractionEnum o2 = + theO2.getCodeElement().getValueAsEnum(); if (o1 == null && o2 == null) { return 0; } @@ -295,7 +315,6 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv return o1.ordinal() - o2.ordinal(); } }); - } for (String nextInclude : includes) { @@ -308,7 +327,10 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; String opName = bindings.getOperationBindingToId().get(methodBinding); if (operationNames.add(opName)) { - rest.addOperation().setName(methodBinding.getName().substring(1)).getDefinition().setReference("OperationDefinition/" + opName); + rest.addOperation() + .setName(methodBinding.getName().substring(1)) + .getDefinition() + .setReference("OperationDefinition/" + opName); } } } @@ -318,7 +340,12 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv return retVal; } - private void handleSearchMethodBinding(RestResource resource, RuntimeResourceDefinition def, TreeSet includes, SearchMethodBinding searchMethodBinding, RequestDetails theRequestDetails) { + private void handleSearchMethodBinding( + RestResource resource, + RuntimeResourceDefinition def, + TreeSet includes, + SearchMethodBinding searchMethodBinding, + RequestDetails theRequestDetails) { includes.addAll(searchMethodBinding.getIncludes()); List params = searchMethodBinding.getParameters(); @@ -368,10 +395,7 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv } String finalNextParamUnchainedName = nextParamUnchainedName; - RestResourceSearchParam param = - resource - .getSearchParam() - .stream() + RestResourceSearchParam param = resource.getSearchParam().stream() .filter(t -> t.getName().equals(finalNextParamUnchainedName)) .findFirst() .orElseGet(() -> resource.addSearchParam()); @@ -391,10 +415,13 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv param.setDocumentation(nextParamDescription); if (nextParameter.getParamType() != null) { - param.getTypeElement().setValueAsString(nextParameter.getParamType().getCode()); + param.getTypeElement() + .setValueAsString(nextParameter.getParamType().getCode()); } for (Class nextTarget : nextParameter.getDeclaredTypes()) { - RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails).getFhirContext().getResourceDefinition(nextTarget); + RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails) + .getFhirContext() + .getResourceDefinition(nextTarget); if (targetDef != null) { ResourceTypeEnum code = ResourceTypeEnum.VALUESET_BINDER.fromCodeString(targetDef.getName()); if (code != null) { @@ -406,7 +433,6 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv } } - @Read(type = OperationDefinition.class) public OperationDefinition readOperationDefinition(@IdParam IdDt theId, RequestDetails theRequestDetails) { if (theId == null || theId.hasIdPart() == false) { @@ -415,7 +441,8 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails); Bindings bindings = serverConfiguration.provideBindings(); - List sharedDescriptions = bindings.getOperationIdToBindings().get(theId.getIdPart()); + List sharedDescriptions = + bindings.getOperationIdToBindings().get(theId.getIdPart()); if (sharedDescriptions == null || sharedDescriptions.isEmpty()) { throw new ResourceNotFoundException(Msg.code(1989) + theId); } @@ -527,5 +554,4 @@ public class ServerConformanceProvider extends BaseServerCapabilityStatementProv } }); } - } diff --git a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/FhirDstu3.java b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/FhirDstu3.java index b8baa3241b9..54340f6b8b7 100644 --- a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/FhirDstu3.java +++ b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/FhirDstu3.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,12 +19,12 @@ */ package org.hl7.fhir.dstu3.hapi.ctx; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IFhirVersion; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; @@ -41,84 +41,84 @@ import java.util.List; public class FhirDstu3 implements IFhirVersion { - private String myId; + private String myId; - @Override - public IFhirPath createFhirPathExecutor(FhirContext theFhirContext) { - return new FhirPathDstu3(theFhirContext); - } + @Override + public IFhirPath createFhirPathExecutor(FhirContext theFhirContext) { + return new FhirPathDstu3(theFhirContext); + } - @Override - public IBaseResource generateProfile(RuntimeResourceDefinition theRuntimeResourceDefinition, String theServerBase) { - StructureDefinition retVal = new StructureDefinition(); + @Override + public IBaseResource generateProfile(RuntimeResourceDefinition theRuntimeResourceDefinition, String theServerBase) { + StructureDefinition retVal = new StructureDefinition(); - RuntimeResourceDefinition def = theRuntimeResourceDefinition; + RuntimeResourceDefinition def = theRuntimeResourceDefinition; - myId = def.getId(); - if (StringUtils.isBlank(myId)) { - myId = theRuntimeResourceDefinition.getName().toLowerCase(); - } + myId = def.getId(); + if (StringUtils.isBlank(myId)) { + myId = theRuntimeResourceDefinition.getName().toLowerCase(); + } - retVal.setId(new IdDt(myId)); - return retVal; - } + retVal.setId(new IdDt(myId)); + return retVal; + } - @SuppressWarnings("rawtypes") - @Override - public Class getContainedType() { - return List.class; - } + @SuppressWarnings("rawtypes") + @Override + public Class getContainedType() { + return List.class; + } - @Override - public InputStream getFhirVersionPropertiesFile() { - InputStream str = FhirDstu3.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/fhirversion.properties"); - if (str == null) { - str = FhirDstu3.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/fhirversion.properties"); - } - if (str == null) { - throw new ConfigurationException(Msg.code(609) + "Can not find model property file on classpath: " + "/ca/uhn/fhir/model/dstu3/fhirversion.properties"); - } - return str; - } + @Override + public InputStream getFhirVersionPropertiesFile() { + InputStream str = FhirDstu3.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/fhirversion.properties"); + if (str == null) { + str = FhirDstu3.class.getResourceAsStream("/org/hl7/fhir/dstu3/model/fhirversion.properties"); + } + if (str == null) { + throw new ConfigurationException(Msg.code(609) + "Can not find model property file on classpath: " + + "/ca/uhn/fhir/model/dstu3/fhirversion.properties"); + } + return str; + } - @Override - public IPrimitiveType getLastUpdated(IBaseResource theResource) { - return ((Resource) theResource).getMeta().getLastUpdatedElement(); - } + @Override + public IPrimitiveType getLastUpdated(IBaseResource theResource) { + return ((Resource) theResource).getMeta().getLastUpdatedElement(); + } - @Override - public String getPathToSchemaDefinitions() { - return "/org/hl7/fhir/dstu3/model/schema"; - } + @Override + public String getPathToSchemaDefinitions() { + return "/org/hl7/fhir/dstu3/model/schema"; + } - @Override - public Class getResourceReferenceType() { - return Reference.class; - } + @Override + public Class getResourceReferenceType() { + return Reference.class; + } - @Override - public Object getServerVersion() { - return ReflectionUtil.newInstanceOfFhirServerType("org.hl7.fhir.dstu3.hapi.ctx.FhirServerDstu3"); - } + @Override + public Object getServerVersion() { + return ReflectionUtil.newInstanceOfFhirServerType("org.hl7.fhir.dstu3.hapi.ctx.FhirServerDstu3"); + } - @Override - public FhirVersionEnum getVersion() { - return FhirVersionEnum.DSTU3; - } + @Override + public FhirVersionEnum getVersion() { + return FhirVersionEnum.DSTU3; + } - @Override - public IVersionSpecificBundleFactory newBundleFactory(FhirContext theContext) { - return new Dstu3BundleFactory(theContext); - } + @Override + public IVersionSpecificBundleFactory newBundleFactory(FhirContext theContext) { + return new Dstu3BundleFactory(theContext); + } - @Override - public IBaseCoding newCodingDt() { - return new Coding(); - } - - @Override - public IIdType newIdType() { - return new IdType(); - } + @Override + public IBaseCoding newCodingDt() { + return new Coding(); + } + @Override + public IIdType newIdType() { + return new IdType(); + } } diff --git a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/FhirServerDstu3.java b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/FhirServerDstu3.java index e05f36bb820..aaee681728e 100644 --- a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/FhirServerDstu3.java +++ b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/FhirServerDstu3.java @@ -5,9 +5,8 @@ import ca.uhn.fhir.rest.server.RestfulServer; import org.hl7.fhir.dstu3.hapi.rest.server.ServerCapabilityStatementProvider; public class FhirServerDstu3 implements IFhirVersionServer { - @Override - public ServerCapabilityStatementProvider createServerConformanceProvider(RestfulServer theServer) { - return new ServerCapabilityStatementProvider(theServer); - } - + @Override + public ServerCapabilityStatementProvider createServerConformanceProvider(RestfulServer theServer) { + return new ServerCapabilityStatementProvider(theServer); + } } diff --git a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/HapiWorkerContext.java b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/HapiWorkerContext.java index bfcbfaedbaf..9b2f1096929 100644 --- a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/HapiWorkerContext.java +++ b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/ctx/HapiWorkerContext.java @@ -47,282 +47,339 @@ import java.util.Set; import static org.apache.commons.lang3.StringUtils.isNotBlank; public final class HapiWorkerContext extends I18nBase implements IWorkerContext { - private static final Logger ourLog = LoggerFactory.getLogger(HapiWorkerContext.class); - private final FhirContext myCtx; - private final Cache myFetchedResourceCache; - private IValidationSupport myValidationSupport; - private ExpansionProfile myExpansionProfile; + private static final Logger ourLog = LoggerFactory.getLogger(HapiWorkerContext.class); + private final FhirContext myCtx; + private final Cache myFetchedResourceCache; + private IValidationSupport myValidationSupport; + private ExpansionProfile myExpansionProfile; - public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) { - Validate.notNull(theCtx, "theCtx must not be null"); - Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); - myCtx = theCtx; - myValidationSupport = theValidationSupport; + public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) { + Validate.notNull(theCtx, "theCtx must not be null"); + Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); + myCtx = theCtx; + myValidationSupport = theValidationSupport; - long timeoutMillis = HapiSystemProperties.getTestValidationResourceCachesMs(); - myFetchedResourceCache = CacheFactory.build(timeoutMillis); - // Set a default locale - setValidationMessageLanguage(getLocale()); - } + long timeoutMillis = HapiSystemProperties.getTestValidationResourceCachesMs(); + myFetchedResourceCache = CacheFactory.build(timeoutMillis); + // Set a default locale + setValidationMessageLanguage(getLocale()); + } - @Override - @CoverageIgnore - public List allConformanceResources() { - throw new UnsupportedOperationException(Msg.code(610)); - } + @Override + @CoverageIgnore + public List allConformanceResources() { + throw new UnsupportedOperationException(Msg.code(610)); + } - @Override - public List allStructures() { - return myValidationSupport.fetchAllStructureDefinitions(); - } + @Override + public List allStructures() { + return myValidationSupport.fetchAllStructureDefinitions(); + } - @Override - public ValueSetExpansionComponent expandVS(ConceptSetComponent theInc, boolean theHierarchical) { - ValueSet input = new ValueSet(); - input.getCompose().addInclude(theInc); - IValidationSupport.ValueSetExpansionOutcome output = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), null, input); - ValueSet outputValueSet = (ValueSet) output.getValueSet(); - if (outputValueSet != null) { - return outputValueSet.getExpansion(); - } else { - return null; - } - } + @Override + public ValueSetExpansionComponent expandVS(ConceptSetComponent theInc, boolean theHierarchical) { + ValueSet input = new ValueSet(); + input.getCompose().addInclude(theInc); + IValidationSupport.ValueSetExpansionOutcome output = + myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), null, input); + ValueSet outputValueSet = (ValueSet) output.getValueSet(); + if (outputValueSet != null) { + return outputValueSet.getExpansion(); + } else { + return null; + } + } - @Override - public StructureDefinition fetchTypeDefinition(String theCode) { - return fetchResource(org.hl7.fhir.dstu3.model.StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/" + theCode); - } + @Override + public StructureDefinition fetchTypeDefinition(String theCode) { + return fetchResource( + org.hl7.fhir.dstu3.model.StructureDefinition.class, + "http://hl7.org/fhir/StructureDefinition/" + theCode); + } - @Override - public CodeSystem fetchCodeSystem(String theSystem) { - if (myValidationSupport == null) { - return null; - } else { - return (CodeSystem) myValidationSupport.fetchCodeSystem(theSystem); - } - } + @Override + public CodeSystem fetchCodeSystem(String theSystem) { + if (myValidationSupport == null) { + return null; + } else { + return (CodeSystem) myValidationSupport.fetchCodeSystem(theSystem); + } + } - @Override - public T fetchResource(Class theClass, String theUri) { - Validate.notBlank(theUri, "theUri must not be null or blank"); - if (myValidationSupport == null) { - return null; - } else { - try { - //noinspection unchecked - return (T) myFetchedResourceCache.get(theUri, t -> { - T resource = myValidationSupport.fetchResource(theClass, theUri); - if (resource == null) { - throw new IllegalArgumentException(Msg.code(611)); - } - return resource; - }); - } catch (IllegalArgumentException e) { - return null; - } - } - } + @Override + public T fetchResource(Class theClass, String theUri) { + Validate.notBlank(theUri, "theUri must not be null or blank"); + if (myValidationSupport == null) { + return null; + } else { + try { + //noinspection unchecked + return (T) myFetchedResourceCache.get(theUri, t -> { + T resource = myValidationSupport.fetchResource(theClass, theUri); + if (resource == null) { + throw new IllegalArgumentException(Msg.code(611)); + } + return resource; + }); + } catch (IllegalArgumentException e) { + return null; + } + } + } - @Override - public T fetchResourceWithException(Class theClass_, String theUri) throws FHIRException { - T retVal = fetchResource(theClass_, theUri); - if (retVal == null) { - throw new FHIRException(Msg.code(612) + "Unable to fetch " + theUri); - } - return retVal; - } + @Override + public T fetchResourceWithException(Class theClass_, String theUri) throws FHIRException { + T retVal = fetchResource(theClass_, theUri); + if (retVal == null) { + throw new FHIRException(Msg.code(612) + "Unable to fetch " + theUri); + } + return retVal; + } - @Override - public List findMapsForSource(String theUrl) { - throw new UnsupportedOperationException(Msg.code(613)); - } + @Override + public List findMapsForSource(String theUrl) { + throw new UnsupportedOperationException(Msg.code(613)); + } - @Override - public ValueSetExpander.ValueSetExpansionOutcome expandVS(ValueSet source, boolean cacheOk, boolean heiarchical) { - throw new UnsupportedOperationException(Msg.code(614)); - } + @Override + public ValueSetExpander.ValueSetExpansionOutcome expandVS(ValueSet source, boolean cacheOk, boolean heiarchical) { + throw new UnsupportedOperationException(Msg.code(614)); + } - @Override - public String getAbbreviation(String theName) { - throw new UnsupportedOperationException(Msg.code(615)); - } + @Override + public String getAbbreviation(String theName) { + throw new UnsupportedOperationException(Msg.code(615)); + } - @Override - public ExpansionProfile getExpansionProfile() { - return myExpansionProfile; - } + @Override + public ExpansionProfile getExpansionProfile() { + return myExpansionProfile; + } - @Override - public void setExpansionProfile(ExpansionProfile theExpProfile) { - myExpansionProfile = theExpProfile; - } + @Override + public void setExpansionProfile(ExpansionProfile theExpProfile) { + myExpansionProfile = theExpProfile; + } - @Override - public INarrativeGenerator getNarrativeGenerator(String thePrefix, String theBasePath) { - throw new UnsupportedOperationException(Msg.code(616)); - } + @Override + public INarrativeGenerator getNarrativeGenerator(String thePrefix, String theBasePath) { + throw new UnsupportedOperationException(Msg.code(616)); + } - @Override - public IResourceValidator newValidator() throws FHIRException { - throw new UnsupportedOperationException(Msg.code(617)); - } + @Override + public IResourceValidator newValidator() throws FHIRException { + throw new UnsupportedOperationException(Msg.code(617)); + } - @Override - public IParser getParser(ParserType theType) { - throw new UnsupportedOperationException(Msg.code(618)); - } + @Override + public IParser getParser(ParserType theType) { + throw new UnsupportedOperationException(Msg.code(618)); + } - @Override - public IParser getParser(String theType) { - throw new UnsupportedOperationException(Msg.code(619)); - } + @Override + public IParser getParser(String theType) { + throw new UnsupportedOperationException(Msg.code(619)); + } - @Override - public List getResourceNames() { - List result = new ArrayList<>(); - for (ResourceType next : ResourceType.values()) { - result.add(next.name()); - } - Collections.sort(result); - return result; - } + @Override + public List getResourceNames() { + List result = new ArrayList<>(); + for (ResourceType next : ResourceType.values()) { + result.add(next.name()); + } + Collections.sort(result); + return result; + } - @Override - public Set getResourceNamesAsSet() { - return new HashSet<>(getResourceNames()); - } + @Override + public Set getResourceNamesAsSet() { + return new HashSet<>(getResourceNames()); + } - @Override - public List getTypeNames() { - throw new UnsupportedOperationException(Msg.code(620)); - } + @Override + public List getTypeNames() { + throw new UnsupportedOperationException(Msg.code(620)); + } - @Override - public String getVersion() { - return myCtx.getVersion().getVersion().getFhirVersionString(); - } + @Override + public String getVersion() { + return myCtx.getVersion().getVersion().getFhirVersionString(); + } - @Override - @CoverageIgnore - public boolean hasCache() { - throw new UnsupportedOperationException(Msg.code(621)); - } + @Override + @CoverageIgnore + public boolean hasCache() { + throw new UnsupportedOperationException(Msg.code(621)); + } - @Override - public boolean hasResource(Class theClass_, String theUri) { - throw new UnsupportedOperationException(Msg.code(622)); - } + @Override + public boolean hasResource(Class theClass_, String theUri) { + throw new UnsupportedOperationException(Msg.code(622)); + } - @Override - public boolean isNoTerminologyServer() { - return false; - } + @Override + public boolean isNoTerminologyServer() { + return false; + } - @Override - public IParser newJsonParser() { - throw new UnsupportedOperationException(Msg.code(623)); - } + @Override + public IParser newJsonParser() { + throw new UnsupportedOperationException(Msg.code(623)); + } - @Override - public IParser newXmlParser() { - throw new UnsupportedOperationException(Msg.code(624)); - } + @Override + public IParser newXmlParser() { + throw new UnsupportedOperationException(Msg.code(624)); + } - @Override - public String oid2Uri(String theCode) { - throw new UnsupportedOperationException(Msg.code(625)); - } + @Override + public String oid2Uri(String theCode) { + throw new UnsupportedOperationException(Msg.code(625)); + } - @Override - public void setLogger(ILoggingService theLogger) { - throw new UnsupportedOperationException(Msg.code(626)); - } + @Override + public void setLogger(ILoggingService theLogger) { + throw new UnsupportedOperationException(Msg.code(626)); + } - @Override - public boolean supportsSystem(String theSystem) { - if (myValidationSupport == null) { - return false; - } else { - return myValidationSupport.isCodeSystemSupported(new ValidationSupportContext(myValidationSupport), theSystem); - } - } + @Override + public boolean supportsSystem(String theSystem) { + if (myValidationSupport == null) { + return false; + } else { + return myValidationSupport.isCodeSystemSupported( + new ValidationSupportContext(myValidationSupport), theSystem); + } + } - @Override - public Set typeTails() { - return new HashSet<>(Arrays.asList("Integer", "UnsignedInt", "PositiveInt", "Decimal", "DateTime", "Date", "Time", "Instant", "String", "Uri", "Oid", "Uuid", "Id", "Boolean", "Code", - "Markdown", "Base64Binary", "Coding", "CodeableConcept", "Attachment", "Identifier", "Quantity", "SampledData", "Range", "Period", "Ratio", "HumanName", "Address", "ContactPoint", - "Timing", "Reference", "Annotation", "Signature", "Meta")); - } + @Override + public Set typeTails() { + return new HashSet<>(Arrays.asList( + "Integer", + "UnsignedInt", + "PositiveInt", + "Decimal", + "DateTime", + "Date", + "Time", + "Instant", + "String", + "Uri", + "Oid", + "Uuid", + "Id", + "Boolean", + "Code", + "Markdown", + "Base64Binary", + "Coding", + "CodeableConcept", + "Attachment", + "Identifier", + "Quantity", + "SampledData", + "Range", + "Period", + "Ratio", + "HumanName", + "Address", + "ContactPoint", + "Timing", + "Reference", + "Annotation", + "Signature", + "Meta")); + } - @Override - public ValidationResult validateCode(CodeableConcept theCode, ValueSet theVs) { - for (Coding next : theCode.getCoding()) { - ValidationResult retVal = validateCode(next, theVs); - if (retVal.isOk()) { - return retVal; - } - } + @Override + public ValidationResult validateCode(CodeableConcept theCode, ValueSet theVs) { + for (Coding next : theCode.getCoding()) { + ValidationResult retVal = validateCode(next, theVs); + if (retVal.isOk()) { + return retVal; + } + } - return new ValidationResult(IssueSeverity.ERROR, null); - } + return new ValidationResult(IssueSeverity.ERROR, null); + } - @Override - public ValidationResult validateCode(Coding theCode, ValueSet theVs) { - String system = theCode.getSystem(); - String code = theCode.getCode(); - String display = theCode.getDisplay(); - return validateCode(system, code, display, theVs); - } + @Override + public ValidationResult validateCode(Coding theCode, ValueSet theVs) { + String system = theCode.getSystem(); + String code = theCode.getCode(); + String display = theCode.getDisplay(); + return validateCode(system, code, display, theVs); + } - @Override - public ValidationResult validateCode(String theSystem, String theCode, String theDisplay) { - ValidationOptions options = new ValidationOptions(); - IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), convertConceptValidationOptions(options), theSystem, theCode, theDisplay, null); - if (result == null) { - return null; - } + @Override + public ValidationResult validateCode(String theSystem, String theCode, String theDisplay) { + ValidationOptions options = new ValidationOptions(); + IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(options), + theSystem, + theCode, + theDisplay, + null); + if (result == null) { + return null; + } - IssueSeverity severity = null; - if (result.getSeverity() != null) { - severity = IssueSeverity.fromCode(result.getSeverityCode()); - } - ConceptDefinitionComponent definition = new ConceptDefinitionComponent().setCode(result.getCode()); - return new ValidationResult(severity, result.getMessage(), definition); - } + IssueSeverity severity = null; + if (result.getSeverity() != null) { + severity = IssueSeverity.fromCode(result.getSeverityCode()); + } + ConceptDefinitionComponent definition = new ConceptDefinitionComponent().setCode(result.getCode()); + return new ValidationResult(severity, result.getMessage(), definition); + } - public static ConceptValidationOptions convertConceptValidationOptions(ValidationOptions theOptions) { - ConceptValidationOptions retVal = new ConceptValidationOptions(); - if (theOptions.isGuessSystem()) { - retVal = retVal.setInferSystem(true); - } - return retVal; - } + public static ConceptValidationOptions convertConceptValidationOptions(ValidationOptions theOptions) { + ConceptValidationOptions retVal = new ConceptValidationOptions(); + if (theOptions.isGuessSystem()) { + retVal = retVal.setInferSystem(true); + } + return retVal; + } - @Override - public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) { - throw new UnsupportedOperationException(Msg.code(627)); - } + @Override + public ValidationResult validateCode( + String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) { + throw new UnsupportedOperationException(Msg.code(627)); + } - @Override - public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ValueSet theVs) { + @Override + public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ValueSet theVs) { - IValidationSupport.CodeValidationResult outcome; - ValidationOptions options = new ValidationOptions(); - if (isNotBlank(theVs.getUrl())) { - outcome = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), convertConceptValidationOptions(options), theSystem, theCode, theDisplay, theVs.getUrl()); - } else { - outcome = myValidationSupport.validateCodeInValueSet(new ValidationSupportContext(myValidationSupport), convertConceptValidationOptions(options), theSystem, theCode, theDisplay, theVs); - } + IValidationSupport.CodeValidationResult outcome; + ValidationOptions options = new ValidationOptions(); + if (isNotBlank(theVs.getUrl())) { + outcome = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(options), + theSystem, + theCode, + theDisplay, + theVs.getUrl()); + } else { + outcome = myValidationSupport.validateCodeInValueSet( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(options), + theSystem, + theCode, + theDisplay, + theVs); + } - if (outcome != null && outcome.isOk()) { - ConceptDefinitionComponent definition = new ConceptDefinitionComponent(); - definition.setCode(theCode); - definition.setDisplay(outcome.getDisplay()); - return new ValidationResult(definition); - } - - return new ValidationResult(IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + "]"); - } + if (outcome != null && outcome.isOk()) { + ConceptDefinitionComponent definition = new ConceptDefinitionComponent(); + definition.setCode(theCode); + definition.setDisplay(outcome.getDisplay()); + return new ValidationResult(definition); + } + return new ValidationResult( + IssueSeverity.ERROR, + "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + + "]"); + } } diff --git a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/fluentpath/FhirPathDstu3.java b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/fluentpath/FhirPathDstu3.java index 78359fbf482..378da6bbf91 100644 --- a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/fluentpath/FhirPathDstu3.java +++ b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/fluentpath/FhirPathDstu3.java @@ -1,11 +1,11 @@ package org.hl7.fhir.dstu3.hapi.fluentpath; -import ca.uhn.fhir.fhirpath.IFhirPathEvaluationContext; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.fhirpath.FhirPathExecutionException; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.fhirpath.IFhirPathEvaluationContext; +import ca.uhn.fhir.i18n.Msg; import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext; import org.hl7.fhir.dstu3.model.Base; import org.hl7.fhir.dstu3.model.IdType; @@ -15,16 +15,16 @@ import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.PathEngineException; import org.hl7.fhir.instance.model.api.IBase; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; public class FhirPathDstu3 implements IFhirPath { private FHIRPathEngine myEngine; public FhirPathDstu3(FhirContext theCtx) { - IValidationSupport validationSupport = theCtx.getValidationSupport(); + IValidationSupport validationSupport = theCtx.getValidationSupport(); myEngine = new FHIRPathEngine(new HapiWorkerContext(theCtx, validationSupport)); } @@ -33,70 +33,71 @@ public class FhirPathDstu3 implements IFhirPath { public List evaluate(IBase theInput, String thePath, Class theReturnType) { List result; try { - result = myEngine.evaluate((Base)theInput, thePath); + result = myEngine.evaluate((Base) theInput, thePath); } catch (FHIRException e) { throw new FhirPathExecutionException(Msg.code(607) + e); } for (Base next : result) { if (!theReturnType.isAssignableFrom(next.getClass())) { - throw new FhirPathExecutionException(Msg.code(608) + "FluentPath expression \"" + thePath + "\" returned unexpected type " + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); + throw new FhirPathExecutionException( + Msg.code(608) + "FluentPath expression \"" + thePath + "\" returned unexpected type " + + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); } } - + return (List) result; } - @Override - public Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType) { - return evaluate(theInput, thePath, theReturnType).stream().findFirst(); - } + @Override + public Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType) { + return evaluate(theInput, thePath, theReturnType).stream().findFirst(); + } - @Override - public void parse(String theExpression) { - myEngine.parse(theExpression); - } + @Override + public void parse(String theExpression) { + myEngine.parse(theExpression); + } - @Override - public void setEvaluationContext(@Nonnull IFhirPathEvaluationContext theEvaluationContext) { - myEngine.setHostServices(new FHIRPathEngine.IEvaluationContext(){ + @Override + public void setEvaluationContext(@Nonnull IFhirPathEvaluationContext theEvaluationContext) { + myEngine.setHostServices(new FHIRPathEngine.IEvaluationContext() { - @Override - public Base resolveConstant(Object appContext, String name) throws PathEngineException { - return null; - } + @Override + public Base resolveConstant(Object appContext, String name) throws PathEngineException { + return null; + } - @Override - public TypeDetails resolveConstantType(Object appContext, String name) throws PathEngineException { - return null; - } + @Override + public TypeDetails resolveConstantType(Object appContext, String name) throws PathEngineException { + return null; + } - @Override - public boolean log(String argument, List focus) { - return false; - } + @Override + public boolean log(String argument, List focus) { + return false; + } - @Override - public FunctionDetails resolveFunction(String functionName) { - return null; - } + @Override + public FunctionDetails resolveFunction(String functionName) { + return null; + } - @Override - public TypeDetails checkFunction(Object appContext, String functionName, List parameters) throws PathEngineException { - return null; - } + @Override + public TypeDetails checkFunction(Object appContext, String functionName, List parameters) + throws PathEngineException { + return null; + } - @Override - public List executeFunction(Object appContext, String functionName, List> parameters) { - return null; - } - - @Override - public Base resolveReference(Object appContext, String theUrl) throws FHIRException { - return (Base)theEvaluationContext.resolveReference(new IdType(theUrl), null); - } - - }); - } + @Override + public List executeFunction(Object appContext, String functionName, List> parameters) { + return null; + } + @Override + public Base resolveReference(Object appContext, String theUrl) throws FHIRException { + return (Base) theEvaluationContext.resolveReference(new IdType(theUrl), null); + } + }); + } } diff --git a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/Dstu3BundleFactory.java b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/Dstu3BundleFactory.java index bc8ac06e41b..6608644db94 100644 --- a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/Dstu3BundleFactory.java +++ b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/Dstu3BundleFactory.java @@ -43,245 +43,253 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class Dstu3BundleFactory implements IVersionSpecificBundleFactory { - private String myBase; - private Bundle myBundle; - private FhirContext myContext; + private String myBase; + private Bundle myBundle; + private FhirContext myContext; - public Dstu3BundleFactory(FhirContext theContext) { - myContext = theContext; - } + public Dstu3BundleFactory(FhirContext theContext) { + myContext = theContext; + } + @Override + public void addResourcesToBundle( + List theResult, + BundleTypeEnum theBundleType, + String theServerBase, + BundleInclusionRule theBundleInclusionRule, + Set theIncludes) { + ensureBundle(); - @Override - public void addResourcesToBundle(List theResult, BundleTypeEnum theBundleType, String theServerBase, BundleInclusionRule theBundleInclusionRule, Set theIncludes) { - ensureBundle(); + List includedResources = new ArrayList(); + Set addedResourceIds = new HashSet(); - List includedResources = new ArrayList(); - Set addedResourceIds = new HashSet(); + for (IBaseResource next : theResult) { + if (next.getIdElement().isEmpty() == false) { + addedResourceIds.add(next.getIdElement()); + } + } - for (IBaseResource next : theResult) { - if (next.getIdElement().isEmpty() == false) { - addedResourceIds.add(next.getIdElement()); - } - } + for (IBaseResource next : theResult) { - for (IBaseResource next : theResult) { + Set containedIds = new HashSet(); - Set containedIds = new HashSet(); + if (next instanceof DomainResource) { + for (Resource nextContained : ((DomainResource) next).getContained()) { + if (isNotBlank(nextContained.getId())) { + containedIds.add(nextContained.getId()); + } + } + } - if (next instanceof DomainResource) { - for (Resource nextContained : ((DomainResource) next).getContained()) { - if (isNotBlank(nextContained.getId())) { - containedIds.add(nextContained.getId()); - } - } - } + List references = myContext.newTerser().getAllResourceReferences(next); + do { + List addedResourcesThisPass = new ArrayList(); - List references = myContext.newTerser().getAllResourceReferences(next); - do { - List addedResourcesThisPass = new ArrayList(); + for (ResourceReferenceInfo nextRefInfo : references) { + if (theBundleInclusionRule != null + && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { + continue; + } - for (ResourceReferenceInfo nextRefInfo : references) { - if (theBundleInclusionRule != null && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { - continue; - } + IAnyResource nextRes = + (IAnyResource) nextRefInfo.getResourceReference().getResource(); + if (nextRes != null) { + if (nextRes.getIdElement().hasIdPart()) { + if (containedIds.contains(nextRes.getIdElement().getValue())) { + // Don't add contained IDs as top level resources + continue; + } - IAnyResource nextRes = (IAnyResource) nextRefInfo.getResourceReference().getResource(); - if (nextRes != null) { - if (nextRes.getIdElement().hasIdPart()) { - if (containedIds.contains(nextRes.getIdElement().getValue())) { - // Don't add contained IDs as top level resources - continue; - } + IIdType id = nextRes.getIdElement(); + if (id.hasResourceType() == false) { + String resName = myContext.getResourceType(nextRes); + id = id.withResourceType(resName); + } - IIdType id = nextRes.getIdElement(); - if (id.hasResourceType() == false) { - String resName = myContext.getResourceType(nextRes); - id = id.withResourceType(resName); - } + if (!addedResourceIds.contains(id)) { + addedResourceIds.add(id); + addedResourcesThisPass.add(nextRes); + } + } + } + } - if (!addedResourceIds.contains(id)) { - addedResourceIds.add(id); - addedResourcesThisPass.add(nextRes); - } + includedResources.addAll(addedResourcesThisPass); - } - } - } + // Linked resources may themselves have linked resources + references = new ArrayList<>(); + for (IAnyResource iResource : addedResourcesThisPass) { + List newReferences = + myContext.newTerser().getAllResourceReferences(iResource); + references.addAll(newReferences); + } + } while (references.isEmpty() == false); - includedResources.addAll(addedResourcesThisPass); + BundleEntryComponent entry = myBundle.addEntry().setResource((Resource) next); + Resource nextAsResource = (Resource) next; + IIdType id = populateBundleEntryFullUrl(next, entry); + BundleEntryTransactionMethodEnum httpVerb = + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); + if (httpVerb != null) { + entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); + if (id != null) { + entry.getRequest().setUrl(id.toUnqualified().getValue()); + } + } + if (BundleEntryTransactionMethodEnum.DELETE.equals(httpVerb)) { + entry.setResource(null); + } - // Linked resources may themselves have linked resources - references = new ArrayList<>(); - for (IAnyResource iResource : addedResourcesThisPass) { - List newReferences = myContext.newTerser().getAllResourceReferences(iResource); - references.addAll(newReferences); - } - } while (references.isEmpty() == false); + // Populate Bundle.entry.response + if (theBundleType != null) { + switch (theBundleType) { + case BATCH_RESPONSE: + case TRANSACTION_RESPONSE: + if ("1".equals(id.getVersionIdPart())) { + entry.getResponse().setStatus("201 Created"); + } else if (isNotBlank(id.getVersionIdPart())) { + entry.getResponse().setStatus("200 OK"); + } + if (isNotBlank(id.getVersionIdPart())) { + entry.getResponse().setEtag(RestfulServerUtils.createEtag(id.getVersionIdPart())); + } + break; + } + } - BundleEntryComponent entry = myBundle.addEntry().setResource((Resource) next); - Resource nextAsResource = (Resource) next; - IIdType id = populateBundleEntryFullUrl(next, entry); - BundleEntryTransactionMethodEnum httpVerb = ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); - if (httpVerb != null) { - entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); - if (id != null) { - entry.getRequest().setUrl(id.toUnqualified().getValue()); - } - } - if (BundleEntryTransactionMethodEnum.DELETE.equals(httpVerb)) { - entry.setResource(null); - } + // Populate Bundle.entry.search + BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource); + if (searchMode != null) { + entry.getSearch().getModeElement().setValueAsString(searchMode.getCode()); + } + } - // Populate Bundle.entry.response - if (theBundleType != null) { - switch (theBundleType) { - case BATCH_RESPONSE: - case TRANSACTION_RESPONSE: - if ("1".equals(id.getVersionIdPart())) { - entry.getResponse().setStatus("201 Created"); - } else if (isNotBlank(id.getVersionIdPart())) { - entry.getResponse().setStatus("200 OK"); - } - if (isNotBlank(id.getVersionIdPart())) { - entry.getResponse().setEtag(RestfulServerUtils.createEtag(id.getVersionIdPart())); - } - break; - } - } + /* + * Actually add the resources to the bundle + */ + for (IAnyResource next : includedResources) { + BundleEntryComponent entry = myBundle.addEntry(); + entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); + populateBundleEntryFullUrl(next, entry); + } + } - // Populate Bundle.entry.search - BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource); - if (searchMode != null) { - entry.getSearch().getModeElement().setValueAsString(searchMode.getCode()); - } + @Override + public void addRootPropertiesToBundle( + String theId, + @Nonnull BundleLinks theBundleLinks, + Integer theTotalResults, + IPrimitiveType theLastUpdated) { + ensureBundle(); - } + myBase = theBundleLinks.serverBase; - /* - * Actually add the resources to the bundle - */ - for (IAnyResource next : includedResources) { - BundleEntryComponent entry = myBundle.addEntry(); - entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); - populateBundleEntryFullUrl(next, entry); - } + if (myBundle.getIdElement().isEmpty()) { + myBundle.setId(theId); + } - } + if (myBundle.getMeta().getLastUpdated() == null && theLastUpdated != null) { + myBundle.getMeta().getLastUpdatedElement().setValueAsString(theLastUpdated.getValueAsString()); + } - @Override - public void addRootPropertiesToBundle(String theId, @Nonnull BundleLinks theBundleLinks, Integer theTotalResults, - IPrimitiveType theLastUpdated) { - ensureBundle(); + if (!hasLink(Constants.LINK_SELF, myBundle) && isNotBlank(theBundleLinks.getSelf())) { + myBundle.addLink().setRelation(Constants.LINK_SELF).setUrl(theBundleLinks.getSelf()); + } + if (!hasLink(Constants.LINK_NEXT, myBundle) && isNotBlank(theBundleLinks.getNext())) { + myBundle.addLink().setRelation(Constants.LINK_NEXT).setUrl(theBundleLinks.getNext()); + } + if (!hasLink(Constants.LINK_PREVIOUS, myBundle) && isNotBlank(theBundleLinks.getPrev())) { + myBundle.addLink().setRelation(Constants.LINK_PREVIOUS).setUrl(theBundleLinks.getPrev()); + } - myBase = theBundleLinks.serverBase; + addTotalResultsToBundle(theTotalResults, theBundleLinks.bundleType); + } - if (myBundle.getIdElement().isEmpty()) { - myBundle.setId(theId); - } + @Override + public void addTotalResultsToBundle(Integer theTotalResults, BundleTypeEnum theBundleType) { + ensureBundle(); - if (myBundle.getMeta().getLastUpdated() == null && theLastUpdated != null) { - myBundle.getMeta().getLastUpdatedElement().setValueAsString(theLastUpdated.getValueAsString()); - } + if (myBundle.getIdElement().isEmpty()) { + myBundle.setId(UUID.randomUUID().toString()); + } - if (!hasLink(Constants.LINK_SELF, myBundle) && isNotBlank(theBundleLinks.getSelf())) { - myBundle.addLink().setRelation(Constants.LINK_SELF).setUrl(theBundleLinks.getSelf()); - } - if (!hasLink(Constants.LINK_NEXT, myBundle) && isNotBlank(theBundleLinks.getNext())) { - myBundle.addLink().setRelation(Constants.LINK_NEXT).setUrl(theBundleLinks.getNext()); - } - if (!hasLink(Constants.LINK_PREVIOUS, myBundle) && isNotBlank(theBundleLinks.getPrev())) { - myBundle.addLink().setRelation(Constants.LINK_PREVIOUS).setUrl(theBundleLinks.getPrev()); - } + if (myBundle.getTypeElement().isEmpty() && theBundleType != null) { + myBundle.getTypeElement().setValueAsString(theBundleType.getCode()); + } - addTotalResultsToBundle(theTotalResults, theBundleLinks.bundleType); - } + if (myBundle.getTotalElement().isEmpty() && theTotalResults != null) { + myBundle.getTotalElement().setValue(theTotalResults); + } + } - @Override - public void addTotalResultsToBundle(Integer theTotalResults, BundleTypeEnum theBundleType) { - ensureBundle(); + private void ensureBundle() { + if (myBundle == null) { + myBundle = new Bundle(); + } + } - if (myBundle.getIdElement().isEmpty()) { - myBundle.setId(UUID.randomUUID().toString()); - } + @Override + public IBaseResource getResourceBundle() { + return myBundle; + } - if (myBundle.getTypeElement().isEmpty() && theBundleType != null) { - myBundle.getTypeElement().setValueAsString(theBundleType.getCode()); - } + private boolean hasLink(String theLinkType, Bundle theBundle) { + for (BundleLinkComponent next : theBundle.getLink()) { + if (theLinkType.equals(next.getRelation())) { + return true; + } + } + return false; + } - if (myBundle.getTotalElement().isEmpty() && theTotalResults != null) { - myBundle.getTotalElement().setValue(theTotalResults); - } - } + @Override + public void initializeWithBundleResource(IBaseResource theBundle) { + myBundle = (Bundle) theBundle; + } - private void ensureBundle() { - if (myBundle == null) { - myBundle = new Bundle(); - } - } - - @Override - public IBaseResource getResourceBundle() { - return myBundle; - } - - private boolean hasLink(String theLinkType, Bundle theBundle) { - for (BundleLinkComponent next : theBundle.getLink()) { - if (theLinkType.equals(next.getRelation())) { - return true; - } - } - return false; - } - - @Override - public void initializeWithBundleResource(IBaseResource theBundle) { - myBundle = (Bundle) theBundle; - } - - private IIdType populateBundleEntryFullUrl(IBaseResource next, BundleEntryComponent entry) { - IIdType idElement = null; - if (next.getIdElement().hasBaseUrl()) { - idElement = next.getIdElement(); - entry.setFullUrl(idElement.toVersionless().getValue()); - } else { - if (isNotBlank(myBase) && next.getIdElement().hasIdPart()) { - idElement = next.getIdElement(); - idElement = idElement.withServerBase(myBase, myContext.getResourceType(next)); - entry.setFullUrl(idElement.toVersionless().getValue()); - } - } - return idElement; - } - - @Override - public List toListOfResources() { - ArrayList retVal = new ArrayList(); - for (BundleEntryComponent next : myBundle.getEntry()) { - if (next.getResource() != null) { - retVal.add(next.getResource()); - } else if (next.getResponse().getLocationElement().isEmpty() == false) { - IdType id = new IdType(next.getResponse().getLocation()); - String resourceType = id.getResourceType(); - if (isNotBlank(resourceType)) { - IAnyResource res = (IAnyResource) myContext.getResourceDefinition(resourceType).newInstance(); - res.setId(id); - retVal.add(res); - } - } - } - return retVal; - } + private IIdType populateBundleEntryFullUrl(IBaseResource next, BundleEntryComponent entry) { + IIdType idElement = null; + if (next.getIdElement().hasBaseUrl()) { + idElement = next.getIdElement(); + entry.setFullUrl(idElement.toVersionless().getValue()); + } else { + if (isNotBlank(myBase) && next.getIdElement().hasIdPart()) { + idElement = next.getIdElement(); + idElement = idElement.withServerBase(myBase, myContext.getResourceType(next)); + entry.setFullUrl(idElement.toVersionless().getValue()); + } + } + return idElement; + } + @Override + public List toListOfResources() { + ArrayList retVal = new ArrayList(); + for (BundleEntryComponent next : myBundle.getEntry()) { + if (next.getResource() != null) { + retVal.add(next.getResource()); + } else if (next.getResponse().getLocationElement().isEmpty() == false) { + IdType id = new IdType(next.getResponse().getLocation()); + String resourceType = id.getResourceType(); + if (isNotBlank(resourceType)) { + IAnyResource res = (IAnyResource) + myContext.getResourceDefinition(resourceType).newInstance(); + res.setId(id); + retVal.add(res); + } + } + } + return retVal; + } } diff --git a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/ServerCapabilityStatementProvider.java b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/ServerCapabilityStatementProvider.java index 6fae8f93a95..eaf2a98c582 100644 --- a/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/ServerCapabilityStatementProvider.java +++ b/hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/hapi/rest/server/ServerCapabilityStatementProvider.java @@ -19,43 +19,42 @@ */ package org.hl7.fhir.dstu3.hapi.rest.server; -import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Metadata; import ca.uhn.fhir.rest.annotation.Read; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.server.*; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; -import ca.uhn.fhir.rest.server.method.*; import ca.uhn.fhir.rest.server.method.OperationMethodBinding.ReturnType; import ca.uhn.fhir.rest.server.method.SearchParameter; import ca.uhn.fhir.rest.server.util.BaseServerCapabilityStatementProvider; +import ca.uhn.fhir.rest.server.*; +import ca.uhn.fhir.rest.server.method.*; import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.dstu3.model.*; -import org.hl7.fhir.dstu3.model.CapabilityStatement.*; import org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus; import org.hl7.fhir.dstu3.model.OperationDefinition.OperationDefinitionParameterComponent; import org.hl7.fhir.dstu3.model.OperationDefinition.OperationKind; +import org.hl7.fhir.dstu3.model.*; +import org.hl7.fhir.dstu3.model.CapabilityStatement.*; import org.hl7.fhir.dstu3.model.OperationDefinition.OperationParameterUse; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; +import java.util.Map.Entry; +import java.util.*; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; -import java.util.*; -import java.util.Map.Entry; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; -import ca.uhn.fhir.context.FhirContext; - /** * Server FHIR Provider which serves the conformance statement for a RESTful server implementation * @@ -64,593 +63,623 @@ import ca.uhn.fhir.context.FhirContext; * false. This means that if you are adding anything to the returned conformance instance on each call you should call setCache(false) in your provider constructor. *

    */ -public class ServerCapabilityStatementProvider extends BaseServerCapabilityStatementProvider implements IServerConformanceProvider { +public class ServerCapabilityStatementProvider extends BaseServerCapabilityStatementProvider + implements IServerConformanceProvider { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ServerCapabilityStatementProvider.class); - private String myPublisher = "Not provided"; + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(ServerCapabilityStatementProvider.class); + private String myPublisher = "Not provided"; - /** - * No-arg constructor and setter so that the ServerConformanceProvider can be Spring-wired with the RestfulService avoiding the potential reference cycle that would happen. - */ - public ServerCapabilityStatementProvider() { - super(); - } + /** + * No-arg constructor and setter so that the ServerConformanceProvider can be Spring-wired with the RestfulService avoiding the potential reference cycle that would happen. + */ + public ServerCapabilityStatementProvider() { + super(); + } - /** - * Constructor - * - * @deprecated Use no-args constructor instead. Deprecated in 4.0.0 - */ - @Deprecated - public ServerCapabilityStatementProvider(RestfulServer theRestfulServer) { - this(); - } + /** + * Constructor + * + * @deprecated Use no-args constructor instead. Deprecated in 4.0.0 + */ + @Deprecated + public ServerCapabilityStatementProvider(RestfulServer theRestfulServer) { + this(); + } - /** - * Constructor - This is intended only for JAX-RS server - */ - public ServerCapabilityStatementProvider(RestfulServerConfiguration theServerConfiguration) { - super(theServerConfiguration); - } + /** + * Constructor - This is intended only for JAX-RS server + */ + public ServerCapabilityStatementProvider(RestfulServerConfiguration theServerConfiguration) { + super(theServerConfiguration); + } - private void checkBindingForSystemOps(CapabilityStatementRestComponent rest, Set systemOps, BaseMethodBinding nextMethodBinding) { - if (nextMethodBinding.getRestOperationType() != null) { - String sysOpCode = nextMethodBinding.getRestOperationType().getCode(); - if (sysOpCode != null) { - SystemRestfulInteraction sysOp; - try { - sysOp = SystemRestfulInteraction.fromCode(sysOpCode); - } catch (FHIRException e) { - return; - } - if (sysOp == null) { - return; - } - if (systemOps.contains(sysOp) == false) { - systemOps.add(sysOp); - rest.addInteraction().setCode(sysOp); - } - } - } - } + private void checkBindingForSystemOps( + CapabilityStatementRestComponent rest, + Set systemOps, + BaseMethodBinding nextMethodBinding) { + if (nextMethodBinding.getRestOperationType() != null) { + String sysOpCode = nextMethodBinding.getRestOperationType().getCode(); + if (sysOpCode != null) { + SystemRestfulInteraction sysOp; + try { + sysOp = SystemRestfulInteraction.fromCode(sysOpCode); + } catch (FHIRException e) { + return; + } + if (sysOp == null) { + return; + } + if (systemOps.contains(sysOp) == false) { + systemOps.add(sysOp); + rest.addInteraction().setCode(sysOp); + } + } + } + } - private Map> collectMethodBindings(RequestDetails theRequestDetails) { - Map> resourceToMethods = new TreeMap<>(); - for (ResourceBinding next : getServerConfiguration(theRequestDetails).getResourceBindings()) { - String resourceName = next.getResourceName(); - for (BaseMethodBinding nextMethodBinding : next.getMethodBindings()) { - if (resourceToMethods.containsKey(resourceName) == false) { - resourceToMethods.put(resourceName, new ArrayList<>()); - } - resourceToMethods.get(resourceName).add(nextMethodBinding); - } - } - for (BaseMethodBinding nextMethodBinding : getServerConfiguration(theRequestDetails).getServerBindings()) { - String resourceName = ""; - if (resourceToMethods.containsKey(resourceName) == false) { - resourceToMethods.put(resourceName, new ArrayList<>()); - } - resourceToMethods.get(resourceName).add(nextMethodBinding); - } - return resourceToMethods; - } + private Map> collectMethodBindings(RequestDetails theRequestDetails) { + Map> resourceToMethods = new TreeMap<>(); + for (ResourceBinding next : getServerConfiguration(theRequestDetails).getResourceBindings()) { + String resourceName = next.getResourceName(); + for (BaseMethodBinding nextMethodBinding : next.getMethodBindings()) { + if (resourceToMethods.containsKey(resourceName) == false) { + resourceToMethods.put(resourceName, new ArrayList<>()); + } + resourceToMethods.get(resourceName).add(nextMethodBinding); + } + } + for (BaseMethodBinding nextMethodBinding : + getServerConfiguration(theRequestDetails).getServerBindings()) { + String resourceName = ""; + if (resourceToMethods.containsKey(resourceName) == false) { + resourceToMethods.put(resourceName, new ArrayList<>()); + } + resourceToMethods.get(resourceName).add(nextMethodBinding); + } + return resourceToMethods; + } - private DateTimeType conformanceDate(RequestDetails theRequestDetails) { - IPrimitiveType buildDate = getServerConfiguration(theRequestDetails).getConformanceDate(); - if (buildDate != null && buildDate.getValue() != null) { - try { - return new DateTimeType(buildDate.getValueAsString()); - } catch (DataFormatException e) { - // fall through - } - } - return DateTimeType.now(); - } + private DateTimeType conformanceDate(RequestDetails theRequestDetails) { + IPrimitiveType buildDate = + getServerConfiguration(theRequestDetails).getConformanceDate(); + if (buildDate != null && buildDate.getValue() != null) { + try { + return new DateTimeType(buildDate.getValueAsString()); + } catch (DataFormatException e) { + // fall through + } + } + return DateTimeType.now(); + } - private String createNamedQueryName(SearchMethodBinding searchMethodBinding) { - StringBuilder retVal = new StringBuilder(); - if (searchMethodBinding.getResourceName() != null) { - retVal.append(searchMethodBinding.getResourceName()); - } - retVal.append("-query-"); - retVal.append(searchMethodBinding.getQueryName()); - - return retVal.toString(); - } - - private String createOperationName(OperationMethodBinding theMethodBinding) { - StringBuilder retVal = new StringBuilder(); - if (theMethodBinding.getResourceName() != null) { - retVal.append(theMethodBinding.getResourceName()); - } + private String createNamedQueryName(SearchMethodBinding searchMethodBinding) { + StringBuilder retVal = new StringBuilder(); + if (searchMethodBinding.getResourceName() != null) { + retVal.append(searchMethodBinding.getResourceName()); + } + retVal.append("-query-"); + retVal.append(searchMethodBinding.getQueryName()); - retVal.append('-'); - if (theMethodBinding.isCanOperateAtInstanceLevel()) { - retVal.append('i'); - } - if (theMethodBinding.isCanOperateAtServerLevel()) { - retVal.append('s'); - } - retVal.append('-'); + return retVal.toString(); + } - // Exclude the leading $ - retVal.append(theMethodBinding.getName(), 1, theMethodBinding.getName().length()); + private String createOperationName(OperationMethodBinding theMethodBinding) { + StringBuilder retVal = new StringBuilder(); + if (theMethodBinding.getResourceName() != null) { + retVal.append(theMethodBinding.getResourceName()); + } - return retVal.toString(); - } + retVal.append('-'); + if (theMethodBinding.isCanOperateAtInstanceLevel()) { + retVal.append('i'); + } + if (theMethodBinding.isCanOperateAtServerLevel()) { + retVal.append('s'); + } + retVal.append('-'); - /** - * Gets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The - * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. - */ - public String getPublisher() { - return myPublisher; - } + // Exclude the leading $ + retVal.append(theMethodBinding.getName(), 1, theMethodBinding.getName().length()); - /** - * Sets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The - * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. - */ - public void setPublisher(String thePublisher) { - myPublisher = thePublisher; - } + return retVal.toString(); + } + /** + * Gets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The + * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. + */ + public String getPublisher() { + return myPublisher; + } - @SuppressWarnings("EnumSwitchStatementWhichMissesCases") - @Override - @Metadata - public CapabilityStatement getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { - RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails); - Bindings bindings = serverConfiguration.provideBindings(); + /** + * Sets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The + * value defaults to "Not provided" but may be set to null, which will cause this element to be omitted. + */ + public void setPublisher(String thePublisher) { + myPublisher = thePublisher; + } - CapabilityStatement retVal = new CapabilityStatement(); + @SuppressWarnings("EnumSwitchStatementWhichMissesCases") + @Override + @Metadata + public CapabilityStatement getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { + RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails); + Bindings bindings = serverConfiguration.provideBindings(); - retVal.setPublisher(myPublisher); - retVal.setDateElement(conformanceDate(theRequestDetails)); - retVal.setFhirVersion(FhirVersionEnum.DSTU3.getFhirVersionString()); - retVal.setAcceptUnknown(UnknownContentCode.EXTENSIONS); // TODO: make this configurable - this is a fairly big - // effort since the parser - // needs to be modified to actually allow it + CapabilityStatement retVal = new CapabilityStatement(); - ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); - String serverBase = serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); - retVal - .getImplementation() - .setUrl(serverBase) - .setDescription(serverConfiguration.getImplementationDescription()); + retVal.setPublisher(myPublisher); + retVal.setDateElement(conformanceDate(theRequestDetails)); + retVal.setFhirVersion(FhirVersionEnum.DSTU3.getFhirVersionString()); + retVal.setAcceptUnknown(UnknownContentCode.EXTENSIONS); // TODO: make this configurable - this is a fairly big + // effort since the parser + // needs to be modified to actually allow it - retVal.setKind(CapabilityStatementKind.INSTANCE); - retVal.getSoftware().setName(serverConfiguration.getServerName()); - retVal.getSoftware().setVersion(serverConfiguration.getServerVersion()); - retVal.addFormat(Constants.CT_FHIR_XML_NEW); - retVal.addFormat(Constants.CT_FHIR_JSON_NEW); - retVal.addFormat(Constants.FORMAT_JSON); - retVal.addFormat(Constants.FORMAT_XML); - retVal.setStatus(PublicationStatus.ACTIVE); + ServletContext servletContext = (ServletContext) + (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); + String serverBase = + serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); + retVal.getImplementation() + .setUrl(serverBase) + .setDescription(serverConfiguration.getImplementationDescription()); - CapabilityStatementRestComponent rest = retVal.addRest(); - rest.setMode(RestfulCapabilityMode.SERVER); + retVal.setKind(CapabilityStatementKind.INSTANCE); + retVal.getSoftware().setName(serverConfiguration.getServerName()); + retVal.getSoftware().setVersion(serverConfiguration.getServerVersion()); + retVal.addFormat(Constants.CT_FHIR_XML_NEW); + retVal.addFormat(Constants.CT_FHIR_JSON_NEW); + retVal.addFormat(Constants.FORMAT_JSON); + retVal.addFormat(Constants.FORMAT_XML); + retVal.setStatus(PublicationStatus.ACTIVE); - Set systemOps = new HashSet<>(); - Set operationNames = new HashSet<>(); + CapabilityStatementRestComponent rest = retVal.addRest(); + rest.setMode(RestfulCapabilityMode.SERVER); - Map> resourceToMethods = collectMethodBindings(theRequestDetails); - Map> resourceNameToSharedSupertype = serverConfiguration.getNameToSharedSupertype(); - for (Entry> nextEntry : resourceToMethods.entrySet()) { + Set systemOps = new HashSet<>(); + Set operationNames = new HashSet<>(); - if (nextEntry.getKey().isEmpty() == false) { - Set resourceOps = new HashSet<>(); - CapabilityStatementRestResourceComponent resource = rest.addResource(); - String resourceName = nextEntry.getKey(); - - RuntimeResourceDefinition def; - FhirContext context = serverConfiguration.getFhirContext(); - if (resourceNameToSharedSupertype.containsKey(resourceName)) { - def = context.getResourceDefinition(resourceNameToSharedSupertype.get(resourceName)); - } else { - def = context.getResourceDefinition(resourceName); - } - resource.getTypeElement().setValue(def.getName()); - resource.getProfile().setReference((def.getResourceProfile(serverBase))); + Map> resourceToMethods = collectMethodBindings(theRequestDetails); + Map> resourceNameToSharedSupertype = + serverConfiguration.getNameToSharedSupertype(); + for (Entry> nextEntry : resourceToMethods.entrySet()) { - TreeSet includes = new TreeSet<>(); + if (nextEntry.getKey().isEmpty() == false) { + Set resourceOps = new HashSet<>(); + CapabilityStatementRestResourceComponent resource = rest.addResource(); + String resourceName = nextEntry.getKey(); - // Map nameToSearchParam = new HashMap(); - for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { - if (nextMethodBinding.getRestOperationType() != null) { - String resOpCode = nextMethodBinding.getRestOperationType().getCode(); - if (resOpCode != null) { - TypeRestfulInteraction resOp; - try { - resOp = TypeRestfulInteraction.fromCode(resOpCode); - } catch (Exception e) { - resOp = null; - } - if (resOp != null) { - if (resourceOps.contains(resOp) == false) { - resourceOps.add(resOp); - resource.addInteraction().setCode(resOp); - } - if ("vread".equals(resOpCode)) { - // vread implies read - resOp = TypeRestfulInteraction.READ; - if (resourceOps.contains(resOp) == false) { - resourceOps.add(resOp); - resource.addInteraction().setCode(resOp); - } - } + RuntimeResourceDefinition def; + FhirContext context = serverConfiguration.getFhirContext(); + if (resourceNameToSharedSupertype.containsKey(resourceName)) { + def = context.getResourceDefinition(resourceNameToSharedSupertype.get(resourceName)); + } else { + def = context.getResourceDefinition(resourceName); + } + resource.getTypeElement().setValue(def.getName()); + resource.getProfile().setReference((def.getResourceProfile(serverBase))); - if (nextMethodBinding.isSupportsConditional()) { - switch (resOp) { - case CREATE: - resource.setConditionalCreate(true); - break; - case DELETE: - if (nextMethodBinding.isSupportsConditionalMultiple()) { - resource.setConditionalDelete(ConditionalDeleteStatus.MULTIPLE); - } else { - resource.setConditionalDelete(ConditionalDeleteStatus.SINGLE); - } - break; - case UPDATE: - resource.setConditionalUpdate(true); - break; - default: - break; - } - } - } - } - } + TreeSet includes = new TreeSet<>(); - checkBindingForSystemOps(rest, systemOps, nextMethodBinding); + // Map nameToSearchParam = new HashMap(); + for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { + if (nextMethodBinding.getRestOperationType() != null) { + String resOpCode = + nextMethodBinding.getRestOperationType().getCode(); + if (resOpCode != null) { + TypeRestfulInteraction resOp; + try { + resOp = TypeRestfulInteraction.fromCode(resOpCode); + } catch (Exception e) { + resOp = null; + } + if (resOp != null) { + if (resourceOps.contains(resOp) == false) { + resourceOps.add(resOp); + resource.addInteraction().setCode(resOp); + } + if ("vread".equals(resOpCode)) { + // vread implies read + resOp = TypeRestfulInteraction.READ; + if (resourceOps.contains(resOp) == false) { + resourceOps.add(resOp); + resource.addInteraction().setCode(resOp); + } + } - if (nextMethodBinding instanceof SearchMethodBinding) { - SearchMethodBinding methodBinding = (SearchMethodBinding) nextMethodBinding; - if (methodBinding.getQueryName() != null) { - String queryName = bindings.getNamedSearchMethodBindingToName().get(methodBinding); - if (operationNames.add(queryName)) { - rest.addOperation().setName(methodBinding.getQueryName()).setDefinition(new Reference("OperationDefinition/" + queryName)); - } - } else { - handleNamelessSearchMethodBinding(rest, resource, resourceName, def, includes, (SearchMethodBinding) nextMethodBinding, theRequestDetails); - } - } else if (nextMethodBinding instanceof OperationMethodBinding) { - OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; - String opName = bindings.getOperationBindingToId().get(methodBinding); - if (operationNames.add(opName)) { - // Only add each operation (by name) once - rest.addOperation().setName(methodBinding.getName().substring(1)).setDefinition(new Reference("OperationDefinition/" + opName)); - } - } + if (nextMethodBinding.isSupportsConditional()) { + switch (resOp) { + case CREATE: + resource.setConditionalCreate(true); + break; + case DELETE: + if (nextMethodBinding.isSupportsConditionalMultiple()) { + resource.setConditionalDelete(ConditionalDeleteStatus.MULTIPLE); + } else { + resource.setConditionalDelete(ConditionalDeleteStatus.SINGLE); + } + break; + case UPDATE: + resource.setConditionalUpdate(true); + break; + default: + break; + } + } + } + } + } - resource.getInteraction().sort(new Comparator() { - @Override - public int compare(ResourceInteractionComponent theO1, ResourceInteractionComponent theO2) { - TypeRestfulInteraction o1 = theO1.getCode(); - TypeRestfulInteraction o2 = theO2.getCode(); - if (o1 == null && o2 == null) { - return 0; - } - if (o1 == null) { - return 1; - } - if (o2 == null) { - return -1; - } - return o1.ordinal() - o2.ordinal(); - } - }); + checkBindingForSystemOps(rest, systemOps, nextMethodBinding); - } + if (nextMethodBinding instanceof SearchMethodBinding) { + SearchMethodBinding methodBinding = (SearchMethodBinding) nextMethodBinding; + if (methodBinding.getQueryName() != null) { + String queryName = + bindings.getNamedSearchMethodBindingToName().get(methodBinding); + if (operationNames.add(queryName)) { + rest.addOperation() + .setName(methodBinding.getQueryName()) + .setDefinition(new Reference("OperationDefinition/" + queryName)); + } + } else { + handleNamelessSearchMethodBinding( + rest, + resource, + resourceName, + def, + includes, + (SearchMethodBinding) nextMethodBinding, + theRequestDetails); + } + } else if (nextMethodBinding instanceof OperationMethodBinding) { + OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; + String opName = bindings.getOperationBindingToId().get(methodBinding); + if (operationNames.add(opName)) { + // Only add each operation (by name) once + rest.addOperation() + .setName(methodBinding.getName().substring(1)) + .setDefinition(new Reference("OperationDefinition/" + opName)); + } + } - for (String nextInclude : includes) { - resource.addSearchInclude(nextInclude); - } - } else { - for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { - checkBindingForSystemOps(rest, systemOps, nextMethodBinding); - if (nextMethodBinding instanceof OperationMethodBinding) { - OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; - String opName = bindings.getOperationBindingToId().get(methodBinding); - if (operationNames.add(opName)) { - ourLog.debug("Found bound operation: {}", opName); - rest.addOperation().setName(methodBinding.getName().substring(1)).setDefinition(new Reference("OperationDefinition/" + opName)); - } - } - } - } - } + resource.getInteraction().sort(new Comparator() { + @Override + public int compare(ResourceInteractionComponent theO1, ResourceInteractionComponent theO2) { + TypeRestfulInteraction o1 = theO1.getCode(); + TypeRestfulInteraction o2 = theO2.getCode(); + if (o1 == null && o2 == null) { + return 0; + } + if (o1 == null) { + return 1; + } + if (o2 == null) { + return -1; + } + return o1.ordinal() - o2.ordinal(); + } + }); + } - return retVal; - } + for (String nextInclude : includes) { + resource.addSearchInclude(nextInclude); + } + } else { + for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { + checkBindingForSystemOps(rest, systemOps, nextMethodBinding); + if (nextMethodBinding instanceof OperationMethodBinding) { + OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; + String opName = bindings.getOperationBindingToId().get(methodBinding); + if (operationNames.add(opName)) { + ourLog.debug("Found bound operation: {}", opName); + rest.addOperation() + .setName(methodBinding.getName().substring(1)) + .setDefinition(new Reference("OperationDefinition/" + opName)); + } + } + } + } + } + return retVal; + } + private void handleNamelessSearchMethodBinding( + CapabilityStatementRestComponent rest, + CapabilityStatementRestResourceComponent resource, + String resourceName, + RuntimeResourceDefinition def, + TreeSet includes, + SearchMethodBinding searchMethodBinding, + RequestDetails theRequestDetails) { + includes.addAll(searchMethodBinding.getIncludes()); - private void handleNamelessSearchMethodBinding(CapabilityStatementRestComponent rest, CapabilityStatementRestResourceComponent resource, String resourceName, RuntimeResourceDefinition def, TreeSet includes, - SearchMethodBinding searchMethodBinding, RequestDetails theRequestDetails) { - includes.addAll(searchMethodBinding.getIncludes()); + List params = searchMethodBinding.getParameters(); + List searchParameters = new ArrayList<>(); + for (IParameter nextParameter : params) { + if ((nextParameter instanceof SearchParameter)) { + searchParameters.add((SearchParameter) nextParameter); + } + } + sortSearchParameters(searchParameters); + if (!searchParameters.isEmpty()) { + // boolean allOptional = searchParameters.get(0).isRequired() == false; + // + // OperationDefinition query = null; + // if (!allOptional) { + // RestOperation operation = rest.addOperation(); + // query = new OperationDefinition(); + // operation.setDefinition(new ResourceReferenceDt(query)); + // query.getDescriptionElement().setValue(searchMethodBinding.getDescription()); + // query.addUndeclaredExtension(false, ExtensionConstants.QUERY_RETURN_TYPE, new CodeDt(resourceName)); + // for (String nextInclude : searchMethodBinding.getIncludes()) { + // query.addUndeclaredExtension(false, ExtensionConstants.QUERY_ALLOWED_INCLUDE, new StringDt(nextInclude)); + // } + // } - List params = searchMethodBinding.getParameters(); - List searchParameters = new ArrayList<>(); - for (IParameter nextParameter : params) { - if ((nextParameter instanceof SearchParameter)) { - searchParameters.add((SearchParameter) nextParameter); - } - } - sortSearchParameters(searchParameters); - if (!searchParameters.isEmpty()) { - // boolean allOptional = searchParameters.get(0).isRequired() == false; - // - // OperationDefinition query = null; - // if (!allOptional) { - // RestOperation operation = rest.addOperation(); - // query = new OperationDefinition(); - // operation.setDefinition(new ResourceReferenceDt(query)); - // query.getDescriptionElement().setValue(searchMethodBinding.getDescription()); - // query.addUndeclaredExtension(false, ExtensionConstants.QUERY_RETURN_TYPE, new CodeDt(resourceName)); - // for (String nextInclude : searchMethodBinding.getIncludes()) { - // query.addUndeclaredExtension(false, ExtensionConstants.QUERY_ALLOWED_INCLUDE, new StringDt(nextInclude)); - // } - // } + for (SearchParameter nextParameter : searchParameters) { - for (SearchParameter nextParameter : searchParameters) { + String nextParamName = nextParameter.getName(); - String nextParamName = nextParameter.getName(); + String chain = null; + String nextParamUnchainedName = nextParamName; + if (nextParamName.contains(".")) { + chain = nextParamName.substring(nextParamName.indexOf('.') + 1); + nextParamUnchainedName = nextParamName.substring(0, nextParamName.indexOf('.')); + } - String chain = null; - String nextParamUnchainedName = nextParamName; - if (nextParamName.contains(".")) { - chain = nextParamName.substring(nextParamName.indexOf('.') + 1); - nextParamUnchainedName = nextParamName.substring(0, nextParamName.indexOf('.')); - } + String nextParamDescription = nextParameter.getDescription(); - String nextParamDescription = nextParameter.getDescription(); + /* + * If the parameter has no description, default to the one from the resource + */ + if (StringUtils.isBlank(nextParamDescription)) { + RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName); + if (paramDef != null) { + nextParamDescription = paramDef.getDescription(); + } + } - /* - * If the parameter has no description, default to the one from the resource - */ - if (StringUtils.isBlank(nextParamDescription)) { - RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName); - if (paramDef != null) { - nextParamDescription = paramDef.getDescription(); - } - } + CapabilityStatementRestResourceSearchParamComponent param = resource.addSearchParam(); + param.setName(nextParamUnchainedName); - CapabilityStatementRestResourceSearchParamComponent param = resource.addSearchParam(); - param.setName(nextParamUnchainedName); + // if (StringUtils.isNotBlank(chain)) { + // param.addChain(chain); + // } + // + // if (nextParameter.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { + // for (String nextWhitelist : new TreeSet(nextParameter.getQualifierWhitelist())) { + // if (nextWhitelist.startsWith(".")) { + // param.addChain(nextWhitelist.substring(1)); + // } + // } + // } -// if (StringUtils.isNotBlank(chain)) { -// param.addChain(chain); -// } -// -// if (nextParameter.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { -// for (String nextWhitelist : new TreeSet(nextParameter.getQualifierWhitelist())) { -// if (nextWhitelist.startsWith(".")) { -// param.addChain(nextWhitelist.substring(1)); -// } -// } -// } + param.setDocumentation(nextParamDescription); + if (nextParameter.getParamType() != null) { + param.getTypeElement() + .setValueAsString(nextParameter.getParamType().getCode()); + } + for (Class nextTarget : nextParameter.getDeclaredTypes()) { + RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails) + .getFhirContext() + .getResourceDefinition(nextTarget); + if (targetDef != null) { + ResourceType code; + try { + code = ResourceType.fromCode(targetDef.getName()); + } catch (FHIRException e) { + code = null; + } + // if (code != null) { + // param.addTarget(targetDef.getName()); + // } + } + } + } + } + } - param.setDocumentation(nextParamDescription); - if (nextParameter.getParamType() != null) { - param.getTypeElement().setValueAsString(nextParameter.getParamType().getCode()); - } - for (Class nextTarget : nextParameter.getDeclaredTypes()) { - RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails).getFhirContext().getResourceDefinition(nextTarget); - if (targetDef != null) { - ResourceType code; - try { - code = ResourceType.fromCode(targetDef.getName()); - } catch (FHIRException e) { - code = null; - } -// if (code != null) { -// param.addTarget(targetDef.getName()); -// } - } - } - } - } - } + @Read(type = OperationDefinition.class) + public OperationDefinition readOperationDefinition(@IdParam IdType theId, RequestDetails theRequestDetails) { + if (theId == null || theId.hasIdPart() == false) { + throw new ResourceNotFoundException(Msg.code(628) + theId); + } + RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails); + Bindings bindings = serverConfiguration.provideBindings(); - - @Read(type = OperationDefinition.class) - public OperationDefinition readOperationDefinition(@IdParam IdType theId, RequestDetails theRequestDetails) { - if (theId == null || theId.hasIdPart() == false) { - throw new ResourceNotFoundException(Msg.code(628) + theId); - } + List operationBindings = + bindings.getOperationIdToBindings().get(theId.getIdPart()); + if (operationBindings != null && !operationBindings.isEmpty()) { + return readOperationDefinitionForOperation(operationBindings); + } + List searchBindings = + bindings.getSearchNameToBindings().get(theId.getIdPart()); + if (searchBindings != null && !searchBindings.isEmpty()) { + return readOperationDefinitionForNamedSearch(searchBindings); + } + throw new ResourceNotFoundException(Msg.code(2257) + theId); + } - RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails); - Bindings bindings = serverConfiguration.provideBindings(); + private OperationDefinition readOperationDefinitionForNamedSearch(List bindings) { + OperationDefinition op = new OperationDefinition(); + op.setStatus(PublicationStatus.ACTIVE); + op.setKind(OperationKind.QUERY); + op.setIdempotent(true); - List operationBindings = bindings.getOperationIdToBindings().get(theId.getIdPart()); - if (operationBindings != null && !operationBindings.isEmpty()) { - return readOperationDefinitionForOperation(operationBindings); - } - List searchBindings = bindings.getSearchNameToBindings().get(theId.getIdPart()); - if (searchBindings != null && !searchBindings.isEmpty()) { - return readOperationDefinitionForNamedSearch(searchBindings); - } - throw new ResourceNotFoundException(Msg.code(2257) + theId); - } - - private OperationDefinition readOperationDefinitionForNamedSearch(List bindings) { - OperationDefinition op = new OperationDefinition(); - op.setStatus(PublicationStatus.ACTIVE); - op.setKind(OperationKind.QUERY); - op.setIdempotent(true); + op.setSystem(false); + op.setType(false); + op.setInstance(false); - op.setSystem(false); - op.setType(false); - op.setInstance(false); + Set inParams = new HashSet<>(); - Set inParams = new HashSet<>(); + for (SearchMethodBinding binding : bindings) { + if (isNotBlank(binding.getDescription())) { + op.setDescription(binding.getDescription()); + } + if (isBlank(binding.getResourceProviderResourceName())) { + op.setSystem(true); + } else { + op.setType(true); + op.addResourceElement().setValue(binding.getResourceProviderResourceName()); + } + op.setCode(binding.getQueryName()); + for (IParameter nextParamUntyped : binding.getParameters()) { + if (nextParamUntyped instanceof SearchParameter) { + SearchParameter nextParam = (SearchParameter) nextParamUntyped; + if (!inParams.add(nextParam.getName())) { + continue; + } + OperationDefinitionParameterComponent param = op.addParameter(); + param.setUse(OperationParameterUse.IN); + param.setType("string"); + param.getSearchTypeElement() + .setValueAsString(nextParam.getParamType().getCode()); + param.setMin(nextParam.isRequired() ? 1 : 0); + param.setMax("1"); + param.setName(nextParam.getName()); + } + } - for (SearchMethodBinding binding : bindings) { - if (isNotBlank(binding.getDescription())) { - op.setDescription(binding.getDescription()); - } - if (isBlank(binding.getResourceProviderResourceName())) { - op.setSystem(true); - } else { - op.setType(true); - op.addResourceElement().setValue(binding.getResourceProviderResourceName()); - } - op.setCode(binding.getQueryName()); - for (IParameter nextParamUntyped : binding.getParameters()) { - if (nextParamUntyped instanceof SearchParameter) { - SearchParameter nextParam = (SearchParameter) nextParamUntyped; - if (!inParams.add(nextParam.getName())) { - continue; - } - OperationDefinitionParameterComponent param = op.addParameter(); - param.setUse(OperationParameterUse.IN); - param.setType("string"); - param.getSearchTypeElement().setValueAsString(nextParam.getParamType().getCode()); - param.setMin(nextParam.isRequired() ? 1 : 0); - param.setMax("1"); - param.setName(nextParam.getName()); - } - } + if (isBlank(op.getName())) { + if (isNotBlank(op.getDescription())) { + op.setName(op.getDescription()); + } else { + op.setName(op.getCode()); + } + } + } - if (isBlank(op.getName())) { - if (isNotBlank(op.getDescription())) { - op.setName(op.getDescription()); - } else { - op.setName(op.getCode()); - } - } - } + return op; + } - return op; - } - - private OperationDefinition readOperationDefinitionForOperation(List bindings) { - OperationDefinition op = new OperationDefinition(); - op.setStatus(PublicationStatus.ACTIVE); - op.setKind(OperationKind.OPERATION); - op.setIdempotent(true); + private OperationDefinition readOperationDefinitionForOperation(List bindings) { + OperationDefinition op = new OperationDefinition(); + op.setStatus(PublicationStatus.ACTIVE); + op.setKind(OperationKind.OPERATION); + op.setIdempotent(true); - // We reset these to true below if we find a binding that can handle the level - op.setSystem(false); - op.setType(false); - op.setInstance(false); + // We reset these to true below if we find a binding that can handle the level + op.setSystem(false); + op.setType(false); + op.setInstance(false); - Set inParams = new HashSet<>(); - Set outParams = new HashSet<>(); + Set inParams = new HashSet<>(); + Set outParams = new HashSet<>(); - for (OperationMethodBinding sharedDescription : bindings) { - if (isNotBlank(sharedDescription.getDescription())) { - op.setDescription(sharedDescription.getDescription()); - } - if (sharedDescription.isCanOperateAtInstanceLevel()) { - op.setInstance(true); - } - if (sharedDescription.isCanOperateAtServerLevel()) { - op.setSystem(true); - } - if (sharedDescription.isCanOperateAtTypeLevel()) { - op.setType(true); - } - if (!sharedDescription.isIdempotent()) { - op.setIdempotent(sharedDescription.isIdempotent()); - } - op.setCode(sharedDescription.getName().substring(1)); - if (sharedDescription.isCanOperateAtInstanceLevel()) { - op.setInstance(sharedDescription.isCanOperateAtInstanceLevel()); - } - if (sharedDescription.isCanOperateAtServerLevel()) { - op.setSystem(sharedDescription.isCanOperateAtServerLevel()); - } - if (isNotBlank(sharedDescription.getResourceName())) { - op.addResourceElement().setValue(sharedDescription.getResourceName()); - } + for (OperationMethodBinding sharedDescription : bindings) { + if (isNotBlank(sharedDescription.getDescription())) { + op.setDescription(sharedDescription.getDescription()); + } + if (sharedDescription.isCanOperateAtInstanceLevel()) { + op.setInstance(true); + } + if (sharedDescription.isCanOperateAtServerLevel()) { + op.setSystem(true); + } + if (sharedDescription.isCanOperateAtTypeLevel()) { + op.setType(true); + } + if (!sharedDescription.isIdempotent()) { + op.setIdempotent(sharedDescription.isIdempotent()); + } + op.setCode(sharedDescription.getName().substring(1)); + if (sharedDescription.isCanOperateAtInstanceLevel()) { + op.setInstance(sharedDescription.isCanOperateAtInstanceLevel()); + } + if (sharedDescription.isCanOperateAtServerLevel()) { + op.setSystem(sharedDescription.isCanOperateAtServerLevel()); + } + if (isNotBlank(sharedDescription.getResourceName())) { + op.addResourceElement().setValue(sharedDescription.getResourceName()); + } - for (IParameter nextParamUntyped : sharedDescription.getParameters()) { - if (nextParamUntyped instanceof OperationParameter) { - OperationParameter nextParam = (OperationParameter) nextParamUntyped; - if (!inParams.add(nextParam.getName())) { - continue; - } - OperationDefinitionParameterComponent param = op.addParameter(); - param.setUse(OperationParameterUse.IN); - if (nextParam.getParamType() != null) { - param.setType(nextParam.getParamType()); - } - if (nextParam.getSearchParamType() != null) { - param.getSearchTypeElement().setValueAsString(nextParam.getSearchParamType()); - } - param.setMin(nextParam.getMin()); - param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax())); - param.setName(nextParam.getName()); - } - } + for (IParameter nextParamUntyped : sharedDescription.getParameters()) { + if (nextParamUntyped instanceof OperationParameter) { + OperationParameter nextParam = (OperationParameter) nextParamUntyped; + if (!inParams.add(nextParam.getName())) { + continue; + } + OperationDefinitionParameterComponent param = op.addParameter(); + param.setUse(OperationParameterUse.IN); + if (nextParam.getParamType() != null) { + param.setType(nextParam.getParamType()); + } + if (nextParam.getSearchParamType() != null) { + param.getSearchTypeElement().setValueAsString(nextParam.getSearchParamType()); + } + param.setMin(nextParam.getMin()); + param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax())); + param.setName(nextParam.getName()); + } + } - for (ReturnType nextParam : sharedDescription.getReturnParams()) { - if (!outParams.add(nextParam.getName())) { - continue; - } - OperationDefinitionParameterComponent param = op.addParameter(); - param.setUse(OperationParameterUse.OUT); - if (nextParam.getType() != null) { - param.setType(nextParam.getType()); - } - param.setMin(nextParam.getMin()); - param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax())); - param.setName(nextParam.getName()); - } - } + for (ReturnType nextParam : sharedDescription.getReturnParams()) { + if (!outParams.add(nextParam.getName())) { + continue; + } + OperationDefinitionParameterComponent param = op.addParameter(); + param.setUse(OperationParameterUse.OUT); + if (nextParam.getType() != null) { + param.setType(nextParam.getType()); + } + param.setMin(nextParam.getMin()); + param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax())); + param.setName(nextParam.getName()); + } + } - if (isBlank(op.getName())) { - if (isNotBlank(op.getDescription())) { - op.setName(op.getDescription()); - } else { - op.setName(op.getCode()); - } - } + if (isBlank(op.getName())) { + if (isNotBlank(op.getDescription())) { + op.setName(op.getDescription()); + } else { + op.setName(op.getCode()); + } + } - if (op.hasSystem() == false) { - op.setSystem(false); - } - if (op.hasInstance() == false) { - op.setInstance(false); - } + if (op.hasSystem() == false) { + op.setSystem(false); + } + if (op.hasInstance() == false) { + op.setInstance(false); + } - return op; - } + return op; + } - /** - * Sets the cache property (default is true). If set to true, the same response will be returned for each invocation. - *

    - * See the class documentation for an important note if you are extending this class - *

    - * - * @deprecated Since 4.0.0 this doesn't do anything - */ - public ServerCapabilityStatementProvider setCache(boolean theCache) { - return this; - } + /** + * Sets the cache property (default is true). If set to true, the same response will be returned for each invocation. + *

    + * See the class documentation for an important note if you are extending this class + *

    + * + * @deprecated Since 4.0.0 this doesn't do anything + */ + public ServerCapabilityStatementProvider setCache(boolean theCache) { + return this; + } - @Override - public void setRestfulServer(RestfulServer theRestfulServer) { - // ignore - } + @Override + public void setRestfulServer(RestfulServer theRestfulServer) { + // ignore + } - private void sortSearchParameters(List searchParameters) { - Collections.sort(searchParameters, new Comparator() { - @Override - public int compare(SearchParameter theO1, SearchParameter theO2) { - if (theO1.isRequired() == theO2.isRequired()) { - return theO1.getName().compareTo(theO2.getName()); - } - if (theO1.isRequired()) { - return -1; - } - return 1; - } - }); - } + private void sortSearchParameters(List searchParameters) { + Collections.sort(searchParameters, new Comparator() { + @Override + public int compare(SearchParameter theO1, SearchParameter theO2) { + if (theO1.isRequired() == theO2.isRequired()) { + return theO1.getName().compareTo(theO2.getName()); + } + if (theO1.isRequired()) { + return -1; + } + return 1; + } + }); + } } diff --git a/hapi-fhir-structures-hl7org-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2hl7org/Dstu2Hl7OrgBundleFactory.java b/hapi-fhir-structures-hl7org-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2hl7org/Dstu2Hl7OrgBundleFactory.java index 04d2846090c..172f3bc3162 100644 --- a/hapi-fhir-structures-hl7org-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2hl7org/Dstu2Hl7OrgBundleFactory.java +++ b/hapi-fhir-structures-hl7org-dstu2/src/main/java/ca/uhn/fhir/rest/server/provider/dstu2hl7org/Dstu2Hl7OrgBundleFactory.java @@ -44,251 +44,259 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class Dstu2Hl7OrgBundleFactory implements IVersionSpecificBundleFactory { - private String myBase; - private Bundle myBundle; - private FhirContext myContext; + private String myBase; + private Bundle myBundle; + private FhirContext myContext; - public Dstu2Hl7OrgBundleFactory(FhirContext theContext) { - myContext = theContext; - } + public Dstu2Hl7OrgBundleFactory(FhirContext theContext) { + myContext = theContext; + } - @Override - public void addResourcesToBundle(List theResult, BundleTypeEnum theBundleType, String theServerBase, - BundleInclusionRule theBundleInclusionRule, Set theIncludes) { - ensureBundle(); + @Override + public void addResourcesToBundle( + List theResult, + BundleTypeEnum theBundleType, + String theServerBase, + BundleInclusionRule theBundleInclusionRule, + Set theIncludes) { + ensureBundle(); - List includedResources = new ArrayList(); - Set addedResourceIds = new HashSet(); + List includedResources = new ArrayList(); + Set addedResourceIds = new HashSet(); - for (IBaseResource next : theResult) { - if (next.getIdElement().isEmpty() == false) { - addedResourceIds.add(next.getIdElement()); - } - } + for (IBaseResource next : theResult) { + if (next.getIdElement().isEmpty() == false) { + addedResourceIds.add(next.getIdElement()); + } + } - for (IBaseResource next : theResult) { + for (IBaseResource next : theResult) { - Set containedIds = new HashSet(); + Set containedIds = new HashSet(); - if (next instanceof DomainResource) { - for (Resource nextContained : ((DomainResource) next).getContained()) { - if (isNotBlank(nextContained.getId())) { - containedIds.add(nextContained.getId()); - } - } - } + if (next instanceof DomainResource) { + for (Resource nextContained : ((DomainResource) next).getContained()) { + if (isNotBlank(nextContained.getId())) { + containedIds.add(nextContained.getId()); + } + } + } - List references = myContext.newTerser().getAllResourceReferences(next); - do { - List addedResourcesThisPass = new ArrayList(); + List references = myContext.newTerser().getAllResourceReferences(next); + do { + List addedResourcesThisPass = new ArrayList(); - for (ResourceReferenceInfo nextRefInfo : references) { - if (theBundleInclusionRule != null && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { - continue; - } + for (ResourceReferenceInfo nextRefInfo : references) { + if (theBundleInclusionRule != null + && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { + continue; + } - IAnyResource nextRes = (IAnyResource) nextRefInfo.getResourceReference().getResource(); - if (nextRes != null) { - if (nextRes.getIdElement().hasIdPart()) { - if (containedIds.contains(nextRes.getIdElement().getValue())) { - // Don't add contained IDs as top level resources - continue; - } + IAnyResource nextRes = + (IAnyResource) nextRefInfo.getResourceReference().getResource(); + if (nextRes != null) { + if (nextRes.getIdElement().hasIdPart()) { + if (containedIds.contains(nextRes.getIdElement().getValue())) { + // Don't add contained IDs as top level resources + continue; + } - IIdType id = nextRes.getIdElement(); - if (id.hasResourceType() == false) { - String resName = myContext.getResourceType(nextRes); - id = id.withResourceType(resName); - } + IIdType id = nextRes.getIdElement(); + if (id.hasResourceType() == false) { + String resName = myContext.getResourceType(nextRes); + id = id.withResourceType(resName); + } - if (!addedResourceIds.contains(id)) { - addedResourceIds.add(id); - addedResourcesThisPass.add(nextRes); - } + if (!addedResourceIds.contains(id)) { + addedResourceIds.add(id); + addedResourcesThisPass.add(nextRes); + } + } + } + } - } - } - } + includedResources.addAll(addedResourcesThisPass); - includedResources.addAll(addedResourcesThisPass); + // Linked resources may themselves have linked resources + references = new ArrayList<>(); + for (IAnyResource iResource : addedResourcesThisPass) { + List newReferences = + myContext.newTerser().getAllResourceReferences(iResource); + references.addAll(newReferences); + } + } while (references.isEmpty() == false); - // Linked resources may themselves have linked resources - references = new ArrayList<>(); - for (IAnyResource iResource : addedResourcesThisPass) { - List newReferences = myContext.newTerser().getAllResourceReferences(iResource); - references.addAll(newReferences); - } - } while (references.isEmpty() == false); + BundleEntryComponent entry = myBundle.addEntry().setResource((Resource) next); + Resource nextAsResource = (Resource) next; + IIdType id = populateBundleEntryFullUrl(next, entry); + BundleEntryTransactionMethodEnum httpVerb = + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); + if (httpVerb != null) { + entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); + if (id != null) { + entry.getRequest().setUrl(id.getValue()); + } + } + if (BundleEntryTransactionMethodEnum.DELETE.equals(httpVerb)) { + entry.setResource(null); + } - BundleEntryComponent entry = myBundle.addEntry().setResource((Resource) next); - Resource nextAsResource = (Resource) next; - IIdType id = populateBundleEntryFullUrl(next, entry); - BundleEntryTransactionMethodEnum httpVerb = ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); - if (httpVerb != null) { - entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); - if (id != null) { - entry.getRequest().setUrl(id.getValue()); - } - } - if (BundleEntryTransactionMethodEnum.DELETE.equals(httpVerb)) { - entry.setResource(null); - } + // Populate Bundle.entry.response + if (theBundleType != null) { + switch (theBundleType) { + case BATCH_RESPONSE: + case TRANSACTION_RESPONSE: + if (id != null) { + if ("1".equals(id.getVersionIdPart())) { + entry.getResponse().setStatus("201 Created"); + } else if (isNotBlank(id.getVersionIdPart())) { + entry.getResponse().setStatus("200 OK"); + } + if (isNotBlank(id.getVersionIdPart())) { + entry.getResponse().setEtag(RestfulServerUtils.createEtag(id.getVersionIdPart())); + } + } + break; + } + } - // Populate Bundle.entry.response - if (theBundleType != null) { - switch (theBundleType) { - case BATCH_RESPONSE: - case TRANSACTION_RESPONSE: - if (id != null) { - if ("1".equals(id.getVersionIdPart())) { - entry.getResponse().setStatus("201 Created"); - } else if (isNotBlank(id.getVersionIdPart())) { - entry.getResponse().setStatus("200 OK"); - } - if (isNotBlank(id.getVersionIdPart())) { - entry.getResponse().setEtag(RestfulServerUtils.createEtag(id.getVersionIdPart())); - } - } - break; - } - } + // Populate Bundle.entry.search + BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource); + if (searchMode != null) { + entry.getSearch().getModeElement().setValueAsString(searchMode.getCode()); + } + } - // Populate Bundle.entry.search - BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource); - if (searchMode != null) { - entry.getSearch().getModeElement().setValueAsString(searchMode.getCode()); - } + /* + * Actually add the resources to the bundle + */ + for (IAnyResource next : includedResources) { + BundleEntryComponent entry = myBundle.addEntry(); + entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); + populateBundleEntryFullUrl(next, entry); + } + } - } + @Override + public void addRootPropertiesToBundle( + String theId, + @Nonnull BundleLinks theBundleLinks, + Integer theTotalResults, + IPrimitiveType theLastUpdated) { + ensureBundle(); - /* - * Actually add the resources to the bundle - */ - for (IAnyResource next : includedResources) { - BundleEntryComponent entry = myBundle.addEntry(); - entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); - populateBundleEntryFullUrl(next, entry); - } + if (myBundle.getIdElement().isEmpty()) { + myBundle.setId(theId); + } - } + if (myBundle.getMeta().getLastUpdated() == null && theLastUpdated != null) { + InstantType instantType = new InstantType(); + instantType.setValueAsString(theLastUpdated.getValueAsString()); + myBundle.getMeta().setLastUpdatedElement(instantType); + } - @Override - public void addRootPropertiesToBundle(String theId, @Nonnull BundleLinks theBundleLinks, - Integer theTotalResults, IPrimitiveType theLastUpdated) { - ensureBundle(); + if (!hasLink(Constants.LINK_SELF, myBundle) && isNotBlank(theBundleLinks.getSelf())) { + myBundle.addLink().setRelation(Constants.LINK_SELF).setUrl(theBundleLinks.getSelf()); + } + if (!hasLink(Constants.LINK_NEXT, myBundle) && isNotBlank(theBundleLinks.getNext())) { + myBundle.addLink().setRelation(Constants.LINK_NEXT).setUrl(theBundleLinks.getNext()); + } + if (!hasLink(Constants.LINK_SELF, myBundle) && isNotBlank(theBundleLinks.getPrev())) { + myBundle.addLink().setRelation(Constants.LINK_PREVIOUS).setUrl(theBundleLinks.getPrev()); + } - if (myBundle.getIdElement().isEmpty()) { - myBundle.setId(theId); - } + myBase = theBundleLinks.serverBase; - if (myBundle.getMeta().getLastUpdated() == null && theLastUpdated != null) { - InstantType instantType = new InstantType(); - instantType.setValueAsString(theLastUpdated.getValueAsString()); - myBundle.getMeta().setLastUpdatedElement(instantType); - } + addTotalResultsToBundle(theTotalResults, theBundleLinks.bundleType); + } - if (!hasLink(Constants.LINK_SELF, myBundle) && isNotBlank(theBundleLinks.getSelf())) { - myBundle.addLink().setRelation(Constants.LINK_SELF).setUrl(theBundleLinks.getSelf()); - } - if (!hasLink(Constants.LINK_NEXT, myBundle) && isNotBlank(theBundleLinks.getNext())) { - myBundle.addLink().setRelation(Constants.LINK_NEXT).setUrl(theBundleLinks.getNext()); - } - if (!hasLink(Constants.LINK_SELF, myBundle) && isNotBlank(theBundleLinks.getPrev())) { - myBundle.addLink().setRelation(Constants.LINK_PREVIOUS).setUrl(theBundleLinks.getPrev()); - } + @Override + public void addTotalResultsToBundle(Integer theTotalResults, BundleTypeEnum theBundleType) { + ensureBundle(); - myBase = theBundleLinks.serverBase; + if (isBlank(myBundle.getId())) { + myBundle.setId(UUID.randomUUID().toString()); + } - addTotalResultsToBundle(theTotalResults, theBundleLinks.bundleType); - } + if (myBundle.getTypeElement().isEmpty() && theBundleType != null) { + myBundle.getTypeElement().setValueAsString(theBundleType.getCode()); + } - @Override - public void addTotalResultsToBundle(Integer theTotalResults, BundleTypeEnum theBundleType) { - ensureBundle(); + if (myBundle.getTotalElement().isEmpty() && theTotalResults != null) { + myBundle.getTotalElement().setValue(theTotalResults); + } + } - if (isBlank(myBundle.getId())) { - myBundle.setId(UUID.randomUUID().toString()); - } + private void ensureBundle() { + if (myBundle == null) { + myBundle = new Bundle(); + } + } - if (myBundle.getTypeElement().isEmpty() && theBundleType != null) { - myBundle.getTypeElement().setValueAsString(theBundleType.getCode()); - } + @Override + public IBaseResource getResourceBundle() { + return myBundle; + } - if (myBundle.getTotalElement().isEmpty() && theTotalResults != null) { - myBundle.getTotalElement().setValue(theTotalResults); - } - } + private boolean hasLink(String theLinkType, Bundle theBundle) { + for (BundleLinkComponent next : theBundle.getLink()) { + if (theLinkType.equals(next.getRelation())) { + return true; + } + } + return false; + } - private void ensureBundle() { - if (myBundle == null) { - myBundle = new Bundle(); - } - } + @Override + public void initializeWithBundleResource(IBaseResource theBundle) { + myBundle = (Bundle) theBundle; + } - @Override - public IBaseResource getResourceBundle() { - return myBundle; - } - - private boolean hasLink(String theLinkType, Bundle theBundle) { - for (BundleLinkComponent next : theBundle.getLink()) { - if (theLinkType.equals(next.getRelation())) { - return true; - } - } - return false; - } - - @Override - public void initializeWithBundleResource(IBaseResource theBundle) { - myBundle = (Bundle) theBundle; - } - - private IIdType populateBundleEntryFullUrl(IBaseResource next, BundleEntryComponent entry) { - IIdType idElement = null; - if (next.getIdElement().hasBaseUrl()) { - idElement = next.getIdElement(); - entry.setFullUrl(idElement.toVersionless().getValue()); - } else { - if (isNotBlank(myBase) && next.getIdElement().hasIdPart()) { - idElement = next.getIdElement(); - idElement = idElement.withServerBase(myBase, myContext.getResourceType(next)); - entry.setFullUrl(idElement.toVersionless().getValue()); - } - } - return idElement; - } - - @Override - public List toListOfResources() { - ArrayList retVal = new ArrayList(); - for (BundleEntryComponent next : myBundle.getEntry()) { - if (next.getResource() != null) { - retVal.add(next.getResource()); - } else if (next.getResponse().getLocationElement().isEmpty() == false) { - IdType id = new IdType(next.getResponse().getLocation()); - String resourceType = id.getResourceType(); - if (isNotBlank(resourceType)) { - IBaseResource res = (IBaseResource) myContext.getResourceDefinition(resourceType).newInstance(); - res.setId(id); - retVal.add(res); - } - } - } - return retVal; - } + private IIdType populateBundleEntryFullUrl(IBaseResource next, BundleEntryComponent entry) { + IIdType idElement = null; + if (next.getIdElement().hasBaseUrl()) { + idElement = next.getIdElement(); + entry.setFullUrl(idElement.toVersionless().getValue()); + } else { + if (isNotBlank(myBase) && next.getIdElement().hasIdPart()) { + idElement = next.getIdElement(); + idElement = idElement.withServerBase(myBase, myContext.getResourceType(next)); + entry.setFullUrl(idElement.toVersionless().getValue()); + } + } + return idElement; + } + @Override + public List toListOfResources() { + ArrayList retVal = new ArrayList(); + for (BundleEntryComponent next : myBundle.getEntry()) { + if (next.getResource() != null) { + retVal.add(next.getResource()); + } else if (next.getResponse().getLocationElement().isEmpty() == false) { + IdType id = new IdType(next.getResponse().getLocation()); + String resourceType = id.getResourceType(); + if (isNotBlank(resourceType)) { + IBaseResource res = (IBaseResource) + myContext.getResourceDefinition(resourceType).newInstance(); + res.setId(id); + retVal.add(res); + } + } + } + return retVal; + } } diff --git a/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/ctx/FhirDstu2Hl7Org.java b/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/ctx/FhirDstu2Hl7Org.java index 56f3b3e9320..47a11be000f 100644 --- a/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/ctx/FhirDstu2Hl7Org.java +++ b/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/ctx/FhirDstu2Hl7Org.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,12 +19,12 @@ */ package org.hl7.fhir.dstu2.hapi.ctx; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IFhirVersion; import ca.uhn.fhir.model.base.composite.BaseCodingDt; import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; @@ -45,85 +45,85 @@ import java.util.Date; public class FhirDstu2Hl7Org implements IFhirVersion { - private String myId; + private String myId; - @Override - public IFhirPath createFhirPathExecutor(FhirContext theFhirContext) { - throw new UnsupportedOperationException(Msg.code(586) + "FluentPath is not supported in DSTU2 contexts"); - } + @Override + public IFhirPath createFhirPathExecutor(FhirContext theFhirContext) { + throw new UnsupportedOperationException(Msg.code(586) + "FluentPath is not supported in DSTU2 contexts"); + } - @Override - public StructureDefinition generateProfile(RuntimeResourceDefinition theRuntimeResourceDefinition, String theServerBase) { - StructureDefinition retVal = new StructureDefinition(); + @Override + public StructureDefinition generateProfile( + RuntimeResourceDefinition theRuntimeResourceDefinition, String theServerBase) { + StructureDefinition retVal = new StructureDefinition(); - RuntimeResourceDefinition def = theRuntimeResourceDefinition; + RuntimeResourceDefinition def = theRuntimeResourceDefinition; - myId = def.getId(); - if (StringUtils.isBlank(myId)) { - myId = theRuntimeResourceDefinition.getName().toLowerCase(); - } + myId = def.getId(); + if (StringUtils.isBlank(myId)) { + myId = theRuntimeResourceDefinition.getName().toLowerCase(); + } - retVal.setId(myId); - return retVal; - } + retVal.setId(myId); + return retVal; + } - @SuppressWarnings("rawtypes") - @Override - public Class getContainedType() { - return ArrayList.class; - } + @SuppressWarnings("rawtypes") + @Override + public Class getContainedType() { + return ArrayList.class; + } - @Override - public InputStream getFhirVersionPropertiesFile() { - String path = "/org/hl7/fhir/instance/model/fhirversion.properties"; - InputStream str = FhirDstu2Hl7Org.class.getResourceAsStream(path); - if (str == null) { - str = FhirDstu2Hl7Org.class.getResourceAsStream(path.substring(1)); - } - if (str == null) { - throw new ConfigurationException(Msg.code(587) + "Can not find model property file on classpath: " + path); - } - return str; - } + @Override + public InputStream getFhirVersionPropertiesFile() { + String path = "/org/hl7/fhir/instance/model/fhirversion.properties"; + InputStream str = FhirDstu2Hl7Org.class.getResourceAsStream(path); + if (str == null) { + str = FhirDstu2Hl7Org.class.getResourceAsStream(path.substring(1)); + } + if (str == null) { + throw new ConfigurationException(Msg.code(587) + "Can not find model property file on classpath: " + path); + } + return str; + } - @Override - public IPrimitiveType getLastUpdated(IBaseResource theResource) { - return ((Resource) theResource).getMeta().getLastUpdatedElement(); - } + @Override + public IPrimitiveType getLastUpdated(IBaseResource theResource) { + return ((Resource) theResource).getMeta().getLastUpdatedElement(); + } - @Override - public String getPathToSchemaDefinitions() { - return "/org/hl7/fhir/instance/model/schema"; - } + @Override + public String getPathToSchemaDefinitions() { + return "/org/hl7/fhir/instance/model/schema"; + } - @Override - public Class getResourceReferenceType() { - return Reference.class; - } + @Override + public Class getResourceReferenceType() { + return Reference.class; + } - @Override - public Object getServerVersion() { - return ReflectionUtil.newInstanceOfFhirServerType("org.hl7.fhir.dstu2.hapi.ctx.FhirServerDstu2Hl7Org2"); - } + @Override + public Object getServerVersion() { + return ReflectionUtil.newInstanceOfFhirServerType("org.hl7.fhir.dstu2.hapi.ctx.FhirServerDstu2Hl7Org2"); + } - @Override - public FhirVersionEnum getVersion() { - return FhirVersionEnum.DSTU2_HL7ORG; - } + @Override + public FhirVersionEnum getVersion() { + return FhirVersionEnum.DSTU2_HL7ORG; + } - @Override - public IVersionSpecificBundleFactory newBundleFactory(FhirContext theContext) { - return new Dstu2Hl7OrgBundleFactory(theContext); - } + @Override + public IVersionSpecificBundleFactory newBundleFactory(FhirContext theContext) { + return new Dstu2Hl7OrgBundleFactory(theContext); + } - @Override - public BaseCodingDt newCodingDt() { - throw new UnsupportedOperationException(Msg.code(588)); - } - - @Override - public IIdType newIdType() { - return new IdType(); - } + @Override + public BaseCodingDt newCodingDt() { + throw new UnsupportedOperationException(Msg.code(588)); + } + @Override + public IIdType newIdType() { + return new IdType(); + } } diff --git a/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/ctx/FhirServerDstu2Hl7Org2.java b/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/ctx/FhirServerDstu2Hl7Org2.java index bc83877b948..9190158f287 100644 --- a/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/ctx/FhirServerDstu2Hl7Org2.java +++ b/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/ctx/FhirServerDstu2Hl7Org2.java @@ -6,9 +6,8 @@ import org.hl7.fhir.dstu2.hapi.rest.server.ServerConformanceProvider; public class FhirServerDstu2Hl7Org2 implements IFhirVersionServer { - @Override - public ServerConformanceProvider createServerConformanceProvider(RestfulServer theServer) { - return new ServerConformanceProvider(theServer); - } - + @Override + public ServerConformanceProvider createServerConformanceProvider(RestfulServer theServer) { + return new ServerConformanceProvider(theServer); + } } diff --git a/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/rest/server/ServerConformanceProvider.java b/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/rest/server/ServerConformanceProvider.java index 676f1fdbceb..4a6aaae718a 100644 --- a/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/rest/server/ServerConformanceProvider.java +++ b/hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/dstu2/hapi/rest/server/ServerConformanceProvider.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,10 +19,10 @@ */ package org.hl7.fhir.dstu2.hapi.rest.server; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.Metadata; @@ -56,17 +56,17 @@ import org.hl7.fhir.dstu2.model.OperationDefinition.OperationParameterUse; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; +import java.util.Map.Entry; +import java.util.*; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; -import java.util.*; -import java.util.Map.Entry; import static org.apache.commons.lang3.StringUtils.isNotBlank; /** * Server FHIR Provider which serves the conformance statement for a RESTful * server implementation - * + * *

    * Note: This class is safe to extend, but it is important to note that the same * instance of {@link Conformance} is always returned unless @@ -76,444 +76,465 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; * constructor. *

    */ -public class ServerConformanceProvider extends BaseServerCapabilityStatementProvider implements IServerConformanceProvider { +public class ServerConformanceProvider extends BaseServerCapabilityStatementProvider + implements IServerConformanceProvider { private String myPublisher = "Not provided"; - /** - * No-arg constructor and seetter so that the ServerConfirmanceProvider can be Spring-wired with the RestfulService avoiding the potential reference cycle that would happen. - */ - public ServerConformanceProvider() { - super(); - } + /** + * No-arg constructor and seetter so that the ServerConfirmanceProvider can be Spring-wired with the RestfulService avoiding the potential reference cycle that would happen. + */ + public ServerConformanceProvider() { + super(); + } - /** - * Constructor - * - * @deprecated Use no-args constructor instead. Deprecated in 4.0.0 - */ - @Deprecated - public ServerConformanceProvider(RestfulServer theRestfulServer) { - this(); - } + /** + * Constructor + * + * @deprecated Use no-args constructor instead. Deprecated in 4.0.0 + */ + @Deprecated + public ServerConformanceProvider(RestfulServer theRestfulServer) { + this(); + } - /** - * Constructor - */ - public ServerConformanceProvider(RestfulServerConfiguration theServerConfiguration) { - super(theServerConfiguration); - } + /** + * Constructor + */ + public ServerConformanceProvider(RestfulServerConfiguration theServerConfiguration) { + super(theServerConfiguration); + } - @Override - public void setRestfulServer (RestfulServer theRestfulServer) { - // ignore - } + @Override + public void setRestfulServer(RestfulServer theRestfulServer) { + // ignore + } - private void checkBindingForSystemOps(ConformanceRestComponent rest, Set systemOps, - BaseMethodBinding nextMethodBinding) { - if (nextMethodBinding.getRestOperationType() != null) { - String sysOpCode = nextMethodBinding.getRestOperationType().getCode(); - if (sysOpCode != null) { - SystemRestfulInteraction sysOp; - try { - sysOp = SystemRestfulInteraction.fromCode(sysOpCode); - } catch (Exception e) { - sysOp = null; - } - if (sysOp == null) { - return; - } - if (systemOps.contains(sysOp) == false) { - systemOps.add(sysOp); - rest.addInteraction().setCode(sysOp); - } - } - } - } + private void checkBindingForSystemOps( + ConformanceRestComponent rest, + Set systemOps, + BaseMethodBinding nextMethodBinding) { + if (nextMethodBinding.getRestOperationType() != null) { + String sysOpCode = nextMethodBinding.getRestOperationType().getCode(); + if (sysOpCode != null) { + SystemRestfulInteraction sysOp; + try { + sysOp = SystemRestfulInteraction.fromCode(sysOpCode); + } catch (Exception e) { + sysOp = null; + } + if (sysOp == null) { + return; + } + if (systemOps.contains(sysOp) == false) { + systemOps.add(sysOp); + rest.addInteraction().setCode(sysOp); + } + } + } + } - private Map> collectMethodBindings(RequestDetails theRequestDetails) { - Map> resourceToMethods = new TreeMap>(); - for (ResourceBinding next : getServerConfiguration(theRequestDetails).getResourceBindings()) { - String resourceName = next.getResourceName(); - for (BaseMethodBinding nextMethodBinding : next.getMethodBindings()) { - if (resourceToMethods.containsKey(resourceName) == false) { - resourceToMethods.put(resourceName, new ArrayList()); - } - resourceToMethods.get(resourceName).add(nextMethodBinding); - } - } - for (BaseMethodBinding nextMethodBinding : getServerConfiguration(theRequestDetails).getServerBindings()) { - String resourceName = ""; - if (resourceToMethods.containsKey(resourceName) == false) { - resourceToMethods.put(resourceName, new ArrayList<>()); - } - resourceToMethods.get(resourceName).add(nextMethodBinding); - } - return resourceToMethods; - } + private Map> collectMethodBindings(RequestDetails theRequestDetails) { + Map> resourceToMethods = new TreeMap>(); + for (ResourceBinding next : getServerConfiguration(theRequestDetails).getResourceBindings()) { + String resourceName = next.getResourceName(); + for (BaseMethodBinding nextMethodBinding : next.getMethodBindings()) { + if (resourceToMethods.containsKey(resourceName) == false) { + resourceToMethods.put(resourceName, new ArrayList()); + } + resourceToMethods.get(resourceName).add(nextMethodBinding); + } + } + for (BaseMethodBinding nextMethodBinding : + getServerConfiguration(theRequestDetails).getServerBindings()) { + String resourceName = ""; + if (resourceToMethods.containsKey(resourceName) == false) { + resourceToMethods.put(resourceName, new ArrayList<>()); + } + resourceToMethods.get(resourceName).add(nextMethodBinding); + } + return resourceToMethods; + } - private String createOperationName(OperationMethodBinding theMethodBinding) { - return theMethodBinding.getName().substring(1); - } + private String createOperationName(OperationMethodBinding theMethodBinding) { + return theMethodBinding.getName().substring(1); + } - /** - * Gets the value of the "publisher" that will be placed in the generated - * conformance statement. As this is a mandatory element, the value should not - * be null (although this is not enforced). The value defaults to - * "Not provided" but may be set to null, which will cause this element to be - * omitted. - */ - public String getPublisher() { - return myPublisher; - } + /** + * Gets the value of the "publisher" that will be placed in the generated + * conformance statement. As this is a mandatory element, the value should not + * be null (although this is not enforced). The value defaults to + * "Not provided" but may be set to null, which will cause this element to be + * omitted. + */ + public String getPublisher() { + return myPublisher; + } - @SuppressWarnings("EnumSwitchStatementWhichMissesCases") - @Override - @Metadata - public Conformance getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { - RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails); - Bindings bindings = serverConfiguration.provideBindings(); + @SuppressWarnings("EnumSwitchStatementWhichMissesCases") + @Override + @Metadata + public Conformance getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) { + RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails); + Bindings bindings = serverConfiguration.provideBindings(); - Conformance retVal = new Conformance(); + Conformance retVal = new Conformance(); - retVal.setPublisher(myPublisher); - retVal.setDateElement(conformanceDate(theRequestDetails)); - retVal.setFhirVersion(FhirVersionEnum.DSTU2_HL7ORG.getFhirVersionString()); - retVal.setAcceptUnknown(UnknownContentCode.EXTENSIONS); // TODO: make this configurable - this is a fairly big effort since the parser - // needs to be modified to actually allow it + retVal.setPublisher(myPublisher); + retVal.setDateElement(conformanceDate(theRequestDetails)); + retVal.setFhirVersion(FhirVersionEnum.DSTU2_HL7ORG.getFhirVersionString()); + retVal.setAcceptUnknown( + UnknownContentCode + .EXTENSIONS); // TODO: make this configurable - this is a fairly big effort since the parser + // needs to be modified to actually allow it - retVal.getImplementation().setDescription(serverConfiguration.getImplementationDescription()); - retVal.setKind(ConformanceStatementKind.INSTANCE); - retVal.getSoftware().setName(serverConfiguration.getServerName()); - retVal.getSoftware().setVersion(serverConfiguration.getServerVersion()); - retVal.addFormat(Constants.CT_FHIR_XML); - retVal.addFormat(Constants.CT_FHIR_JSON); + retVal.getImplementation().setDescription(serverConfiguration.getImplementationDescription()); + retVal.setKind(ConformanceStatementKind.INSTANCE); + retVal.getSoftware().setName(serverConfiguration.getServerName()); + retVal.getSoftware().setVersion(serverConfiguration.getServerVersion()); + retVal.addFormat(Constants.CT_FHIR_XML); + retVal.addFormat(Constants.CT_FHIR_JSON); - ConformanceRestComponent rest = retVal.addRest(); - rest.setMode(RestfulConformanceMode.SERVER); + ConformanceRestComponent rest = retVal.addRest(); + rest.setMode(RestfulConformanceMode.SERVER); - Set systemOps = new HashSet<>(); - Set operationNames = new HashSet<>(); + Set systemOps = new HashSet<>(); + Set operationNames = new HashSet<>(); - Map> resourceToMethods = collectMethodBindings(theRequestDetails); - for (Entry> nextEntry : resourceToMethods.entrySet()) { + Map> resourceToMethods = collectMethodBindings(theRequestDetails); + for (Entry> nextEntry : resourceToMethods.entrySet()) { - if (nextEntry.getKey().isEmpty() == false) { - Set resourceOps = new HashSet<>(); - ConformanceRestResourceComponent resource = rest.addResource(); - String resourceName = nextEntry.getKey(); - RuntimeResourceDefinition def = serverConfiguration.getFhirContext().getResourceDefinition(resourceName); - resource.getTypeElement().setValue(def.getName()); - ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); - String serverBase = serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); - resource.getProfile().setReference((def.getResourceProfile(serverBase))); + if (nextEntry.getKey().isEmpty() == false) { + Set resourceOps = new HashSet<>(); + ConformanceRestResourceComponent resource = rest.addResource(); + String resourceName = nextEntry.getKey(); + RuntimeResourceDefinition def = + serverConfiguration.getFhirContext().getResourceDefinition(resourceName); + resource.getTypeElement().setValue(def.getName()); + ServletContext servletContext = (ServletContext) + (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE)); + String serverBase = + serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest); + resource.getProfile().setReference((def.getResourceProfile(serverBase))); - TreeSet includes = new TreeSet<>(); + TreeSet includes = new TreeSet<>(); - // Map nameToSearchParam = - // new HashMap(); - for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { - if (nextMethodBinding.getRestOperationType() != null) { - String resOpCode = nextMethodBinding.getRestOperationType().getCode(); - if (resOpCode != null) { - TypeRestfulInteraction resOp; - try { - resOp = TypeRestfulInteraction.fromCode(resOpCode); - } catch (Exception e) { - resOp = null; - } - if (resOp != null) { - if (resourceOps.contains(resOp) == false) { - resourceOps.add(resOp); - resource.addInteraction().setCode(resOp); - } - if ("vread".equals(resOpCode)) { - // vread implies read - resOp = TypeRestfulInteraction.READ; - if (resourceOps.contains(resOp) == false) { - resourceOps.add(resOp); - resource.addInteraction().setCode(resOp); - } - } + // Map nameToSearchParam = + // new HashMap(); + for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { + if (nextMethodBinding.getRestOperationType() != null) { + String resOpCode = + nextMethodBinding.getRestOperationType().getCode(); + if (resOpCode != null) { + TypeRestfulInteraction resOp; + try { + resOp = TypeRestfulInteraction.fromCode(resOpCode); + } catch (Exception e) { + resOp = null; + } + if (resOp != null) { + if (resourceOps.contains(resOp) == false) { + resourceOps.add(resOp); + resource.addInteraction().setCode(resOp); + } + if ("vread".equals(resOpCode)) { + // vread implies read + resOp = TypeRestfulInteraction.READ; + if (resourceOps.contains(resOp) == false) { + resourceOps.add(resOp); + resource.addInteraction().setCode(resOp); + } + } - if (nextMethodBinding.isSupportsConditional()) { - switch (resOp) { - case CREATE: - resource.setConditionalCreate(true); - break; - case DELETE: - resource.setConditionalDelete(ConditionalDeleteStatus.SINGLE); - break; - case UPDATE: - resource.setConditionalUpdate(true); - break; - default: - break; - } - } - } - } - } + if (nextMethodBinding.isSupportsConditional()) { + switch (resOp) { + case CREATE: + resource.setConditionalCreate(true); + break; + case DELETE: + resource.setConditionalDelete(ConditionalDeleteStatus.SINGLE); + break; + case UPDATE: + resource.setConditionalUpdate(true); + break; + default: + break; + } + } + } + } + } - checkBindingForSystemOps(rest, systemOps, nextMethodBinding); + checkBindingForSystemOps(rest, systemOps, nextMethodBinding); - if (nextMethodBinding instanceof SearchMethodBinding) { - handleSearchMethodBinding(resource, def, includes, - (SearchMethodBinding) nextMethodBinding, theRequestDetails); - } else if (nextMethodBinding instanceof OperationMethodBinding) { - OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; - String opName = bindings.getOperationBindingToId().get(methodBinding); - if (operationNames.add(opName)) { - // Only add each operation (by name) once - rest.addOperation().setName(methodBinding.getName()).getDefinition() - .setReference("OperationDefinition/" + opName); - } - } + if (nextMethodBinding instanceof SearchMethodBinding) { + handleSearchMethodBinding( + resource, def, includes, (SearchMethodBinding) nextMethodBinding, theRequestDetails); + } else if (nextMethodBinding instanceof OperationMethodBinding) { + OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; + String opName = bindings.getOperationBindingToId().get(methodBinding); + if (operationNames.add(opName)) { + // Only add each operation (by name) once + rest.addOperation() + .setName(methodBinding.getName()) + .getDefinition() + .setReference("OperationDefinition/" + opName); + } + } - Collections.sort(resource.getInteraction(), new Comparator() { - @Override - public int compare(ResourceInteractionComponent theO1, ResourceInteractionComponent theO2) { - TypeRestfulInteraction o1 = theO1.getCode(); - TypeRestfulInteraction o2 = theO2.getCode(); - if (o1 == null && o2 == null) { - return 0; - } - if (o1 == null) { - return 1; - } - if (o2 == null) { - return -1; - } - return o1.ordinal() - o2.ordinal(); - } - }); + Collections.sort(resource.getInteraction(), new Comparator() { + @Override + public int compare(ResourceInteractionComponent theO1, ResourceInteractionComponent theO2) { + TypeRestfulInteraction o1 = theO1.getCode(); + TypeRestfulInteraction o2 = theO2.getCode(); + if (o1 == null && o2 == null) { + return 0; + } + if (o1 == null) { + return 1; + } + if (o2 == null) { + return -1; + } + return o1.ordinal() - o2.ordinal(); + } + }); + } - } + for (String nextInclude : includes) { + resource.addSearchInclude(nextInclude); + } + } else { + for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { + checkBindingForSystemOps(rest, systemOps, nextMethodBinding); + if (nextMethodBinding instanceof OperationMethodBinding) { + OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; + String opName = bindings.getOperationBindingToId().get(methodBinding); + if (operationNames.add(opName)) { + rest.addOperation() + .setName(methodBinding.getName()) + .getDefinition() + .setReference("OperationDefinition/" + opName); + } + } + } + } + } - for (String nextInclude : includes) { - resource.addSearchInclude(nextInclude); - } - } else { - for (BaseMethodBinding nextMethodBinding : nextEntry.getValue()) { - checkBindingForSystemOps(rest, systemOps, nextMethodBinding); - if (nextMethodBinding instanceof OperationMethodBinding) { - OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding; - String opName = bindings.getOperationBindingToId().get(methodBinding); - if (operationNames.add(opName)) { - rest.addOperation().setName(methodBinding.getName()).getDefinition() - .setReference("OperationDefinition/" + opName); - } - } - } - } - } + return retVal; + } - return retVal; - } + private DateTimeType conformanceDate(RequestDetails theRequestDetails) { + IPrimitiveType buildDate = + getServerConfiguration(theRequestDetails).getConformanceDate(); + if (buildDate != null && buildDate.getValue() != null) { + try { + return new DateTimeType(buildDate.getValueAsString()); + } catch (DataFormatException e) { + // fall through + } + } + return DateTimeType.now(); + } - private DateTimeType conformanceDate(RequestDetails theRequestDetails) { - IPrimitiveType buildDate = getServerConfiguration(theRequestDetails).getConformanceDate(); - if (buildDate != null && buildDate.getValue() != null) { - try { - return new DateTimeType(buildDate.getValueAsString()); - } catch (DataFormatException e) { - // fall through - } - } - return DateTimeType.now(); - } + private void handleSearchMethodBinding( + ConformanceRestResourceComponent resource, + RuntimeResourceDefinition def, + TreeSet includes, + SearchMethodBinding searchMethodBinding, + RequestDetails theRequestDetails) { + includes.addAll(searchMethodBinding.getIncludes()); - private void handleSearchMethodBinding(ConformanceRestResourceComponent resource, - RuntimeResourceDefinition def, TreeSet includes, - SearchMethodBinding searchMethodBinding, RequestDetails theRequestDetails) { - includes.addAll(searchMethodBinding.getIncludes()); + List params = searchMethodBinding.getParameters(); + List searchParameters = new ArrayList<>(); + for (IParameter nextParameter : params) { + if ((nextParameter instanceof SearchParameter)) { + searchParameters.add((SearchParameter) nextParameter); + } + } + sortSearchParameters(searchParameters); + if (!searchParameters.isEmpty()) { + // boolean allOptional = searchParameters.get(0).isRequired() == false; + // + // OperationDefinition query = null; + // if (!allOptional) { + // RestOperation operation = rest.addOperation(); + // query = new OperationDefinition(); + // operation.setDefinition(new ResourceReferenceDt(query)); + // query.getDescriptionElement().setValue(searchMethodBinding.getDescription()); + // query.addUndeclaredExtension(false, + // ExtensionConstants.QUERY_RETURN_TYPE, new CodeDt(resourceName)); + // for (String nextInclude : searchMethodBinding.getIncludes()) { + // query.addUndeclaredExtension(false, + // ExtensionConstants.QUERY_ALLOWED_INCLUDE, new StringDt(nextInclude)); + // } + // } - List params = searchMethodBinding.getParameters(); - List searchParameters = new ArrayList<>(); - for (IParameter nextParameter : params) { - if ((nextParameter instanceof SearchParameter)) { - searchParameters.add((SearchParameter) nextParameter); - } - } - sortSearchParameters(searchParameters); - if (!searchParameters.isEmpty()) { - // boolean allOptional = searchParameters.get(0).isRequired() == false; - // - // OperationDefinition query = null; - // if (!allOptional) { - // RestOperation operation = rest.addOperation(); - // query = new OperationDefinition(); - // operation.setDefinition(new ResourceReferenceDt(query)); - // query.getDescriptionElement().setValue(searchMethodBinding.getDescription()); - // query.addUndeclaredExtension(false, - // ExtensionConstants.QUERY_RETURN_TYPE, new CodeDt(resourceName)); - // for (String nextInclude : searchMethodBinding.getIncludes()) { - // query.addUndeclaredExtension(false, - // ExtensionConstants.QUERY_ALLOWED_INCLUDE, new StringDt(nextInclude)); - // } - // } + for (SearchParameter nextParameter : searchParameters) { - for (SearchParameter nextParameter : searchParameters) { + String nextParamName = nextParameter.getName(); - String nextParamName = nextParameter.getName(); + String chain = null; + String nextParamUnchainedName = nextParamName; + if (nextParamName.contains(".")) { + chain = nextParamName.substring(nextParamName.indexOf('.') + 1); + nextParamUnchainedName = nextParamName.substring(0, nextParamName.indexOf('.')); + } - String chain = null; - String nextParamUnchainedName = nextParamName; - if (nextParamName.contains(".")) { - chain = nextParamName.substring(nextParamName.indexOf('.') + 1); - nextParamUnchainedName = nextParamName.substring(0, nextParamName.indexOf('.')); - } + String nextParamDescription = nextParameter.getDescription(); - String nextParamDescription = nextParameter.getDescription(); + /* + * If the parameter has no description, default to the one from the + * resource + */ + if (StringUtils.isBlank(nextParamDescription)) { + RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName); + if (paramDef != null) { + nextParamDescription = paramDef.getDescription(); + } + } - /* - * If the parameter has no description, default to the one from the - * resource - */ - if (StringUtils.isBlank(nextParamDescription)) { - RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName); - if (paramDef != null) { - nextParamDescription = paramDef.getDescription(); - } - } + ConformanceRestResourceSearchParamComponent param = resource.addSearchParam(); + param.setName(nextParamUnchainedName); + if (StringUtils.isNotBlank(chain)) { + param.addChain(chain); + } + param.setDocumentation(nextParamDescription); + if (nextParameter.getParamType() != null) { + param.getTypeElement() + .setValueAsString(nextParameter.getParamType().getCode()); + } + for (Class nextTarget : nextParameter.getDeclaredTypes()) { + RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails) + .getFhirContext() + .getResourceDefinition(nextTarget); + if (targetDef != null) { + ResourceType code; + try { + code = ResourceType.fromCode(targetDef.getName()); + } catch (Exception e) { + code = null; + } + if (code != null) { + param.addTarget(code.toCode()); + } + } + } + } + } + } - ConformanceRestResourceSearchParamComponent param = resource.addSearchParam(); - param.setName(nextParamUnchainedName); - if (StringUtils.isNotBlank(chain)) { - param.addChain(chain); - } - param.setDocumentation(nextParamDescription); - if (nextParameter.getParamType() != null) { - param.getTypeElement().setValueAsString(nextParameter.getParamType().getCode()); - } - for (Class nextTarget : nextParameter.getDeclaredTypes()) { - RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails).getFhirContext().getResourceDefinition(nextTarget); - if (targetDef != null) { - ResourceType code; - try { - code = ResourceType.fromCode(targetDef.getName()); - } catch (Exception e) { - code = null; - } - if (code != null) { - param.addTarget(code.toCode()); - } - } - } - } - } - } + @Read(type = OperationDefinition.class) + public OperationDefinition readOperationDefinition(@IdParam IdType theId, RequestDetails theRequestDetails) { + if (theId == null || theId.hasIdPart() == false) { + throw new ResourceNotFoundException(Msg.code(1986) + theId); + } + List sharedDescriptions = getServerConfiguration(theRequestDetails) + .provideBindings() + .getOperationIdToBindings() + .get(theId.getIdPart()); + if (sharedDescriptions == null || sharedDescriptions.isEmpty()) { + throw new ResourceNotFoundException(Msg.code(1987) + theId); + } + OperationDefinition op = new OperationDefinition(); + op.setStatus(ConformanceResourceStatus.ACTIVE); + op.setIdempotent(true); + Set inParams = new HashSet<>(); + Set outParams = new HashSet<>(); - @Read(type = OperationDefinition.class) - public OperationDefinition readOperationDefinition(@IdParam IdType theId, RequestDetails theRequestDetails) { - if (theId == null || theId.hasIdPart() == false) { - throw new ResourceNotFoundException(Msg.code(1986) + theId); - } - List sharedDescriptions = getServerConfiguration(theRequestDetails).provideBindings().getOperationIdToBindings().get(theId.getIdPart()); - if (sharedDescriptions == null || sharedDescriptions.isEmpty()) { - throw new ResourceNotFoundException(Msg.code(1987) + theId); - } + for (OperationMethodBinding sharedDescription : sharedDescriptions) { + if (isNotBlank(sharedDescription.getDescription())) { + op.setDescription(sharedDescription.getDescription()); + } + if (!sharedDescription.isIdempotent()) { + op.setIdempotent(sharedDescription.isIdempotent()); + } + op.setCode(sharedDescription.getName()); + if (sharedDescription.isCanOperateAtInstanceLevel()) { + op.setInstance(sharedDescription.isCanOperateAtInstanceLevel()); + } + if (sharedDescription.isCanOperateAtServerLevel()) { + op.setSystem(sharedDescription.isCanOperateAtServerLevel()); + } + if (isNotBlank(sharedDescription.getResourceName())) { + op.addTypeElement().setValue(sharedDescription.getResourceName()); + } - OperationDefinition op = new OperationDefinition(); - op.setStatus(ConformanceResourceStatus.ACTIVE); - op.setIdempotent(true); + for (IParameter nextParamUntyped : sharedDescription.getParameters()) { + if (nextParamUntyped instanceof OperationParameter) { + OperationParameter nextParam = (OperationParameter) nextParamUntyped; + OperationDefinitionParameterComponent param = op.addParameter(); + if (!inParams.add(nextParam.getName())) { + continue; + } + param.setUse(OperationParameterUse.IN); + if (nextParam.getParamType() != null) { + param.setType(nextParam.getParamType()); + } + param.setMin(nextParam.getMin()); + param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax())); + param.setName(nextParam.getName()); + } + } - Set inParams = new HashSet<>(); - Set outParams = new HashSet<>(); + for (ReturnType nextParam : sharedDescription.getReturnParams()) { + if (!outParams.add(nextParam.getName())) { + continue; + } + OperationDefinitionParameterComponent param = op.addParameter(); + param.setUse(OperationParameterUse.OUT); + if (nextParam.getType() != null) { + param.setType(nextParam.getType()); + } + param.setMin(nextParam.getMin()); + param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax())); + param.setName(nextParam.getName()); + } + } - for (OperationMethodBinding sharedDescription : sharedDescriptions) { - if (isNotBlank(sharedDescription.getDescription())) { - op.setDescription(sharedDescription.getDescription()); - } - if (!sharedDescription.isIdempotent()) { - op.setIdempotent(sharedDescription.isIdempotent()); - } - op.setCode(sharedDescription.getName()); - if (sharedDescription.isCanOperateAtInstanceLevel()) { - op.setInstance(sharedDescription.isCanOperateAtInstanceLevel()); - } - if (sharedDescription.isCanOperateAtServerLevel()) { - op.setSystem(sharedDescription.isCanOperateAtServerLevel()); - } - if (isNotBlank(sharedDescription.getResourceName())) { - op.addTypeElement().setValue(sharedDescription.getResourceName()); - } + return op; + } - for (IParameter nextParamUntyped : sharedDescription.getParameters()) { - if (nextParamUntyped instanceof OperationParameter) { - OperationParameter nextParam = (OperationParameter) nextParamUntyped; - OperationDefinitionParameterComponent param = op.addParameter(); - if (!inParams.add(nextParam.getName())) { - continue; - } - param.setUse(OperationParameterUse.IN); - if (nextParam.getParamType() != null) { - param.setType(nextParam.getParamType()); - } - param.setMin(nextParam.getMin()); - param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax())); - param.setName(nextParam.getName()); - } - } + /** + * Sets the cache property (default is true). If set to true, the same + * response will be returned for each invocation. + *

    + * See the class documentation for an important note if you are extending this + * class + *

    + * @deprecated Since 4.0.0 this method doesn't do anything + */ + @Deprecated + public void setCache(boolean theCache) { + // nothing + } - for (ReturnType nextParam : sharedDescription.getReturnParams()) { - if (!outParams.add(nextParam.getName())) { - continue; - } - OperationDefinitionParameterComponent param = op.addParameter(); - param.setUse(OperationParameterUse.OUT); - if (nextParam.getType() != null) { - param.setType(nextParam.getType()); - } - param.setMin(nextParam.getMin()); - param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax())); - param.setName(nextParam.getName()); - } - } + /** + * Sets the value of the "publisher" that will be placed in the generated + * conformance statement. As this is a mandatory element, the value should not + * be null (although this is not enforced). The value defaults to + * "Not provided" but may be set to null, which will cause this element to be + * omitted. + */ + public void setPublisher(String thePublisher) { + myPublisher = thePublisher; + } - return op; - } - - /** - * Sets the cache property (default is true). If set to true, the same - * response will be returned for each invocation. - *

    - * See the class documentation for an important note if you are extending this - * class - *

    - * @deprecated Since 4.0.0 this method doesn't do anything - */ - @Deprecated - public void setCache(boolean theCache) { - // nothing - } - - /** - * Sets the value of the "publisher" that will be placed in the generated - * conformance statement. As this is a mandatory element, the value should not - * be null (although this is not enforced). The value defaults to - * "Not provided" but may be set to null, which will cause this element to be - * omitted. - */ - public void setPublisher(String thePublisher) { - myPublisher = thePublisher; - } - - private void sortSearchParameters(List searchParameters) { - Collections.sort(searchParameters, new Comparator() { - @Override - public int compare(SearchParameter theO1, SearchParameter theO2) { - if (theO1.isRequired() == theO2.isRequired()) { - return theO1.getName().compareTo(theO2.getName()); - } - if (theO1.isRequired()) { - return -1; - } - return 1; - } - }); - } + private void sortSearchParameters(List searchParameters) { + Collections.sort(searchParameters, new Comparator() { + @Override + public int compare(SearchParameter theO1, SearchParameter theO2) { + if (theO1.isRequired() == theO2.isRequired()) { + return theO1.getName().compareTo(theO2.getName()); + } + if (theO1.isRequired()) { + return -1; + } + return 1; + } + }); + } } diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/FhirR4.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/FhirR4.java index 9cd17887c38..a7efad6dcfa 100644 --- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/FhirR4.java +++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/FhirR4.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,23 +19,22 @@ */ package org.hl7.fhir.r4.hapi.ctx; +import ca.uhn.fhir.context.*; +import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.i18n.Msg; -import java.io.InputStream; -import java.util.Date; -import java.util.List; - +import ca.uhn.fhir.model.api.IFhirVersion; +import ca.uhn.fhir.model.primitive.IdDt; +import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; +import ca.uhn.fhir.util.ReflectionUtil; import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.instance.model.api.*; import org.hl7.fhir.r4.hapi.fluentpath.FhirPathR4; import org.hl7.fhir.r4.hapi.rest.server.R4BundleFactory; import org.hl7.fhir.r4.model.*; -import ca.uhn.fhir.context.*; -import ca.uhn.fhir.fhirpath.IFhirPath; -import ca.uhn.fhir.model.api.IFhirVersion; -import ca.uhn.fhir.model.primitive.IdDt; -import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; -import ca.uhn.fhir.util.ReflectionUtil; +import java.io.InputStream; +import java.util.Date; +import java.util.List; public class FhirR4 implements IFhirVersion { @@ -119,5 +118,4 @@ public class FhirR4 implements IFhirVersion { public IIdType newIdType() { return new IdType(); } - } diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/FhirServerR4.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/FhirServerR4.java index 62b47b29dea..5b15292f69e 100644 --- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/FhirServerR4.java +++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/FhirServerR4.java @@ -6,9 +6,8 @@ import ca.uhn.fhir.rest.server.provider.ServerCapabilityStatementProvider; public class FhirServerR4 implements IFhirVersionServer { - @Override - public ServerCapabilityStatementProvider createServerConformanceProvider(RestfulServer theServer) { - return new ServerCapabilityStatementProvider(theServer); - } - + @Override + public ServerCapabilityStatementProvider createServerConformanceProvider(RestfulServer theServer) { + return new ServerCapabilityStatementProvider(theServer); + } } diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/HapiWorkerContext.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/HapiWorkerContext.java index dd19a1b2fe9..ed09a4bf5ec 100644 --- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/HapiWorkerContext.java +++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/HapiWorkerContext.java @@ -48,345 +48,410 @@ import java.util.Set; import static org.apache.commons.lang3.StringUtils.isNotBlank; public final class HapiWorkerContext extends I18nBase implements IWorkerContext { - private final FhirContext myCtx; - private final Cache myFetchedResourceCache; - private IValidationSupport myValidationSupport; - private Parameters myExpansionProfile; - private String myOverrideVersionNs; + private final FhirContext myCtx; + private final Cache myFetchedResourceCache; + private IValidationSupport myValidationSupport; + private Parameters myExpansionProfile; + private String myOverrideVersionNs; - public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) { - Validate.notNull(theCtx, "theCtx must not be null"); - Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); - myCtx = theCtx; - myValidationSupport = theValidationSupport; + public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) { + Validate.notNull(theCtx, "theCtx must not be null"); + Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); + myCtx = theCtx; + myValidationSupport = theValidationSupport; - long timeoutMillis = HapiSystemProperties.getTestValidationResourceCachesMs(); + long timeoutMillis = HapiSystemProperties.getTestValidationResourceCachesMs(); - myFetchedResourceCache = CacheFactory.build(timeoutMillis); + myFetchedResourceCache = CacheFactory.build(timeoutMillis); - // Set a default locale - setValidationMessageLanguage(getLocale()); - } + // Set a default locale + setValidationMessageLanguage(getLocale()); + } - @Override - public List allStructures() { - return myValidationSupport.fetchAllStructureDefinitions(); - } + @Override + public List allStructures() { + return myValidationSupport.fetchAllStructureDefinitions(); + } - @Override - public List getStructures() { - return allStructures(); - } + @Override + public List getStructures() { + return allStructures(); + } - @Override - public CodeSystem fetchCodeSystem(String theSystem) { - if (myValidationSupport == null) { - return null; - } else { - return (CodeSystem) myValidationSupport.fetchCodeSystem(theSystem); - } - } + @Override + public CodeSystem fetchCodeSystem(String theSystem) { + if (myValidationSupport == null) { + return null; + } else { + return (CodeSystem) myValidationSupport.fetchCodeSystem(theSystem); + } + } - @Override - public List findMapsForSource(String theUrl) { - throw new UnsupportedOperationException(Msg.code(258)); - } + @Override + public List findMapsForSource(String theUrl) { + throw new UnsupportedOperationException(Msg.code(258)); + } - @Override - public String getAbbreviation(String theName) { - throw new UnsupportedOperationException(Msg.code(259)); - } + @Override + public String getAbbreviation(String theName) { + throw new UnsupportedOperationException(Msg.code(259)); + } - @Override - public org.hl7.fhir.r4.utils.INarrativeGenerator getNarrativeGenerator(String thePrefix, String theBasePath) { - throw new UnsupportedOperationException(Msg.code(260)); - } + @Override + public org.hl7.fhir.r4.utils.INarrativeGenerator getNarrativeGenerator(String thePrefix, String theBasePath) { + throw new UnsupportedOperationException(Msg.code(260)); + } - @Override - public IParser getParser(ParserType theType) { - throw new UnsupportedOperationException(Msg.code(261)); - } + @Override + public IParser getParser(ParserType theType) { + throw new UnsupportedOperationException(Msg.code(261)); + } - @Override - public IParser getParser(String theType) { - throw new UnsupportedOperationException(Msg.code(262)); - } + @Override + public IParser getParser(String theType) { + throw new UnsupportedOperationException(Msg.code(262)); + } - @Override - public List getResourceNames() { - List result = new ArrayList<>(); - for (ResourceType next : ResourceType.values()) { - result.add(next.name()); - } - Collections.sort(result); - return result; - } + @Override + public List getResourceNames() { + List result = new ArrayList<>(); + for (ResourceType next : ResourceType.values()) { + result.add(next.name()); + } + Collections.sort(result); + return result; + } - @Override - public IParser newJsonParser() { - throw new UnsupportedOperationException(Msg.code(263)); - } + @Override + public IParser newJsonParser() { + throw new UnsupportedOperationException(Msg.code(263)); + } - @Override - public IResourceValidator newValidator() { - throw new UnsupportedOperationException(Msg.code(264)); - } + @Override + public IResourceValidator newValidator() { + throw new UnsupportedOperationException(Msg.code(264)); + } - @Override - public IParser newXmlParser() { - throw new UnsupportedOperationException(Msg.code(265)); - } + @Override + public IParser newXmlParser() { + throw new UnsupportedOperationException(Msg.code(265)); + } - @Override - public String oid2Uri(String theCode) { - throw new UnsupportedOperationException(Msg.code(266)); - } + @Override + public String oid2Uri(String theCode) { + throw new UnsupportedOperationException(Msg.code(266)); + } - @Override - public boolean supportsSystem(String theSystem) { - if (myValidationSupport == null) { - return false; - } else { - return myValidationSupport.isCodeSystemSupported(new ValidationSupportContext(myValidationSupport), theSystem); - } - } + @Override + public boolean supportsSystem(String theSystem) { + if (myValidationSupport == null) { + return false; + } else { + return myValidationSupport.isCodeSystemSupported( + new ValidationSupportContext(myValidationSupport), theSystem); + } + } - @Override - public Set typeTails() { - return new HashSet<>(Arrays.asList("Integer", "UnsignedInt", "PositiveInt", "Decimal", "DateTime", "Date", "Time", "Instant", "String", "Uri", "Oid", "Uuid", "Id", "Boolean", "Code", - "Markdown", "Base64Binary", "Coding", "CodeableConcept", "Attachment", "Identifier", "Quantity", "SampledData", "Range", "Period", "Ratio", "HumanName", "Address", "ContactPoint", - "Timing", "Reference", "Annotation", "Signature", "Meta")); - } + @Override + public Set typeTails() { + return new HashSet<>(Arrays.asList( + "Integer", + "UnsignedInt", + "PositiveInt", + "Decimal", + "DateTime", + "Date", + "Time", + "Instant", + "String", + "Uri", + "Oid", + "Uuid", + "Id", + "Boolean", + "Code", + "Markdown", + "Base64Binary", + "Coding", + "CodeableConcept", + "Attachment", + "Identifier", + "Quantity", + "SampledData", + "Range", + "Period", + "Ratio", + "HumanName", + "Address", + "ContactPoint", + "Timing", + "Reference", + "Annotation", + "Signature", + "Meta")); + } - @Override - public ValidationResult validateCode(ValidationOptions theOptions, CodeableConcept theCode, ValueSet theVs) { - for (Coding next : theCode.getCoding()) { - ValidationResult retVal = validateCode(theOptions, next, theVs); - if (retVal.isOk()) { - return retVal; - } - } + @Override + public ValidationResult validateCode(ValidationOptions theOptions, CodeableConcept theCode, ValueSet theVs) { + for (Coding next : theCode.getCoding()) { + ValidationResult retVal = validateCode(theOptions, next, theVs); + if (retVal.isOk()) { + return retVal; + } + } - return new ValidationResult(IssueSeverity.ERROR, null); - } + return new ValidationResult(IssueSeverity.ERROR, null); + } - @Override - public ValidationResult validateCode(ValidationOptions theOptions, Coding theCode, ValueSet theVs) { - String system = theCode.getSystem(); - String code = theCode.getCode(); - String display = theCode.getDisplay(); - return validateCode(theOptions, system, code, display, theVs); - } + @Override + public ValidationResult validateCode(ValidationOptions theOptions, Coding theCode, ValueSet theVs) { + String system = theCode.getSystem(); + String code = theCode.getCode(); + String display = theCode.getDisplay(); + return validateCode(theOptions, system, code, display, theVs); + } - @Override - public ValidationResult validateCode(ValidationOptions theOptions, String theSystem, String theCode, String theDisplay) { - IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, null); - if (result == null) { - return null; - } + @Override + public ValidationResult validateCode( + ValidationOptions theOptions, String theSystem, String theCode, String theDisplay) { + IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + null); + if (result == null) { + return null; + } - IssueSeverity severity = null; - if (result.getSeverity() != null) { - severity = IssueSeverity.fromCode(result.getSeverityCode()); - } + IssueSeverity severity = null; + if (result.getSeverity() != null) { + severity = IssueSeverity.fromCode(result.getSeverityCode()); + } - ConceptDefinitionComponent definition = new ConceptDefinitionComponent().setCode(result.getCode()); - return new ValidationResult(severity, result.getMessage(), definition); - } + ConceptDefinitionComponent definition = new ConceptDefinitionComponent().setCode(result.getCode()); + return new ValidationResult(severity, result.getMessage(), definition); + } - @Override - public ValidationResult validateCode(ValidationOptions theOptions, String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) { - throw new UnsupportedOperationException(Msg.code(267)); - } + @Override + public ValidationResult validateCode( + ValidationOptions theOptions, + String theSystem, + String theCode, + String theDisplay, + ConceptSetComponent theVsi) { + throw new UnsupportedOperationException(Msg.code(267)); + } + @Override + public ValidationResult validateCode( + ValidationOptions theOptions, String theSystem, String theCode, String theDisplay, ValueSet theVs) { - @Override - public ValidationResult validateCode(ValidationOptions theOptions, String theSystem, String theCode, String theDisplay, ValueSet theVs) { + IValidationSupport.CodeValidationResult outcome; + if (isNotBlank(theVs.getUrl())) { + outcome = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + theVs.getUrl()); + } else { + outcome = myValidationSupport.validateCodeInValueSet( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + theVs); + } - IValidationSupport.CodeValidationResult outcome; - if (isNotBlank(theVs.getUrl())) { - outcome = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, theVs.getUrl()); - } else { - outcome = myValidationSupport.validateCodeInValueSet(new ValidationSupportContext(myValidationSupport), convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, theVs); - } + if (outcome != null && outcome.isOk()) { + ConceptDefinitionComponent definition = new ConceptDefinitionComponent(); + definition.setCode(theCode); + definition.setDisplay(outcome.getDisplay()); + return new ValidationResult(definition); + } - if (outcome != null && outcome.isOk()) { - ConceptDefinitionComponent definition = new ConceptDefinitionComponent(); - definition.setCode(theCode); - definition.setDisplay(outcome.getDisplay()); - return new ValidationResult(definition); - } + return new ValidationResult( + IssueSeverity.ERROR, + "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + + "]"); + } - return new ValidationResult(IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + "]"); - } + @Override + public ValidationResult validateCode(ValidationOptions theOptions, String code, ValueSet vs) { + ValidationOptions options = theOptions.withGuessSystem(); + return validateCode(options, null, code, null, vs); + } - @Override - public ValidationResult validateCode(ValidationOptions theOptions, String code, ValueSet vs) { - ValidationOptions options = theOptions.withGuessSystem(); - return validateCode(options, null, code, null, vs); - } + @Override + @CoverageIgnore + public List allConformanceResources() { + throw new UnsupportedOperationException(Msg.code(268)); + } - @Override - @CoverageIgnore - public List allConformanceResources() { - throw new UnsupportedOperationException(Msg.code(268)); - } + @Override + public void generateSnapshot(StructureDefinition p) throws FHIRException { + throw new UnsupportedOperationException(Msg.code(269)); + } - @Override - public void generateSnapshot(StructureDefinition p) throws FHIRException { - throw new UnsupportedOperationException(Msg.code(269)); - } + @Override + public Parameters getExpansionParameters() { + return myExpansionProfile; + } - @Override - public Parameters getExpansionParameters() { - return myExpansionProfile; - } + @Override + public void setExpansionProfile(Parameters theExpParameters) { + myExpansionProfile = theExpParameters; + } - @Override - public void setExpansionProfile(Parameters theExpParameters) { - myExpansionProfile = theExpParameters; - } + @Override + @CoverageIgnore + public boolean hasCache() { + throw new UnsupportedOperationException(Msg.code(270)); + } - @Override - @CoverageIgnore - public boolean hasCache() { - throw new UnsupportedOperationException(Msg.code(270)); - } + @Override + public ValueSetExpander.ValueSetExpansionOutcome expandVS( + ValueSet theSource, boolean theCacheOk, boolean theHierarchical) { + throw new UnsupportedOperationException(Msg.code(271)); + } - @Override - public ValueSetExpander.ValueSetExpansionOutcome expandVS(ValueSet theSource, boolean theCacheOk, boolean theHierarchical) { - throw new UnsupportedOperationException(Msg.code(271)); - } + @Override + public ValueSetExpander.ValueSetExpansionOutcome expandVS(ConceptSetComponent theInc, boolean theHierarchical) + throws TerminologyServiceException { + ValueSet input = new ValueSet(); + input.getCompose().addInclude(theInc); + IValidationSupport.ValueSetExpansionOutcome output = + myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), null, input); + return new ValueSetExpander.ValueSetExpansionOutcome((ValueSet) output.getValueSet(), output.getError(), null); + } - @Override - public ValueSetExpander.ValueSetExpansionOutcome expandVS(ConceptSetComponent theInc, boolean theHierarchical) throws TerminologyServiceException { - ValueSet input = new ValueSet(); - input.getCompose().addInclude(theInc); - IValidationSupport.ValueSetExpansionOutcome output = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), null, input); - return new ValueSetExpander.ValueSetExpansionOutcome((ValueSet) output.getValueSet(), output.getError(), null); - } + @Override + public ILoggingService getLogger() { + throw new UnsupportedOperationException(Msg.code(272)); + } - @Override - public ILoggingService getLogger() { - throw new UnsupportedOperationException(Msg.code(272)); - } + @Override + public void setLogger(ILoggingService theLogger) { + throw new UnsupportedOperationException(Msg.code(273)); + } - @Override - public void setLogger(ILoggingService theLogger) { - throw new UnsupportedOperationException(Msg.code(273)); - } + @Override + public String getVersion() { + return myCtx.getVersion().getVersion().getFhirVersionString(); + } - @Override - public String getVersion() { - return myCtx.getVersion().getVersion().getFhirVersionString(); - } + @Override + public UcumService getUcumService() { + throw new UnsupportedOperationException(Msg.code(274)); + } - @Override - public UcumService getUcumService() { - throw new UnsupportedOperationException(Msg.code(274)); - } + @Override + public void setUcumService(UcumService ucumService) { + throw new UnsupportedOperationException(Msg.code(275)); + } - @Override - public void setUcumService(UcumService ucumService) { - throw new UnsupportedOperationException(Msg.code(275)); - } + @Override + public boolean isNoTerminologyServer() { + return false; + } - @Override - public boolean isNoTerminologyServer() { - return false; - } + @Override + public TranslationServices translator() { + throw new UnsupportedOperationException(Msg.code(276)); + } - @Override - public TranslationServices translator() { - throw new UnsupportedOperationException(Msg.code(276)); - } + @Override + public List listTransforms() { + throw new UnsupportedOperationException(Msg.code(277)); + } - @Override - public List listTransforms() { - throw new UnsupportedOperationException(Msg.code(277)); - } + @Override + public StructureMap getTransform(String url) { + throw new UnsupportedOperationException(Msg.code(278)); + } - @Override - public StructureMap getTransform(String url) { - throw new UnsupportedOperationException(Msg.code(278)); - } + @Override + public String getOverrideVersionNs() { + return myOverrideVersionNs; + } - @Override - public String getOverrideVersionNs() { - return myOverrideVersionNs; - } + @Override + public void setOverrideVersionNs(String value) { + myOverrideVersionNs = value; + } - @Override - public void setOverrideVersionNs(String value) { - myOverrideVersionNs = value; - } + @Override + public StructureDefinition fetchTypeDefinition(String theTypeName) { + return fetchResource(StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/" + theTypeName); + } - @Override - public StructureDefinition fetchTypeDefinition(String theTypeName) { - return fetchResource(StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/" + theTypeName); - } + @Override + public String getLinkForUrl(String corePath, String url) { + throw new UnsupportedOperationException(Msg.code(279)); + } - @Override - public String getLinkForUrl(String corePath, String url) { - throw new UnsupportedOperationException(Msg.code(279)); - } + @Override + public List getTypeNames() { + throw new UnsupportedOperationException(Msg.code(280)); + } - @Override - public List getTypeNames() { - throw new UnsupportedOperationException(Msg.code(280)); - } + @Override + public T fetchResource(Class theClass, String theUri) { + if (myValidationSupport == null || theUri == null) { + return null; + } else { + @SuppressWarnings("unchecked") + T retVal = (T) myFetchedResourceCache.get(theUri, t -> myValidationSupport.fetchResource(theClass, theUri)); + return retVal; + } + } - @Override - public T fetchResource(Class theClass, String theUri) { - if (myValidationSupport == null || theUri == null) { - return null; - } else { - @SuppressWarnings("unchecked") - T retVal = (T) myFetchedResourceCache.get(theUri, t -> myValidationSupport.fetchResource(theClass, theUri)); - return retVal; - } - } + @Override + public T fetchResourceWithException(Class theClass, String theUri) + throws FHIRException { + T retVal = fetchResource(theClass, theUri); + if (retVal == null) { + throw new FHIRException(Msg.code(281) + "Could not find resource: " + theUri); + } + return retVal; + } - @Override - public T fetchResourceWithException(Class theClass, String theUri) throws FHIRException { - T retVal = fetchResource(theClass, theUri); - if (retVal == null) { - throw new FHIRException(Msg.code(281) + "Could not find resource: " + theUri); - } - return retVal; - } + @Override + public org.hl7.fhir.r4.model.Resource fetchResourceById(String theType, String theUri) { + throw new UnsupportedOperationException(Msg.code(282)); + } - @Override - public org.hl7.fhir.r4.model.Resource fetchResourceById(String theType, String theUri) { - throw new UnsupportedOperationException(Msg.code(282)); - } + @Override + public boolean hasResource(Class theClass_, String theUri) { + throw new UnsupportedOperationException(Msg.code(283)); + } - @Override - public boolean hasResource(Class theClass_, String theUri) { - throw new UnsupportedOperationException(Msg.code(283)); - } + @Override + public void cacheResource(org.hl7.fhir.r4.model.Resource theRes) throws FHIRException { + throw new UnsupportedOperationException(Msg.code(284)); + } - @Override - public void cacheResource(org.hl7.fhir.r4.model.Resource theRes) throws FHIRException { - throw new UnsupportedOperationException(Msg.code(284)); - } + @Override + public Set getResourceNamesAsSet() { + return myCtx.getResourceTypes(); + } - @Override - public Set getResourceNamesAsSet() { - return myCtx.getResourceTypes(); - } - - @Override - public ValueSetExpander.ValueSetExpansionOutcome expandVS(ElementDefinitionBindingComponent theBinding, boolean theCacheOk, boolean theHierarchical) throws FHIRException { - throw new UnsupportedOperationException(Msg.code(285)); - } - - public static ConceptValidationOptions convertConceptValidationOptions(ValidationOptions theOptions) { - ConceptValidationOptions retVal = new ConceptValidationOptions(); - if (theOptions.isGuessSystem()) { - retVal = retVal.setInferSystem(true); - } - return retVal; - } + @Override + public ValueSetExpander.ValueSetExpansionOutcome expandVS( + ElementDefinitionBindingComponent theBinding, boolean theCacheOk, boolean theHierarchical) + throws FHIRException { + throw new UnsupportedOperationException(Msg.code(285)); + } + public static ConceptValidationOptions convertConceptValidationOptions(ValidationOptions theOptions) { + ConceptValidationOptions retVal = new ConceptValidationOptions(); + if (theOptions.isGuessSystem()) { + retVal = retVal.setInferSystem(true); + } + return retVal; + } } diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/fluentpath/FhirPathR4.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/fluentpath/FhirPathR4.java index bdb3dd9dde4..72d6396638c 100644 --- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/fluentpath/FhirPathR4.java +++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/fluentpath/FhirPathR4.java @@ -1,11 +1,11 @@ package org.hl7.fhir.r4.hapi.fluentpath; -import ca.uhn.fhir.fhirpath.IFhirPathEvaluationContext; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.fhirpath.FhirPathExecutionException; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.fhirpath.IFhirPathEvaluationContext; +import ca.uhn.fhir.i18n.Msg; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.exceptions.PathEngineException; import org.hl7.fhir.instance.model.api.IBase; @@ -16,101 +16,104 @@ import org.hl7.fhir.r4.model.TypeDetails; import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.utils.FHIRPathEngine; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; public class FhirPathR4 implements IFhirPath { - private final FHIRPathEngine myEngine; + private final FHIRPathEngine myEngine; - public FhirPathR4(FhirContext theCtx) { - IValidationSupport validationSupport = theCtx.getValidationSupport(); - myEngine = new FHIRPathEngine(new HapiWorkerContext(theCtx, validationSupport)); - // These changes are to make the FP evaluation non-strict - myEngine.setDoNotEnforceAsCaseSensitive(true); - myEngine.setDoNotEnforceAsSingletonRule(true); - } + public FhirPathR4(FhirContext theCtx) { + IValidationSupport validationSupport = theCtx.getValidationSupport(); + myEngine = new FHIRPathEngine(new HapiWorkerContext(theCtx, validationSupport)); + // These changes are to make the FP evaluation non-strict + myEngine.setDoNotEnforceAsCaseSensitive(true); + myEngine.setDoNotEnforceAsSingletonRule(true); + } - @SuppressWarnings("unchecked") - @Override - public List evaluate(IBase theInput, String thePath, Class theReturnType) { - List result; - try { - result = myEngine.evaluate((Base) theInput, thePath); - } catch (FHIRException e) { - throw new FhirPathExecutionException(Msg.code(255) + e); - } + @SuppressWarnings("unchecked") + @Override + public List evaluate(IBase theInput, String thePath, Class theReturnType) { + List result; + try { + result = myEngine.evaluate((Base) theInput, thePath); + } catch (FHIRException e) { + throw new FhirPathExecutionException(Msg.code(255) + e); + } - for (Base next : result) { - if (!theReturnType.isAssignableFrom(next.getClass())) { - throw new FhirPathExecutionException(Msg.code(256) + "FluentPath expression \"" + thePath + "\" returned unexpected type " + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); - } - } + for (Base next : result) { + if (!theReturnType.isAssignableFrom(next.getClass())) { + throw new FhirPathExecutionException( + Msg.code(256) + "FluentPath expression \"" + thePath + "\" returned unexpected type " + + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); + } + } - return (List) result; - } + return (List) result; + } - @Override - public Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType) { - return evaluate(theInput, thePath, theReturnType).stream().findFirst(); - } + @Override + public Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType) { + return evaluate(theInput, thePath, theReturnType).stream().findFirst(); + } - @Override - public void parse(String theExpression) { - myEngine.parse(theExpression); - } + @Override + public void parse(String theExpression) { + myEngine.parse(theExpression); + } - @Override - public void setEvaluationContext(@Nonnull IFhirPathEvaluationContext theEvaluationContext) { - myEngine.setHostServices(new FHIRPathEngine.IEvaluationContext(){ + @Override + public void setEvaluationContext(@Nonnull IFhirPathEvaluationContext theEvaluationContext) { + myEngine.setHostServices(new FHIRPathEngine.IEvaluationContext() { - @Override - public List resolveConstant(Object appContext, String name, boolean beforeContext) throws PathEngineException { - return null; - } + @Override + public List resolveConstant(Object appContext, String name, boolean beforeContext) + throws PathEngineException { + return null; + } - @Override - public TypeDetails resolveConstantType(Object appContext, String name) throws PathEngineException { - return null; - } + @Override + public TypeDetails resolveConstantType(Object appContext, String name) throws PathEngineException { + return null; + } - @Override - public boolean log(String argument, List focus) { - return false; - } + @Override + public boolean log(String argument, List focus) { + return false; + } - @Override - public FunctionDetails resolveFunction(String functionName) { - return null; - } + @Override + public FunctionDetails resolveFunction(String functionName) { + return null; + } - @Override - public TypeDetails checkFunction(Object appContext, String functionName, List parameters) throws PathEngineException { - return null; - } + @Override + public TypeDetails checkFunction(Object appContext, String functionName, List parameters) + throws PathEngineException { + return null; + } - @Override - public List executeFunction(Object appContext, List focus, String functionName, List> parameters) { - return null; - } + @Override + public List executeFunction( + Object appContext, List focus, String functionName, List> parameters) { + return null; + } - @Override - public Base resolveReference(Object appContext, String theUrl, Base theRefContext) throws FHIRException { - return (Base)theEvaluationContext.resolveReference(new IdType(theUrl), theRefContext); - } - - @Override - public boolean conformsToProfile(Object appContext, Base item, String url) throws FHIRException { - return false; - } - - @Override - public ValueSet resolveValueSet(Object appContext, String url) { - return null; - } - }); - } + @Override + public Base resolveReference(Object appContext, String theUrl, Base theRefContext) throws FHIRException { + return (Base) theEvaluationContext.resolveReference(new IdType(theUrl), theRefContext); + } + @Override + public boolean conformsToProfile(Object appContext, Base item, String url) throws FHIRException { + return false; + } + @Override + public ValueSet resolveValueSet(Object appContext, String url) { + return null; + } + }); + } } diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/R4BundleFactory.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/R4BundleFactory.java index 415d23553f0..58a179e211c 100644 --- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/R4BundleFactory.java +++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/R4BundleFactory.java @@ -43,253 +43,263 @@ import org.hl7.fhir.r4.model.DomainResource; import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.Resource; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @SuppressWarnings("Duplicates") public class R4BundleFactory implements IVersionSpecificBundleFactory { - private String myBase; - private Bundle myBundle; - private final FhirContext myContext; + private String myBase; + private Bundle myBundle; + private final FhirContext myContext; - public R4BundleFactory(FhirContext theContext) { - myContext = theContext; - } + public R4BundleFactory(FhirContext theContext) { + myContext = theContext; + } - @Override - public void addResourcesToBundle(List theResult, BundleTypeEnum theBundleType, String theServerBase, BundleInclusionRule theBundleInclusionRule, Set theIncludes) { - ensureBundle(); + @Override + public void addResourcesToBundle( + List theResult, + BundleTypeEnum theBundleType, + String theServerBase, + BundleInclusionRule theBundleInclusionRule, + Set theIncludes) { + ensureBundle(); - List includedResources = new ArrayList<>(); - Set addedResourceIds = new HashSet<>(); + List includedResources = new ArrayList<>(); + Set addedResourceIds = new HashSet<>(); - for (IBaseResource next : theResult) { - if (!next.getIdElement().isEmpty()) { - addedResourceIds.add(next.getIdElement()); - } - } + for (IBaseResource next : theResult) { + if (!next.getIdElement().isEmpty()) { + addedResourceIds.add(next.getIdElement()); + } + } - for (IBaseResource next : theResult) { + for (IBaseResource next : theResult) { - Set containedIds = new HashSet<>(); + Set containedIds = new HashSet<>(); - if (next instanceof DomainResource) { - for (Resource nextContained : ((DomainResource) next).getContained()) { - if (isNotBlank(nextContained.getId())) { - containedIds.add(nextContained.getId()); - } - } - } + if (next instanceof DomainResource) { + for (Resource nextContained : ((DomainResource) next).getContained()) { + if (isNotBlank(nextContained.getId())) { + containedIds.add(nextContained.getId()); + } + } + } - List references = myContext.newTerser().getAllResourceReferences(next); - do { - List addedResourcesThisPass = new ArrayList<>(); + List references = myContext.newTerser().getAllResourceReferences(next); + do { + List addedResourcesThisPass = new ArrayList<>(); - for (ResourceReferenceInfo nextRefInfo : references) { - if (theBundleInclusionRule != null && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { - continue; - } + for (ResourceReferenceInfo nextRefInfo : references) { + if (theBundleInclusionRule != null + && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { + continue; + } - IAnyResource nextRes = (IAnyResource) nextRefInfo.getResourceReference().getResource(); - if (nextRes != null) { - if (nextRes.getIdElement().hasIdPart()) { - if (containedIds.contains(nextRes.getIdElement().getValue())) { - // Don't add contained IDs as top level resources - continue; - } + IAnyResource nextRes = + (IAnyResource) nextRefInfo.getResourceReference().getResource(); + if (nextRes != null) { + if (nextRes.getIdElement().hasIdPart()) { + if (containedIds.contains(nextRes.getIdElement().getValue())) { + // Don't add contained IDs as top level resources + continue; + } - IIdType id = nextRes.getIdElement(); - if (!id.hasResourceType()) { - String resName = myContext.getResourceType(nextRes); - id = id.withResourceType(resName); - } + IIdType id = nextRes.getIdElement(); + if (!id.hasResourceType()) { + String resName = myContext.getResourceType(nextRes); + id = id.withResourceType(resName); + } - if (!addedResourceIds.contains(id)) { - addedResourceIds.add(id); - addedResourcesThisPass.add(nextRes); - } + if (!addedResourceIds.contains(id)) { + addedResourceIds.add(id); + addedResourcesThisPass.add(nextRes); + } + } + } + } - } - } - } + includedResources.addAll(addedResourcesThisPass); - includedResources.addAll(addedResourcesThisPass); + // Linked resources may themselves have linked resources + references = new ArrayList<>(); + for (IAnyResource iResource : addedResourcesThisPass) { + List newReferences = + myContext.newTerser().getAllResourceReferences(iResource); + references.addAll(newReferences); + } + } while (!references.isEmpty()); - // Linked resources may themselves have linked resources - references = new ArrayList<>(); - for (IAnyResource iResource : addedResourcesThisPass) { - List newReferences = myContext.newTerser().getAllResourceReferences(iResource); - references.addAll(newReferences); - } - } while (!references.isEmpty()); + BundleEntryComponent entry = myBundle.addEntry().setResource((Resource) next); + Resource nextAsResource = (Resource) next; + IIdType id = populateBundleEntryFullUrl(next, entry); - BundleEntryComponent entry = myBundle.addEntry().setResource((Resource) next); - Resource nextAsResource = (Resource) next; - IIdType id = populateBundleEntryFullUrl(next, entry); + // Populate Request + BundleEntryTransactionMethodEnum httpVerb = + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); + if (httpVerb != null) { + entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); + if (id != null) { + entry.getRequest().setUrl(id.toUnqualified().getValue()); + } + } + if (BundleEntryTransactionMethodEnum.DELETE.equals(httpVerb)) { + entry.setResource(null); + } - // Populate Request - BundleEntryTransactionMethodEnum httpVerb = ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); - if (httpVerb != null) { - entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); - if (id != null) { - entry.getRequest().setUrl(id.toUnqualified().getValue()); - } - } - if (BundleEntryTransactionMethodEnum.DELETE.equals(httpVerb)) { - entry.setResource(null); - } + // Populate Bundle.entry.response + if (theBundleType != null) { + switch (theBundleType) { + case BATCH_RESPONSE: + case TRANSACTION_RESPONSE: + case HISTORY: + if (id != null) { + String version = id.getVersionIdPart(); + if ("1".equals(version)) { + entry.getResponse().setStatus("201 Created"); + } else if (isNotBlank(version)) { + entry.getResponse().setStatus("200 OK"); + } + if (isNotBlank(version)) { + entry.getResponse().setEtag(RestfulServerUtils.createEtag(version)); + } + } + break; + } + } - // Populate Bundle.entry.response - if (theBundleType != null) { - switch (theBundleType) { - case BATCH_RESPONSE: - case TRANSACTION_RESPONSE: - case HISTORY: - if (id != null) { - String version = id.getVersionIdPart(); - if ("1".equals(version)) { - entry.getResponse().setStatus("201 Created"); - } else if (isNotBlank(version)) { - entry.getResponse().setStatus("200 OK"); - } - if (isNotBlank(version)) { - entry.getResponse().setEtag(RestfulServerUtils.createEtag(version)); - } - } - break; - } - } + // Populate Bundle.entry.search + BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource); + if (searchMode != null) { + entry.getSearch().getModeElement().setValueAsString(searchMode.getCode()); + } + } - // Populate Bundle.entry.search - BundleEntrySearchModeEnum searchMode = ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.get(nextAsResource); - if (searchMode != null) { - entry.getSearch().getModeElement().setValueAsString(searchMode.getCode()); - } - } + /* + * Actually add the resources to the bundle + */ + for (IAnyResource next : includedResources) { + BundleEntryComponent entry = myBundle.addEntry(); + entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); + populateBundleEntryFullUrl(next, entry); + } + } - /* - * Actually add the resources to the bundle - */ - for (IAnyResource next : includedResources) { - BundleEntryComponent entry = myBundle.addEntry(); - entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); - populateBundleEntryFullUrl(next, entry); - } + @Override + public void addRootPropertiesToBundle( + String theId, + @Nonnull BundleLinks theBundleLinks, + Integer theTotalResults, + IPrimitiveType theLastUpdated) { + ensureBundle(); - } + myBase = theBundleLinks.serverBase; - @Override - public void addRootPropertiesToBundle(String theId, @Nonnull BundleLinks theBundleLinks, Integer theTotalResults, - IPrimitiveType theLastUpdated) { - ensureBundle(); + if (myBundle.getIdElement().isEmpty()) { + myBundle.setId(theId); + } - myBase = theBundleLinks.serverBase; + if (myBundle.getMeta().getLastUpdated() == null && theLastUpdated != null) { + myBundle.getMeta().getLastUpdatedElement().setValueAsString(theLastUpdated.getValueAsString()); + } - if (myBundle.getIdElement().isEmpty()) { - myBundle.setId(theId); - } + if (hasNoLinkOfType(Constants.LINK_SELF, myBundle) && isNotBlank(theBundleLinks.getSelf())) { + myBundle.addLink().setRelation(Constants.LINK_SELF).setUrl(theBundleLinks.getSelf()); + } + if (hasNoLinkOfType(Constants.LINK_NEXT, myBundle) && isNotBlank(theBundleLinks.getNext())) { + myBundle.addLink().setRelation(Constants.LINK_NEXT).setUrl(theBundleLinks.getNext()); + } + if (hasNoLinkOfType(Constants.LINK_PREVIOUS, myBundle) && isNotBlank(theBundleLinks.getPrev())) { + myBundle.addLink().setRelation(Constants.LINK_PREVIOUS).setUrl(theBundleLinks.getPrev()); + } - if (myBundle.getMeta().getLastUpdated() == null && theLastUpdated != null) { - myBundle.getMeta().getLastUpdatedElement().setValueAsString(theLastUpdated.getValueAsString()); - } + addTotalResultsToBundle(theTotalResults, theBundleLinks.bundleType); + } - if (hasNoLinkOfType(Constants.LINK_SELF, myBundle) && isNotBlank(theBundleLinks.getSelf())) { - myBundle.addLink().setRelation(Constants.LINK_SELF).setUrl(theBundleLinks.getSelf()); - } - if (hasNoLinkOfType(Constants.LINK_NEXT, myBundle) && isNotBlank(theBundleLinks.getNext())) { - myBundle.addLink().setRelation(Constants.LINK_NEXT).setUrl(theBundleLinks.getNext()); - } - if (hasNoLinkOfType(Constants.LINK_PREVIOUS, myBundle) && isNotBlank(theBundleLinks.getPrev())) { - myBundle.addLink().setRelation(Constants.LINK_PREVIOUS).setUrl(theBundleLinks.getPrev()); - } + @Override + public void addTotalResultsToBundle(Integer theTotalResults, BundleTypeEnum theBundleType) { + ensureBundle(); - addTotalResultsToBundle(theTotalResults, theBundleLinks.bundleType); - } + if (myBundle.getIdElement().isEmpty()) { + myBundle.setId(UUID.randomUUID().toString()); + } - @Override - public void addTotalResultsToBundle(Integer theTotalResults, BundleTypeEnum theBundleType) { - ensureBundle(); + if (myBundle.getTypeElement().isEmpty() && theBundleType != null) { + myBundle.getTypeElement().setValueAsString(theBundleType.getCode()); + } - if (myBundle.getIdElement().isEmpty()) { - myBundle.setId(UUID.randomUUID().toString()); - } + if (myBundle.getTotalElement().isEmpty() && theTotalResults != null) { + myBundle.getTotalElement().setValue(theTotalResults); + } + } - if (myBundle.getTypeElement().isEmpty() && theBundleType != null) { - myBundle.getTypeElement().setValueAsString(theBundleType.getCode()); - } + private void ensureBundle() { + if (myBundle == null) { + myBundle = new Bundle(); + } + } - if (myBundle.getTotalElement().isEmpty() && theTotalResults != null) { - myBundle.getTotalElement().setValue(theTotalResults); - } - } + @Override + public IBaseResource getResourceBundle() { + return myBundle; + } - private void ensureBundle() { - if (myBundle == null) { - myBundle = new Bundle(); - } - } + private boolean hasNoLinkOfType(String theLinkType, Bundle theBundle) { + for (BundleLinkComponent next : theBundle.getLink()) { + if (theLinkType.equals(next.getRelation())) { + return false; + } + } + return true; + } - @Override - public IBaseResource getResourceBundle() { - return myBundle; - } + @Override + public void initializeWithBundleResource(IBaseResource theBundle) { + myBundle = (Bundle) theBundle; + } - private boolean hasNoLinkOfType(String theLinkType, Bundle theBundle) { - for (BundleLinkComponent next : theBundle.getLink()) { - if (theLinkType.equals(next.getRelation())) { - return false; - } - } - return true; - } - - @Override - public void initializeWithBundleResource(IBaseResource theBundle) { - myBundle = (Bundle) theBundle; - } - - @Nullable - private IIdType populateBundleEntryFullUrl(IBaseResource theResource, BundleEntryComponent theEntry) { - final IIdType idElement; - if (theResource.getIdElement().hasBaseUrl()) { - idElement = theResource.getIdElement(); - theEntry.setFullUrl(idElement.toVersionless().getValue()); - } else { - if (isNotBlank(myBase) && theResource.getIdElement().hasIdPart()) { - idElement = theResource.getIdElement().withServerBase(myBase, myContext.getResourceType(theResource)); - theEntry.setFullUrl(idElement.toVersionless().getValue()); - } else { - idElement = null; - } - } - return idElement; - } - - @Override - public List toListOfResources() { - ArrayList retVal = new ArrayList<>(); - for (BundleEntryComponent next : myBundle.getEntry()) { - if (next.getResource() != null) { - retVal.add(next.getResource()); - } else if (!next.getResponse().getLocationElement().isEmpty()) { - IdType id = new IdType(next.getResponse().getLocation()); - String resourceType = id.getResourceType(); - if (isNotBlank(resourceType)) { - IAnyResource res = (IAnyResource) myContext.getResourceDefinition(resourceType).newInstance(); - res.setId(id); - retVal.add(res); - } - } - } - return retVal; - } + @Nullable + private IIdType populateBundleEntryFullUrl(IBaseResource theResource, BundleEntryComponent theEntry) { + final IIdType idElement; + if (theResource.getIdElement().hasBaseUrl()) { + idElement = theResource.getIdElement(); + theEntry.setFullUrl(idElement.toVersionless().getValue()); + } else { + if (isNotBlank(myBase) && theResource.getIdElement().hasIdPart()) { + idElement = theResource.getIdElement().withServerBase(myBase, myContext.getResourceType(theResource)); + theEntry.setFullUrl(idElement.toVersionless().getValue()); + } else { + idElement = null; + } + } + return idElement; + } + @Override + public List toListOfResources() { + ArrayList retVal = new ArrayList<>(); + for (BundleEntryComponent next : myBundle.getEntry()) { + if (next.getResource() != null) { + retVal.add(next.getResource()); + } else if (!next.getResponse().getLocationElement().isEmpty()) { + IdType id = new IdType(next.getResponse().getLocation()); + String resourceType = id.getResourceType(); + if (isNotBlank(resourceType)) { + IAnyResource res = (IAnyResource) + myContext.getResourceDefinition(resourceType).newInstance(); + res.setId(id); + retVal.add(res); + } + } + } + return retVal; + } } diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/helper/BatchHelperR4.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/helper/BatchHelperR4.java index 53dc8bf9893..ff3ce1796c3 100644 --- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/helper/BatchHelperR4.java +++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/rest/server/helper/BatchHelperR4.java @@ -9,15 +9,17 @@ import javax.annotation.Nonnull; public class BatchHelperR4 { - @Nonnull - public static Long jobIdFromParameters(Parameters response) { - DecimalType jobIdDecimal = (DecimalType) response.getParameterValue(ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID); - return jobIdDecimal.getValue().longValue(); - } + @Nonnull + public static Long jobIdFromParameters(Parameters response) { + DecimalType jobIdDecimal = + (DecimalType) response.getParameterValue(ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID); + return jobIdDecimal.getValue().longValue(); + } - @Nonnull - public static String jobIdFromBatch2Parameters(Parameters response) { - StringType jobIdString = (StringType) response.getParameterValue(ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID); - return jobIdString.getValue(); - } + @Nonnull + public static String jobIdFromBatch2Parameters(Parameters response) { + StringType jobIdString = + (StringType) response.getParameterValue(ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID); + return jobIdString.getValue(); + } } diff --git a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/FhirR4B.java b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/FhirR4B.java index 854929c4fd0..ac9131d4ba3 100644 --- a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/FhirR4B.java +++ b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/FhirR4B.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -129,5 +129,4 @@ public class FhirR4B implements IFhirVersion { public IIdType newIdType() { return new IdType(); } - } diff --git a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/FhirServerR4B.java b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/FhirServerR4B.java index ab55b485eb9..816ae46aa7a 100644 --- a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/FhirServerR4B.java +++ b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/FhirServerR4B.java @@ -9,5 +9,4 @@ public class FhirServerR4B implements IFhirVersionServer { public ServerCapabilityStatementProvider createServerConformanceProvider(RestfulServer theServer) { return new ServerCapabilityStatementProvider(theServer); } - } diff --git a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/HapiWorkerContext.java b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/HapiWorkerContext.java index 99852541a99..be36650d263 100644 --- a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/HapiWorkerContext.java +++ b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/ctx/HapiWorkerContext.java @@ -156,11 +156,11 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext if (myValidationSupport == null) { return false; } else { - return myValidationSupport.isCodeSystemSupported(new ValidationSupportContext(myValidationSupport), theSystem); + return myValidationSupport.isCodeSystemSupported( + new ValidationSupportContext(myValidationSupport), theSystem); } } - @Override public ValidationResult validateCode(ValidationOptions theOptions, CodeableConcept theCode, ValueSet theVs) { for (Coding next : theCode.getCoding()) { @@ -182,25 +182,33 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public ValidationResult validateCode(ValidationOptions options, Coding code, ValueSet vs, ValidationContextCarrier ctxt) { + public ValidationResult validateCode( + ValidationOptions options, Coding code, ValueSet vs, ValidationContextCarrier ctxt) { return validateCode(options, code, vs); } @Override - public void validateCodeBatch(ValidationOptions options, List codes, ValueSet vs) { + public void validateCodeBatch( + ValidationOptions options, List codes, ValueSet vs) { throw new UnsupportedOperationException(Msg.code(2165)); } @Override - public ValueSetExpander.ValueSetExpansionOutcome expandVS(ValueSet theValueSet, boolean cacheOk, boolean heiarchical, boolean incompleteOk) { + public ValueSetExpander.ValueSetExpansionOutcome expandVS( + ValueSet theValueSet, boolean cacheOk, boolean heiarchical, boolean incompleteOk) { return null; } @Override - public ValidationResult validateCode(ValidationOptions theOptions, String theSystem, String theVersion, - String theCode, String theDisplay) { - IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), - convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, null); + public ValidationResult validateCode( + ValidationOptions theOptions, String theSystem, String theVersion, String theCode, String theDisplay) { + IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + null); if (result == null) { return null; } @@ -213,15 +221,30 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public ValidationResult validateCode(ValidationOptions theOptions, String theSystem, String theVersion, - String theCode, String theDisplay, ValueSet theVs) { + public ValidationResult validateCode( + ValidationOptions theOptions, + String theSystem, + String theVersion, + String theCode, + String theDisplay, + ValueSet theVs) { IValidationSupport.CodeValidationResult outcome; if (isNotBlank(theVs.getUrl())) { - outcome = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), - convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, theVs.getUrl()); + outcome = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + theVs.getUrl()); } else { - outcome = myValidationSupport.validateCodeInValueSet(new ValidationSupportContext(myValidationSupport), - convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, theVs); + outcome = myValidationSupport.validateCodeInValueSet( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + theVs); } if (outcome != null && outcome.isOk()) { @@ -231,8 +254,10 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext return new ValidationResult(theSystem, definition); } - return new ValidationResult(IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + - Constants.codeSystemWithDefaultDescription(theSystem) + "]"); + return new ValidationResult( + IssueSeverity.ERROR, + "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + + "]"); } @Override @@ -252,9 +277,7 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public void generateSnapshot(StructureDefinition mr, boolean ifLogical) { - - } + public void generateSnapshot(StructureDefinition mr, boolean ifLogical) {} @Override public Parameters getExpansionParameters() { @@ -273,17 +296,20 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public ValueSetExpander.ValueSetExpansionOutcome expandVS(ValueSet theSource, boolean theCacheOk, boolean theHierarchical) { + public ValueSetExpander.ValueSetExpansionOutcome expandVS( + ValueSet theSource, boolean theCacheOk, boolean theHierarchical) { throw new UnsupportedOperationException(Msg.code(2168)); } @Override - public ValueSetExpander.ValueSetExpansionOutcome expandVS(ConceptSetComponent theInc, boolean hierarchical) throws TerminologyServiceException { + public ValueSetExpander.ValueSetExpansionOutcome expandVS(ConceptSetComponent theInc, boolean hierarchical) + throws TerminologyServiceException { ValueSet input = new ValueSet(); input.getCompose().addInclude(theInc); ValueSetExpansionOptions options = new ValueSetExpansionOptions(); options.setIncludeHierarchy(hierarchical); - IValidationSupport.ValueSetExpansionOutcome output = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), options, input); + IValidationSupport.ValueSetExpansionOutcome output = + myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), options, input); return new ValueSetExpander.ValueSetExpansionOutcome((ValueSet) output.getValueSet(), output.getError(), null); } @@ -389,7 +415,8 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public T fetchResourceWithException(Class theClass, String theUri) throws FHIRException { + public T fetchResourceWithException(Class theClass, String theUri) + throws FHIRException { T retVal = fetchResource(theClass, theUri); if (retVal == null) { throw new FHIRException(Msg.code(2180) + "Could not find resource: " + theUri); @@ -403,7 +430,8 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public T fetchResource(Class theClass, String theUri, CanonicalResource canonicalForSource) { + public T fetchResource( + Class theClass, String theUri, CanonicalResource canonicalForSource) { return fetchResource(theClass, theUri); } @@ -428,9 +456,7 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public void cachePackage(PackageDetails packageDetails, List list) { - - } + public void cachePackage(PackageDetails packageDetails, List list) {} @Override public Set getResourceNamesAsSet() { @@ -438,11 +464,12 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public ValueSetExpander.ValueSetExpansionOutcome expandVS(ElementDefinitionBindingComponent theBinding, boolean theCacheOk, boolean theHierarchical) throws FHIRException { + public ValueSetExpander.ValueSetExpansionOutcome expandVS( + ElementDefinitionBindingComponent theBinding, boolean theCacheOk, boolean theHierarchical) + throws FHIRException { throw new UnsupportedOperationException(Msg.code(2186)); } - @Override public String getLinkForUrl(String corePath, String url) { throw new UnsupportedOperationException(Msg.code(2187)); @@ -464,7 +491,8 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public int loadFromPackageAndDependencies(NpmPackage pi, IContextResourceLoader loader, BasePackageCacheManager pcm) throws FHIRException { + public int loadFromPackageAndDependencies(NpmPackage pi, IContextResourceLoader loader, BasePackageCacheManager pcm) + throws FHIRException { throw new UnsupportedOperationException(Msg.code(2191)); } @@ -510,5 +538,4 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } return retVal; } - } diff --git a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/fhirpath/FhirPathR4B.java b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/fhirpath/FhirPathR4B.java index c853fa48775..af901839eba 100644 --- a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/fhirpath/FhirPathR4B.java +++ b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/fhirpath/FhirPathR4B.java @@ -16,42 +16,44 @@ import org.hl7.fhir.r4b.model.TypeDetails; import org.hl7.fhir.r4b.model.ValueSet; import org.hl7.fhir.r4b.utils.FHIRPathEngine; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; public class FhirPathR4B implements IFhirPath { - private FHIRPathEngine myEngine; + private FHIRPathEngine myEngine; - public FhirPathR4B(FhirContext theCtx) { - IValidationSupport validationSupport = theCtx.getValidationSupport(); - myEngine = new FHIRPathEngine(new HapiWorkerContext(theCtx, validationSupport)); - } + public FhirPathR4B(FhirContext theCtx) { + IValidationSupport validationSupport = theCtx.getValidationSupport(); + myEngine = new FHIRPathEngine(new HapiWorkerContext(theCtx, validationSupport)); + } - @SuppressWarnings("unchecked") - @Override - public List evaluate(IBase theInput, String thePath, Class theReturnType) { - List result; - try { - result = myEngine.evaluate((Base) theInput, thePath); - } catch (FHIRException e) { - throw new FhirPathExecutionException(Msg.code(2154) + e); - } + @SuppressWarnings("unchecked") + @Override + public List evaluate(IBase theInput, String thePath, Class theReturnType) { + List result; + try { + result = myEngine.evaluate((Base) theInput, thePath); + } catch (FHIRException e) { + throw new FhirPathExecutionException(Msg.code(2154) + e); + } - for (Base next : result) { - if (!theReturnType.isAssignableFrom(next.getClass())) { - throw new FhirPathExecutionException(Msg.code(2155) + "FluentPath expression \"" + thePath + "\" returned unexpected type " + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); - } - } + for (Base next : result) { + if (!theReturnType.isAssignableFrom(next.getClass())) { + throw new FhirPathExecutionException( + Msg.code(2155) + "FluentPath expression \"" + thePath + "\" returned unexpected type " + + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); + } + } - return (List) result; - } + return (List) result; + } - @Override - public Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType) { - return evaluate(theInput, thePath, theReturnType).stream().findFirst(); - } + @Override + public Optional evaluateFirst(IBase theInput, String thePath, Class theReturnType) { + return evaluate(theInput, thePath, theReturnType).stream().findFirst(); + } @Override public void parse(String theExpression) { @@ -60,10 +62,11 @@ public class FhirPathR4B implements IFhirPath { @Override public void setEvaluationContext(@Nonnull IFhirPathEvaluationContext theEvaluationContext) { - myEngine.setHostServices(new FHIRPathEngine.IEvaluationContext(){ + myEngine.setHostServices(new FHIRPathEngine.IEvaluationContext() { @Override - public List resolveConstant(Object appContext, String name, boolean beforeContext) throws PathEngineException { + public List resolveConstant(Object appContext, String name, boolean beforeContext) + throws PathEngineException { return null; } @@ -83,18 +86,20 @@ public class FhirPathR4B implements IFhirPath { } @Override - public TypeDetails checkFunction(Object appContext, String functionName, List parameters) throws PathEngineException { + public TypeDetails checkFunction(Object appContext, String functionName, List parameters) + throws PathEngineException { return null; } @Override - public List executeFunction(Object appContext, List focus, String functionName, List> parameters) { + public List executeFunction( + Object appContext, List focus, String functionName, List> parameters) { return null; } @Override public Base resolveReference(Object appContext, String theUrl, Base refContext) throws FHIRException { - return (Base)theEvaluationContext.resolveReference(new IdType(theUrl), refContext); + return (Base) theEvaluationContext.resolveReference(new IdType(theUrl), refContext); } @Override diff --git a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/rest/server/R4BBundleFactory.java b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/rest/server/R4BBundleFactory.java index 0271770a582..c1ac46d65b8 100644 --- a/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/rest/server/R4BBundleFactory.java +++ b/hapi-fhir-structures-r4b/src/main/java/org/hl7/fhir/r4b/hapi/rest/server/R4BBundleFactory.java @@ -43,13 +43,13 @@ import org.hl7.fhir.r4b.model.DomainResource; import org.hl7.fhir.r4b.model.IdType; import org.hl7.fhir.r4b.model.Resource; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -64,7 +64,12 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { } @Override - public void addResourcesToBundle(List theResult, BundleTypeEnum theBundleType, String theServerBase, BundleInclusionRule theBundleInclusionRule, Set theIncludes) { + public void addResourcesToBundle( + List theResult, + BundleTypeEnum theBundleType, + String theServerBase, + BundleInclusionRule theBundleInclusionRule, + Set theIncludes) { ensureBundle(); List includedResources = new ArrayList(); @@ -93,11 +98,13 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { List addedResourcesThisPass = new ArrayList(); for (ResourceReferenceInfo nextRefInfo : references) { - if (theBundleInclusionRule != null && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { + if (theBundleInclusionRule != null + && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { continue; } - IAnyResource nextRes = (IAnyResource) nextRefInfo.getResourceReference().getResource(); + IAnyResource nextRes = + (IAnyResource) nextRefInfo.getResourceReference().getResource(); if (nextRes != null) { if (nextRes.getIdElement().hasIdPart()) { if (containedIds.contains(nextRes.getIdElement().getValue())) { @@ -115,7 +122,6 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { addedResourceIds.add(id); addedResourcesThisPass.add(nextRes); } - } } } @@ -125,7 +131,8 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { // Linked resources may themselves have linked resources references = new ArrayList<>(); for (IAnyResource iResource : addedResourcesThisPass) { - List newReferences = myContext.newTerser().getAllResourceReferences(iResource); + List newReferences = + myContext.newTerser().getAllResourceReferences(iResource); references.addAll(newReferences); } } while (references.isEmpty() == false); @@ -135,7 +142,8 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { IIdType id = populateBundleEntryFullUrl(next, entry); // Populate Request - BundleEntryTransactionMethodEnum httpVerb = ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); + BundleEntryTransactionMethodEnum httpVerb = + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); if (httpVerb != null) { entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); if (id != null) { @@ -179,12 +187,14 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); populateBundleEntryFullUrl(next, entry); } - } @Override - public void addRootPropertiesToBundle(String theId, @Nonnull BundleLinks theBundleLinks, Integer theTotalResults, - IPrimitiveType theLastUpdated) { + public void addRootPropertiesToBundle( + String theId, + @Nonnull BundleLinks theBundleLinks, + Integer theTotalResults, + IPrimitiveType theLastUpdated) { ensureBundle(); myBase = theBundleLinks.serverBase; @@ -247,7 +257,6 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { return false; } - @Override public void initializeWithBundleResource(IBaseResource theBundle) { myBundle = (Bundle) theBundle; @@ -278,7 +287,8 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { IdType id = new IdType(next.getResponse().getLocation()); String resourceType = id.getResourceType(); if (isNotBlank(resourceType)) { - IAnyResource res = (IAnyResource) myContext.getResourceDefinition(resourceType).newInstance(); + IAnyResource res = (IAnyResource) + myContext.getResourceDefinition(resourceType).newInstance(); res.setId(id); retVal.add(res); } @@ -286,5 +296,4 @@ public class R4BBundleFactory implements IVersionSpecificBundleFactory { } return retVal; } - } diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/FhirR5.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/FhirR5.java index 1eb973e3e38..5e4402f9441 100644 --- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/FhirR5.java +++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/FhirR5.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,12 +19,12 @@ */ package org.hl7.fhir.r5.hapi.ctx; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.IFhirVersion; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; @@ -129,5 +129,4 @@ public class FhirR5 implements IFhirVersion { public IIdType newIdType() { return new IdType(); } - } diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/FhirServerR5.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/FhirServerR5.java index 5c181fa1984..65bf813e221 100644 --- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/FhirServerR5.java +++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/FhirServerR5.java @@ -9,5 +9,4 @@ public class FhirServerR5 implements IFhirVersionServer { public ServerCapabilityStatementProvider createServerConformanceProvider(RestfulServer theServer) { return new ServerCapabilityStatementProvider(theServer); } - } diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java index 6db9bf07365..28efb60380d 100644 --- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java +++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/ctx/HapiWorkerContext.java @@ -100,7 +100,6 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext return null; } - @Override public List getResourceNames() { List result = new ArrayList<>(); @@ -111,7 +110,6 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext return result; } - @Override public IResourceValidator newValidator() { throw new UnsupportedOperationException(Msg.code(206)); @@ -127,11 +125,11 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext if (myValidationSupport == null) { return false; } else { - return myValidationSupport.isCodeSystemSupported(new ValidationSupportContext(myValidationSupport), theSystem); + return myValidationSupport.isCodeSystemSupported( + new ValidationSupportContext(myValidationSupport), theSystem); } } - @Override public ValidationResult validateCode(ValidationOptions theOptions, CodeableConcept theCode, ValueSet theVs) { for (Coding next : theCode.getCoding()) { @@ -153,25 +151,33 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public ValidationResult validateCode(ValidationOptions options, Coding code, ValueSet vs, ValidationContextCarrier ctxt) { + public ValidationResult validateCode( + ValidationOptions options, Coding code, ValueSet vs, ValidationContextCarrier ctxt) { return validateCode(options, code, vs); } @Override - public void validateCodeBatch(ValidationOptions options, List codes, ValueSet vs) { + public void validateCodeBatch( + ValidationOptions options, List codes, ValueSet vs) { throw new UnsupportedOperationException(Msg.code(209)); } @Override - public ValueSetExpansionOutcome expandVS(ValueSet theValueSet, boolean cacheOk, boolean heiarchical, boolean incompleteOk) { + public ValueSetExpansionOutcome expandVS( + ValueSet theValueSet, boolean cacheOk, boolean heiarchical, boolean incompleteOk) { return null; } @Override - public ValidationResult validateCode(ValidationOptions theOptions, String theSystem, String theVersion, - String theCode, String theDisplay) { - IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), - convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, null); + public ValidationResult validateCode( + ValidationOptions theOptions, String theSystem, String theVersion, String theCode, String theDisplay) { + IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + null); if (result == null) { return null; } @@ -184,15 +190,30 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public ValidationResult validateCode(ValidationOptions theOptions, String theSystem, String theVersion, - String theCode, String theDisplay, ValueSet theVs) { + public ValidationResult validateCode( + ValidationOptions theOptions, + String theSystem, + String theVersion, + String theCode, + String theDisplay, + ValueSet theVs) { IValidationSupport.CodeValidationResult outcome; if (isNotBlank(theVs.getUrl())) { - outcome = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), - convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, theVs.getUrl()); + outcome = myValidationSupport.validateCode( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + theVs.getUrl()); } else { - outcome = myValidationSupport.validateCodeInValueSet(new ValidationSupportContext(myValidationSupport), - convertConceptValidationOptions(theOptions), theSystem, theCode, theDisplay, theVs); + outcome = myValidationSupport.validateCodeInValueSet( + new ValidationSupportContext(myValidationSupport), + convertConceptValidationOptions(theOptions), + theSystem, + theCode, + theDisplay, + theVs); } if (outcome != null && outcome.isOk()) { @@ -202,8 +223,11 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext return new ValidationResult(theSystem, theVersion, definition, null); } - return new ValidationResult(IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + - Constants.codeSystemWithDefaultDescription(theSystem) + "]", null); + return new ValidationResult( + IssueSeverity.ERROR, + "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + + "]", + null); } @Override @@ -211,7 +235,6 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext return validateCode(theOptions, null, null, code, null, vs); } - @Override public Parameters getExpansionParameters() { return myExpansionProfile; @@ -228,11 +251,13 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public ValueSetExpansionOutcome expandVS(ConceptSetComponent theInc, boolean theHierarchical, boolean theNoInactive) throws TerminologyServiceException { + public ValueSetExpansionOutcome expandVS(ConceptSetComponent theInc, boolean theHierarchical, boolean theNoInactive) + throws TerminologyServiceException { ValueSet input = new ValueSet(); - input.getCompose().setInactive(!theNoInactive); //TODO GGG/DO is this valid? + input.getCompose().setInactive(!theNoInactive); // TODO GGG/DO is this valid? input.getCompose().addInclude(theInc); - IValidationSupport.ValueSetExpansionOutcome output = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), null, input); + IValidationSupport.ValueSetExpansionOutcome output = + myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), null, input); return new ValueSetExpansionOutcome((ValueSet) output.getValueSet(), output.getError(), null); } @@ -261,7 +286,6 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext return myCtx.getVersion().getVersion().getFhirVersionString(); } - @Override public UcumService getUcumService() { throw new UnsupportedOperationException(Msg.code(216)); @@ -287,8 +311,6 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext throw new UnsupportedOperationException(Msg.code(219)); } - - @Override public StructureDefinition fetchTypeDefinition(String typeName) { return fetchResource(StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/" + typeName); @@ -299,7 +321,6 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext throw new UnsupportedOperationException(Msg.code(234)); } - @Override public T fetchResource(Class theClass, String theUri) { if (myValidationSupport == null || theUri == null) { @@ -312,7 +333,8 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public T fetchResourceWithException(Class theClass, String theUri) throws FHIRException { + public T fetchResourceWithException(Class theClass, String theUri) + throws FHIRException { T retVal = fetchResource(theClass, theUri); if (retVal == null) { throw new FHIRException(Msg.code(224) + "Could not find resource: " + theUri); @@ -321,11 +343,11 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public T fetchResourceWithException(Class theClass, String uri, Resource sourceOfReference) throws FHIRException { + public T fetchResourceWithException(Class theClass, String uri, Resource sourceOfReference) + throws FHIRException { throw new UnsupportedOperationException(Msg.code(2213)); } - @Override public T fetchResource(Class theClass, String theUri, String theVersion) { return fetchResource(theClass, theUri + "|" + theVersion); @@ -333,7 +355,7 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext @Override public T fetchResource(Class class_, String uri, Resource canonicalForSource) { - return fetchResource(class_,uri); + return fetchResource(class_, uri); } @Override @@ -357,23 +379,20 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public void cachePackage(PackageInformation packageInformation) { - - } + public void cachePackage(PackageInformation packageInformation) {} @Override public Set getResourceNamesAsSet() { return myCtx.getResourceTypes(); } - @Override - public ValueSetExpansionOutcome expandVS(Resource src,ElementDefinitionBindingComponent theBinding, boolean theCacheOk, boolean theHierarchical) throws FHIRException { + public ValueSetExpansionOutcome expandVS( + Resource src, ElementDefinitionBindingComponent theBinding, boolean theCacheOk, boolean theHierarchical) + throws FHIRException { throw new UnsupportedOperationException(Msg.code(230)); } - - @Override public Set getBinaryKeysAsSet() { throw new UnsupportedOperationException(Msg.code(2115)); @@ -395,12 +414,14 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public int loadFromPackage(NpmPackage pi, IContextResourceLoader loader, List types) throws FileNotFoundException, IOException, FHIRException { + public int loadFromPackage(NpmPackage pi, IContextResourceLoader loader, List types) + throws FileNotFoundException, IOException, FHIRException { throw new UnsupportedOperationException(Msg.code(2328)); } @Override - public int loadFromPackageAndDependencies(NpmPackage pi, IContextResourceLoader loader, BasePackageCacheManager pcm) throws FHIRException { + public int loadFromPackageAndDependencies(NpmPackage pi, IContextResourceLoader loader, BasePackageCacheManager pcm) + throws FHIRException { throw new UnsupportedOperationException(Msg.code(235)); } @@ -452,7 +473,6 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext return retVal; } - @Override public List fetchResourcesByType(Class theClass) { if (theClass.equals(StructureDefinition.class)) { @@ -473,7 +493,8 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext } @Override - public PEBuilder getProfiledElementBuilder(PEBuilder.PEElementPropertiesPolicy thePEElementPropertiesPolicy, boolean theB) { + public PEBuilder getProfiledElementBuilder( + PEBuilder.PEElementPropertiesPolicy thePEElementPropertiesPolicy, boolean theB) { throw new UnsupportedOperationException(Msg.code(2261)); } @@ -485,6 +506,5 @@ public final class HapiWorkerContext extends I18nBase implements IWorkerContext @Override public void setForPublication(boolean b) { throw new UnsupportedOperationException(Msg.code(2350)); - } } diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java index d099a7c12b4..79af5cdb729 100644 --- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java +++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/fhirpath/FhirPathR5.java @@ -16,9 +16,9 @@ import org.hl7.fhir.r5.model.TypeDetails; import org.hl7.fhir.r5.model.ValueSet; import org.hl7.fhir.r5.utils.FHIRPathEngine; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; +import javax.annotation.Nonnull; public class FhirPathR5 implements IFhirPath { @@ -42,7 +42,9 @@ public class FhirPathR5 implements IFhirPath { for (Base next : result) { if (!theReturnType.isAssignableFrom(next.getClass())) { - throw new FhirPathExecutionException(Msg.code(199) + "FluentPath expression \"" + thePath + "\" returned unexpected type " + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); + throw new FhirPathExecutionException( + Msg.code(199) + "FluentPath expression \"" + thePath + "\" returned unexpected type " + + next.getClass().getSimpleName() + " - Expected " + theReturnType.getName()); } } @@ -64,7 +66,8 @@ public class FhirPathR5 implements IFhirPath { myEngine.setHostServices(new FHIRPathEngine.IEvaluationContext() { @Override - public List resolveConstant(Object appContext, String name, boolean beforeContext) throws PathEngineException { + public List resolveConstant(Object appContext, String name, boolean beforeContext) + throws PathEngineException { return null; } @@ -84,12 +87,14 @@ public class FhirPathR5 implements IFhirPath { } @Override - public TypeDetails checkFunction(Object appContext, String functionName, List parameters) throws PathEngineException { + public TypeDetails checkFunction(Object appContext, String functionName, List parameters) + throws PathEngineException { return null; } @Override - public List executeFunction(Object appContext, List focus, String functionName, List> parameters) { + public List executeFunction( + Object appContext, List focus, String functionName, List> parameters) { return null; } diff --git a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/rest/server/R5BundleFactory.java b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/rest/server/R5BundleFactory.java index df4385e0f08..a98bb028a5b 100644 --- a/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/rest/server/R5BundleFactory.java +++ b/hapi-fhir-structures-r5/src/main/java/org/hl7/fhir/r5/hapi/rest/server/R5BundleFactory.java @@ -43,13 +43,13 @@ import org.hl7.fhir.r5.model.DomainResource; import org.hl7.fhir.r5.model.IdType; import org.hl7.fhir.r5.model.Resource; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -64,7 +64,12 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { } @Override - public void addResourcesToBundle(List theResult, BundleTypeEnum theBundleType, String theServerBase, BundleInclusionRule theBundleInclusionRule, Set theIncludes) { + public void addResourcesToBundle( + List theResult, + BundleTypeEnum theBundleType, + String theServerBase, + BundleInclusionRule theBundleInclusionRule, + Set theIncludes) { ensureBundle(); List includedResources = new ArrayList(); @@ -93,11 +98,13 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { List addedResourcesThisPass = new ArrayList(); for (ResourceReferenceInfo nextRefInfo : references) { - if (theBundleInclusionRule != null && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { + if (theBundleInclusionRule != null + && !theBundleInclusionRule.shouldIncludeReferencedResource(nextRefInfo, theIncludes)) { continue; } - IAnyResource nextRes = (IAnyResource) nextRefInfo.getResourceReference().getResource(); + IAnyResource nextRes = + (IAnyResource) nextRefInfo.getResourceReference().getResource(); if (nextRes != null) { if (nextRes.getIdElement().hasIdPart()) { if (containedIds.contains(nextRes.getIdElement().getValue())) { @@ -115,7 +122,6 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { addedResourceIds.add(id); addedResourcesThisPass.add(nextRes); } - } } } @@ -125,7 +131,8 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { // Linked resources may themselves have linked resources references = new ArrayList<>(); for (IAnyResource iResource : addedResourcesThisPass) { - List newReferences = myContext.newTerser().getAllResourceReferences(iResource); + List newReferences = + myContext.newTerser().getAllResourceReferences(iResource); references.addAll(newReferences); } } while (references.isEmpty() == false); @@ -135,7 +142,8 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { IIdType id = populateBundleEntryFullUrl(next, entry); // Populate Request - BundleEntryTransactionMethodEnum httpVerb = ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); + BundleEntryTransactionMethodEnum httpVerb = + ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get(nextAsResource); if (httpVerb != null) { entry.getRequest().getMethodElement().setValueAsString(httpVerb.name()); if (id != null) { @@ -179,12 +187,14 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { entry.setResource((Resource) next).getSearch().setMode(SearchEntryMode.INCLUDE); populateBundleEntryFullUrl(next, entry); } - } @Override - public void addRootPropertiesToBundle(String theId, @Nonnull BundleLinks theBundleLinks, Integer theTotalResults, - IPrimitiveType theLastUpdated) { + public void addRootPropertiesToBundle( + String theId, + @Nonnull BundleLinks theBundleLinks, + Integer theTotalResults, + IPrimitiveType theLastUpdated) { ensureBundle(); myBase = theBundleLinks.serverBase; @@ -247,7 +257,6 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { return false; } - @Override public void initializeWithBundleResource(IBaseResource theBundle) { myBundle = (Bundle) theBundle; @@ -278,7 +287,8 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { IdType id = new IdType(next.getResponse().getLocation()); String resourceType = id.getResourceType(); if (isNotBlank(resourceType)) { - IAnyResource res = (IAnyResource) myContext.getResourceDefinition(resourceType).newInstance(); + IAnyResource res = (IAnyResource) + myContext.getResourceDefinition(resourceType).newInstance(); res.setId(id); retVal.add(res); } @@ -286,5 +296,4 @@ public class R5BundleFactory implements IVersionSpecificBundleFactory { } return retVal; } - } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/jpa/conformance/DateSearchTestCase.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/jpa/conformance/DateSearchTestCase.java index 05b4babdf17..4bc5c03265c 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/jpa/conformance/DateSearchTestCase.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/jpa/conformance/DateSearchTestCase.java @@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.conformance; import ca.uhn.fhir.util.CollectionUtil; import org.junit.jupiter.params.provider.Arguments; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -34,6 +33,7 @@ import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Collection of test cases for date type search. @@ -49,7 +49,12 @@ public class DateSearchTestCase { final String myFileName; final int myLineNumber; - public DateSearchTestCase(String myResourceValue, String myQueryValue, boolean expectedResult, String theFileName, int theLineNumber) { + public DateSearchTestCase( + String myResourceValue, + String myQueryValue, + boolean expectedResult, + String theFileName, + int theLineNumber) { this.myResourceValue = myResourceValue; this.myQueryValue = myQueryValue; this.expectedResult = expectedResult; @@ -65,8 +70,9 @@ public class DateSearchTestCase { * We have two sources of test cases: * - DateSearchTestCase.csv which holds one test case per line * - DateSearchTestCase-compact.csv which specifies all operators for each value pair - */ - public final static List ourCases; + */ + public static final List ourCases; + static { ourCases = new ArrayList<>(); ourCases.addAll(expandedCases()); @@ -89,11 +95,17 @@ public class DateSearchTestCase { static List parseCsvCases(Reader theSource, String theFileName) { LineNumberReader lineNumberReader = new LineNumberReader(theSource); - return lineNumberReader.lines() - .filter(l->!l.startsWith("#")) // strip comments - .map(l -> l.split(",")) - .map(fields -> new DateSearchTestCase(fields[0].trim(), fields[1].trim(), Boolean.parseBoolean(fields[2].trim()), theFileName, lineNumberReader.getLineNumber())) - .collect(Collectors.toList()); + return lineNumberReader + .lines() + .filter(l -> !l.startsWith("#")) // strip comments + .map(l -> l.split(",")) + .map(fields -> new DateSearchTestCase( + fields[0].trim(), + fields[1].trim(), + Boolean.parseBoolean(fields[2].trim()), + theFileName, + lineNumberReader.getLineNumber())) + .collect(Collectors.toList()); } public static List compactCases() { @@ -119,24 +131,31 @@ public class DateSearchTestCase { // expand these into individual tests for each prefix. LineNumberReader lineNumberReader = new LineNumberReader(theSource); - return lineNumberReader.lines() - .filter(l->!l.startsWith("#")) // strip comments - .map(l -> l.split(",")) - .flatMap(fields -> { - // line looks like: "eq ge le,2020, 2020" - // Matching prefixes, Query Date, Resource Date - String resourceValue = fields[0].trim(); - String truePrefixes = fields[1].trim(); - String queryValue = fields[2].trim(); - Set expectedTruePrefixes = Arrays.stream(truePrefixes.split("\\s+")).map(String::trim).collect(Collectors.toSet()); + return lineNumberReader + .lines() + .filter(l -> !l.startsWith("#")) // strip comments + .map(l -> l.split(",")) + .flatMap(fields -> { + // line looks like: "eq ge le,2020, 2020" + // Matching prefixes, Query Date, Resource Date + String resourceValue = fields[0].trim(); + String truePrefixes = fields[1].trim(); + String queryValue = fields[2].trim(); + Set expectedTruePrefixes = Arrays.stream(truePrefixes.split("\\s+")) + .map(String::trim) + .collect(Collectors.toSet()); - // expand to one test case per supportedPrefixes - return supportedPrefixes.stream() - .map(prefix -> { + // expand to one test case per supportedPrefixes + return supportedPrefixes.stream().map(prefix -> { boolean expectMatch = expectedTruePrefixes.contains(prefix); - return new DateSearchTestCase(resourceValue, prefix + queryValue, expectMatch, theFileName, lineNumberReader.getLineNumber()); + return new DateSearchTestCase( + resourceValue, + prefix + queryValue, + expectMatch, + theFileName, + lineNumberReader.getLineNumber()); }); - }) - .collect(Collectors.toList()); + }) + .collect(Collectors.toList()); } } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/jpa/conformance/package-info.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/jpa/conformance/package-info.java index 914c38385d8..9ae5a82c82e 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/jpa/conformance/package-info.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/jpa/conformance/package-info.java @@ -23,4 +23,3 @@ * These require binding into specific contexts (JPA Spring test, full server IT, etc.) */ package ca.uhn.fhir.jpa.conformance; - diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractJsonParserErrorHandlerTest.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractJsonParserErrorHandlerTest.java index 7f4d265a8fd..7929ca86d32 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractJsonParserErrorHandlerTest.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractJsonParserErrorHandlerTest.java @@ -21,10 +21,10 @@ package ca.uhn.fhir.parser; import ca.uhn.fhir.context.FhirContext; -abstract public non-sealed class AbstractJsonParserErrorHandlerTest extends AbstractParserErrorHandlerTest { +public abstract non-sealed class AbstractJsonParserErrorHandlerTest extends AbstractParserErrorHandlerTest { - private static String PATIENT_DUPLICATE_CHOICE = - """ + private static String PATIENT_DUPLICATE_CHOICE = + """ { "resourceType": "Patient", "deceasedBoolean": "true", @@ -33,7 +33,7 @@ abstract public non-sealed class AbstractJsonParserErrorHandlerTest extends Abst """; protected abstract FhirContext getFhirContext(); - + @Override protected IParser createParser() { return getFhirContext().newJsonParser(); @@ -43,5 +43,4 @@ abstract public non-sealed class AbstractJsonParserErrorHandlerTest extends Abst protected String createResourceWithRepeatingChoice() { return PATIENT_DUPLICATE_CHOICE; } - } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractParserErrorHandlerTest.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractParserErrorHandlerTest.java index ae47f486a21..d052f918040 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractParserErrorHandlerTest.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractParserErrorHandlerTest.java @@ -19,20 +19,21 @@ */ package ca.uhn.fhir.parser; +import org.junit.jupiter.api.Test; + import static org.junit.jupiter.api.Assertions.assertThrows; -import org.junit.jupiter.api.Test; /** * Defines FHIR version independent tests for testing parser error handling. In version dependent * projects, the sub-types {@link AbstractXmlParserErrorHandlerTest}, {@link * AbstractJsonParserErrorHandlerTest} can be sub-classed to create a complete test. */ public abstract sealed class AbstractParserErrorHandlerTest - permits AbstractXmlParserErrorHandlerTest, AbstractJsonParserErrorHandlerTest { + permits AbstractXmlParserErrorHandlerTest, AbstractJsonParserErrorHandlerTest { - protected abstract IParser createParser(); + protected abstract IParser createParser(); - protected abstract String createResourceWithRepeatingChoice(); + protected abstract String createResourceWithRepeatingChoice(); @Test public void testRepeatingChoiceHandled() { @@ -43,7 +44,7 @@ public abstract sealed class AbstractParserErrorHandlerTest IParserErrorHandler errorHandler = new ErrorHandlerAdapter() { @Override public void unexpectedRepeatingElement(IParseLocation theLocation, String theElementName) { - throw new RepeatingChoiceHandledException(); + throw new RepeatingChoiceHandledException(); } }; @@ -51,10 +52,8 @@ public abstract sealed class AbstractParserErrorHandlerTest parser.setParserErrorHandler(errorHandler); String resourceStr = createResourceWithRepeatingChoice(); - assertThrows( - RepeatingChoiceHandledException.class, - () -> { - parser.parseResource(resourceStr); + assertThrows(RepeatingChoiceHandledException.class, () -> { + parser.parseResource(resourceStr); }); } } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractXmlParserErrorHandlerTest.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractXmlParserErrorHandlerTest.java index 8a26d5cab3c..41957c0fcc8 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractXmlParserErrorHandlerTest.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/parser/AbstractXmlParserErrorHandlerTest.java @@ -23,7 +23,7 @@ import ca.uhn.fhir.context.FhirContext; public abstract non-sealed class AbstractXmlParserErrorHandlerTest extends AbstractParserErrorHandlerTest { - private static String PATIENT_DUPLICATE_CHOICE = + private static String PATIENT_DUPLICATE_CHOICE = """ @@ -41,5 +41,4 @@ public abstract non-sealed class AbstractXmlParserErrorHandlerTest extends Abstr protected String createResourceWithRepeatingChoice() { return PATIENT_DUPLICATE_CHOICE; } - } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRuleTestUtil.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRuleTestUtil.java index 600efab4949..e9120f03efd 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRuleTestUtil.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/OperationRuleTestUtil.java @@ -28,38 +28,38 @@ public final class OperationRuleTestUtil { private OperationRuleTestUtil() {} public static String getOperationName(IAuthRule theRule) { - return ((OperationRule)theRule).getOperationName(); + return ((OperationRule) theRule).getOperationName(); } public static boolean isAppliesToServer(IAuthRule theRule) { - return ((OperationRule)theRule).isAppliesToServer(); + return ((OperationRule) theRule).isAppliesToServer(); } public static boolean isAppliesToAnyType(IAuthRule theRule) { - return ((OperationRule)theRule).isAppliesToAnyType(); + return ((OperationRule) theRule).isAppliesToAnyType(); } public static boolean isAppliesToAnyInstance(IAuthRule theRule) { - return ((OperationRule)theRule).isAppliesToAnyInstance(); + return ((OperationRule) theRule).isAppliesToAnyInstance(); } public static HashSet> getAppliesToTypes(IAuthRule theRule) { - return ((OperationRule)theRule).getAppliesToTypes(); + return ((OperationRule) theRule).getAppliesToTypes(); } public static HashSet> getAppliesToInstancesOfType(IAuthRule theRule) { - return ((OperationRule)theRule).getAppliesToInstancesOfType(); + return ((OperationRule) theRule).getAppliesToInstancesOfType(); } public static boolean isAllowAllResponses(IAuthRule theRule) { - return ((OperationRule)theRule).isAllowAllResponses(); + return ((OperationRule) theRule).isAllowAllResponses(); } public static String getGroupId(IAuthRule theRule) { - return ((RuleBulkExportImpl)theRule).getGroupId(); + return ((RuleBulkExportImpl) theRule).getGroupId(); } public static BulkExportJobParameters.ExportStyle getWantExportStyle(IAuthRule theRule) { - return ((RuleBulkExportImpl)theRule).getWantExportStyle(); + return ((RuleBulkExportImpl) theRule).getWantExportStyle(); } } diff --git a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/subscription/SubscriptionTestDataHelper.java b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/subscription/SubscriptionTestDataHelper.java index 34c996016a4..5edbd951d3a 100644 --- a/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/subscription/SubscriptionTestDataHelper.java +++ b/hapi-fhir-test-utilities/src/main/java/ca/uhn/fhir/subscription/SubscriptionTestDataHelper.java @@ -53,12 +53,29 @@ public class SubscriptionTestDataHelper { subscription.getMeta().addProfile(SubscriptionConstants.SUBSCRIPTION_TOPIC_PROFILE_URL); subscription.setCriteria(TEST_TOPIC); - subscription.getCriteriaElement().addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_FILTER_URL, new StringType(TEST_FILTER1)); - subscription.getCriteriaElement().addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_FILTER_URL, new StringType(TEST_FILTER2)); - subscription.getChannel().addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_HEARTBEAT_PERIOD_URL, new UnsignedIntType(86400)); - subscription.getChannel().addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_TIMEOUT_URL, new UnsignedIntType(60)); - subscription.getChannel().addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_MAX_COUNT, new PositiveIntType(20)); - subscription.getChannel().getPayloadElement().addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_PAYLOAD_CONTENT, new CodeType("full-resource")); + subscription + .getCriteriaElement() + .addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_FILTER_URL, new StringType(TEST_FILTER1)); + subscription + .getCriteriaElement() + .addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_FILTER_URL, new StringType(TEST_FILTER2)); + subscription + .getChannel() + .addExtension( + SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_HEARTBEAT_PERIOD_URL, + new UnsignedIntType(86400)); + subscription + .getChannel() + .addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_TIMEOUT_URL, new UnsignedIntType(60)); + subscription + .getChannel() + .addExtension(SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_MAX_COUNT, new PositiveIntType(20)); + subscription + .getChannel() + .getPayloadElement() + .addExtension( + SubscriptionConstants.SUBSCRIPTION_TOPIC_CHANNEL_PAYLOAD_CONTENT, + new CodeType("full-resource")); return subscription; } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/BaseController.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/BaseController.java index 27ef87b9906..6aca81d697d 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/BaseController.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/BaseController.java @@ -33,8 +33,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.ui.ModelMap; import org.thymeleaf.ITemplateEngine; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; @@ -44,19 +42,25 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; import static ca.uhn.fhir.util.UrlUtil.sanitizeUrlPart; import static org.apache.commons.lang3.StringUtils.defaultString; public class BaseController { static final String PARAM_RESOURCE = "resource"; - static final String RESOURCE_COUNT_EXT_URL = "http://hl7api.sourceforge.net/hapi-fhir/res/extdefs.html#resourceCount"; + static final String RESOURCE_COUNT_EXT_URL = + "http://hl7api.sourceforge.net/hapi-fhir/res/extdefs.html#resourceCount"; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseController.class); + @Autowired protected TesterConfig myConfig; + private final Map myContexts = new HashMap<>(); private final Map myCanonicalizers = new HashMap<>(); private List myFilterHeaders; + @Autowired private ITemplateEngine myTemplateEngine; @@ -64,7 +68,8 @@ public class BaseController { super(); } - protected CapabilityStatement addCommonParams(HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { + protected CapabilityStatement addCommonParams( + HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { final String serverId = theRequest.getServerIdWithDefault(myConfig); final String serverBase = theRequest.getServerBase(theServletRequest, myConfig); final String serverName = theRequest.getServerName(myConfig); @@ -118,7 +123,13 @@ public class BaseController { char nextChar6 = (i + 5) < str.length() ? str.charAt(i + 5) : ' '; if (inQuote) { b.append(nextChar); - if (prevChar != '\\' && nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + if (prevChar != '\\' + && nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { b.append("quot;"); i += 5; inQuote = false; @@ -141,7 +152,12 @@ public class BaseController { b.append(nextChar); b.append(""); inValue = false; - } else if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + } else if (nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { if (inValue) { b.append("""); } else { @@ -172,7 +188,12 @@ public class BaseController { char nextChar6 = (i + 5) < str.length() ? str.charAt(i + 5) : ' '; if (inQuote) { b.append(nextChar); - if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + if (nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { b.append("quot;"); i += 5; inQuote = false; @@ -185,7 +206,12 @@ public class BaseController { } else if (nextChar == ' ') { b.append(""); b.append(nextChar); - } else if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { + } else if (nextChar == '&' + && nextChar2 == 'q' + && nextChar3 == 'u' + && nextChar4 == 'o' + && nextChar5 == 't' + && nextChar6 == ';') { b.append("""); inQuote = true; i += 5; @@ -242,7 +268,8 @@ public class BaseController { b.append("&"); } else if (nextChar == '=') { b.append("="); - // }else if (nextChar=='%' && Character.isLetterOrDigit(nextChar2)&& Character.isLetterOrDigit(nextChar3)) { + // }else if (nextChar=='%' && Character.isLetterOrDigit(nextChar2)&& + // Character.isLetterOrDigit(nextChar3)) { // URLDecoder.decode(s, enc) } else { b.append(nextChar); @@ -276,7 +303,8 @@ public class BaseController { return retVal; } - protected RuntimeResourceDefinition getResourceType(HomeRequest theRequest, HttpServletRequest theReq) throws ServletException { + protected RuntimeResourceDefinition getResourceType(HomeRequest theRequest, HttpServletRequest theReq) + throws ServletException { String resourceName = sanitizeUrlPart(defaultString(theReq.getParameter(PARAM_RESOURCE))); RuntimeResourceDefinition def = getContext(theRequest).getResourceDefinition(resourceName); if (def == null) { @@ -297,7 +325,8 @@ public class BaseController { return returnsResource; } - private CapabilityStatement loadAndAddConf(HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { + private CapabilityStatement loadAndAddConf( + HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { CaptureInterceptor interceptor = new CaptureInterceptor(); GenericClient client = theRequest.newClient(theServletRequest, getContext(theRequest), myConfig, interceptor); @@ -308,7 +337,8 @@ public class BaseController { name = "Conformance"; } try { - Class type = (Class) ctx.getResourceDefinition(name).getImplementingClass(); + Class type = (Class) + ctx.getResourceDefinition(name).getImplementingClass(); fetchedCapabilityStatement = client.fetchConformance().ofType(type).execute(); } catch (Exception ex) { ourLog.warn("Failed to load conformance statement, error was: {}", ex.toString()); @@ -316,18 +346,24 @@ public class BaseController { fetchedCapabilityStatement = ctx.getResourceDefinition(name).newInstance(); } - theModel.put("jsonEncodedConf", getContext(theRequest).newJsonParser().encodeResourceToString(fetchedCapabilityStatement)); + theModel.put( + "jsonEncodedConf", + getContext(theRequest).newJsonParser().encodeResourceToString(fetchedCapabilityStatement)); - org.hl7.fhir.r5.model.CapabilityStatement capabilityStatement = getVersionCanonicalizer(theRequest).capabilityStatementToCanonical(fetchedCapabilityStatement); + org.hl7.fhir.r5.model.CapabilityStatement capabilityStatement = + getVersionCanonicalizer(theRequest).capabilityStatementToCanonical(fetchedCapabilityStatement); Map resourceCounts = new HashMap<>(); long total = 0; - for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : capabilityStatement.getRest()) { - for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceComponent nextResource : nextRest.getResource()) { + for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : + capabilityStatement.getRest()) { + for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceComponent nextResource : + nextRest.getResource()) { List exts = nextResource.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (exts != null && exts.size() > 0) { - Number nextCount = ((org.hl7.fhir.r5.model.DecimalType) (exts.get(0).getValue())).getValueAsNumber(); + Number nextCount = + ((org.hl7.fhir.r5.model.DecimalType) (exts.get(0).getValue())).getValueAsNumber(); resourceCounts.put(nextResource.getTypeElement().getValue(), nextCount); total += nextCount.longValue(); } @@ -337,21 +373,26 @@ public class BaseController { theModel.put("resourceCounts", resourceCounts); if (total > 0) { - for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : capabilityStatement.getRest()) { + for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : + capabilityStatement.getRest()) { Collections.sort(nextRest.getResource(), (theO1, theO2) -> { org.hl7.fhir.r5.model.DecimalType count1 = new org.hl7.fhir.r5.model.DecimalType(); List count1exts = theO1.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (count1exts != null && count1exts.size() > 0) { - count1 = (org.hl7.fhir.r5.model.DecimalType) count1exts.get(0).getValue(); + count1 = (org.hl7.fhir.r5.model.DecimalType) + count1exts.get(0).getValue(); } org.hl7.fhir.r5.model.DecimalType count2 = new org.hl7.fhir.r5.model.DecimalType(); List count2exts = theO2.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (count2exts != null && count2exts.size() > 0) { - count2 = (org.hl7.fhir.r5.model.DecimalType) count2exts.get(0).getValue(); + count2 = (org.hl7.fhir.r5.model.DecimalType) + count2exts.get(0).getValue(); } int retVal = count2.compareTo(count1); if (retVal == 0) { - retVal = theO1.getTypeElement().getValue().compareTo(theO2.getTypeElement().getValue()); + retVal = theO1.getTypeElement() + .getValue() + .compareTo(theO2.getTypeElement().getValue()); } return retVal; }); @@ -364,7 +405,6 @@ public class BaseController { return capabilityStatement; } - protected String logPrefix(ModelMap theModel) { return "[server=" + theModel.get("serverId") + "] - "; } @@ -397,9 +437,12 @@ public class BaseController { // If this is a document, we'll pull the narrative from the Composition IBaseBundle bundle = (IBaseBundle) theResult; if ("document".equals(BundleUtil.getBundleType(theContext, bundle))) { - IBaseResource firstResource = theContext.newTerser().getSingleValueOrNull(bundle, "Bundle.entry.resource", IBaseResource.class); + IBaseResource firstResource = theContext + .newTerser() + .getSingleValueOrNull(bundle, "Bundle.entry.resource", IBaseResource.class); if (firstResource != null && "Composition".equals(theContext.getResourceType(firstResource))) { - IBaseXhtml html = theContext.newTerser().getSingleValueOrNull(firstResource, "text.div", IBaseXhtml.class); + IBaseXhtml html = + theContext.newTerser().getSingleValueOrNull(firstResource, "text.div", IBaseXhtml.class); if (html != null) { retVal = html.getValueAsString(); } @@ -437,8 +480,14 @@ public class BaseController { return retVal; } - protected void processAndAddLastClientInvocation(GenericClient theClient, ResultType theResultType, ModelMap theModelMap, long theLatency, String outcomeDescription, - CaptureInterceptor theInterceptor, HomeRequest theRequest) { + protected void processAndAddLastClientInvocation( + GenericClient theClient, + ResultType theResultType, + ModelMap theModelMap, + long theLatency, + String outcomeDescription, + CaptureInterceptor theInterceptor, + HomeRequest theRequest) { try { IHttpRequest lastRequest = theInterceptor.getLastRequest(); IHttpResponse lastResponse = theInterceptor.getLastResponse(); @@ -506,8 +555,10 @@ public class BaseController { resultDescription.append(" (").append(defaultString(resultBody).length() + " bytes)"); - Header[] requestHeaders = lastRequest != null ? applyHeaderFilters(lastRequest.getAllHeaders()) : new Header[0]; - Header[] responseHeaders = lastResponse != null ? applyHeaderFilters(lastResponse.getAllHeaders()) : new Header[0]; + Header[] requestHeaders = + lastRequest != null ? applyHeaderFilters(lastRequest.getAllHeaders()) : new Header[0]; + Header[] responseHeaders = + lastResponse != null ? applyHeaderFilters(lastResponse.getAllHeaders()) : new Header[0]; theModelMap.put("resultDescription", resultDescription.toString()); theModelMap.put("action", action); @@ -537,7 +588,6 @@ public class BaseController { ourLog.error("Failure during processing", e); theModelMap.put("errorMsg", toDisplayError("Error during processing: " + e.getMessage(), e)); } - } /** @@ -553,14 +603,18 @@ public class BaseController { } protected enum ResultType { - BUNDLE, NONE, RESOURCE, TAGLIST, PARAMETERS + BUNDLE, + NONE, + RESOURCE, + TAGLIST, + PARAMETERS } public static class CaptureInterceptor implements IClientInterceptor { private IHttpRequest myLastRequest; private IHttpResponse myLastResponse; -// private String myResponseBody; + // private String myResponseBody; public IHttpRequest getLastRequest() { return myLastRequest; @@ -570,9 +624,9 @@ public class BaseController { return myLastResponse; } -// public String getLastResponseBody() { -// return myResponseBody; -// } + // public String getLastResponseBody() { + // return myResponseBody; + // } @Override public void interceptRequest(IHttpRequest theRequest) { @@ -585,42 +639,42 @@ public class BaseController { public void interceptResponse(IHttpResponse theResponse) throws IOException { assert myLastResponse == null; myLastResponse = theResponse; -// myLastResponse = ((ApacheHttpResponse) theResponse).getResponse(); -// -// HttpEntity respEntity = myLastResponse.getEntity(); -// if (respEntity != null) { -// final byte[] bytes; -// try { -// bytes = IOUtils.toByteArray(respEntity.getContent()); -// } catch (IllegalStateException e) { -// throw new InternalErrorException(Msg.code(194) + e); -// } -// -// myResponseBody = new String(bytes, "UTF-8"); -// myLastResponse.setEntity(new MyEntityWrapper(respEntity, bytes)); -// } + // myLastResponse = ((ApacheHttpResponse) theResponse).getResponse(); + // + // HttpEntity respEntity = myLastResponse.getEntity(); + // if (respEntity != null) { + // final byte[] bytes; + // try { + // bytes = IOUtils.toByteArray(respEntity.getContent()); + // } catch (IllegalStateException e) { + // throw new InternalErrorException(Msg.code(194) + e); + // } + // + // myResponseBody = new String(bytes, "UTF-8"); + // myLastResponse.setEntity(new MyEntityWrapper(respEntity, bytes)); + // } } -// private static class MyEntityWrapper extends HttpEntityWrapper { -// -// private byte[] myBytes; -// -// public MyEntityWrapper(HttpEntity theWrappedEntity, byte[] theBytes) { -// super(theWrappedEntity); -// myBytes = theBytes; -// } -// -// @Override -// public InputStream getContent() throws IOException { -// return new ByteArrayInputStream(myBytes); -// } -// -// @Override -// public void writeTo(OutputStream theOutstream) throws IOException { -// theOutstream.write(myBytes); -// } -// -// } + // private static class MyEntityWrapper extends HttpEntityWrapper { + // + // private byte[] myBytes; + // + // public MyEntityWrapper(HttpEntity theWrappedEntity, byte[] theBytes) { + // super(theWrappedEntity); + // myBytes = theBytes; + // } + // + // @Override + // public InputStream getContent() throws IOException { + // return new ByteArrayInputStream(myBytes); + // } + // + // @Override + // public void writeTo(OutputStream theOutstream) throws IOException { + // theOutstream.write(myBytes); + // } + // + // } } @@ -642,5 +696,4 @@ public class BaseController { } return retVal; } - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/Controller.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/Controller.java index 8050e40beaa..e0ad5f3dfdd 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/Controller.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/Controller.java @@ -1,9 +1,9 @@ package ca.uhn.fhir.to; -import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum; import ca.uhn.fhir.model.primitive.BoundCodeDt; @@ -45,16 +45,15 @@ import org.springframework.ui.ModelMap; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.RequestMapping; -import javax.annotation.Nullable; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.io.StringWriter; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Objects; import java.util.TreeSet; +import javax.annotation.Nullable; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; import static ca.uhn.fhir.rest.server.provider.ProviderConstants.DIFF_OPERATION_NAME; import static ca.uhn.fhir.util.UrlUtil.sanitizeUrlPart; @@ -67,8 +66,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class Controller extends BaseController { static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(Controller.class); - @RequestMapping(value = { "/about" }) - public String actionAbout(HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { + @RequestMapping(value = {"/about"}) + public String actionAbout( + HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { addCommonParams(theServletRequest, theRequest, theModel); theModel.put("notHome", true); @@ -79,8 +79,12 @@ public class Controller extends BaseController { return "about"; } - @RequestMapping(value = { "/conformance" }) - public String actionConformance(HttpServletRequest theServletRequest, final HomeRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/conformance"}) + public String actionConformance( + HttpServletRequest theServletRequest, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { addCommonParams(theServletRequest, theRequest, theModel); CaptureInterceptor interceptor = new CaptureInterceptor(); @@ -95,28 +99,38 @@ public class Controller extends BaseController { name = "Conformance"; } - Class type = (Class) context.getResourceDefinition(name).getImplementingClass(); + Class type = (Class) + context.getResourceDefinition(name).getImplementingClass(); client.fetchConformance().ofType(type).execute(); } catch (Exception e) { returnsResource = handleClientException(client, e, theModel); } long delay = System.currentTimeMillis() - start; - processAndAddLastClientInvocation(client, returnsResource, theModel, delay, "Loaded conformance", interceptor, theRequest); + processAndAddLastClientInvocation( + client, returnsResource, theModel, delay, "Loaded conformance", interceptor, theRequest); ourLog.info(logPrefix(theModel) + "Displayed conformance profile"); return "result"; } - @RequestMapping(value = { "/create" }) - public String actionCreate(final HttpServletRequest theReq, final HomeRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/create"}) + public String actionCreate( + final HttpServletRequest theReq, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { doActionCreateOrValidate(theReq, theRequest, theBindingResult, theModel, "create"); return "result"; } - @RequestMapping(value = { "/delete" }) - public String actionDelete(HttpServletRequest theServletRequest, HomeRequest theRequest, BindingResult theBindingResult, ModelMap theModel) { + @RequestMapping(value = {"/delete"}) + public String actionDelete( + HttpServletRequest theServletRequest, + HomeRequest theRequest, + BindingResult theBindingResult, + ModelMap theModel) { addCommonParams(theServletRequest, theRequest, theModel); CaptureInterceptor interceptor = new CaptureInterceptor(); @@ -153,34 +167,48 @@ public class Controller extends BaseController { returnsResource = handleClientException(client, e, theModel); } long delay = System.currentTimeMillis() - start; - processAndAddLastClientInvocation(client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); + processAndAddLastClientInvocation( + client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); ourLog.info(logPrefix(theModel) + "Deleted resource of type " + def.getName()); return "result"; } - @RequestMapping(value = { "/history-server" }) - public String actionHistoryServer(final HttpServletRequest theReq, final HomeRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/history-server"}) + public String actionHistoryServer( + final HttpServletRequest theReq, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { doActionHistory(theReq, theRequest, theBindingResult, theModel, "history-server", "Server History"); return "result"; } - @RequestMapping(value = { "/history-type" }) - public String actionHistoryType(final HttpServletRequest theReq, final HomeRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/history-type"}) + public String actionHistoryType( + final HttpServletRequest theReq, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { doActionHistory(theReq, theRequest, theBindingResult, theModel, "history-type", "History"); return "result"; } - @RequestMapping(value = { "/", "/home" }) - public String actionHome(HttpServletRequest theServletRequest, final HomeRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/", "/home"}) + public String actionHome( + HttpServletRequest theServletRequest, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { addCommonParams(theServletRequest, theRequest, theModel); ourLog.info(theServletRequest.toString()); return "home"; } - @RequestMapping(value = { "/page" }) - public String actionPage(HttpServletRequest theReq, HomeRequest theRequest, BindingResult theBindingResult, ModelMap theModel) { + @RequestMapping(value = {"/page"}) + public String actionPage( + HttpServletRequest theReq, HomeRequest theRequest, BindingResult theBindingResult, ModelMap theModel) { addCommonParams(theReq, theRequest, theModel); CaptureInterceptor interceptor = new CaptureInterceptor(); @@ -204,7 +232,8 @@ public class Controller extends BaseController { try { ourLog.info(logPrefix(theModel) + "Loading paging URL: {}", url); @SuppressWarnings("unchecked") - Class bundleType = (Class) context.getResourceDefinition("Bundle").getImplementingClass(); + Class bundleType = (Class) + context.getResourceDefinition("Bundle").getImplementingClass(); client.loadPage().byUrl(url).andReturnBundle(bundleType).execute(); } catch (Exception e) { returnsResource = handleClientException(client, e, theModel); @@ -213,13 +242,18 @@ public class Controller extends BaseController { String outcomeDescription = "Bundle Page"; - processAndAddLastClientInvocation(client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); + processAndAddLastClientInvocation( + client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); return "result"; } - @RequestMapping(value = { "/read" }) - public String actionRead(HttpServletRequest theServletRequest, HomeRequest theRequest, BindingResult theBindingResult, ModelMap theModel) { + @RequestMapping(value = {"/read"}) + public String actionRead( + HttpServletRequest theServletRequest, + HomeRequest theRequest, + BindingResult theBindingResult, + ModelMap theModel) { addCommonParams(theServletRequest, theRequest, theModel); CaptureInterceptor interceptor = new CaptureInterceptor(); @@ -264,13 +298,18 @@ public class Controller extends BaseController { } long delay = System.currentTimeMillis() - start; - processAndAddLastClientInvocation(client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); + processAndAddLastClientInvocation( + client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); return "result"; } - @RequestMapping({ "/resource" }) - public String actionResource(HttpServletRequest theServletRequest, final ResourceRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping({"/resource"}) + public String actionResource( + HttpServletRequest theServletRequest, + final ResourceRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { String resourceName = theRequest.getResource(); @@ -280,10 +319,15 @@ public class Controller extends BaseController { String updateId = theRequest.getUpdateId(); String updateVid = defaultIfEmpty(theRequest.getUpdateVid(), null); CaptureInterceptor interceptor = new CaptureInterceptor(); - GenericClient client = theRequest.newClient(theServletRequest, getContext(theRequest), myConfig, interceptor); + GenericClient client = + theRequest.newClient(theServletRequest, getContext(theRequest), myConfig, interceptor); RuntimeResourceDefinition def = getContext(theRequest).getResourceDefinition(theRequest.getResource()); - IBaseResource updateResource = client.read(def.getImplementingClass(), new IdDt(resourceName, updateId, updateVid)); - String updateResourceString = theRequest.newParser(getContext(theRequest)).setPrettyPrint(true).encodeResourceToString(updateResource); + IBaseResource updateResource = + client.read(def.getImplementingClass(), new IdDt(resourceName, updateId, updateVid)); + String updateResourceString = theRequest + .newParser(getContext(theRequest)) + .setPrettyPrint(true) + .encodeResourceToString(updateResource); theModel.put("updateResource", updateResourceString); theModel.put("updateResourceId", updateId); } @@ -293,8 +337,10 @@ public class Controller extends BaseController { return "resource"; } - private void populateModelForResource(HttpServletRequest theServletRequest, HomeRequest theRequest, ModelMap theModel) { - org.hl7.fhir.r5.model.CapabilityStatement conformance = addCommonParams(theServletRequest, theRequest, theModel); + private void populateModelForResource( + HttpServletRequest theServletRequest, HomeRequest theRequest, ModelMap theModel) { + org.hl7.fhir.r5.model.CapabilityStatement conformance = + addCommonParams(theServletRequest, theRequest, theModel); String resourceName = theRequest.getResource(); @@ -304,7 +350,8 @@ public class Controller extends BaseController { boolean haveSearchParams = false; List> queryIncludes = new ArrayList<>(); - haveSearchParams = extractSearchParamsR5CapabilityStatement(conformance, resourceName, includes, revIncludes, sortParams, haveSearchParams, queryIncludes); + haveSearchParams = extractSearchParamsR5CapabilityStatement( + conformance, resourceName, includes, revIncludes, sortParams, haveSearchParams, queryIncludes); theModel.put("includes", includes); theModel.put("revincludes", revIncludes); @@ -315,8 +362,13 @@ public class Controller extends BaseController { } @SuppressWarnings("unchecked") - @RequestMapping(value = { "/search" }) - public String actionSearch(HttpServletRequest theServletRequest, HomeRequest theRequest, BindingResult theBindingResult, ModelMap theModel) throws IOException { + @RequestMapping(value = {"/search"}) + public String actionSearch( + HttpServletRequest theServletRequest, + HomeRequest theRequest, + BindingResult theBindingResult, + ModelMap theModel) + throws IOException { addCommonParams(theServletRequest, theRequest, theModel); StringWriter clientCodeJsonStringWriter = new StringWriter(); @@ -334,7 +386,8 @@ public class Controller extends BaseController { IQuery query; if (isNotBlank(theServletRequest.getParameter("resource"))) { try { - query = search.forResource(getResourceType(theRequest, theServletRequest).getImplementingClass()); + query = search.forResource( + getResourceType(theRequest, theServletRequest).getImplementingClass()); } catch (ServletException e) { populateModelForResource(theServletRequest, theRequest, theModel); theModel.put("errorMsg", toDisplayError(e.toString(), e)); @@ -440,7 +493,8 @@ public class Controller extends BaseController { } Class bundleType; - bundleType = (Class) client.getFhirContext().getResourceDefinition("Bundle").getImplementingClass(); + bundleType = (Class) + client.getFhirContext().getResourceDefinition("Bundle").getImplementingClass(); IQuery queryTyped = query.returnBundle(bundleType); long start = System.currentTimeMillis(); @@ -455,7 +509,8 @@ public class Controller extends BaseController { } long delay = System.currentTimeMillis() - start; - processAndAddLastClientInvocation(client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); + processAndAddLastClientInvocation( + client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); clientCodeJsonWriter.endObject(); clientCodeJsonWriter.close(); @@ -465,8 +520,12 @@ public class Controller extends BaseController { return "result"; } - @RequestMapping(value = { "/transaction" }) - public String actionTransaction(HttpServletRequest theServletRequest, final TransactionRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/transaction"}) + public String actionTransaction( + HttpServletRequest theServletRequest, + final TransactionRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { addCommonParams(theServletRequest, theRequest, theModel); CaptureInterceptor interceptor = new CaptureInterceptor(); @@ -481,13 +540,18 @@ public class Controller extends BaseController { } else if (body.startsWith("<")) { // XML content } else { - theModel.put("errorMsg", - toDisplayError("Message body does not appear to be a valid FHIR resource instance document. Body should start with '<' (for XML encoding) or '{' (for JSON encoding).", null)); + theModel.put( + "errorMsg", + toDisplayError( + "Message body does not appear to be a valid FHIR resource instance document. Body should start with '<' (for XML encoding) or '{' (for JSON encoding).", + null)); return "home"; } } catch (DataFormatException e) { ourLog.warn("Failed to parse bundle", e); - theModel.put("errorMsg", toDisplayError("Failed to parse transaction bundle body. Error was: " + e.getMessage(), e)); + theModel.put( + "errorMsg", + toDisplayError("Failed to parse transaction bundle body. Error was: " + e.getMessage(), e)); return "home"; } @@ -501,24 +565,38 @@ public class Controller extends BaseController { } long delay = System.currentTimeMillis() - start; - processAndAddLastClientInvocation(client, returnsResource, theModel, delay, "Transaction", interceptor, theRequest); + processAndAddLastClientInvocation( + client, returnsResource, theModel, delay, "Transaction", interceptor, theRequest); return "result"; } - @RequestMapping(value = { "/update" }) - public String actionUpdate(final HttpServletRequest theReq, final HomeRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/update"}) + public String actionUpdate( + final HttpServletRequest theReq, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { doActionCreateOrValidate(theReq, theRequest, theBindingResult, theModel, "update"); return "result"; } - @RequestMapping(value = { "/validate" }) - public String actionValidate(final HttpServletRequest theReq, final HomeRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/validate"}) + public String actionValidate( + final HttpServletRequest theReq, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { doActionCreateOrValidate(theReq, theRequest, theBindingResult, theModel, "validate"); return "result"; } - private void doActionCreateOrValidate(HttpServletRequest theReq, HomeRequest theRequest, BindingResult theBindingResult, ModelMap theModel, String theMethod) { + private void doActionCreateOrValidate( + HttpServletRequest theReq, + HomeRequest theRequest, + BindingResult theBindingResult, + ModelMap theModel, + String theMethod) { boolean validate = "validate".equals(theMethod); addCommonParams(theReq, theRequest, theModel); @@ -534,7 +612,8 @@ public class Controller extends BaseController { } // Don't sanitize this param, it's a raw resource body and may well be XML - String body = validate ? theReq.getParameter("resource-validate-body") : theReq.getParameter("resource-create-body"); + String body = + validate ? theReq.getParameter("resource-validate-body") : theReq.getParameter("resource-create-body"); if (isBlank(body)) { theModel.put("errorMsg", toDisplayError("No message body specified", null)); return; @@ -551,8 +630,11 @@ public class Controller extends BaseController { resource = getContext(theRequest).newXmlParser().parseResource(type, body); client.setEncoding(EncodingEnum.XML); } else { - theModel.put("errorMsg", - toDisplayError("Message body does not appear to be a valid FHIR resource instance document. Body should start with '<' (for XML encoding) or '{' (for JSON encoding).", null)); + theModel.put( + "errorMsg", + toDisplayError( + "Message body does not appear to be a valid FHIR resource instance document. Body should start with '<' (for XML encoding) or '{' (for JSON encoding).", + null)); return; } } catch (DataFormatException e) { @@ -588,26 +670,32 @@ public class Controller extends BaseController { } long delay = System.currentTimeMillis() - start; - processAndAddLastClientInvocation(client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); + processAndAddLastClientInvocation( + client, returnsResource, theModel, delay, outcomeDescription, interceptor, theRequest); try { if (validate) { - ourLog.info(logPrefix(theModel) + "Validated resource of type " + getResourceType(theRequest, theReq).getName()); + ourLog.info(logPrefix(theModel) + "Validated resource of type " + + getResourceType(theRequest, theReq).getName()); } else if (update) { - ourLog.info(logPrefix(theModel) + "Updated resource of type " + getResourceType(theRequest, theReq).getName()); + ourLog.info(logPrefix(theModel) + "Updated resource of type " + + getResourceType(theRequest, theReq).getName()); } else { - ourLog.info(logPrefix(theModel) + "Created resource of type " + getResourceType(theRequest, theReq).getName()); + ourLog.info(logPrefix(theModel) + "Created resource of type " + + getResourceType(theRequest, theReq).getName()); } } catch (Exception e) { ourLog.warn("Failed to determine resource type from request", e); } - } - @SuppressWarnings("unchecked") - @RequestMapping(value = { "/operation" }) - public String actionOperation(final HttpServletRequest theReq, final HomeRequest theRequest, final BindingResult theBindingResult, final ModelMap theModel) { + @RequestMapping(value = {"/operation"}) + public String actionOperation( + final HttpServletRequest theReq, + final HomeRequest theRequest, + final BindingResult theBindingResult, + final ModelMap theModel) { String instanceType = theReq.getParameter("instanceType"); String instanceId = theReq.getParameter("instanceId"); @@ -621,19 +709,20 @@ public class Controller extends BaseController { GenericClient client = theRequest.newClient(theReq, getContext(theRequest), myConfig, interceptor); client.setPrettyPrint(true); - Class type = getContext(theRequest).getResourceDefinition(instanceType).getImplementingClass(); - Class parametersType = (Class) getContext(theRequest).getResourceDefinition("Parameters").getImplementingClass(); + Class type = + getContext(theRequest).getResourceDefinition(instanceType).getImplementingClass(); + Class parametersType = (Class) + getContext(theRequest).getResourceDefinition("Parameters").getImplementingClass(); StopWatch sw = new StopWatch(); ResultType returnsResource = getReturnedTypeBasedOnOperation(operationName); try { - client - .operation() - .onInstance(instanceType + "/" + instanceId) - .named(operationName) - .withNoParameters(parametersType) - .useHttpGet() - .execute(); + client.operation() + .onInstance(instanceType + "/" + instanceId) + .named(operationName) + .withNoParameters(parametersType) + .useHttpGet() + .execute(); } catch (DataFormatException e) { ourLog.warn("Failed to parse resource", e); theModel.put("errorMsg", toDisplayError("Failed to parse message body. Error was: " + e.getMessage(), e)); @@ -644,7 +733,8 @@ public class Controller extends BaseController { } String outcomeDescription = "Execute " + operationName + " Operation"; - processAndAddLastClientInvocation(client, returnsResource, theModel, sw.getMillis(), outcomeDescription, interceptor, theRequest); + processAndAddLastClientInvocation( + client, returnsResource, theModel, sw.getMillis(), outcomeDescription, interceptor, theRequest); return "result"; } @@ -653,8 +743,13 @@ public class Controller extends BaseController { return DIFF_OPERATION_NAME.equals(operationName) ? ResultType.PARAMETERS : ResultType.BUNDLE; } - - private void doActionHistory(HttpServletRequest theReq, HomeRequest theRequest, BindingResult theBindingResult, ModelMap theModel, String theMethod, String theMethodDescription) { + private void doActionHistory( + HttpServletRequest theReq, + HomeRequest theRequest, + BindingResult theBindingResult, + ModelMap theModel, + String theMethod, + String theMethodDescription) { addCommonParams(theReq, theRequest, theModel); CaptureInterceptor interceptor = new CaptureInterceptor(); @@ -684,7 +779,9 @@ public class Controller extends BaseController { long start = System.currentTimeMillis(); try { - ourLog.info(logPrefix(theModel) + "Retrieving history for type {} ID {} since {}", new Object[] { type, id, since }); + ourLog.info( + logPrefix(theModel) + "Retrieving history for type {} ID {} since {}", + new Object[] {type, id, since}); IHistory hist0 = client.history(); IHistoryUntyped hist1; @@ -697,7 +794,8 @@ public class Controller extends BaseController { } IHistoryTyped hist2; - hist2 = hist1.andReturnBundle(client.getFhirContext().getResourceDefinition("Bundle").getImplementingClass(IBaseBundle.class)); + hist2 = hist1.andReturnBundle( + client.getFhirContext().getResourceDefinition("Bundle").getImplementingClass(IBaseBundle.class)); if (since != null) { hist2.since(since); @@ -712,13 +810,20 @@ public class Controller extends BaseController { } long delay = System.currentTimeMillis() - start; - processAndAddLastClientInvocation(client, returnsResource, theModel, delay, theMethodDescription, interceptor, theRequest); - + processAndAddLastClientInvocation( + client, returnsResource, theModel, delay, theMethodDescription, interceptor, theRequest); } - private boolean extractSearchParamsDstu2(IBaseResource theConformance, String resourceName, TreeSet includes, TreeSet theRevIncludes, TreeSet sortParams, - boolean haveSearchParams, List> queryIncludes) { - ca.uhn.fhir.model.dstu2.resource.Conformance conformance = (ca.uhn.fhir.model.dstu2.resource.Conformance) theConformance; + private boolean extractSearchParamsDstu2( + IBaseResource theConformance, + String resourceName, + TreeSet includes, + TreeSet theRevIncludes, + TreeSet sortParams, + boolean haveSearchParams, + List> queryIncludes) { + ca.uhn.fhir.model.dstu2.resource.Conformance conformance = + (ca.uhn.fhir.model.dstu2.resource.Conformance) theConformance; for (ca.uhn.fhir.model.dstu2.resource.Conformance.Rest nextRest : conformance.getRest()) { for (ca.uhn.fhir.model.dstu2.resource.Conformance.RestResource nextRes : nextRest.getResource()) { if (nextRes.getTypeElement().getValue().equals(resourceName)) { @@ -727,8 +832,10 @@ public class Controller extends BaseController { includes.add(next.getValue()); } } - for (ca.uhn.fhir.model.dstu2.resource.Conformance.RestResourceSearchParam next : nextRes.getSearchParam()) { - if (next.getTypeElement().getValueAsEnum() != ca.uhn.fhir.model.dstu2.valueset.SearchParamTypeEnum.COMPOSITE) { + for (ca.uhn.fhir.model.dstu2.resource.Conformance.RestResourceSearchParam next : + nextRes.getSearchParam()) { + if (next.getTypeElement().getValueAsEnum() + != ca.uhn.fhir.model.dstu2.valueset.SearchParamTypeEnum.COMPOSITE) { sortParams.add(next.getNameElement().getValue()); } } @@ -738,8 +845,10 @@ public class Controller extends BaseController { } else { // It's a different resource from the one we're searching, so // scan for revinclude candidates - for (ca.uhn.fhir.model.dstu2.resource.Conformance.RestResourceSearchParam next : nextRes.getSearchParam()) { - if (next.getTypeElement().getValueAsEnum() == ca.uhn.fhir.model.dstu2.valueset.SearchParamTypeEnum.REFERENCE) { + for (ca.uhn.fhir.model.dstu2.resource.Conformance.RestResourceSearchParam next : + nextRes.getSearchParam()) { + if (next.getTypeElement().getValueAsEnum() + == ca.uhn.fhir.model.dstu2.valueset.SearchParamTypeEnum.REFERENCE) { for (BoundCodeDt nextTargetType : next.getTarget()) { if (nextTargetType.getValue().equals(resourceName)) { theRevIncludes.add(nextRes.getTypeElement().getValue() + ":" + next.getName()); @@ -753,8 +862,14 @@ public class Controller extends BaseController { return haveSearchParams; } - private boolean extractSearchParamsDstu3CapabilityStatement(IBaseResource theConformance, String resourceName, TreeSet includes, TreeSet theRevIncludes, TreeSet sortParams, - boolean haveSearchParams, List> queryIncludes) { + private boolean extractSearchParamsDstu3CapabilityStatement( + IBaseResource theConformance, + String resourceName, + TreeSet includes, + TreeSet theRevIncludes, + TreeSet sortParams, + boolean haveSearchParams, + List> queryIncludes) { CapabilityStatement conformance = (org.hl7.fhir.dstu3.model.CapabilityStatement) theConformance; for (CapabilityStatementRestComponent nextRest : conformance.getRest()) { for (CapabilityStatementRestResourceComponent nextRes : nextRest.getResource()) { @@ -765,7 +880,8 @@ public class Controller extends BaseController { } } for (CapabilityStatementRestResourceSearchParamComponent next : nextRes.getSearchParam()) { - if (next.getTypeElement().getValue() != org.hl7.fhir.dstu3.model.Enumerations.SearchParamType.COMPOSITE) { + if (next.getTypeElement().getValue() + != org.hl7.fhir.dstu3.model.Enumerations.SearchParamType.COMPOSITE) { sortParams.add(next.getNameElement().getValue()); } } @@ -776,8 +892,8 @@ public class Controller extends BaseController { // It's a different resource from the one we're searching, so // scan for revinclude candidates for (CapabilityStatementRestResourceSearchParamComponent next : nextRes.getSearchParam()) { - if (next.getTypeElement().getValue() == org.hl7.fhir.dstu3.model.Enumerations.SearchParamType.REFERENCE) { - } + if (next.getTypeElement().getValue() + == org.hl7.fhir.dstu3.model.Enumerations.SearchParamType.REFERENCE) {} } } } @@ -785,19 +901,30 @@ public class Controller extends BaseController { return haveSearchParams; } - private boolean extractSearchParamsR4CapabilityStatement(IBaseResource theConformance, String resourceName, TreeSet includes, TreeSet theRevIncludes, TreeSet sortParams, - boolean haveSearchParams, List> queryIncludes) { - org.hl7.fhir.r4.model.CapabilityStatement conformance = (org.hl7.fhir.r4.model.CapabilityStatement) theConformance; - for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : conformance.getRest()) { - for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceComponent nextRes : nextRest.getResource()) { + private boolean extractSearchParamsR4CapabilityStatement( + IBaseResource theConformance, + String resourceName, + TreeSet includes, + TreeSet theRevIncludes, + TreeSet sortParams, + boolean haveSearchParams, + List> queryIncludes) { + org.hl7.fhir.r4.model.CapabilityStatement conformance = + (org.hl7.fhir.r4.model.CapabilityStatement) theConformance; + for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : + conformance.getRest()) { + for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceComponent nextRes : + nextRest.getResource()) { if (nextRes.getTypeElement().getValue().equals(resourceName)) { for (org.hl7.fhir.r4.model.StringType next : nextRes.getSearchInclude()) { if (next.isEmpty() == false) { includes.add(next.getValue()); } } - for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent next : nextRes.getSearchParam()) { - if (next.getTypeElement().getValue() != org.hl7.fhir.r4.model.Enumerations.SearchParamType.COMPOSITE) { + for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent + next : nextRes.getSearchParam()) { + if (next.getTypeElement().getValue() + != org.hl7.fhir.r4.model.Enumerations.SearchParamType.COMPOSITE) { sortParams.add(next.getNameElement().getValue()); } } @@ -807,9 +934,10 @@ public class Controller extends BaseController { } else { // It's a different resource from the one we're searching, so // scan for revinclude candidates - for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent next : nextRes.getSearchParam()) { - if (next.getTypeElement().getValue() == org.hl7.fhir.r4.model.Enumerations.SearchParamType.REFERENCE) { - } + for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent + next : nextRes.getSearchParam()) { + if (next.getTypeElement().getValue() + == org.hl7.fhir.r4.model.Enumerations.SearchParamType.REFERENCE) {} } } } @@ -817,19 +945,30 @@ public class Controller extends BaseController { return haveSearchParams; } - private boolean extractSearchParamsR5CapabilityStatement(IBaseResource theConformance, String resourceName, TreeSet includes, TreeSet theRevIncludes, TreeSet sortParams, - boolean haveSearchParams, List> queryIncludes) { - org.hl7.fhir.r5.model.CapabilityStatement conformance = (org.hl7.fhir.r5.model.CapabilityStatement) theConformance; - for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : conformance.getRest()) { - for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceComponent nextRes : nextRest.getResource()) { + private boolean extractSearchParamsR5CapabilityStatement( + IBaseResource theConformance, + String resourceName, + TreeSet includes, + TreeSet theRevIncludes, + TreeSet sortParams, + boolean haveSearchParams, + List> queryIncludes) { + org.hl7.fhir.r5.model.CapabilityStatement conformance = + (org.hl7.fhir.r5.model.CapabilityStatement) theConformance; + for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : + conformance.getRest()) { + for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceComponent nextRes : + nextRest.getResource()) { if (nextRes.getTypeElement().getValue().equals(resourceName)) { for (org.hl7.fhir.r5.model.StringType next : nextRes.getSearchInclude()) { if (next.isEmpty() == false) { includes.add(next.getValue()); } } - for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent next : nextRes.getSearchParam()) { - if (next.getTypeElement().getValue() != org.hl7.fhir.r5.model.Enumerations.SearchParamType.COMPOSITE) { + for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent + next : nextRes.getSearchParam()) { + if (next.getTypeElement().getValue() + != org.hl7.fhir.r5.model.Enumerations.SearchParamType.COMPOSITE) { sortParams.add(next.getNameElement().getValue()); } } @@ -839,9 +978,10 @@ public class Controller extends BaseController { } else { // It's a different resource from the one we're searching, so // scan for revinclude candidates - for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent next : nextRes.getSearchParam()) { - if (next.getTypeElement().getValue() == org.hl7.fhir.r5.model.Enumerations.SearchParamType.REFERENCE) { - } + for (org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent + next : nextRes.getSearchParam()) { + if (next.getTypeElement().getValue() + == org.hl7.fhir.r5.model.Enumerations.SearchParamType.REFERENCE) {} } } } @@ -849,13 +989,16 @@ public class Controller extends BaseController { return haveSearchParams; } - private boolean handleSearchParam(String paramIdxString, HttpServletRequest theReq, IQuery theQuery, JsonWriter theClientCodeJsonWriter) throws IOException { + private boolean handleSearchParam( + String paramIdxString, HttpServletRequest theReq, IQuery theQuery, JsonWriter theClientCodeJsonWriter) + throws IOException { String nextName = sanitizeUrlPart(theReq.getParameter("param." + paramIdxString + ".name")); if (isBlank(nextName)) { return false; } - String nextQualifier = sanitizeUrlPart(defaultString(theReq.getParameter("param." + paramIdxString + ".qualifier"))); + String nextQualifier = + sanitizeUrlPart(defaultString(theReq.getParameter("param." + paramIdxString + ".qualifier"))); String nextType = sanitizeUrlPart(theReq.getParameter("param." + paramIdxString + ".type")); List parts = new ArrayList(); @@ -872,10 +1015,13 @@ public class Controller extends BaseController { addToWhere = false; if (isBlank(parts.get(0))) { values = Collections.singletonList(parts.get(1)); - theQuery.where(new TokenClientParam(nextName + nextQualifier).exactly().code(parts.get(1))); + theQuery.where( + new TokenClientParam(nextName + nextQualifier).exactly().code(parts.get(1))); } else { values = Collections.singletonList(parts.get(0) + "|" + parts.get(1)); - theQuery.where(new TokenClientParam(nextName + nextQualifier).exactly().systemAndCode(parts.get(0), parts.get(1))); + theQuery.where(new TokenClientParam(nextName + nextQualifier) + .exactly() + .systemAndCode(parts.get(0), parts.get(1))); } } else if ("date".equals(nextType)) { values = new ArrayList(); @@ -944,9 +1090,10 @@ public class Controller extends BaseController { theClientCodeJsonWriter.value(nextValue); theClientCodeJsonWriter.endObject(); if (addToWhere) { - theQuery.where(new StringClientParam(nextName + nextQualifier).matches().value(nextValue)); + theQuery.where(new StringClientParam(nextName + nextQualifier) + .matches() + .value(nextValue)); } - } if (StringUtils.isNotBlank(theReq.getParameter("param." + paramIdxString + ".0.name"))) { @@ -955,5 +1102,4 @@ public class Controller extends BaseController { return true; } - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/FhirTesterMvcConfig.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/FhirTesterMvcConfig.java index 958b872bb72..7cc02fd2a19 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/FhirTesterMvcConfig.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/FhirTesterMvcConfig.java @@ -57,7 +57,8 @@ public class FhirTesterMvcConfig implements WebMvcConfigurer { } @Bean - public AnnotationMethodHandlerAdapterConfigurer annotationMethodHandlerAdapterConfigurer(@Qualifier("requestMappingHandlerAdapter") RequestMappingHandlerAdapter theAdapter) { + public AnnotationMethodHandlerAdapterConfigurer annotationMethodHandlerAdapterConfigurer( + @Qualifier("requestMappingHandlerAdapter") RequestMappingHandlerAdapter theAdapter) { return new AnnotationMethodHandlerAdapterConfigurer(theAdapter); } @@ -76,5 +77,4 @@ public class FhirTesterMvcConfig implements WebMvcConfigurer { return templateEngine; } - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/TesterConfig.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/TesterConfig.java index 145dc05a140..1a7c0f4f685 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/TesterConfig.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/TesterConfig.java @@ -9,13 +9,12 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IIdType; import org.springframework.beans.factory.annotation.Required; -import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import javax.annotation.PostConstruct; public class TesterConfig { public static final String SYSPROP_FORCE_SERVERS = "ca.uhn.fhir.to.TesterConfig_SYSPROP_FORCE_SERVERS"; @@ -25,8 +24,10 @@ public class TesterConfig { private final LinkedHashMap myIdToServerBase = new LinkedHashMap<>(); private final LinkedHashMap myIdToServerName = new LinkedHashMap<>(); private final List myServerBuilders = new ArrayList<>(); - private final LinkedHashMap> myServerIdToTypeToOperationNameToInclusionChecker = new LinkedHashMap<>(); - private final LinkedHashMap> myServerIdToTypeToInteractionNameToInclusionChecker = new LinkedHashMap<>(); + private final LinkedHashMap> + myServerIdToTypeToOperationNameToInclusionChecker = new LinkedHashMap<>(); + private final LinkedHashMap> + myServerIdToTypeToInteractionNameToInclusionChecker = new LinkedHashMap<>(); private ITestingUiClientFactory myClientFactory; private boolean myRefuseToFetchThirdPartyUrls = true; private boolean myDebugTemplatesMode; @@ -49,7 +50,8 @@ public class TesterConfig { myIdToServerName.put(next.myId, next.myName); myIdToAllowsApiKey.put(next.myId, next.myAllowsApiKey); myServerIdToTypeToOperationNameToInclusionChecker.put(next.myId, next.myOperationNameToInclusionChecker); - myServerIdToTypeToInteractionNameToInclusionChecker.put(next.myId, next.mySearchResultRowInteractionEnabled); + myServerIdToTypeToInteractionNameToInclusionChecker.put( + next.myId, next.mySearchResultRowInteractionEnabled); if (next.myEnableDebugTemplates) { myDebugTemplatesMode = true; } @@ -108,7 +110,8 @@ public class TesterConfig { public List getSearchResultRowOperations(String theId, IIdType theResourceId) { List retVal = new ArrayList<>(); - Map operationNamesToInclusionCheckers = myServerIdToTypeToOperationNameToInclusionChecker.get(theId); + Map operationNamesToInclusionCheckers = + myServerIdToTypeToOperationNameToInclusionChecker.get(theId); for (String operationName : operationNamesToInclusionCheckers.keySet()) { IInclusionChecker checker = operationNamesToInclusionCheckers.get(operationName); if (checker.shouldInclude(theResourceId)) { @@ -123,10 +126,12 @@ public class TesterConfig { * Called from Thymeleaf */ @SuppressWarnings("unused") - public boolean isSearchResultRowInteractionEnabled(String theServerId, String theInteractionName, IIdType theResourceId) { + public boolean isSearchResultRowInteractionEnabled( + String theServerId, String theInteractionName, IIdType theResourceId) { List retVal = new ArrayList<>(); - Map interactionNamesToInclusionCheckers = myServerIdToTypeToInteractionNameToInclusionChecker.get(theServerId); + Map interactionNamesToInclusionCheckers = + myServerIdToTypeToInteractionNameToInclusionChecker.get(theServerId); RestOperationTypeEnum interaction = RestOperationTypeEnum.forCode(theInteractionName); Validate.isTrue(interaction != null, "Unknown interaction: %s", theInteractionName); IInclusionChecker inclusionChecker = interactionNamesToInclusionCheckers.getOrDefault(interaction, id -> false); @@ -148,7 +153,8 @@ public class TesterConfig { String[] nextSplit = nextRaw.split(","); if (nextSplit.length < 3) { - throw new IllegalArgumentException(Msg.code(195) + "Invalid serveer line '" + nextRaw + "' - Must be comma separated"); + throw new IllegalArgumentException( + Msg.code(195) + "Invalid serveer line '" + nextRaw + "' - Must be comma separated"); } else { Validate.notBlank(nextSplit[0], "theId can not be blank"); Validate.notBlank(nextSplit[1], "theVersion can not be blank"); @@ -156,7 +162,10 @@ public class TesterConfig { Validate.notBlank(nextSplit[3], "theServerBase can not be blank"); myIdToServerName.put(nextSplit[0].trim(), nextSplit[2].trim()); myIdToServerBase.put(nextSplit[0].trim(), nextSplit[3].trim()); - myIdToFhirVersion.put(nextSplit[0].trim(), FhirVersionEnum.valueOf(nextSplit[1].trim().toUpperCase().replace('.', '_'))); + myIdToFhirVersion.put( + nextSplit[0].trim(), + FhirVersionEnum.valueOf( + nextSplit[1].trim().toUpperCase().replace('.', '_'))); } } } @@ -164,25 +173,21 @@ public class TesterConfig { public interface IServerBuilderStep1 { IServerBuilderStep2 withId(String theId); - } public interface IServerBuilderStep2 { IServerBuilderStep3 withFhirVersion(FhirVersionEnum theVersion); - } public interface IServerBuilderStep3 { IServerBuilderStep4 withBaseUrl(String theBaseUrl); - } public interface IServerBuilderStep4 { IServerBuilderStep5 withName(String theName); - } public interface IServerBuilderStep5 { @@ -210,19 +215,25 @@ public class TesterConfig { * By default {@link RestOperationTypeEnum#READ} and {@link RestOperationTypeEnum#UPDATE} are * already enabled, and they are currently the only interactions supported. */ - ServerBuilder withSearchResultRowInteraction(RestOperationTypeEnum theInteraction, IInclusionChecker theEnabled); + ServerBuilder withSearchResultRowInteraction( + RestOperationTypeEnum theInteraction, IInclusionChecker theEnabled); } public interface IInclusionChecker { boolean shouldInclude(IIdType theResourceId); - } - public class ServerBuilder implements IServerBuilderStep1, IServerBuilderStep2, IServerBuilderStep3, IServerBuilderStep4, IServerBuilderStep5 { + public class ServerBuilder + implements IServerBuilderStep1, + IServerBuilderStep2, + IServerBuilderStep3, + IServerBuilderStep4, + IServerBuilderStep5 { private final Map myOperationNameToInclusionChecker = new LinkedHashMap<>(); - private final Map mySearchResultRowInteractionEnabled = new LinkedHashMap<>(); + private final Map mySearchResultRowInteractionEnabled = + new LinkedHashMap<>(); private boolean myAllowsApiKey; private String myBaseUrl; private String myId; @@ -261,7 +272,8 @@ public class TesterConfig { } @Override - public ServerBuilder withSearchResultRowInteraction(RestOperationTypeEnum theInteraction, IInclusionChecker theEnabled) { + public ServerBuilder withSearchResultRowInteraction( + RestOperationTypeEnum theInteraction, IInclusionChecker theEnabled) { mySearchResultRowInteractionEnabled.put(theInteraction, theEnabled); return this; } @@ -293,6 +305,5 @@ public class TesterConfig { myName = theName; return this; } - } } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/client/BearerTokenClientFactory.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/client/BearerTokenClientFactory.java index c95b7fa01af..9ce922cd8bc 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/client/BearerTokenClientFactory.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/client/BearerTokenClientFactory.java @@ -1,18 +1,19 @@ package ca.uhn.fhir.to.client; -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -import javax.servlet.http.HttpServletRequest; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.interceptor.BearerTokenAuthInterceptor; import ca.uhn.fhir.rest.server.util.ITestingUiClientFactory; +import javax.servlet.http.HttpServletRequest; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + public class BearerTokenClientFactory implements ITestingUiClientFactory { @Override - public IGenericClient newClient(FhirContext theFhirContext, HttpServletRequest theRequest, String theServerBaseUrl) { + public IGenericClient newClient( + FhirContext theFhirContext, HttpServletRequest theRequest, String theServerBaseUrl) { // Create a client IGenericClient client = theFhirContext.newRestfulGenericClient(theServerBaseUrl); @@ -23,5 +24,4 @@ public class BearerTokenClientFactory implements ITestingUiClientFactory { return client; } - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/BufferResponseInterceptor.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/BufferResponseInterceptor.java index e200d32b1f4..21d85ef71a5 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/BufferResponseInterceptor.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/BufferResponseInterceptor.java @@ -1,9 +1,9 @@ package ca.uhn.fhir.to.model; -import java.io.IOException; - import ca.uhn.fhir.rest.client.api.*; +import java.io.IOException; + public class BufferResponseInterceptor implements IClientInterceptor { @Override @@ -15,5 +15,4 @@ public class BufferResponseInterceptor implements IClientInterceptor { public void interceptResponse(IHttpResponse theResponse) throws IOException { theResponse.bufferEntity(); } - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/HomeRequest.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/HomeRequest.java index 9e41a48fee0..0ab72bc77a7 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/HomeRequest.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/HomeRequest.java @@ -1,11 +1,5 @@ package ca.uhn.fhir.to.model; -import static org.apache.commons.lang3.StringUtils.*; - -import javax.servlet.http.HttpServletRequest; - -import org.springframework.web.bind.annotation.ModelAttribute; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.parser.IParser; @@ -17,6 +11,11 @@ import ca.uhn.fhir.rest.server.IncomingRequestAddressStrategy; import ca.uhn.fhir.rest.server.util.ITestingUiClientFactory; import ca.uhn.fhir.to.Controller; import ca.uhn.fhir.to.TesterConfig; +import org.springframework.web.bind.annotation.ModelAttribute; + +import javax.servlet.http.HttpServletRequest; + +import static org.apache.commons.lang3.StringUtils.*; public class HomeRequest { @@ -119,20 +118,22 @@ public class HomeRequest { myServerId = theServerId; } - public GenericClient newClient(HttpServletRequest theRequest, FhirContext theContext, TesterConfig theConfig, Controller.CaptureInterceptor theInterceptor) { + public GenericClient newClient( + HttpServletRequest theRequest, + FhirContext theContext, + TesterConfig theConfig, + Controller.CaptureInterceptor theInterceptor) { theContext.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER); GenericClient retVal; ITestingUiClientFactory clientFactory = theConfig.getClientFactory(); if (clientFactory != null) { - retVal = (GenericClient) clientFactory.newClient( - theContext, - theRequest, - getServerBase(theRequest, theConfig)); + retVal = (GenericClient) + clientFactory.newClient(theContext, theRequest, getServerBase(theRequest, theConfig)); } else { retVal = (GenericClient) theContext.newRestfulGenericClient(getServerBase(theRequest, theConfig)); } - + retVal.registerInterceptor(new BufferResponseInterceptor()); retVal.setKeepResponses(true); @@ -155,7 +156,7 @@ public class HomeRequest { retVal.setSummary(summary); } } - + retVal.registerInterceptor(theInterceptor); final String remoteAddr = org.slf4j.MDC.get("req.remoteAddr"); @@ -187,15 +188,15 @@ public class HomeRequest { public String getApiKey(HttpServletRequest theServletRequest, TesterConfig theConfig) { Boolean allowsApiKey; if (isBlank(myServerId) && !theConfig.getIdToFhirVersion().containsKey(myServerId)) { - allowsApiKey = theConfig.getIdToAllowsApiKey().entrySet().iterator().next().getValue(); + allowsApiKey = + theConfig.getIdToAllowsApiKey().entrySet().iterator().next().getValue(); } else { allowsApiKey = theConfig.getIdToAllowsApiKey().get(myServerId); } if (!Boolean.TRUE.equals(allowsApiKey)) { return null; } - + return defaultString(theServletRequest.getParameter("apiKey")); } - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/ResourceRequest.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/ResourceRequest.java index f8b3c53f22e..a752101506b 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/ResourceRequest.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/ResourceRequest.java @@ -24,6 +24,4 @@ public class ResourceRequest extends HomeRequest { public void setUpdateVid(String theUpdateVid) { myUpdateVid = theUpdateVid; } - - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/TransactionRequest.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/TransactionRequest.java index 759297cc6a5..d812043ed33 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/TransactionRequest.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/model/TransactionRequest.java @@ -14,5 +14,4 @@ public class TransactionRequest extends HomeRequest { public void setTransactionBody(String theTransactionBody) { myTransactionBody = theTransactionBody; } - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/mvc/ToBindingInitializer.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/mvc/ToBindingInitializer.java index 14f69789cb4..1b64099b779 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/mvc/ToBindingInitializer.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/mvc/ToBindingInitializer.java @@ -15,5 +15,4 @@ public class ToBindingInitializer implements WebBindingInitializer { public void initBinder(WebDataBinder theBinder, WebRequest theRequest) { // nothing } - } diff --git a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/WebUtil.java b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/WebUtil.java index 32019e859ca..d2c16d0fff9 100644 --- a/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/WebUtil.java +++ b/hapi-fhir-testpage-overlay/src/main/java/ca/uhn/fhir/to/util/WebUtil.java @@ -9,8 +9,8 @@ */ package ca.uhn.fhir.to.util; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.i18n.Msg; import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistration; import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; @@ -40,8 +40,11 @@ public class WebUtil { addWebjarWithVersion(theRegistry, name, version); } - public static ResourceHandlerRegistration addWebjarWithVersion(ResourceHandlerRegistry theRegistry, String name, String version) { - return theRegistry.addResourceHandler("/resources/" + name + "/**").addResourceLocations("classpath:/META-INF/resources/webjars/" + name + "/" + version + "/"); + public static ResourceHandlerRegistration addWebjarWithVersion( + ResourceHandlerRegistry theRegistry, String name, String version) { + return theRegistry + .addResourceHandler("/resources/" + name + "/**") + .addResourceLocations("classpath:/META-INF/resources/webjars/" + name + "/" + version + "/"); } public static void webJarAddAwesomeCheckbox(ResourceHandlerRegistry theRegistry) { @@ -79,5 +82,4 @@ public class WebUtil { public static void webJarAddPopperJs(ResourceHandlerRegistry theRegistry) { WebUtil.addStaticResourceWebJar(theRegistry, "org.webjars.npm", "popper.js"); } - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/BaseStaticResourceValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/BaseStaticResourceValidationSupport.java index 6ef2b5966d0..f12a6c6ae21 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/BaseStaticResourceValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/BaseStaticResourceValidationSupport.java @@ -23,5 +23,4 @@ public abstract class BaseStaticResourceValidationSupport extends BaseValidation ArrayList retVal = new ArrayList<>(theMap.values()); return (List) Collections.unmodifiableList(retVal); } - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/BaseValidationSupportWrapper.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/BaseValidationSupportWrapper.java index 6d92c53353b..e14a686e208 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/BaseValidationSupportWrapper.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/BaseValidationSupportWrapper.java @@ -9,9 +9,9 @@ import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** * This class is a wrapper for an existing {@link @IContextValidationSupport} object, intended to be @@ -73,17 +73,35 @@ public abstract class BaseValidationSupportWrapper extends BaseValidationSupport } @Override - public CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { - return myWrap.validateCode(theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, theValueSetUrl); + public CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { + return myWrap.validateCode( + theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, theValueSetUrl); } @Override - public IValidationSupport.CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theValidationOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { - return myWrap.validateCodeInValueSet(theValidationSupportContext, theValidationOptions, theCodeSystem, theCode, theDisplay, theValueSet); + public IValidationSupport.CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theValidationOptions, + String theCodeSystem, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { + return myWrap.validateCodeInValueSet( + theValidationSupportContext, theValidationOptions, theCodeSystem, theCode, theDisplay, theValueSet); } @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { return myWrap.lookupCode(theValidationSupportContext, theSystem, theCode, theDisplayLanguage); } @@ -93,7 +111,10 @@ public abstract class BaseValidationSupportWrapper extends BaseValidationSupport } @Override - public IValidationSupport.ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) { + public IValidationSupport.ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + ValueSetExpansionOptions theExpansionOptions, + @Nonnull IBaseResource theValueSetToExpand) { return myWrap.expandValueSet(theValidationSupportContext, theExpansionOptions, theValueSetToExpand); } @@ -107,14 +128,18 @@ public abstract class BaseValidationSupportWrapper extends BaseValidationSupport return myWrap.fetchValueSet(theUri); } - @Override public IBaseResource fetchStructureDefinition(String theUrl) { return myWrap.fetchStructureDefinition(theUrl); } @Override - public IBaseResource generateSnapshot(ValidationSupportContext theValidationSupportContext, IBaseResource theInput, String theUrl, String theWebUrl, String theProfileName) { + public IBaseResource generateSnapshot( + ValidationSupportContext theValidationSupportContext, + IBaseResource theInput, + String theUrl, + String theWebUrl, + String theProfileName) { return myWrap.generateSnapshot(theValidationSupportContext, theInput, theUrl, theWebUrl, theProfileName); } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CachingValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CachingValidationSupport.java index d153cf38bc3..873a854f1f3 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CachingValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CachingValidationSupport.java @@ -15,8 +15,6 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -26,6 +24,8 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Function; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.StringUtils.defaultIfBlank; @@ -70,7 +70,10 @@ public class CachingValidationSupport extends BaseValidationSupportWrapper imple * @param theWrap The validation support module to wrap * @param theCacheTimeouts The timeouts to use */ - public CachingValidationSupport(IValidationSupport theWrap, CacheTimeouts theCacheTimeouts, boolean theIsEnabledValidationForCodingsLogicalAnd) { + public CachingValidationSupport( + IValidationSupport theWrap, + CacheTimeouts theCacheTimeouts, + boolean theIsEnabledValidationForCodingsLogicalAnd) { super(theWrap.getFhirContext(), theWrap); myExpandValueSetCache = CacheFactory.build(theCacheTimeouts.getExpandValueSetMillis(), 100); myValidateCodeCache = CacheFactory.build(theCacheTimeouts.getValidateCodeMillis(), 5000); @@ -81,18 +84,12 @@ public class CachingValidationSupport extends BaseValidationSupportWrapper imple LinkedBlockingQueue executorQueue = new LinkedBlockingQueue<>(1000); BasicThreadFactory threadFactory = new BasicThreadFactory.Builder() - .namingPattern("CachingValidationSupport-%d") - .daemon(false) - .priority(Thread.NORM_PRIORITY) - .build(); + .namingPattern("CachingValidationSupport-%d") + .daemon(false) + .priority(Thread.NORM_PRIORITY) + .build(); myBackgroundExecutor = new ThreadPoolExecutor( - 1, - 1, - 0L, - TimeUnit.MILLISECONDS, - executorQueue, - threadFactory, - new ThreadPoolExecutor.DiscardPolicy()); + 1, 1, 0L, TimeUnit.MILLISECONDS, executorQueue, threadFactory, new ThreadPoolExecutor.DiscardPolicy()); myIsEnabledValidationForCodingsLogicalAnd = theIsEnabledValidationForCodingsLogicalAnd; } @@ -134,7 +131,8 @@ public class CachingValidationSupport extends BaseValidationSupportWrapper imple @Override public IBaseResource fetchStructureDefinition(String theUrl) { - return loadFromCache(myCache, "fetchStructureDefinition " + theUrl, t -> super.fetchStructureDefinition(theUrl)); + return loadFromCache( + myCache, "fetchStructureDefinition " + theUrl, t -> super.fetchStructureDefinition(theUrl)); } @Override @@ -144,57 +142,103 @@ public class CachingValidationSupport extends BaseValidationSupportWrapper imple @Override public T fetchResource(@Nullable Class theClass, String theUri) { - return loadFromCache(myCache, "fetchResource " + theClass + " " + theUri, - t -> super.fetchResource(theClass, theUri)); + return loadFromCache( + myCache, "fetchResource " + theClass + " " + theUri, t -> super.fetchResource(theClass, theUri)); } @Override public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) { String key = "isCodeSystemSupported " + theSystem; - Boolean retVal = loadFromCacheReentrantSafe(myCache, key, t -> super.isCodeSystemSupported(theValidationSupportContext, theSystem)); + Boolean retVal = loadFromCacheReentrantSafe( + myCache, key, t -> super.isCodeSystemSupported(theValidationSupportContext, theSystem)); assert retVal != null; return retVal; } @Override - public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) { + public ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + ValueSetExpansionOptions theExpansionOptions, + @Nonnull IBaseResource theValueSetToExpand) { if (!theValueSetToExpand.getIdElement().hasIdPart()) { return super.expandValueSet(theValidationSupportContext, theExpansionOptions, theValueSetToExpand); } ValueSetExpansionOptions expansionOptions = defaultIfNull(theExpansionOptions, EMPTY_EXPANSION_OPTIONS); - String key = "expandValueSet " + - theValueSetToExpand.getIdElement().getValue() + " " + - expansionOptions.isIncludeHierarchy() + " " + - expansionOptions.getFilter() + " " + - expansionOptions.getOffset() + " " + - expansionOptions.getCount(); - return loadFromCache(myExpandValueSetCache, key, t -> super.expandValueSet(theValidationSupportContext, theExpansionOptions, theValueSetToExpand)); + String key = "expandValueSet " + theValueSetToExpand.getIdElement().getValue() + + " " + expansionOptions.isIncludeHierarchy() + + " " + expansionOptions.getFilter() + + " " + expansionOptions.getOffset() + + " " + expansionOptions.getCount(); + return loadFromCache( + myExpandValueSetCache, + key, + t -> super.expandValueSet(theValidationSupportContext, theExpansionOptions, theValueSetToExpand)); } @Override - public CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { - String key = "validateCode " + theCodeSystem + " " + theCode + " " + defaultString(theDisplay) + " " + defaultIfBlank(theValueSetUrl, "NO_VS"); - return loadFromCache(myValidateCodeCache, key, t -> super.validateCode(theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, theValueSetUrl)); + public CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { + String key = "validateCode " + theCodeSystem + " " + theCode + " " + defaultString(theDisplay) + " " + + defaultIfBlank(theValueSetUrl, "NO_VS"); + return loadFromCache( + myValidateCodeCache, + key, + t -> super.validateCode( + theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, theValueSetUrl)); } @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { String key = "lookupCode " + theSystem + " " + theCode + " " + defaultIfBlank(theDisplayLanguage, "NO_LANG"); - return loadFromCache(myLookupCodeCache, key, t -> super.lookupCode(theValidationSupportContext, theSystem, theCode, theDisplayLanguage)); + return loadFromCache( + myLookupCodeCache, + key, + t -> super.lookupCode(theValidationSupportContext, theSystem, theCode, theDisplayLanguage)); } @Override - public IValidationSupport.CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theValidationOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + public IValidationSupport.CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theValidationOptions, + String theCodeSystem, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { - BaseRuntimeChildDefinition urlChild = myCtx.getResourceDefinition(theValueSet).getChildByName("url"); - Optional valueSetUrl = urlChild.getAccessor().getValues(theValueSet).stream().map(t -> ((IPrimitiveType) t).getValueAsString()).filter(t -> isNotBlank(t)).findFirst(); + BaseRuntimeChildDefinition urlChild = + myCtx.getResourceDefinition(theValueSet).getChildByName("url"); + Optional valueSetUrl = urlChild.getAccessor().getValues(theValueSet).stream() + .map(t -> ((IPrimitiveType) t).getValueAsString()) + .filter(t -> isNotBlank(t)) + .findFirst(); if (valueSetUrl.isPresent()) { - String key = "validateCodeInValueSet " + theValidationOptions.toString() + " " + defaultString(theCodeSystem) + " " + defaultString(theCode) + " " + defaultString(theDisplay) + " " + valueSetUrl.get(); - return loadFromCache(myValidateCodeCache, key, t -> super.validateCodeInValueSet(theValidationSupportContext, theValidationOptions, theCodeSystem, theCode, theDisplay, theValueSet)); + String key = + "validateCodeInValueSet " + theValidationOptions.toString() + " " + defaultString(theCodeSystem) + + " " + defaultString(theCode) + " " + defaultString(theDisplay) + " " + valueSetUrl.get(); + return loadFromCache( + myValidateCodeCache, + key, + t -> super.validateCodeInValueSet( + theValidationSupportContext, + theValidationOptions, + theCodeSystem, + theCode, + theDisplay, + theValueSet)); } - return super.validateCodeInValueSet(theValidationSupportContext, theValidationOptions, theCodeSystem, theCode, theDisplay, theValueSet); + return super.validateCodeInValueSet( + theValidationSupportContext, theValidationOptions, theCodeSystem, theCode, theDisplay, theValueSet); } @Override @@ -256,7 +300,6 @@ public class CachingValidationSupport extends BaseValidationSupportWrapper imple return retVal; } - @Override public void invalidateCaches() { myExpandValueSetCache.invalidateAll(); @@ -324,11 +367,11 @@ public class CachingValidationSupport extends BaseValidationSupportWrapper imple public static CacheTimeouts defaultValues() { return new CacheTimeouts() - .setLookupCodeMillis(10 * DateUtils.MILLIS_PER_MINUTE) - .setExpandValueSetMillis(1 * DateUtils.MILLIS_PER_MINUTE) - .setTranslateCodeMillis(10 * DateUtils.MILLIS_PER_MINUTE) - .setValidateCodeMillis(10 * DateUtils.MILLIS_PER_MINUTE) - .setMiscMillis(10 * DateUtils.MILLIS_PER_MINUTE); + .setLookupCodeMillis(10 * DateUtils.MILLIS_PER_MINUTE) + .setExpandValueSetMillis(1 * DateUtils.MILLIS_PER_MINUTE) + .setTranslateCodeMillis(10 * DateUtils.MILLIS_PER_MINUTE) + .setValidateCodeMillis(10 * DateUtils.MILLIS_PER_MINUTE) + .setMiscMillis(10 * DateUtils.MILLIS_PER_MINUTE); } } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CommonCodeSystemsTerminologyService.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CommonCodeSystemsTerminologyService.java index 06d580f897c..d680996f74a 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CommonCodeSystemsTerminologyService.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CommonCodeSystemsTerminologyService.java @@ -30,13 +30,13 @@ import org.hl7.fhir.r5.model.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.InputStream; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -86,13 +86,25 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { } @Override - public CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + public CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { String url = getValueSetUrl(getFhirContext(), theValueSet); return validateCode(theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, url); } @Override - public CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { + public CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { /* ************************************************************************************** * NOTE: Update validation_support_modules.html if any of the support in this module * changes in any way! @@ -112,64 +124,65 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { break; case LANGUAGES_VALUESET_URL: - if (!LANGUAGES_CODESYSTEM_URL.equals(theCodeSystem) && !(theCodeSystem == null && theOptions.isInferSystem())) { + if (!LANGUAGES_CODESYSTEM_URL.equals(theCodeSystem) + && !(theCodeSystem == null && theOptions.isInferSystem())) { return new CodeValidationResult() - .setSeverity(IssueSeverity.ERROR) - .setMessage("Inappropriate CodeSystem URL \"" + theCodeSystem + "\" for ValueSet: " + theValueSetUrl); + .setSeverity(IssueSeverity.ERROR) + .setMessage("Inappropriate CodeSystem URL \"" + theCodeSystem + "\" for ValueSet: " + + theValueSetUrl); } IBaseResource languagesVs = myLanguagesVs; if (languagesVs == null) { - languagesVs = theValidationSupportContext.getRootValidationSupport().fetchValueSet("http://hl7.org/fhir/ValueSet/languages"); + languagesVs = theValidationSupportContext + .getRootValidationSupport() + .fetchValueSet("http://hl7.org/fhir/ValueSet/languages"); myLanguagesVs = myVersionCanonicalizer.valueSetToValidatorCanonical(languagesVs); } - Optional match = myLanguagesVs - .getCompose() - .getInclude() - .stream() - .flatMap(t -> t.getConcept().stream()) - .filter(t -> theCode.equals(t.getCode())) - .findFirst(); + Optional match = + myLanguagesVs.getCompose().getInclude().stream() + .flatMap(t -> t.getConcept().stream()) + .filter(t -> theCode.equals(t.getCode())) + .findFirst(); if (match.isPresent()) { return new CodeValidationResult() - .setCode(theCode) - .setDisplay(match.get().getDisplay()); + .setCode(theCode) + .setDisplay(match.get().getDisplay()); } else { return new CodeValidationResult() - .setSeverity(IssueSeverity.ERROR) - .setMessage("Code \"" + theCode + "\" is not in valueset: " + theValueSetUrl); + .setSeverity(IssueSeverity.ERROR) + .setMessage("Code \"" + theCode + "\" is not in valueset: " + theValueSetUrl); } case ALL_LANGUAGES_VALUESET_URL: - if (!LANGUAGES_CODESYSTEM_URL.equals(theCodeSystem) && !(theCodeSystem == null && theOptions.isInferSystem())) { + if (!LANGUAGES_CODESYSTEM_URL.equals(theCodeSystem) + && !(theCodeSystem == null && theOptions.isInferSystem())) { return new CodeValidationResult() - .setSeverity(IssueSeverity.ERROR) - .setMessage("Inappropriate CodeSystem URL \"" + theCodeSystem + "\" for ValueSet: " + theValueSetUrl); + .setSeverity(IssueSeverity.ERROR) + .setMessage("Inappropriate CodeSystem URL \"" + theCodeSystem + "\" for ValueSet: " + + theValueSetUrl); } LookupCodeResult outcome = lookupLanguageCode(theCode); if (outcome.isFound()) { - return new CodeValidationResult() - .setCode(theCode) - .setDisplay(outcome.getCodeDisplay()); + return new CodeValidationResult().setCode(theCode).setDisplay(outcome.getCodeDisplay()); } else { return new CodeValidationResult() - .setSeverity(IssueSeverity.ERROR) - .setMessage("Code \"" + theCode + "\" is not in valueset: " + theValueSetUrl); + .setSeverity(IssueSeverity.ERROR) + .setMessage("Code \"" + theCode + "\" is not in valueset: " + theValueSetUrl); } case MIMETYPES_VALUESET_URL: // This is a pretty naive implementation - Should be enhanced in future - return new CodeValidationResult() - .setCode(theCode) - .setDisplay(theDisplay); + return new CodeValidationResult().setCode(theCode).setDisplay(theDisplay); case UCUM_VALUESET_URL: { String system = theCodeSystem; if (system == null && theOptions.isInferSystem()) { system = UCUM_CODESYSTEM_URL; } - CodeValidationResult validationResult = validateLookupCode(theValidationSupportContext, theCode, system); + CodeValidationResult validationResult = + validateLookupCode(theValidationSupportContext, theCode, system); if (validationResult != null) { return validationResult; } @@ -180,15 +193,13 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { String display = handlerMap.get(theCode); if (display != null) { if (expectSystem.equals(theCodeSystem) || theOptions.isInferSystem()) { - return new CodeValidationResult() - .setCode(theCode) - .setDisplay(display); + return new CodeValidationResult().setCode(theCode).setDisplay(display); } } return new CodeValidationResult() - .setSeverity(IssueSeverity.ERROR) - .setMessage("Code \"" + theCode + "\" is not in system: " + USPS_CODESYSTEM_URL); + .setSeverity(IssueSeverity.ERROR) + .setMessage("Code \"" + theCode + "\" is not in system: " + USPS_CODESYSTEM_URL); } if (isBlank(theValueSetUrl)) { @@ -199,23 +210,27 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { } @Nullable - public CodeValidationResult validateLookupCode(ValidationSupportContext theValidationSupportContext, String theCode, String theSystem) { + public CodeValidationResult validateLookupCode( + ValidationSupportContext theValidationSupportContext, String theCode, String theSystem) { LookupCodeResult lookupResult = lookupCode(theValidationSupportContext, theSystem, theCode); CodeValidationResult validationResult = null; if (lookupResult != null) { if (lookupResult.isFound()) { validationResult = new CodeValidationResult() - .setCode(lookupResult.getSearchedForCode()) - .setDisplay(lookupResult.getCodeDisplay()); + .setCode(lookupResult.getSearchedForCode()) + .setDisplay(lookupResult.getCodeDisplay()); } } return validationResult; } - @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { Map map; switch (theSystem) { case LANGUAGES_CODESYSTEM_URL: @@ -253,7 +268,6 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { retVal.setSearchedForSystem(theSystem); retVal.setFound(false); return retVal; - } private LookupCodeResult lookupLanguageCode(String theCode) { @@ -269,19 +283,22 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { if (hasRegionAndCodeSegments) { // we look for languages in lowercase only // this will allow case insensitivity for language portion of code - language = myLanguagesLanugageMap.get(theCode.substring(0, langRegionSeparatorIndex).toLowerCase()); - region = myLanguagesRegionMap.get(theCode.substring(langRegionSeparatorIndex + 1).toUpperCase()); + language = myLanguagesLanugageMap.get( + theCode.substring(0, langRegionSeparatorIndex).toLowerCase()); + region = myLanguagesRegionMap.get( + theCode.substring(langRegionSeparatorIndex + 1).toUpperCase()); if (language == null || region == null) { - //In case the user provides both a language and a region, they must both be valid for the lookup to succeed. + // In case the user provides both a language and a region, they must both be valid for the lookup to + // succeed. ourLog.warn("Couldn't find a valid bcp47 language-region combination from code: {}", theCode); return buildNotFoundLookupCodeResult(theCode); } else { return buildLookupResultForLanguageAndRegion(theCode, language, region); } } else { - //In case user has only provided a language, we build the lookup from only that. - //NB: we only use the lowercase version of the language + // In case user has only provided a language, we build the lookup from only that. + // NB: we only use the lowercase version of the language language = myLanguagesLanugageMap.get(theCode.toLowerCase()); if (language == null) { ourLog.warn("Couldn't find a valid bcp47 language from code: {}", theCode); @@ -292,14 +309,16 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { } } - private LookupCodeResult buildLookupResultForLanguageAndRegion(@Nonnull String theOriginalCode, @Nonnull String theLanguage, @Nonnull String theRegion) { + private LookupCodeResult buildLookupResultForLanguageAndRegion( + @Nonnull String theOriginalCode, @Nonnull String theLanguage, @Nonnull String theRegion) { LookupCodeResult lookupCodeResult = buildNotFoundLookupCodeResult(theOriginalCode); lookupCodeResult.setCodeDisplay(theLanguage + " " + theRegion); lookupCodeResult.setFound(true); return lookupCodeResult; } - private LookupCodeResult buildLookupResultForLanguage(@Nonnull String theOriginalCode, @Nonnull String theLanguage) { + private LookupCodeResult buildLookupResultForLanguage( + @Nonnull String theOriginalCode, @Nonnull String theLanguage) { LookupCodeResult lookupCodeResult = buildNotFoundLookupCodeResult(theOriginalCode); lookupCodeResult.setCodeDisplay(theLanguage); lookupCodeResult.setFound(true); @@ -424,7 +443,8 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { normalized = retVal; break; case R4B: - Resource normalized50 = VersionConvertorFactory_40_50.convertResource(retVal, new BaseAdvisor_40_50(false)); + Resource normalized50 = + VersionConvertorFactory_40_50.convertResource(retVal, new BaseAdvisor_40_50(false)); normalized = VersionConvertorFactory_43_50.convertResource(normalized50, new BaseAdvisor_43_50()); break; case R5: @@ -503,7 +523,8 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { } case DSTU2_1: default: - throw new IllegalArgumentException(Msg.code(695) + "Can not handle version: " + structureFhirVersionEnum); + throw new IllegalArgumentException( + Msg.code(695) + "Can not handle version: " + structureFhirVersionEnum); } return url; } @@ -526,7 +547,8 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { } case DSTU3: default: - throw new IllegalArgumentException(Msg.code(696) + "Can not handle version: " + structureFhirVersionEnum); + throw new IllegalArgumentException( + Msg.code(696) + "Can not handle version: " + structureFhirVersionEnum); } return url; } @@ -562,10 +584,12 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { /** * N.B.: We are keeping this as a shim due to the upgrade we did to core 5.6.97+ */ - public static FhirVersionEnum getFhirVersionEnum(@Nonnull FhirContext theFhirContext, @Nonnull IBaseResource theResource) { + public static FhirVersionEnum getFhirVersionEnum( + @Nonnull FhirContext theFhirContext, @Nonnull IBaseResource theResource) { FhirVersionEnum structureFhirVersionEnum = theResource.getStructureFhirVersionEnum(); // TODO: Address this when core lib version is bumped - if (theResource.getStructureFhirVersionEnum() == FhirVersionEnum.R5 && theFhirContext.getVersion().getVersion() == FhirVersionEnum.R4B) { + if (theResource.getStructureFhirVersionEnum() == FhirVersionEnum.R5 + && theFhirContext.getVersion().getVersion() == FhirVersionEnum.R4B) { if (!(theResource instanceof org.hl7.fhir.r5.model.Resource)) { structureFhirVersionEnum = FhirVersionEnum.R4B; } @@ -825,7 +849,6 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { return iso4217Codes; } - private static HashMap buildIso3166Codes() { HashMap codes = new HashMap<>(); @@ -1332,5 +1355,4 @@ public class CommonCodeSystemsTerminologyService implements IValidationSupport { codes.put("ZWE", "Zimbabwe"); return codes; } - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/DefaultProfileValidationSupportNpmStrategy.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/DefaultProfileValidationSupportNpmStrategy.java index 2abfaf323ea..6fd440f907e 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/DefaultProfileValidationSupportNpmStrategy.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/DefaultProfileValidationSupportNpmStrategy.java @@ -9,8 +9,8 @@ import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.io.IOException; +import javax.annotation.Nonnull; public class DefaultProfileValidationSupportNpmStrategy extends NpmPackageValidationSupport { private static final Logger ourLog = LoggerFactory.getLogger(DefaultProfileValidationSupportNpmStrategy.class); @@ -31,7 +31,10 @@ public class DefaultProfileValidationSupportNpmStrategy extends NpmPackageValida loadPackageFromClasspath("org/hl7/fhir/r5/packages/hl7.fhir.uv.extensions.r5-1.0.0.tgz"); loadPackageFromClasspath("org/hl7/fhir/r5/packages/hl7.terminology-5.1.0.tgz"); } catch (IOException e) { - throw new ConfigurationException(Msg.code(2333) + "Failed to load required validation resources. Make sure that the appropriate hapi-fhir-validation-resources-VER JAR is on the classpath", e); + throw new ConfigurationException( + Msg.code(2333) + + "Failed to load required validation resources. Make sure that the appropriate hapi-fhir-validation-resources-VER JAR is on the classpath", + e); } ourLog.info("Loaded {} Core+Extension resources in {}", countAll(), sw); diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupport.java index 9ce650e213f..f44af8e3702 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/InMemoryTerminologyServerValidationSupport.java @@ -27,8 +27,6 @@ import org.hl7.fhir.r5.model.CodeSystem; import org.hl7.fhir.r5.model.Enumerations; import org.hl7.fhir.utilities.validation.ValidationMessage; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -39,6 +37,8 @@ import java.util.Set; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.contains; import static org.apache.commons.lang3.StringUtils.defaultString; @@ -73,14 +73,22 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu } @Override - public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) { + public ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + ValueSetExpansionOptions theExpansionOptions, + @Nonnull IBaseResource theValueSetToExpand) { return expandValueSet(theValidationSupportContext, theValueSetToExpand, null, null); } - private ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, IBaseResource theValueSetToExpand, String theWantSystemAndVersion, String theWantCode) { + private ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + IBaseResource theValueSetToExpand, + String theWantSystemAndVersion, + String theWantCode) { org.hl7.fhir.r5.model.ValueSet expansionR5; try { - expansionR5 = expandValueSetToCanonical(theValidationSupportContext, theValueSetToExpand, theWantSystemAndVersion, theWantCode); + expansionR5 = expandValueSetToCanonical( + theValidationSupportContext, theValueSetToExpand, theWantSystemAndVersion, theWantCode); } catch (ExpansionCouldNotBeCompletedInternallyException e) { return new ValueSetExpansionOutcome(e.getMessage()); } @@ -91,7 +99,8 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu IBaseResource expansion; switch (myCtx.getVersion().getVersion()) { case DSTU2: { - org.hl7.fhir.r4.model.ValueSet expansionR4 = (org.hl7.fhir.r4.model.ValueSet) VersionConvertorFactory_40_50.convertResource(expansionR5, new BaseAdvisor_40_50(false)); + org.hl7.fhir.r4.model.ValueSet expansionR4 = (org.hl7.fhir.r4.model.ValueSet) + VersionConvertorFactory_40_50.convertResource(expansionR5, new BaseAdvisor_40_50(false)); expansion = myVersionCanonicalizer.valueSetFromCanonical(expansionR4); break; } @@ -117,58 +126,98 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu } case DSTU2_1: default: - throw new IllegalArgumentException(Msg.code(697) + "Can not handle version: " + myCtx.getVersion().getVersion()); + throw new IllegalArgumentException(Msg.code(697) + "Can not handle version: " + + myCtx.getVersion().getVersion()); } return new ValueSetExpansionOutcome(expansion); } - private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical(ValidationSupportContext theValidationSupportContext, IBaseResource theValueSetToExpand, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { + private org.hl7.fhir.r5.model.ValueSet expandValueSetToCanonical( + ValidationSupportContext theValidationSupportContext, + IBaseResource theValueSetToExpand, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { org.hl7.fhir.r5.model.ValueSet expansionR5; - switch (getFhirVersionEnum(theValidationSupportContext.getRootValidationSupport().getFhirContext(), theValueSetToExpand)) { + switch (getFhirVersionEnum( + theValidationSupportContext.getRootValidationSupport().getFhirContext(), theValueSetToExpand)) { case DSTU2: { - expansionR5 = expandValueSetDstu2(theValidationSupportContext, (ca.uhn.fhir.model.dstu2.resource.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, theWantCode); + expansionR5 = expandValueSetDstu2( + theValidationSupportContext, + (ca.uhn.fhir.model.dstu2.resource.ValueSet) theValueSetToExpand, + theWantSystemUrlAndVersion, + theWantCode); break; } case DSTU2_HL7ORG: { - expansionR5 = expandValueSetDstu2Hl7Org(theValidationSupportContext, (ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, theWantCode); + expansionR5 = expandValueSetDstu2Hl7Org( + theValidationSupportContext, + (ValueSet) theValueSetToExpand, + theWantSystemUrlAndVersion, + theWantCode); break; } case DSTU3: { - expansionR5 = expandValueSetDstu3(theValidationSupportContext, (org.hl7.fhir.dstu3.model.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, theWantCode); + expansionR5 = expandValueSetDstu3( + theValidationSupportContext, + (org.hl7.fhir.dstu3.model.ValueSet) theValueSetToExpand, + theWantSystemUrlAndVersion, + theWantCode); break; } case R4: { - expansionR5 = expandValueSetR4(theValidationSupportContext, (org.hl7.fhir.r4.model.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, theWantCode); + expansionR5 = expandValueSetR4( + theValidationSupportContext, + (org.hl7.fhir.r4.model.ValueSet) theValueSetToExpand, + theWantSystemUrlAndVersion, + theWantCode); break; } case R4B: { - expansionR5 = expandValueSetR4B(theValidationSupportContext, (org.hl7.fhir.r4b.model.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, theWantCode); + expansionR5 = expandValueSetR4B( + theValidationSupportContext, + (org.hl7.fhir.r4b.model.ValueSet) theValueSetToExpand, + theWantSystemUrlAndVersion, + theWantCode); break; } case R5: { - expansionR5 = expandValueSetR5(theValidationSupportContext, (org.hl7.fhir.r5.model.ValueSet) theValueSetToExpand, theWantSystemUrlAndVersion, theWantCode); + expansionR5 = expandValueSetR5( + theValidationSupportContext, + (org.hl7.fhir.r5.model.ValueSet) theValueSetToExpand, + theWantSystemUrlAndVersion, + theWantCode); break; } case DSTU2_1: default: - throw new IllegalArgumentException(Msg.code(698) + "Can not handle version: " + myCtx.getVersion().getVersion()); + throw new IllegalArgumentException(Msg.code(698) + "Can not handle version: " + + myCtx.getVersion().getVersion()); } return expansionR5; } @Override - public CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystemUrlAndVersion, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + public CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theOptions, + String theCodeSystemUrlAndVersion, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { org.hl7.fhir.r5.model.ValueSet expansion; String vsUrl = CommonCodeSystemsTerminologyService.getValueSetUrl(getFhirContext(), theValueSet); try { - expansion = expandValueSetToCanonical(theValidationSupportContext, theValueSet, theCodeSystemUrlAndVersion, theCode); + expansion = expandValueSetToCanonical( + theValidationSupportContext, theValueSet, theCodeSystemUrlAndVersion, theCode); } catch (ExpansionCouldNotBeCompletedInternallyException e) { CodeValidationResult codeValidationResult = new CodeValidationResult(); codeValidationResult.setSeverityCode("error"); - String msg = "Failed to expand ValueSet '" + vsUrl + "' (in-memory). Could not validate code " + theCodeSystemUrlAndVersion + "#" + theCode; + String msg = "Failed to expand ValueSet '" + vsUrl + "' (in-memory). Could not validate code " + + theCodeSystemUrlAndVersion + "#" + theCode; if (e.getMessage() != null) { msg += ". Error was: " + e.getMessage(); } @@ -181,13 +230,25 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu return null; } - return validateCodeInExpandedValueSet(theValidationSupportContext, theOptions, theCodeSystemUrlAndVersion, theCode, theDisplay, expansion, vsUrl); + return validateCodeInExpandedValueSet( + theValidationSupportContext, + theOptions, + theCodeSystemUrlAndVersion, + theCode, + theDisplay, + expansion, + vsUrl); } - @Override @Nullable - public CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { + public CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { IBaseResource vs; if (isNotBlank(theValueSetUrl)) { vs = theValidationSupportContext.getRootValidationSupport().fetchValueSet(theValueSetUrl); @@ -208,146 +269,185 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu case DSTU2: case DSTU2_HL7ORG: vs = new org.hl7.fhir.dstu2.model.ValueSet() - .setCompose(new org.hl7.fhir.dstu2.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.dstu2.model.ValueSet.ConceptSetComponent().setSystem(theCodeSystem))); + .setCompose(new org.hl7.fhir.dstu2.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.dstu2.model.ValueSet.ConceptSetComponent() + .setSystem(theCodeSystem))); break; case DSTU3: if (codeSystemVersion != null) { vs = new org.hl7.fhir.dstu3.model.ValueSet() - .setCompose(new org.hl7.fhir.dstu3.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent().setSystem(codeSystemUrl).setVersion(codeSystemVersion))); + .setCompose(new org.hl7.fhir.dstu3.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent() + .setSystem(codeSystemUrl) + .setVersion(codeSystemVersion))); } else { vs = new org.hl7.fhir.dstu3.model.ValueSet() - .setCompose(new org.hl7.fhir.dstu3.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent().setSystem(theCodeSystem))); + .setCompose(new org.hl7.fhir.dstu3.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent() + .setSystem(theCodeSystem))); } break; case R4: if (codeSystemVersion != null) { vs = new org.hl7.fhir.r4.model.ValueSet() - .setCompose(new org.hl7.fhir.r4.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent().setSystem(codeSystemUrl).setVersion(codeSystemVersion))); + .setCompose(new org.hl7.fhir.r4.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent() + .setSystem(codeSystemUrl) + .setVersion(codeSystemVersion))); } else { vs = new org.hl7.fhir.r4.model.ValueSet() - .setCompose(new org.hl7.fhir.r4.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent().setSystem(theCodeSystem))); + .setCompose(new org.hl7.fhir.r4.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent() + .setSystem(theCodeSystem))); } break; case R4B: if (codeSystemVersion != null) { vs = new org.hl7.fhir.r4b.model.ValueSet() - .setCompose(new org.hl7.fhir.r4b.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.r4b.model.ValueSet.ConceptSetComponent().setSystem(codeSystemUrl).setVersion(codeSystemVersion))); + .setCompose(new org.hl7.fhir.r4b.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.r4b.model.ValueSet.ConceptSetComponent() + .setSystem(codeSystemUrl) + .setVersion(codeSystemVersion))); } else { vs = new org.hl7.fhir.r4b.model.ValueSet() - .setCompose(new org.hl7.fhir.r4b.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.r4b.model.ValueSet.ConceptSetComponent().setSystem(theCodeSystem))); + .setCompose(new org.hl7.fhir.r4b.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.r4b.model.ValueSet.ConceptSetComponent() + .setSystem(theCodeSystem))); } break; case R5: if (codeSystemVersion != null) { vs = new org.hl7.fhir.r5.model.ValueSet() - .setCompose(new org.hl7.fhir.r5.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent().setSystem(codeSystemUrl).setVersion(codeSystemVersion))); + .setCompose(new org.hl7.fhir.r5.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent() + .setSystem(codeSystemUrl) + .setVersion(codeSystemVersion))); } else { vs = new org.hl7.fhir.r5.model.ValueSet() - .setCompose(new org.hl7.fhir.r5.model.ValueSet.ValueSetComposeComponent() - .addInclude(new org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent().setSystem(theCodeSystem))); + .setCompose(new org.hl7.fhir.r5.model.ValueSet.ValueSetComposeComponent() + .addInclude(new org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent() + .setSystem(theCodeSystem))); } break; case DSTU2_1: default: - throw new IllegalArgumentException(Msg.code(699) + "Can not handle version: " + myCtx.getVersion().getVersion()); + throw new IllegalArgumentException(Msg.code(699) + "Can not handle version: " + + myCtx.getVersion().getVersion()); } } - ValueSetExpansionOutcome valueSetExpansionOutcome = expandValueSet(theValidationSupportContext, vs, theCodeSystem, theCode); + ValueSetExpansionOutcome valueSetExpansionOutcome = + expandValueSet(theValidationSupportContext, vs, theCodeSystem, theCode); if (valueSetExpansionOutcome == null) { return null; } if (valueSetExpansionOutcome.getError() != null) { return new CodeValidationResult() - .setSeverity(IssueSeverity.ERROR) - .setMessage(valueSetExpansionOutcome.getError()); + .setSeverity(IssueSeverity.ERROR) + .setMessage(valueSetExpansionOutcome.getError()); } IBaseResource expansion = valueSetExpansionOutcome.getValueSet(); - return validateCodeInExpandedValueSet(theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, expansion, theValueSetUrl); + return validateCodeInExpandedValueSet( + theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, expansion, theValueSetUrl); } - private CodeValidationResult validateCodeInExpandedValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystemUrlAndVersionToValidate, String theCodeToValidate, String theDisplayToValidate, IBaseResource theExpansion, String theValueSetUrl) { + private CodeValidationResult validateCodeInExpandedValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theOptions, + String theCodeSystemUrlAndVersionToValidate, + String theCodeToValidate, + String theDisplayToValidate, + IBaseResource theExpansion, + String theValueSetUrl) { assert theExpansion != null; boolean caseSensitive = true; IBaseResource codeSystemToValidateResource = null; if (!theOptions.isInferSystem() && isNotBlank(theCodeSystemUrlAndVersionToValidate)) { - codeSystemToValidateResource = theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(theCodeSystemUrlAndVersionToValidate); + codeSystemToValidateResource = theValidationSupportContext + .getRootValidationSupport() + .fetchCodeSystem(theCodeSystemUrlAndVersionToValidate); } List codes = new ArrayList<>(); - switch (getFhirVersionEnum(theValidationSupportContext.getRootValidationSupport().getFhirContext(), theExpansion)) { + switch (getFhirVersionEnum( + theValidationSupportContext.getRootValidationSupport().getFhirContext(), theExpansion)) { case DSTU2: { - ca.uhn.fhir.model.dstu2.resource.ValueSet expansionVs = (ca.uhn.fhir.model.dstu2.resource.ValueSet) theExpansion; - List contains = expansionVs.getExpansion().getContains(); + ca.uhn.fhir.model.dstu2.resource.ValueSet expansionVs = + (ca.uhn.fhir.model.dstu2.resource.ValueSet) theExpansion; + List contains = + expansionVs.getExpansion().getContains(); flattenAndConvertCodesDstu2(contains, codes); break; } case DSTU2_HL7ORG: { ValueSet expansionVs = (ValueSet) theExpansion; - List contains = expansionVs.getExpansion().getContains(); + List contains = + expansionVs.getExpansion().getContains(); flattenAndConvertCodesDstu2Hl7Org(contains, codes); break; } case DSTU3: { org.hl7.fhir.dstu3.model.ValueSet expansionVs = (org.hl7.fhir.dstu3.model.ValueSet) theExpansion; - List contains = expansionVs.getExpansion().getContains(); + List contains = + expansionVs.getExpansion().getContains(); flattenAndConvertCodesDstu3(contains, codes); break; } case R4: { org.hl7.fhir.r4.model.ValueSet expansionVs = (org.hl7.fhir.r4.model.ValueSet) theExpansion; - List contains = expansionVs.getExpansion().getContains(); + List contains = + expansionVs.getExpansion().getContains(); flattenAndConvertCodesR4(contains, codes); break; } case R4B: { org.hl7.fhir.r4b.model.ValueSet expansionVs = (org.hl7.fhir.r4b.model.ValueSet) theExpansion; - List contains = expansionVs.getExpansion().getContains(); + List contains = + expansionVs.getExpansion().getContains(); flattenAndConvertCodesR4B(contains, codes); break; } case R5: { org.hl7.fhir.r5.model.ValueSet expansionVs = (org.hl7.fhir.r5.model.ValueSet) theExpansion; - List contains = expansionVs.getExpansion().getContains(); + List contains = + expansionVs.getExpansion().getContains(); flattenAndConvertCodesR5(contains, codes); break; } case DSTU2_1: default: - throw new IllegalArgumentException(Msg.code(700) + "Can not handle version: " + myCtx.getVersion().getVersion()); + throw new IllegalArgumentException(Msg.code(700) + "Can not handle version: " + + myCtx.getVersion().getVersion()); } String codeSystemResourceName = null; String codeSystemResourceVersion = null; String codeSystemResourceContentMode = null; if (codeSystemToValidateResource != null) { - switch (getFhirVersionEnum(theValidationSupportContext.getRootValidationSupport().getFhirContext(), codeSystemToValidateResource)) { + switch (getFhirVersionEnum( + theValidationSupportContext.getRootValidationSupport().getFhirContext(), + codeSystemToValidateResource)) { case DSTU2: case DSTU2_HL7ORG: { caseSensitive = true; break; } case DSTU3: { - org.hl7.fhir.dstu3.model.CodeSystem systemDstu3 = (org.hl7.fhir.dstu3.model.CodeSystem) codeSystemToValidateResource; + org.hl7.fhir.dstu3.model.CodeSystem systemDstu3 = + (org.hl7.fhir.dstu3.model.CodeSystem) codeSystemToValidateResource; caseSensitive = systemDstu3.getCaseSensitive(); codeSystemResourceName = systemDstu3.getName(); codeSystemResourceVersion = systemDstu3.getVersion(); - codeSystemResourceContentMode = systemDstu3.getContentElement().getValueAsString(); + codeSystemResourceContentMode = + systemDstu3.getContentElement().getValueAsString(); break; } case R4: { - org.hl7.fhir.r4.model.CodeSystem systemR4 = (org.hl7.fhir.r4.model.CodeSystem) codeSystemToValidateResource; + org.hl7.fhir.r4.model.CodeSystem systemR4 = + (org.hl7.fhir.r4.model.CodeSystem) codeSystemToValidateResource; caseSensitive = systemR4.getCaseSensitive(); codeSystemResourceName = systemR4.getName(); codeSystemResourceVersion = systemR4.getVersion(); @@ -355,11 +455,13 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu break; } case R4B: { - org.hl7.fhir.r4b.model.CodeSystem systemR4B = (org.hl7.fhir.r4b.model.CodeSystem) codeSystemToValidateResource; + org.hl7.fhir.r4b.model.CodeSystem systemR4B = + (org.hl7.fhir.r4b.model.CodeSystem) codeSystemToValidateResource; caseSensitive = systemR4B.getCaseSensitive(); codeSystemResourceName = systemR4B.getName(); codeSystemResourceVersion = systemR4B.getVersion(); - codeSystemResourceContentMode = systemR4B.getContentElement().getValueAsString(); + codeSystemResourceContentMode = + systemR4B.getContentElement().getValueAsString(); break; } case R5: { @@ -372,7 +474,8 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu } case DSTU2_1: default: - throw new IllegalArgumentException(Msg.code(701) + "Can not handle version: " + myCtx.getVersion().getVersion()); + throw new IllegalArgumentException(Msg.code(701) + "Can not handle version: " + + myCtx.getVersion().getVersion()); } } @@ -396,32 +499,40 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu codeMatches = defaultString(theCodeToValidate).equalsIgnoreCase(nextExpansionCode.getCode()); } if (codeMatches) { - if (theOptions.isInferSystem() || (nextExpansionCode.getSystem().equals(codeSystemUrlToValidate) && (codeSystemVersionToValidate == null || codeSystemVersionToValidate.equals(nextExpansionCode.getSystemVersion())))) { + if (theOptions.isInferSystem() + || (nextExpansionCode.getSystem().equals(codeSystemUrlToValidate) + && (codeSystemVersionToValidate == null + || codeSystemVersionToValidate.equals(nextExpansionCode.getSystemVersion())))) { String csVersion = codeSystemResourceVersion; if (isNotBlank(nextExpansionCode.getSystemVersion())) { csVersion = nextExpansionCode.getSystemVersion(); } - if (!theOptions.isValidateDisplay() || (isBlank(nextExpansionCode.getDisplay()) || isBlank(theDisplayToValidate) || nextExpansionCode.getDisplay().equals(theDisplayToValidate))) { + if (!theOptions.isValidateDisplay() + || (isBlank(nextExpansionCode.getDisplay()) + || isBlank(theDisplayToValidate) + || nextExpansionCode.getDisplay().equals(theDisplayToValidate))) { CodeValidationResult codeValidationResult = new CodeValidationResult() - .setCode(theCodeToValidate) - .setDisplay(nextExpansionCode.getDisplay()) - .setCodeSystemName(codeSystemResourceName) - .setCodeSystemVersion(csVersion); + .setCode(theCodeToValidate) + .setDisplay(nextExpansionCode.getDisplay()) + .setCodeSystemName(codeSystemResourceName) + .setCodeSystemVersion(csVersion); if (isNotBlank(theValueSetUrl)) { - codeValidationResult.setMessage("Code was validated against in-memory expansion of ValueSet: " + theValueSetUrl); + codeValidationResult.setMessage( + "Code was validated against in-memory expansion of ValueSet: " + theValueSetUrl); } return codeValidationResult; } else { - String message = "Concept Display \"" + theDisplayToValidate + "\" does not match expected \"" + nextExpansionCode.getDisplay() + "\""; + String message = "Concept Display \"" + theDisplayToValidate + "\" does not match expected \"" + + nextExpansionCode.getDisplay() + "\""; if (isNotBlank(theValueSetUrl)) { message += " for in-memory expansion of ValueSet: " + theValueSetUrl; } return new CodeValidationResult() - .setSeverity(IssueSeverity.ERROR) - .setDisplay(nextExpansionCode.getDisplay()) - .setMessage(message) - .setCodeSystemName(codeSystemResourceName) - .setCodeSystemVersion(csVersion); + .setSeverity(IssueSeverity.ERROR) + .setDisplay(nextExpansionCode.getDisplay()) + .setMessage(message) + .setCodeSystemName(codeSystemResourceName) + .setCodeSystemVersion(csVersion); } } } @@ -431,23 +542,34 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu String message; if ("fragment".equals(codeSystemResourceContentMode)) { severity = ValidationMessage.IssueSeverity.WARNING; - message = "Unknown code in fragment CodeSystem '" + (isNotBlank(theCodeSystemUrlAndVersionToValidate) ? theCodeSystemUrlAndVersionToValidate + "#" : "") + theCodeToValidate + "'"; + message = "Unknown code in fragment CodeSystem '" + + (isNotBlank(theCodeSystemUrlAndVersionToValidate) + ? theCodeSystemUrlAndVersionToValidate + "#" + : "") + + theCodeToValidate + "'"; } else { severity = ValidationMessage.IssueSeverity.ERROR; - message = "Unknown code '" + (isNotBlank(theCodeSystemUrlAndVersionToValidate) ? theCodeSystemUrlAndVersionToValidate + "#" : "") + theCodeToValidate + "'"; + message = "Unknown code '" + + (isNotBlank(theCodeSystemUrlAndVersionToValidate) + ? theCodeSystemUrlAndVersionToValidate + "#" + : "") + + theCodeToValidate + "'"; } if (isNotBlank(theValueSetUrl)) { message += " for in-memory expansion of ValueSet '" + theValueSetUrl + "'"; } - return new CodeValidationResult() - .setSeverityCode(severity.toCode()) - .setMessage(message); + return new CodeValidationResult().setSeverityCode(severity.toCode()).setMessage(message); } @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { - CodeValidationResult codeValidationResult = validateCode(theValidationSupportContext, new ConceptValidationOptions(), theSystem, theCode, null, null); + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { + CodeValidationResult codeValidationResult = validateCode( + theValidationSupportContext, new ConceptValidationOptions(), theSystem, theCode, null, null); if (codeValidationResult == null) { return null; } @@ -455,18 +577,31 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu2Hl7Org(ValidationSupportContext theValidationSupportContext, ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { - org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_10_50.convertResource(theInput, new BaseAdvisor_10_50(false)); + private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu2Hl7Org( + ValidationSupportContext theValidationSupportContext, + ValueSet theInput, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { + org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_10_50.convertResource(theInput, new BaseAdvisor_10_50(false)); return (expandValueSetR5(theValidationSupportContext, input, theWantSystemUrlAndVersion, theWantCode)); } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu2(ValidationSupportContext theValidationSupportContext, ca.uhn.fhir.model.dstu2.resource.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { + private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu2( + ValidationSupportContext theValidationSupportContext, + ca.uhn.fhir.model.dstu2.resource.ValueSet theInput, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { IParser parserRi = FhirContext.forCached(FhirVersionEnum.DSTU2_HL7ORG).newJsonParser(); IParser parserHapi = FhirContext.forDstu2Cached().newJsonParser(); - org.hl7.fhir.dstu2.model.ValueSet valueSetRi = parserRi.parseResource(org.hl7.fhir.dstu2.model.ValueSet.class, parserHapi.encodeResourceToString(theInput)); - org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_10_50.convertResource(valueSetRi, new BaseAdvisor_10_50(false)); + org.hl7.fhir.dstu2.model.ValueSet valueSetRi = parserRi.parseResource( + org.hl7.fhir.dstu2.model.ValueSet.class, parserHapi.encodeResourceToString(theInput)); + org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_10_50.convertResource(valueSetRi, new BaseAdvisor_10_50(false)); return (expandValueSetR5(theValidationSupportContext, input, theWantSystemUrlAndVersion, theWantCode)); } @@ -476,14 +611,16 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu return false; } - IBaseResource cs = theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(theSystem); + IBaseResource cs = + theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(theSystem); if (!myCtx.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU2_1)) { return cs != null; } if (cs != null) { - IPrimitiveType content = getFhirContext().newTerser().getSingleValueOrNull(cs, "content", IPrimitiveType.class); + IPrimitiveType content = + getFhirContext().newTerser().getSingleValueOrNull(cs, "content", IPrimitiveType.class); return !"not-present".equals(content.getValueAsString()); } @@ -492,59 +629,105 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu @Override public boolean isValueSetSupported(ValidationSupportContext theValidationSupportContext, String theValueSetUrl) { - return isNotBlank(theValueSetUrl) && theValidationSupportContext.getRootValidationSupport().fetchValueSet(theValueSetUrl) != null; + return isNotBlank(theValueSetUrl) + && theValidationSupportContext.getRootValidationSupport().fetchValueSet(theValueSetUrl) != null; } - - private void addCodesDstu2Hl7Org(List theSourceList, List theTargetList) { + private void addCodesDstu2Hl7Org( + List theSourceList, + List theTargetList) { for (ValueSet.ConceptDefinitionComponent nextSource : theSourceList) { - CodeSystem.ConceptDefinitionComponent targetConcept = new CodeSystem.ConceptDefinitionComponent().setCode(nextSource.getCode()).setDisplay(nextSource.getDisplay()); + CodeSystem.ConceptDefinitionComponent targetConcept = new CodeSystem.ConceptDefinitionComponent() + .setCode(nextSource.getCode()) + .setDisplay(nextSource.getDisplay()); theTargetList.add(targetConcept); addCodesDstu2Hl7Org(nextSource.getConcept(), targetConcept.getConcept()); } } - private void addCodesDstu2(List theSourceList, List theTargetList) { + private void addCodesDstu2( + List theSourceList, + List theTargetList) { for (ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystemConcept nextSource : theSourceList) { - CodeSystem.ConceptDefinitionComponent targetConcept = new CodeSystem.ConceptDefinitionComponent().setCode(nextSource.getCode()).setDisplay(nextSource.getDisplay()); + CodeSystem.ConceptDefinitionComponent targetConcept = new CodeSystem.ConceptDefinitionComponent() + .setCode(nextSource.getCode()) + .setDisplay(nextSource.getDisplay()); theTargetList.add(targetConcept); addCodesDstu2(nextSource.getConcept(), targetConcept.getConcept()); } } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu3(ValidationSupportContext theValidationSupportContext, org.hl7.fhir.dstu3.model.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { - org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_30_50.convertResource(theInput, new BaseAdvisor_30_50(false)); + private org.hl7.fhir.r5.model.ValueSet expandValueSetDstu3( + ValidationSupportContext theValidationSupportContext, + org.hl7.fhir.dstu3.model.ValueSet theInput, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { + org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_30_50.convertResource(theInput, new BaseAdvisor_30_50(false)); return (expandValueSetR5(theValidationSupportContext, input, theWantSystemUrlAndVersion, theWantCode)); } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetR4(ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r4.model.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { - org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_40_50.convertResource(theInput, new BaseAdvisor_40_50(false)); + private org.hl7.fhir.r5.model.ValueSet expandValueSetR4( + ValidationSupportContext theValidationSupportContext, + org.hl7.fhir.r4.model.ValueSet theInput, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { + org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_40_50.convertResource(theInput, new BaseAdvisor_40_50(false)); return expandValueSetR5(theValidationSupportContext, input, theWantSystemUrlAndVersion, theWantCode); } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetR4B(ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r4b.model.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { - org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_43_50.convertResource(theInput, new BaseAdvisor_43_50(false)); + private org.hl7.fhir.r5.model.ValueSet expandValueSetR4B( + ValidationSupportContext theValidationSupportContext, + org.hl7.fhir.r4b.model.ValueSet theInput, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { + org.hl7.fhir.r5.model.ValueSet input = (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_43_50.convertResource(theInput, new BaseAdvisor_43_50(false)); return expandValueSetR5(theValidationSupportContext, input, theWantSystemUrlAndVersion, theWantCode); } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetR5(ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r5.model.ValueSet theInput) throws ExpansionCouldNotBeCompletedInternallyException { + private org.hl7.fhir.r5.model.ValueSet expandValueSetR5( + ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r5.model.ValueSet theInput) + throws ExpansionCouldNotBeCompletedInternallyException { return expandValueSetR5(theValidationSupportContext, theInput, null, null); } @Nullable - private org.hl7.fhir.r5.model.ValueSet expandValueSetR5(ValidationSupportContext theValidationSupportContext, org.hl7.fhir.r5.model.ValueSet theInput, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { + private org.hl7.fhir.r5.model.ValueSet expandValueSetR5( + ValidationSupportContext theValidationSupportContext, + org.hl7.fhir.r5.model.ValueSet theInput, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { Set concepts = new HashSet<>(); - expandValueSetR5IncludeOrExcludes(theValidationSupportContext, concepts, theInput.getCompose().getInclude(), true, theWantSystemUrlAndVersion, theWantCode); - expandValueSetR5IncludeOrExcludes(theValidationSupportContext, concepts, theInput.getCompose().getExclude(), false, theWantSystemUrlAndVersion, theWantCode); + expandValueSetR5IncludeOrExcludes( + theValidationSupportContext, + concepts, + theInput.getCompose().getInclude(), + true, + theWantSystemUrlAndVersion, + theWantCode); + expandValueSetR5IncludeOrExcludes( + theValidationSupportContext, + concepts, + theInput.getCompose().getExclude(), + false, + theWantSystemUrlAndVersion, + theWantCode); org.hl7.fhir.r5.model.ValueSet retVal = new org.hl7.fhir.r5.model.ValueSet(); for (FhirVersionIndependentConcept next : concepts) { - org.hl7.fhir.r5.model.ValueSet.ValueSetExpansionContainsComponent contains = retVal.getExpansion().addContains(); + org.hl7.fhir.r5.model.ValueSet.ValueSetExpansionContainsComponent contains = + retVal.getExpansion().addContains(); contains.setSystem(next.getSystem()); contains.setCode(next.getCode()); contains.setDisplay(next.getDisplay()); @@ -559,12 +742,22 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu * * @since 5.6.0 */ - public void expandValueSetIncludeOrExclude(ValidationSupportContext theValidationSupportContext, Consumer theConsumer, org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent theIncludeOrExclude) throws ExpansionCouldNotBeCompletedInternallyException { + public void expandValueSetIncludeOrExclude( + ValidationSupportContext theValidationSupportContext, + Consumer theConsumer, + org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent theIncludeOrExclude) + throws ExpansionCouldNotBeCompletedInternallyException { expandValueSetR5IncludeOrExclude(theValidationSupportContext, theConsumer, null, null, theIncludeOrExclude); } - - private void expandValueSetR5IncludeOrExcludes(ValidationSupportContext theValidationSupportContext, Set theConcepts, List theComposeList, boolean theComposeListIsInclude, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { + private void expandValueSetR5IncludeOrExcludes( + ValidationSupportContext theValidationSupportContext, + Set theConcepts, + List theComposeList, + boolean theComposeListIsInclude, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { Consumer consumer = c -> { if (theComposeListIsInclude) { theConcepts.add(c); @@ -572,15 +765,22 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu theConcepts.remove(c); } }; - expandValueSetR5IncludeOrExcludes(theValidationSupportContext, consumer, theComposeList, theWantSystemUrlAndVersion, theWantCode); + expandValueSetR5IncludeOrExcludes( + theValidationSupportContext, consumer, theComposeList, theWantSystemUrlAndVersion, theWantCode); } - - private void expandValueSetR5IncludeOrExcludes(ValidationSupportContext theValidationSupportContext, Consumer theConsumer, List theComposeList, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode) throws ExpansionCouldNotBeCompletedInternallyException { + private void expandValueSetR5IncludeOrExcludes( + ValidationSupportContext theValidationSupportContext, + Consumer theConsumer, + List theComposeList, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode) + throws ExpansionCouldNotBeCompletedInternallyException { ExpansionCouldNotBeCompletedInternallyException caughtException = null; for (org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent nextInclude : theComposeList) { try { - boolean outcome = expandValueSetR5IncludeOrExclude(theValidationSupportContext, theConsumer, theWantSystemUrlAndVersion, theWantCode, nextInclude); + boolean outcome = expandValueSetR5IncludeOrExclude( + theValidationSupportContext, theConsumer, theWantSystemUrlAndVersion, theWantCode, nextInclude); if (isNotBlank(theWantCode)) { if (outcome) { return; @@ -602,7 +802,13 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu /** * Returns true if at least one code was added */ - private boolean expandValueSetR5IncludeOrExclude(ValidationSupportContext theValidationSupportContext, Consumer theConsumer, @Nullable String theWantSystemUrlAndVersion, @Nullable String theWantCode, org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent theInclude) throws ExpansionCouldNotBeCompletedInternallyException { + private boolean expandValueSetR5IncludeOrExclude( + ValidationSupportContext theValidationSupportContext, + Consumer theConsumer, + @Nullable String theWantSystemUrlAndVersion, + @Nullable String theWantCode, + org.hl7.fhir.r5.model.ValueSet.ConceptSetComponent theInclude) + throws ExpansionCouldNotBeCompletedInternallyException { String wantSystemUrl = null; String wantSystemVersion = null; @@ -621,14 +827,16 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu String includeOrExcludeConceptSystemVersion = theInclude.getVersion(); Function codeSystemLoader = newCodeSystemLoader(theValidationSupportContext); - Function valueSetLoader = newValueSetLoader(theValidationSupportContext); + Function valueSetLoader = + newValueSetLoader(theValidationSupportContext); List nextCodeList = new ArrayList<>(); CodeSystem includeOrExcludeSystemResource = null; if (isNotBlank(includeOrExcludeConceptSystemUrl)) { - includeOrExcludeConceptSystemVersion = optionallyPopulateVersionFromUrl(includeOrExcludeConceptSystemUrl, includeOrExcludeConceptSystemVersion); + includeOrExcludeConceptSystemVersion = optionallyPopulateVersionFromUrl( + includeOrExcludeConceptSystemUrl, includeOrExcludeConceptSystemVersion); includeOrExcludeConceptSystemUrl = substringBefore(includeOrExcludeConceptSystemUrl, OUR_PIPE_CHARACTER); if (wantSystemUrl != null && !wantSystemUrl.equals(includeOrExcludeConceptSystemUrl)) { @@ -641,7 +849,8 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu String loadedCodeSystemUrl; if (includeOrExcludeConceptSystemVersion != null) { - loadedCodeSystemUrl = includeOrExcludeConceptSystemUrl + OUR_PIPE_CHARACTER + includeOrExcludeConceptSystemVersion; + loadedCodeSystemUrl = + includeOrExcludeConceptSystemUrl + OUR_PIPE_CHARACTER + includeOrExcludeConceptSystemVersion; } else { loadedCodeSystemUrl = includeOrExcludeConceptSystemUrl; } @@ -652,29 +861,44 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu if (theInclude.getConcept().isEmpty()) { wantCodes = null; } else { - wantCodes = theInclude - .getConcept() - .stream().map(t -> t.getCode()).collect(Collectors.toSet()); + wantCodes = + theInclude.getConcept().stream().map(t -> t.getCode()).collect(Collectors.toSet()); } boolean ableToHandleCode = false; String failureMessage = null; FailureType failureType = FailureType.OTHER; - if (includeOrExcludeSystemResource == null || includeOrExcludeSystemResource.getContent() == Enumerations.CodeSystemContentMode.NOTPRESENT) { + if (includeOrExcludeSystemResource == null + || includeOrExcludeSystemResource.getContent() == Enumerations.CodeSystemContentMode.NOTPRESENT) { if (theWantCode != null) { - if (theValidationSupportContext.getRootValidationSupport().isCodeSystemSupported(theValidationSupportContext, includeOrExcludeConceptSystemUrl)) { - LookupCodeResult lookup = theValidationSupportContext.getRootValidationSupport().lookupCode(theValidationSupportContext, includeOrExcludeConceptSystemUrl, theWantCode, null); + if (theValidationSupportContext + .getRootValidationSupport() + .isCodeSystemSupported(theValidationSupportContext, includeOrExcludeConceptSystemUrl)) { + LookupCodeResult lookup = theValidationSupportContext + .getRootValidationSupport() + .lookupCode( + theValidationSupportContext, + includeOrExcludeConceptSystemUrl, + theWantCode, + null); if (lookup != null) { ableToHandleCode = true; if (lookup.isFound()) { - CodeSystem.ConceptDefinitionComponent conceptDefinition = new CodeSystem.ConceptDefinitionComponent() - .addConcept() - .setCode(theWantCode) - .setDisplay(lookup.getCodeDisplay()); - List codesList = Collections.singletonList(conceptDefinition); - addCodes(includeOrExcludeConceptSystemUrl, includeOrExcludeConceptSystemVersion, codesList, nextCodeList, wantCodes); + CodeSystem.ConceptDefinitionComponent conceptDefinition = + new CodeSystem.ConceptDefinitionComponent() + .addConcept() + .setCode(theWantCode) + .setDisplay(lookup.getCodeDisplay()); + List codesList = + Collections.singletonList(conceptDefinition); + addCodes( + includeOrExcludeConceptSystemUrl, + includeOrExcludeConceptSystemVersion, + codesList, + nextCodeList, + wantCodes); } } } else { @@ -689,7 +913,10 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu * enumerate a set of good codes for them is a nice compromise there. */ if (Objects.equals(theInclude.getSystem(), theWantSystemUrlAndVersion)) { - Optional matchingEnumeratedConcept = theInclude.getConcept().stream().filter(t -> Objects.equals(t.getCode(), theWantCode)).findFirst(); + Optional + matchingEnumeratedConcept = theInclude.getConcept().stream() + .filter(t -> Objects.equals(t.getCode(), theWantCode)) + .findFirst(); // If the ValueSet.compose.include has no individual concepts in it, and // we can't find the actual referenced CodeSystem, we have no choice @@ -697,32 +924,44 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu if (!theInclude.getConcept().isEmpty()) { ableToHandleCode = true; } else { - failureMessage = getFailureMessageForMissingOrUnusableCodeSystem(includeOrExcludeSystemResource, loadedCodeSystemUrl); + failureMessage = getFailureMessageForMissingOrUnusableCodeSystem( + includeOrExcludeSystemResource, loadedCodeSystemUrl); } if (matchingEnumeratedConcept.isPresent()) { - CodeSystem.ConceptDefinitionComponent conceptDefinition = new CodeSystem.ConceptDefinitionComponent() - .addConcept() - .setCode(theWantCode) - .setDisplay(matchingEnumeratedConcept.get().getDisplay()); - List codesList = Collections.singletonList(conceptDefinition); - addCodes(includeOrExcludeConceptSystemUrl, includeOrExcludeConceptSystemVersion, codesList, nextCodeList, wantCodes); + CodeSystem.ConceptDefinitionComponent conceptDefinition = + new CodeSystem.ConceptDefinitionComponent() + .addConcept() + .setCode(theWantCode) + .setDisplay(matchingEnumeratedConcept + .get() + .getDisplay()); + List codesList = + Collections.singletonList(conceptDefinition); + addCodes( + includeOrExcludeConceptSystemUrl, + includeOrExcludeConceptSystemVersion, + codesList, + nextCodeList, + wantCodes); } } - } } else { - if (isNotBlank(theInclude.getSystem()) && !theInclude.getConcept().isEmpty() && theInclude.getFilter().isEmpty() && theInclude.getValueSet().isEmpty()) { - theInclude - .getConcept() - .stream() - .map(t -> new FhirVersionIndependentConcept(theInclude.getSystem(), t.getCode(), t.getDisplay(), theInclude.getVersion())) - .forEach(t -> nextCodeList.add(t)); + if (isNotBlank(theInclude.getSystem()) + && !theInclude.getConcept().isEmpty() + && theInclude.getFilter().isEmpty() + && theInclude.getValueSet().isEmpty()) { + theInclude.getConcept().stream() + .map(t -> new FhirVersionIndependentConcept( + theInclude.getSystem(), t.getCode(), t.getDisplay(), theInclude.getVersion())) + .forEach(t -> nextCodeList.add(t)); ableToHandleCode = true; } if (!ableToHandleCode) { - failureMessage = getFailureMessageForMissingOrUnusableCodeSystem(includeOrExcludeSystemResource, loadedCodeSystemUrl); + failureMessage = getFailureMessageForMissingOrUnusableCodeSystem( + includeOrExcludeSystemResource, loadedCodeSystemUrl); } } @@ -732,7 +971,8 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu if (!ableToHandleCode) { if (includeOrExcludeSystemResource == null && failureMessage == null) { - failureMessage = getFailureMessageForMissingOrUnusableCodeSystem(includeOrExcludeSystemResource, loadedCodeSystemUrl); + failureMessage = getFailureMessageForMissingOrUnusableCodeSystem( + includeOrExcludeSystemResource, loadedCodeSystemUrl); } if (includeOrExcludeSystemResource == null) { @@ -742,21 +982,31 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu throw new ExpansionCouldNotBeCompletedInternallyException(Msg.code(702) + failureMessage, failureType); } - if (includeOrExcludeSystemResource != null && includeOrExcludeSystemResource.getContent() != Enumerations.CodeSystemContentMode.NOTPRESENT) { - addCodes(includeOrExcludeConceptSystemUrl, includeOrExcludeConceptSystemVersion, includeOrExcludeSystemResource.getConcept(), nextCodeList, wantCodes); + if (includeOrExcludeSystemResource != null + && includeOrExcludeSystemResource.getContent() != Enumerations.CodeSystemContentMode.NOTPRESENT) { + addCodes( + includeOrExcludeConceptSystemUrl, + includeOrExcludeConceptSystemVersion, + includeOrExcludeSystemResource.getConcept(), + nextCodeList, + wantCodes); } - } for (CanonicalType nextValueSetInclude : theInclude.getValueSet()) { org.hl7.fhir.r5.model.ValueSet vs = valueSetLoader.apply(nextValueSetInclude.getValueAsString()); if (vs != null) { - org.hl7.fhir.r5.model.ValueSet subExpansion = expandValueSetR5(theValidationSupportContext, vs, theWantSystemUrlAndVersion, theWantCode); + org.hl7.fhir.r5.model.ValueSet subExpansion = + expandValueSetR5(theValidationSupportContext, vs, theWantSystemUrlAndVersion, theWantCode); if (subExpansion == null) { - throw new ExpansionCouldNotBeCompletedInternallyException(Msg.code(703) + "Failed to expand ValueSet: " + nextValueSetInclude.getValueAsString(), FailureType.OTHER); + throw new ExpansionCouldNotBeCompletedInternallyException( + Msg.code(703) + "Failed to expand ValueSet: " + nextValueSetInclude.getValueAsString(), + FailureType.OTHER); } - for (org.hl7.fhir.r5.model.ValueSet.ValueSetExpansionContainsComponent next : subExpansion.getExpansion().getContains()) { - nextCodeList.add(new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); + for (org.hl7.fhir.r5.model.ValueSet.ValueSetExpansionContainsComponent next : + subExpansion.getExpansion().getContains()) { + nextCodeList.add(new FhirVersionIndependentConcept( + next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); } } } @@ -783,42 +1033,66 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu return retVal; } - private Function newValueSetLoader(ValidationSupportContext theValidationSupportContext) { + private Function newValueSetLoader( + ValidationSupportContext theValidationSupportContext) { switch (myCtx.getVersion().getVersion()) { case DSTU2: case DSTU2_HL7ORG: return t -> { - IBaseResource vs = theValidationSupportContext.getRootValidationSupport().fetchValueSet(t); + IBaseResource vs = theValidationSupportContext + .getRootValidationSupport() + .fetchValueSet(t); if (vs instanceof ca.uhn.fhir.model.dstu2.resource.ValueSet) { - IParser parserRi = FhirContext.forCached(FhirVersionEnum.DSTU2_HL7ORG).newJsonParser(); + IParser parserRi = FhirContext.forCached(FhirVersionEnum.DSTU2_HL7ORG) + .newJsonParser(); IParser parserHapi = FhirContext.forDstu2Cached().newJsonParser(); - ca.uhn.fhir.model.dstu2.resource.ValueSet valueSet = (ca.uhn.fhir.model.dstu2.resource.ValueSet) vs; - org.hl7.fhir.dstu2.model.ValueSet valueSetRi = parserRi.parseResource(org.hl7.fhir.dstu2.model.ValueSet.class, parserHapi.encodeResourceToString(valueSet)); - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_10_50.convertResource(valueSetRi, new BaseAdvisor_10_50(false)); + ca.uhn.fhir.model.dstu2.resource.ValueSet valueSet = + (ca.uhn.fhir.model.dstu2.resource.ValueSet) vs; + org.hl7.fhir.dstu2.model.ValueSet valueSetRi = parserRi.parseResource( + org.hl7.fhir.dstu2.model.ValueSet.class, parserHapi.encodeResourceToString(valueSet)); + return (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_10_50.convertResource(valueSetRi, new BaseAdvisor_10_50(false)); } else { - org.hl7.fhir.dstu2.model.ValueSet valueSet = (org.hl7.fhir.dstu2.model.ValueSet) theValidationSupportContext.getRootValidationSupport().fetchValueSet(t); - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_10_50.convertResource(valueSet, new BaseAdvisor_10_50(false)); + org.hl7.fhir.dstu2.model.ValueSet valueSet = + (org.hl7.fhir.dstu2.model.ValueSet) theValidationSupportContext + .getRootValidationSupport() + .fetchValueSet(t); + return (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_10_50.convertResource(valueSet, new BaseAdvisor_10_50(false)); } }; case DSTU3: return t -> { - org.hl7.fhir.dstu3.model.ValueSet valueSet = (org.hl7.fhir.dstu3.model.ValueSet) theValidationSupportContext.getRootValidationSupport().fetchValueSet(t); - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_30_50.convertResource(valueSet, new BaseAdvisor_30_50(false)); + org.hl7.fhir.dstu3.model.ValueSet valueSet = + (org.hl7.fhir.dstu3.model.ValueSet) theValidationSupportContext + .getRootValidationSupport() + .fetchValueSet(t); + return (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_30_50.convertResource(valueSet, new BaseAdvisor_30_50(false)); }; case R4: return t -> { - org.hl7.fhir.r4.model.ValueSet valueSet = (org.hl7.fhir.r4.model.ValueSet) theValidationSupportContext.getRootValidationSupport().fetchValueSet(t); - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_40_50.convertResource(valueSet, new BaseAdvisor_40_50(false)); + org.hl7.fhir.r4.model.ValueSet valueSet = + (org.hl7.fhir.r4.model.ValueSet) theValidationSupportContext + .getRootValidationSupport() + .fetchValueSet(t); + return (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_40_50.convertResource(valueSet, new BaseAdvisor_40_50(false)); }; case R4B: return t -> { - org.hl7.fhir.r4b.model.ValueSet valueSet = (org.hl7.fhir.r4b.model.ValueSet) theValidationSupportContext.getRootValidationSupport().fetchValueSet(t); - return (org.hl7.fhir.r5.model.ValueSet) VersionConvertorFactory_43_50.convertResource(valueSet, new BaseAdvisor_43_50(false)); + org.hl7.fhir.r4b.model.ValueSet valueSet = + (org.hl7.fhir.r4b.model.ValueSet) theValidationSupportContext + .getRootValidationSupport() + .fetchValueSet(t); + return (org.hl7.fhir.r5.model.ValueSet) + VersionConvertorFactory_43_50.convertResource(valueSet, new BaseAdvisor_43_50(false)); }; default: case DSTU2_1: case R5: - return t -> (org.hl7.fhir.r5.model.ValueSet) theValidationSupportContext.getRootValidationSupport().fetchValueSet(t); + return t -> (org.hl7.fhir.r5.model.ValueSet) + theValidationSupportContext.getRootValidationSupport().fetchValueSet(t); } } @@ -827,16 +1101,20 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu case DSTU2: case DSTU2_HL7ORG: return t -> { - IBaseResource codeSystem = theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(t); + IBaseResource codeSystem = theValidationSupportContext + .getRootValidationSupport() + .fetchCodeSystem(t); CodeSystem retVal = null; if (codeSystem != null) { retVal = new CodeSystem(); if (codeSystem instanceof ca.uhn.fhir.model.dstu2.resource.ValueSet) { - ca.uhn.fhir.model.dstu2.resource.ValueSet codeSystemCasted = (ca.uhn.fhir.model.dstu2.resource.ValueSet) codeSystem; + ca.uhn.fhir.model.dstu2.resource.ValueSet codeSystemCasted = + (ca.uhn.fhir.model.dstu2.resource.ValueSet) codeSystem; retVal.setUrl(codeSystemCasted.getUrl()); addCodesDstu2(codeSystemCasted.getCodeSystem().getConcept(), retVal.getConcept()); } else { - org.hl7.fhir.dstu2.model.ValueSet codeSystemCasted = (org.hl7.fhir.dstu2.model.ValueSet) codeSystem; + org.hl7.fhir.dstu2.model.ValueSet codeSystemCasted = + (org.hl7.fhir.dstu2.model.ValueSet) codeSystem; retVal.setUrl(codeSystemCasted.getUrl()); addCodesDstu2Hl7Org(codeSystemCasted.getCodeSystem().getConcept(), retVal.getConcept()); } @@ -845,43 +1123,64 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu }; case DSTU3: return t -> { - org.hl7.fhir.dstu3.model.CodeSystem codeSystem = (org.hl7.fhir.dstu3.model.CodeSystem) theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(t); - return (CodeSystem) VersionConvertorFactory_30_50.convertResource(codeSystem, new BaseAdvisor_30_50(false)); + org.hl7.fhir.dstu3.model.CodeSystem codeSystem = + (org.hl7.fhir.dstu3.model.CodeSystem) theValidationSupportContext + .getRootValidationSupport() + .fetchCodeSystem(t); + return (CodeSystem) + VersionConvertorFactory_30_50.convertResource(codeSystem, new BaseAdvisor_30_50(false)); }; case R4: return t -> { - org.hl7.fhir.r4.model.CodeSystem codeSystem = (org.hl7.fhir.r4.model.CodeSystem) theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(t); - return (CodeSystem) VersionConvertorFactory_40_50.convertResource(codeSystem, new BaseAdvisor_40_50(false)); + org.hl7.fhir.r4.model.CodeSystem codeSystem = + (org.hl7.fhir.r4.model.CodeSystem) theValidationSupportContext + .getRootValidationSupport() + .fetchCodeSystem(t); + return (CodeSystem) + VersionConvertorFactory_40_50.convertResource(codeSystem, new BaseAdvisor_40_50(false)); }; case R4B: return t -> { - org.hl7.fhir.r4b.model.CodeSystem codeSystem = (org.hl7.fhir.r4b.model.CodeSystem) theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(t); - return (CodeSystem) VersionConvertorFactory_43_50.convertResource(codeSystem, new BaseAdvisor_43_50(false)); + org.hl7.fhir.r4b.model.CodeSystem codeSystem = + (org.hl7.fhir.r4b.model.CodeSystem) theValidationSupportContext + .getRootValidationSupport() + .fetchCodeSystem(t); + return (CodeSystem) + VersionConvertorFactory_43_50.convertResource(codeSystem, new BaseAdvisor_43_50(false)); }; case DSTU2_1: case R5: default: - return t -> (org.hl7.fhir.r5.model.CodeSystem) theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(t); - + return t -> (org.hl7.fhir.r5.model.CodeSystem) + theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(t); } } - private String getFailureMessageForMissingOrUnusableCodeSystem(CodeSystem includeOrExcludeSystemResource, String loadedCodeSystemUrl) { + private String getFailureMessageForMissingOrUnusableCodeSystem( + CodeSystem includeOrExcludeSystemResource, String loadedCodeSystemUrl) { String failureMessage; if (includeOrExcludeSystemResource == null) { failureMessage = "Unable to expand ValueSet because CodeSystem could not be found: " + loadedCodeSystemUrl; } else { assert includeOrExcludeSystemResource.getContent() == Enumerations.CodeSystemContentMode.NOTPRESENT; - failureMessage = "Unable to expand ValueSet because CodeSystem has CodeSystem.content=not-present but contents were not found: " + loadedCodeSystemUrl; + failureMessage = + "Unable to expand ValueSet because CodeSystem has CodeSystem.content=not-present but contents were not found: " + + loadedCodeSystemUrl; } return failureMessage; } - private void addCodes(String theCodeSystemUrl, String theCodeSystemVersion, List theSource, List theTarget, Set theCodeFilter) { + private void addCodes( + String theCodeSystemUrl, + String theCodeSystemVersion, + List theSource, + List theTarget, + Set theCodeFilter) { for (CodeSystem.ConceptDefinitionComponent next : theSource) { if (isNotBlank(next.getCode())) { if (theCodeFilter == null || theCodeFilter.contains(next.getCode())) { - theTarget.add(new FhirVersionIndependentConcept(theCodeSystemUrl, next.getCode(), next.getDisplay(), theCodeSystemVersion)); + theTarget.add(new FhirVersionIndependentConcept( + theCodeSystemUrl, next.getCode(), next.getDisplay(), theCodeSystemVersion)); } } addCodes(theCodeSystemUrl, theCodeSystemVersion, next.getConcept(), theTarget, theCodeFilter); @@ -889,17 +1188,15 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu } private String optionallyPopulateVersionFromUrl(String theSystemUrl, String theVersion) { - if(contains(theSystemUrl, OUR_PIPE_CHARACTER) && isBlank(theVersion)){ + if (contains(theSystemUrl, OUR_PIPE_CHARACTER) && isBlank(theVersion)) { theVersion = substringAfter(theSystemUrl, OUR_PIPE_CHARACTER); } return theVersion; } public enum FailureType { - UNKNOWN_CODE_SYSTEM, OTHER - } public static class ExpansionCouldNotBeCompletedInternallyException extends Exception { @@ -917,46 +1214,63 @@ public class InMemoryTerminologyServerValidationSupport implements IValidationSu } } - private static void flattenAndConvertCodesDstu2(List theInput, List theFhirVersionIndependentConcepts) { + private static void flattenAndConvertCodesDstu2( + List theInput, + List theFhirVersionIndependentConcepts) { for (ca.uhn.fhir.model.dstu2.resource.ValueSet.ExpansionContains next : theInput) { - theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay())); + theFhirVersionIndependentConcepts.add( + new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay())); flattenAndConvertCodesDstu2(next.getContains(), theFhirVersionIndependentConcepts); } } - private static void flattenAndConvertCodesDstu2Hl7Org(List theInput, List theFhirVersionIndependentConcepts) { + private static void flattenAndConvertCodesDstu2Hl7Org( + List theInput, + List theFhirVersionIndependentConcepts) { for (org.hl7.fhir.dstu2.model.ValueSet.ValueSetExpansionContainsComponent next : theInput) { - theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay())); + theFhirVersionIndependentConcepts.add( + new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay())); flattenAndConvertCodesDstu2Hl7Org(next.getContains(), theFhirVersionIndependentConcepts); } } - private static void flattenAndConvertCodesDstu3(List theInput, List theFhirVersionIndependentConcepts) { + private static void flattenAndConvertCodesDstu3( + List theInput, + List theFhirVersionIndependentConcepts) { for (org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent next : theInput) { - theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); + theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept( + next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); flattenAndConvertCodesDstu3(next.getContains(), theFhirVersionIndependentConcepts); } } - private static void flattenAndConvertCodesR4(List theInput, List theFhirVersionIndependentConcepts) { + private static void flattenAndConvertCodesR4( + List theInput, + List theFhirVersionIndependentConcepts) { for (org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionContainsComponent next : theInput) { - theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); + theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept( + next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); flattenAndConvertCodesR4(next.getContains(), theFhirVersionIndependentConcepts); } } - private static void flattenAndConvertCodesR4B(List theInput, List theFhirVersionIndependentConcepts) { + private static void flattenAndConvertCodesR4B( + List theInput, + List theFhirVersionIndependentConcepts) { for (org.hl7.fhir.r4b.model.ValueSet.ValueSetExpansionContainsComponent next : theInput) { - theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); + theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept( + next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); flattenAndConvertCodesR4B(next.getContains(), theFhirVersionIndependentConcepts); } } - private static void flattenAndConvertCodesR5(List theInput, List theFhirVersionIndependentConcepts) { + private static void flattenAndConvertCodesR5( + List theInput, + List theFhirVersionIndependentConcepts) { for (org.hl7.fhir.r5.model.ValueSet.ValueSetExpansionContainsComponent next : theInput) { - theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept(next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); + theFhirVersionIndependentConcepts.add(new FhirVersionIndependentConcept( + next.getSystem(), next.getCode(), next.getDisplay(), next.getVersion())); flattenAndConvertCodesR5(next.getContains(), theFhirVersionIndependentConcepts); } } - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/LocalFileValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/LocalFileValidationSupport.java index e8c47f9bb1a..c894ff4b685 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/LocalFileValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/LocalFileValidationSupport.java @@ -40,8 +40,7 @@ public class LocalFileValidationSupport extends PrePopulatedValidationSupport { public void loadFile(String theFileName) throws IOException { String contents = IOUtils.toString(new InputStreamReader(new FileInputStream(theFileName), "UTF-8")); - IBaseResource resource = myCtx.newJsonParser().parseResource(contents); + IBaseResource resource = myCtx.newJsonParser().parseResource(contents); this.addResource(resource); } - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/NpmPackageValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/NpmPackageValidationSupport.java index 73ed8ca82ad..bc92b604896 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/NpmPackageValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/NpmPackageValidationSupport.java @@ -9,12 +9,12 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.utilities.TextFile; import org.hl7.fhir.utilities.npm.NpmPackage; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Locale; +import javax.annotation.Nonnull; /** * This interceptor loads and parses FHIR NPM Conformance Packages, and makes the diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/PrePopulatedValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/PrePopulatedValidationSupport.java index 543054c53e2..dea829487ac 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/PrePopulatedValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/PrePopulatedValidationSupport.java @@ -14,8 +14,6 @@ import org.hl7.fhir.r4.model.CodeSystem; import org.hl7.fhir.r4.model.StructureDefinition; import org.hl7.fhir.r4.model.ValueSet; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -24,6 +22,8 @@ import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -31,7 +31,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; * This class is an implementation of {@link IValidationSupport} which may be pre-populated * with a collection of validation resources to be used by the validator. */ -public class PrePopulatedValidationSupport extends BaseStaticResourceValidationSupport implements IValidationSupport, ILockable { +public class PrePopulatedValidationSupport extends BaseStaticResourceValidationSupport + implements IValidationSupport, ILockable { private final Map myCodeSystems; private final Map myStructureDefinitions; @@ -58,10 +59,10 @@ public class PrePopulatedValidationSupport extends BaseStaticResourceValidationS * the resource itself. **/ public PrePopulatedValidationSupport( - FhirContext theFhirContext, - Map theStructureDefinitions, - Map theValueSets, - Map theCodeSystems) { + FhirContext theFhirContext, + Map theStructureDefinitions, + Map theValueSets, + Map theCodeSystems) { this(theFhirContext, theStructureDefinitions, theValueSets, theCodeSystems, new HashMap<>(), new HashMap<>()); } @@ -78,12 +79,12 @@ public class PrePopulatedValidationSupport extends BaseStaticResourceValidationS * are the contents of the file as a byte array. */ public PrePopulatedValidationSupport( - FhirContext theFhirContext, - Map theStructureDefinitions, - Map theValueSets, - Map theCodeSystems, - Map theSearchParameters, - Map theBinaries) { + FhirContext theFhirContext, + Map theStructureDefinitions, + Map theValueSets, + Map theCodeSystems, + Map theSearchParameters, + Map theBinaries) { super(theFhirContext); Validate.notNull(theFhirContext, "theFhirContext must not be null"); Validate.notNull(theStructureDefinitions, "theStructureDefinitions must not be null"); @@ -133,10 +134,14 @@ public class PrePopulatedValidationSupport extends BaseStaticResourceValidationS Validate.notNull(theResource, "the" + theResourceName + " must not be null"); RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(theResource); String actualResourceName = resourceDef.getName(); - Validate.isTrue(actualResourceName.equals(theResourceName), "the" + theResourceName + " must be a " + theResourceName + " - Got: " + actualResourceName); + Validate.isTrue( + actualResourceName.equals(theResourceName), + "the" + theResourceName + " must be a " + theResourceName + " - Got: " + actualResourceName); - Optional urlValue = resourceDef.getChildByName("url").getAccessor().getFirstValueOrNull(theResource); - String url = urlValue.map(t -> (((IPrimitiveType) t).getValueAsString())).orElse(null); + Optional urlValue = + resourceDef.getChildByName("url").getAccessor().getFirstValueOrNull(theResource); + String url = + urlValue.map(t -> (((IPrimitiveType) t).getValueAsString())).orElse(null); Validate.notNull(url, "the" + theResourceName + ".getUrl() must not return null"); Validate.notBlank(url, "the" + theResourceName + ".getUrl() must return a value"); @@ -151,8 +156,11 @@ public class PrePopulatedValidationSupport extends BaseStaticResourceValidationS HashSet retVal = Sets.newHashSet(url, urlWithoutVersion); - Optional versionValue = resourceDef.getChildByName("version").getAccessor().getFirstValueOrNull(theResource); - String version = versionValue.map(t -> (((IPrimitiveType) t).getValueAsString())).orElse(null); + Optional versionValue = + resourceDef.getChildByName("version").getAccessor().getFirstValueOrNull(theResource); + String version = versionValue + .map(t -> (((IPrimitiveType) t).getValueAsString())) + .orElse(null); if (isNotBlank(version)) { retVal.add(urlWithoutVersion + "|" + version); } @@ -223,7 +231,6 @@ public class PrePopulatedValidationSupport extends BaseStaticResourceValidationS addToMap(theValueSet, myValueSets, urls); } - /** * @param theResource The resource. This method delegates to the type-specific methods (e.g. {@link #addCodeSystem(IBaseResource)}) * and will do nothing if the resource type is not supported by this class. @@ -285,7 +292,9 @@ public class PrePopulatedValidationSupport extends BaseStaticResourceValidationS } @Override - public byte[] fetchBinary(String theBinaryKey) { return myBinaries.get(theBinaryKey); } + public byte[] fetchBinary(String theBinaryKey) { + return myBinaries.get(theBinaryKey); + } @Override public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) { @@ -301,13 +310,9 @@ public class PrePopulatedValidationSupport extends BaseStaticResourceValidationS * Returns a count of all known resources */ public int countAll() { - return myBinaries.size() + - myCodeSystems.size() + - myStructureDefinitions.size() + - myValueSets.size(); + return myBinaries.size() + myCodeSystems.size() + myStructureDefinitions.size() + myValueSets.size(); } - @Override public synchronized void lock() { myLocked = true; diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/RemoteTerminologyServiceValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/RemoteTerminologyServiceValidationSupport.java index f8395ef3a3a..437ddb2358c 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/RemoteTerminologyServiceValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/RemoteTerminologyServiceValidationSupport.java @@ -21,9 +21,9 @@ import org.hl7.fhir.r4.model.ValueSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -55,12 +55,24 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup } @Override - public CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { + public CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { return invokeRemoteValidateCode(theCodeSystem, theCode, theDisplay, theValueSetUrl, null); } @Override - public CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + public CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { IBaseResource valueSet = theValueSet; @@ -71,8 +83,10 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup codeSystem = extractCodeSystemForCode((ValueSet) theValueSet, theCode); } - // Remote terminology services shouldn't be used to validate codes with an implied system - if (isBlank(codeSystem)) { return null; } + // Remote terminology services shouldn't be used to validate codes with an implied system + if (isBlank(codeSystem)) { + return null; + } String valueSetUrl = DefaultProfileValidationSupport.getConformanceResourceUrl(myCtx, valueSet); if (isNotBlank(valueSetUrl)) { @@ -87,19 +101,21 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup * Try to obtain the codeSystem of the received code from the received ValueSet */ private String extractCodeSystemForCode(ValueSet theValueSet, String theCode) { - if (theValueSet.getCompose() == null || theValueSet.getCompose().getInclude() == null - || theValueSet.getCompose().getInclude().isEmpty()) { + if (theValueSet.getCompose() == null + || theValueSet.getCompose().getInclude() == null + || theValueSet.getCompose().getInclude().isEmpty()) { return null; } if (theValueSet.getCompose().getInclude().size() == 1) { - ValueSet.ConceptSetComponent include = theValueSet.getCompose().getInclude().iterator().next(); + ValueSet.ConceptSetComponent include = + theValueSet.getCompose().getInclude().iterator().next(); return getVersionedCodeSystem(include); } // when component has more than one include, their codeSystem(s) could be different, so we need to make sure // that we are picking up the system for the include to which the code corresponds - for (ValueSet.ConceptSetComponent include: theValueSet.getCompose().getInclude()) { + for (ValueSet.ConceptSetComponent include : theValueSet.getCompose().getInclude()) { if (include.hasSystem()) { for (ValueSet.ConceptReferenceComponent concept : include.getConcept()) { if (concept.hasCodeElement() && concept.getCode().equals(theCode)) { @@ -116,23 +132,23 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup } private String getVersionedCodeSystem(ValueSet.ConceptSetComponent theComponent) { - String codeSystem = theComponent.getSystem(); - if ( ! codeSystem.contains("|") && theComponent.hasVersion()) { - codeSystem += "|" + theComponent.getVersion(); - } - return codeSystem; + String codeSystem = theComponent.getSystem(); + if (!codeSystem.contains("|") && theComponent.hasVersion()) { + codeSystem += "|" + theComponent.getVersion(); + } + return codeSystem; } @Override public IBaseResource fetchCodeSystem(String theSystem) { IGenericClient client = provideClient(); - Class bundleType = myCtx.getResourceDefinition("Bundle").getImplementingClass(IBaseBundle.class); - IBaseBundle results = client - .search() - .forResource("CodeSystem") - .where(CodeSystem.URL.matches().value(theSystem)) - .returnBundle(bundleType) - .execute(); + Class bundleType = + myCtx.getResourceDefinition("Bundle").getImplementingClass(IBaseBundle.class); + IBaseBundle results = client.search() + .forResource("CodeSystem") + .where(CodeSystem.URL.matches().value(theSystem)) + .returnBundle(bundleType) + .execute(); List resultsList = BundleUtil.toListOfResources(myCtx, results); if (resultsList.size() > 0) { return resultsList.get(0); @@ -142,7 +158,11 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup } @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { Validate.notBlank(theCode, "theCode must be provided"); IGenericClient client = provideClient(); @@ -160,45 +180,57 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup if (!StringUtils.isEmpty(theDisplayLanguage)) { ParametersUtil.addParameterToParametersString(fhirContext, params, "language", theDisplayLanguage); } - Class codeSystemClass = myCtx.getResourceDefinition("CodeSystem").getImplementingClass(); - IBaseParameters outcome = client - .operation() - .onType((Class) codeSystemClass) - .named("$lookup") - .withParameters(params) - .useHttpGet() - .execute(); + Class codeSystemClass = + myCtx.getResourceDefinition("CodeSystem").getImplementingClass(); + IBaseParameters outcome = client.operation() + .onType((Class) codeSystemClass) + .named("$lookup") + .withParameters(params) + .useHttpGet() + .execute(); if (outcome != null && !outcome.isEmpty()) { switch (fhirVersion) { case DSTU3: - return generateLookupCodeResultDSTU3(theCode, theSystem, (org.hl7.fhir.dstu3.model.Parameters)outcome); + return generateLookupCodeResultDSTU3( + theCode, theSystem, (org.hl7.fhir.dstu3.model.Parameters) outcome); case R4: - return generateLookupCodeResultR4(theCode, theSystem, (org.hl7.fhir.r4.model.Parameters)outcome); + return generateLookupCodeResultR4( + theCode, theSystem, (org.hl7.fhir.r4.model.Parameters) outcome); } } break; default: - throw new UnsupportedOperationException(Msg.code(710) + "Unsupported FHIR version '" + fhirVersion.getFhirVersionString() + - "'. Only DSTU3 and R4 are supported."); + throw new UnsupportedOperationException(Msg.code(710) + "Unsupported FHIR version '" + + fhirVersion.getFhirVersionString() + "'. Only DSTU3 and R4 are supported."); } return null; } - private LookupCodeResult generateLookupCodeResultDSTU3(String theCode, String theSystem, org.hl7.fhir.dstu3.model.Parameters outcomeDSTU3) { + private LookupCodeResult generateLookupCodeResultDSTU3( + String theCode, String theSystem, org.hl7.fhir.dstu3.model.Parameters outcomeDSTU3) { // NOTE: I wanted to put all of this logic into the IValidationSupport Class, but it would've required adding - // several new dependencies on version-specific libraries and that is explicitly forbidden (see comment in POM). + // several new dependencies on version-specific libraries and that is explicitly forbidden (see comment in + // POM). LookupCodeResult result = new LookupCodeResult(); result.setSearchedForCode(theCode); result.setSearchedForSystem(theSystem); result.setFound(true); - for (org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent parameterComponent : outcomeDSTU3.getParameter()) { + for (org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent parameterComponent : + outcomeDSTU3.getParameter()) { switch (parameterComponent.getName()) { case "property": org.hl7.fhir.dstu3.model.Property part = parameterComponent.getChildByName("part"); - // The assumption here is that we may only have 2 elements in this part, and if so, these 2 will be saved + // The assumption here is that we may only have 2 elements in this part, and if so, these 2 will be + // saved if (part != null && part.hasValues() && part.getValues().size() >= 2) { - String key = ((org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent) part.getValues().get(0)).getValue().toString(); - String value = ((org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent) part.getValues().get(1)).getValue().toString(); + String key = ((org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent) + part.getValues().get(0)) + .getValue() + .toString(); + String value = ((org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent) + part.getValues().get(1)) + .getValue() + .toString(); if (!StringUtils.isEmpty(key) && !StringUtils.isEmpty(value)) { result.getProperties().add(new StringConceptProperty(key, value)); } @@ -206,13 +238,16 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup break; case "designation": ConceptDesignation conceptDesignation = new ConceptDesignation(); - for (org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent designationComponent : parameterComponent.getPart()) { - switch(designationComponent.getName()) { + for (org.hl7.fhir.dstu3.model.Parameters.ParametersParameterComponent designationComponent : + parameterComponent.getPart()) { + switch (designationComponent.getName()) { case "language": - conceptDesignation.setLanguage(designationComponent.getValue().toString()); + conceptDesignation.setLanguage( + designationComponent.getValue().toString()); break; case "use": - org.hl7.fhir.dstu3.model.Coding coding = (org.hl7.fhir.dstu3.model.Coding)designationComponent.getValue(); + org.hl7.fhir.dstu3.model.Coding coding = + (org.hl7.fhir.dstu3.model.Coding) designationComponent.getValue(); if (coding != null) { conceptDesignation.setUseSystem(coding.getSystem()); conceptDesignation.setUseCode(coding.getCode()); @@ -220,44 +255,72 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup } break; case "value": - conceptDesignation.setValue(((designationComponent.getValue() == null)?null:designationComponent.getValue().toString())); + conceptDesignation.setValue( + ((designationComponent.getValue() == null) + ? null + : designationComponent + .getValue() + .toString())); break; } } result.getDesignations().add(conceptDesignation); break; case "name": - result.setCodeSystemDisplayName(((parameterComponent.getValue() == null)?null:parameterComponent.getValue().toString())); + result.setCodeSystemDisplayName( + ((parameterComponent.getValue() == null) + ? null + : parameterComponent.getValue().toString())); break; case "version": - result.setCodeSystemVersion(((parameterComponent.getValue() == null)?null:parameterComponent.getValue().toString())); + result.setCodeSystemVersion( + ((parameterComponent.getValue() == null) + ? null + : parameterComponent.getValue().toString())); break; case "display": - result.setCodeDisplay(((parameterComponent.getValue() == null)?null:parameterComponent.getValue().toString())); + result.setCodeDisplay( + ((parameterComponent.getValue() == null) + ? null + : parameterComponent.getValue().toString())); break; case "abstract": - result.setCodeIsAbstract(((parameterComponent.getValue() == null)?false:Boolean.parseBoolean(parameterComponent.getValue().toString()))); + result.setCodeIsAbstract( + ((parameterComponent.getValue() == null) + ? false + : Boolean.parseBoolean( + parameterComponent.getValue().toString()))); break; } } return result; } - private LookupCodeResult generateLookupCodeResultR4(String theCode, String theSystem, org.hl7.fhir.r4.model.Parameters outcomeR4) { + private LookupCodeResult generateLookupCodeResultR4( + String theCode, String theSystem, org.hl7.fhir.r4.model.Parameters outcomeR4) { // NOTE: I wanted to put all of this logic into the IValidationSupport Class, but it would've required adding - // several new dependencies on version-specific libraries and that is explicitly forbidden (see comment in POM). + // several new dependencies on version-specific libraries and that is explicitly forbidden (see comment in + // POM). LookupCodeResult result = new LookupCodeResult(); result.setSearchedForCode(theCode); result.setSearchedForSystem(theSystem); result.setFound(true); - for (org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent parameterComponent : outcomeR4.getParameter()) { + for (org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent parameterComponent : + outcomeR4.getParameter()) { switch (parameterComponent.getName()) { case "property": org.hl7.fhir.r4.model.Property part = parameterComponent.getChildByName("part"); - // The assumption here is that we may only have 2 elements in this part, and if so, these 2 will be saved + // The assumption here is that we may only have 2 elements in this part, and if so, these 2 will be + // saved if (part != null && part.hasValues() && part.getValues().size() >= 2) { - String key = ((org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent) part.getValues().get(0)).getValue().toString(); - String value = ((org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent) part.getValues().get(1)).getValue().toString(); + String key = ((org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent) + part.getValues().get(0)) + .getValue() + .toString(); + String value = ((org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent) + part.getValues().get(1)) + .getValue() + .toString(); if (!StringUtils.isEmpty(key) && !StringUtils.isEmpty(value)) { result.getProperties().add(new StringConceptProperty(key, value)); } @@ -265,13 +328,16 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup break; case "designation": ConceptDesignation conceptDesignation = new ConceptDesignation(); - for (org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent designationComponent : parameterComponent.getPart()) { - switch(designationComponent.getName()) { + for (org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent designationComponent : + parameterComponent.getPart()) { + switch (designationComponent.getName()) { case "language": - conceptDesignation.setLanguage(designationComponent.getValue().toString()); + conceptDesignation.setLanguage( + designationComponent.getValue().toString()); break; case "use": - org.hl7.fhir.r4.model.Coding coding = (org.hl7.fhir.r4.model.Coding)designationComponent.getValue(); + org.hl7.fhir.r4.model.Coding coding = + (org.hl7.fhir.r4.model.Coding) designationComponent.getValue(); if (coding != null) { conceptDesignation.setUseSystem(coding.getSystem()); conceptDesignation.setUseCode(coding.getCode()); @@ -279,23 +345,41 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup } break; case "value": - conceptDesignation.setValue(((designationComponent.getValue() == null)?null:designationComponent.getValue().toString())); + conceptDesignation.setValue( + ((designationComponent.getValue() == null) + ? null + : designationComponent + .getValue() + .toString())); break; } } result.getDesignations().add(conceptDesignation); break; case "name": - result.setCodeSystemDisplayName(((parameterComponent.getValue() == null)?null:parameterComponent.getValue().toString())); + result.setCodeSystemDisplayName( + ((parameterComponent.getValue() == null) + ? null + : parameterComponent.getValue().toString())); break; case "version": - result.setCodeSystemVersion(((parameterComponent.getValue() == null)?null:parameterComponent.getValue().toString())); + result.setCodeSystemVersion( + ((parameterComponent.getValue() == null) + ? null + : parameterComponent.getValue().toString())); break; case "display": - result.setCodeDisplay(((parameterComponent.getValue() == null)?null:parameterComponent.getValue().toString())); + result.setCodeDisplay( + ((parameterComponent.getValue() == null) + ? null + : parameterComponent.getValue().toString())); break; case "abstract": - result.setCodeIsAbstract(((parameterComponent.getValue() == null)?false:Boolean.parseBoolean(parameterComponent.getValue().toString()))); + result.setCodeIsAbstract( + ((parameterComponent.getValue() == null) + ? false + : Boolean.parseBoolean( + parameterComponent.getValue().toString()))); break; } } @@ -305,13 +389,13 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup @Override public IBaseResource fetchValueSet(String theValueSetUrl) { IGenericClient client = provideClient(); - Class bundleType = myCtx.getResourceDefinition("Bundle").getImplementingClass(IBaseBundle.class); - IBaseBundle results = client - .search() - .forResource("ValueSet") - .where(CodeSystem.URL.matches().value(theValueSetUrl)) - .returnBundle(bundleType) - .execute(); + Class bundleType = + myCtx.getResourceDefinition("Bundle").getImplementingClass(IBaseBundle.class); + IBaseBundle results = client.search() + .forResource("ValueSet") + .where(CodeSystem.URL.matches().value(theValueSetUrl)) + .returnBundle(bundleType) + .execute(); List resultsList = BundleUtil.toListOfResources(myCtx, results); if (resultsList.size() > 0) { return resultsList.get(0); @@ -337,12 +421,11 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup IBaseParameters params = RemoteTerminologyUtil.buildTranslateInputParameters(fhirContext, theRequest); - IBaseParameters outcome = client - .operation() - .onType("ConceptMap") - .named("$translate") - .withParameters(params) - .execute(); + IBaseParameters outcome = client.operation() + .onType("ConceptMap") + .named("$translate") + .withParameters(params) + .execute(); return RemoteTerminologyUtil.translateOutcomeToResults(fhirContext, outcome); } @@ -355,26 +438,27 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup return retVal; } - protected CodeValidationResult invokeRemoteValidateCode(String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl, IBaseResource theValueSet) { + protected CodeValidationResult invokeRemoteValidateCode( + String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl, IBaseResource theValueSet) { if (isBlank(theCode)) { return null; } IGenericClient client = provideClient(); - IBaseParameters input = buildValidateCodeInputParameters(theCodeSystem, theCode, theDisplay, theValueSetUrl, theValueSet); + IBaseParameters input = + buildValidateCodeInputParameters(theCodeSystem, theCode, theDisplay, theValueSetUrl, theValueSet); String resourceType = "ValueSet"; if (theValueSet == null && theValueSetUrl == null) { resourceType = "CodeSystem"; } - IBaseParameters output = client - .operation() - .onType(resourceType) - .named("validate-code") - .withParameters(input) - .execute(); + IBaseParameters output = client.operation() + .onType(resourceType) + .named("validate-code") + .withParameters(input) + .execute(); List resultValues = ParametersUtil.getNamedParameterValuesAsString(getFhirContext(), output, "result"); if (resultValues.size() < 1 || isBlank(resultValues.get(0))) { @@ -388,7 +472,8 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup if (success) { retVal.setCode(theCode); - List displayValues = ParametersUtil.getNamedParameterValuesAsString(getFhirContext(), output, "display"); + List displayValues = + ParametersUtil.getNamedParameterValuesAsString(getFhirContext(), output, "display"); if (displayValues.size() > 0) { retVal.setDisplay(displayValues.get(0)); } @@ -396,16 +481,17 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup } else { retVal.setSeverity(IssueSeverity.ERROR); - List messageValues = ParametersUtil.getNamedParameterValuesAsString(getFhirContext(), output, "message"); + List messageValues = + ParametersUtil.getNamedParameterValuesAsString(getFhirContext(), output, "message"); if (messageValues.size() > 0) { retVal.setMessage(messageValues.get(0)); } - } return retVal; } - protected IBaseParameters buildValidateCodeInputParameters(String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl, IBaseResource theValueSet) { + protected IBaseParameters buildValidateCodeInputParameters( + String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl, IBaseResource theValueSet) { IBaseParameters params = ParametersUtil.newInstance(getFhirContext()); if (theValueSet == null && theValueSetUrl == null) { diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/RemoteTerminologyUtil.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/RemoteTerminologyUtil.java index 9010deb2c37..02e140adc61 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/RemoteTerminologyUtil.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/RemoteTerminologyUtil.java @@ -23,25 +23,30 @@ import java.util.Optional; public final class RemoteTerminologyUtil { private RemoteTerminologyUtil() {} - public static IBaseParameters buildTranslateInputParameters(FhirContext fhirContext, IValidationSupport.TranslateCodeRequest theRequest) { + public static IBaseParameters buildTranslateInputParameters( + FhirContext fhirContext, IValidationSupport.TranslateCodeRequest theRequest) { IBaseParameters params = ParametersUtil.newInstance(fhirContext); if (!StringUtils.isEmpty(theRequest.getConceptMapUrl())) { ParametersUtil.addParameterToParametersUri(fhirContext, params, "url", theRequest.getConceptMapUrl()); } if (!StringUtils.isEmpty(theRequest.getConceptMapVersion())) { - ParametersUtil.addParameterToParametersString(fhirContext, params, "conceptMapVersion", theRequest.getConceptMapVersion()); + ParametersUtil.addParameterToParametersString( + fhirContext, params, "conceptMapVersion", theRequest.getConceptMapVersion()); } if (theRequest.getCodings() != null) { addCodingsToTranslateParameters(fhirContext, theRequest.getCodings(), params); } if (!StringUtils.isEmpty(theRequest.getSourceValueSetUrl())) { - ParametersUtil.addParameterToParametersUri(fhirContext, params, "source", theRequest.getSourceValueSetUrl()); + ParametersUtil.addParameterToParametersUri( + fhirContext, params, "source", theRequest.getSourceValueSetUrl()); } if (!StringUtils.isEmpty(theRequest.getTargetValueSetUrl())) { - ParametersUtil.addParameterToParametersUri(fhirContext, params, "target", theRequest.getTargetValueSetUrl()); + ParametersUtil.addParameterToParametersUri( + fhirContext, params, "target", theRequest.getTargetValueSetUrl()); } if (!StringUtils.isEmpty(theRequest.getTargetSystemUrl())) { - ParametersUtil.addParameterToParametersUri(fhirContext, params, "targetsystem", theRequest.getTargetSystemUrl()); + ParametersUtil.addParameterToParametersUri( + fhirContext, params, "targetsystem", theRequest.getTargetSystemUrl()); } if (theRequest.isReverse()) { ParametersUtil.addParameterToParametersBoolean(fhirContext, params, "reverse", theRequest.isReverse()); @@ -50,14 +55,19 @@ public final class RemoteTerminologyUtil { return params; } - public static void addCodingsToTranslateParameters(FhirContext fhirContext, List theCodings, IBaseParameters theParams) { - BaseRuntimeElementCompositeDefinition codeableConceptDef = (BaseRuntimeElementCompositeDefinition) Objects.requireNonNull(fhirContext.getElementDefinition("CodeableConcept")); + public static void addCodingsToTranslateParameters( + FhirContext fhirContext, List theCodings, IBaseParameters theParams) { + BaseRuntimeElementCompositeDefinition codeableConceptDef = (BaseRuntimeElementCompositeDefinition) + Objects.requireNonNull(fhirContext.getElementDefinition("CodeableConcept")); BaseRuntimeChildDefinition codings = codeableConceptDef.getChildByName("coding"); - BaseRuntimeElementCompositeDefinition codingDef = (BaseRuntimeElementCompositeDefinition) Objects.requireNonNull(fhirContext.getElementDefinition("Coding")); + BaseRuntimeElementCompositeDefinition codingDef = (BaseRuntimeElementCompositeDefinition) + Objects.requireNonNull(fhirContext.getElementDefinition("Coding")); BaseRuntimeChildDefinition codingSystemChild = codingDef.getChildByName("system"); BaseRuntimeChildDefinition codingCodeChild = codingDef.getChildByName("code"); - BaseRuntimeElementDefinition> systemDef = (RuntimePrimitiveDatatypeDefinition) fhirContext.getElementDefinition("uri"); - BaseRuntimeElementDefinition> codeDef = (RuntimePrimitiveDatatypeDefinition) fhirContext.getElementDefinition("code"); + BaseRuntimeElementDefinition> systemDef = + (RuntimePrimitiveDatatypeDefinition) fhirContext.getElementDefinition("uri"); + BaseRuntimeElementDefinition> codeDef = + (RuntimePrimitiveDatatypeDefinition) fhirContext.getElementDefinition("code"); IBase codeableConcept = codeableConceptDef.newInstance(); @@ -94,7 +104,8 @@ public final class RemoteTerminologyUtil { return retVal; } - private static List matchesToTranslateConceptResults(FhirContext fhirContext, List theMatches) { + private static List matchesToTranslateConceptResults( + FhirContext fhirContext, List theMatches) { List resultList = new ArrayList(); for (IBase m : theMatches) { TranslateConceptResult match = new TranslateConceptResult(); diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/SnapshotGeneratingValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/SnapshotGeneratingValidationSupport.java index 17745a30122..f98c6f8b53b 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/SnapshotGeneratingValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/SnapshotGeneratingValidationSupport.java @@ -51,13 +51,19 @@ public class SnapshotGeneratingValidationSupport implements IValidationSupport { @SuppressWarnings("EnhancedSwitchMigration") @Override - public IBaseResource generateSnapshot(ValidationSupportContext theValidationSupportContext, IBaseResource theInput, String theUrl, String theWebUrl, String theProfileName) { + public IBaseResource generateSnapshot( + ValidationSupportContext theValidationSupportContext, + IBaseResource theInput, + String theUrl, + String theWebUrl, + String theProfileName) { String inputUrl = null; try { FhirVersionEnum version = theInput.getStructureFhirVersionEnum(); - org.hl7.fhir.r5.model.StructureDefinition inputCanonical = myVersionCanonicalizer.structureDefinitionToCanonical(theInput); + org.hl7.fhir.r5.model.StructureDefinition inputCanonical = + myVersionCanonicalizer.structureDefinitionToCanonical(theInput); inputUrl = inputCanonical.getUrl(); if (theValidationSupportContext.getCurrentlyGeneratingSnapshots().contains(inputUrl)) { @@ -68,54 +74,92 @@ public class SnapshotGeneratingValidationSupport implements IValidationSupport { String baseDefinition = inputCanonical.getBaseDefinition(); if (isBlank(baseDefinition)) { - throw new PreconditionFailedException(Msg.code(704) + "StructureDefinition[id=" + inputCanonical.getIdElement().getId() + ", url=" + inputCanonical.getUrl() + "] has no base"); + throw new PreconditionFailedException(Msg.code(704) + "StructureDefinition[id=" + + inputCanonical.getIdElement().getId() + ", url=" + inputCanonical.getUrl() + "] has no base"); } - IBaseResource base = theValidationSupportContext.getRootValidationSupport().fetchStructureDefinition(baseDefinition); + IBaseResource base = + theValidationSupportContext.getRootValidationSupport().fetchStructureDefinition(baseDefinition); if (base == null) { throw new PreconditionFailedException(Msg.code(705) + "Unknown base definition: " + baseDefinition); } - org.hl7.fhir.r5.model.StructureDefinition baseCanonical = myVersionCanonicalizer.structureDefinitionToCanonical(base); + org.hl7.fhir.r5.model.StructureDefinition baseCanonical = + myVersionCanonicalizer.structureDefinitionToCanonical(base); if (baseCanonical.getSnapshot().getElement().isEmpty()) { // If the base definition also doesn't have a snapshot, generate that first - theValidationSupportContext.getRootValidationSupport().generateSnapshot(theValidationSupportContext, base, null, null, null); + theValidationSupportContext + .getRootValidationSupport() + .generateSnapshot(theValidationSupportContext, base, null, null, null); baseCanonical = myVersionCanonicalizer.structureDefinitionToCanonical(base); } ArrayList messages = new ArrayList<>(); ProfileKnowledgeProvider profileKnowledgeProvider = new ProfileKnowledgeWorkerR5(myCtx); - IWorkerContext context = new VersionSpecificWorkerContextWrapper(theValidationSupportContext, myVersionCanonicalizer); + IWorkerContext context = + new VersionSpecificWorkerContextWrapper(theValidationSupportContext, myVersionCanonicalizer); ProfileUtilities profileUtilities = new ProfileUtilities(context, messages, profileKnowledgeProvider); profileUtilities.generateSnapshot(baseCanonical, inputCanonical, theUrl, theWebUrl, theProfileName); - switch (getFhirVersionEnum(theValidationSupportContext.getRootValidationSupport().getFhirContext(), theInput)) { + switch (getFhirVersionEnum( + theValidationSupportContext.getRootValidationSupport().getFhirContext(), theInput)) { case DSTU3: - org.hl7.fhir.dstu3.model.StructureDefinition generatedDstu3 = (org.hl7.fhir.dstu3.model.StructureDefinition) myVersionCanonicalizer.structureDefinitionFromCanonical(inputCanonical); - ((org.hl7.fhir.dstu3.model.StructureDefinition) theInput).getSnapshot().getElement().clear(); - ((org.hl7.fhir.dstu3.model.StructureDefinition) theInput).getSnapshot().getElement().addAll(generatedDstu3.getSnapshot().getElement()); + org.hl7.fhir.dstu3.model.StructureDefinition generatedDstu3 = + (org.hl7.fhir.dstu3.model.StructureDefinition) + myVersionCanonicalizer.structureDefinitionFromCanonical(inputCanonical); + ((org.hl7.fhir.dstu3.model.StructureDefinition) theInput) + .getSnapshot() + .getElement() + .clear(); + ((org.hl7.fhir.dstu3.model.StructureDefinition) theInput) + .getSnapshot() + .getElement() + .addAll(generatedDstu3.getSnapshot().getElement()); break; case R4: - org.hl7.fhir.r4.model.StructureDefinition generatedR4 = (org.hl7.fhir.r4.model.StructureDefinition) myVersionCanonicalizer.structureDefinitionFromCanonical(inputCanonical); - ((org.hl7.fhir.r4.model.StructureDefinition) theInput).getSnapshot().getElement().clear(); - ((org.hl7.fhir.r4.model.StructureDefinition) theInput).getSnapshot().getElement().addAll(generatedR4.getSnapshot().getElement()); + org.hl7.fhir.r4.model.StructureDefinition generatedR4 = (org.hl7.fhir.r4.model.StructureDefinition) + myVersionCanonicalizer.structureDefinitionFromCanonical(inputCanonical); + ((org.hl7.fhir.r4.model.StructureDefinition) theInput) + .getSnapshot() + .getElement() + .clear(); + ((org.hl7.fhir.r4.model.StructureDefinition) theInput) + .getSnapshot() + .getElement() + .addAll(generatedR4.getSnapshot().getElement()); break; case R4B: - org.hl7.fhir.r4b.model.StructureDefinition generatedR4b = (org.hl7.fhir.r4b.model.StructureDefinition) myVersionCanonicalizer.structureDefinitionFromCanonical(inputCanonical); - ((org.hl7.fhir.r4b.model.StructureDefinition) theInput).getSnapshot().getElement().clear(); - ((org.hl7.fhir.r4b.model.StructureDefinition) theInput).getSnapshot().getElement().addAll(generatedR4b.getSnapshot().getElement()); + org.hl7.fhir.r4b.model.StructureDefinition generatedR4b = + (org.hl7.fhir.r4b.model.StructureDefinition) + myVersionCanonicalizer.structureDefinitionFromCanonical(inputCanonical); + ((org.hl7.fhir.r4b.model.StructureDefinition) theInput) + .getSnapshot() + .getElement() + .clear(); + ((org.hl7.fhir.r4b.model.StructureDefinition) theInput) + .getSnapshot() + .getElement() + .addAll(generatedR4b.getSnapshot().getElement()); break; case R5: - org.hl7.fhir.r5.model.StructureDefinition generatedR5 = (org.hl7.fhir.r5.model.StructureDefinition) myVersionCanonicalizer.structureDefinitionFromCanonical(inputCanonical); - ((org.hl7.fhir.r5.model.StructureDefinition) theInput).getSnapshot().getElement().clear(); - ((org.hl7.fhir.r5.model.StructureDefinition) theInput).getSnapshot().getElement().addAll(generatedR5.getSnapshot().getElement()); + org.hl7.fhir.r5.model.StructureDefinition generatedR5 = (org.hl7.fhir.r5.model.StructureDefinition) + myVersionCanonicalizer.structureDefinitionFromCanonical(inputCanonical); + ((org.hl7.fhir.r5.model.StructureDefinition) theInput) + .getSnapshot() + .getElement() + .clear(); + ((org.hl7.fhir.r5.model.StructureDefinition) theInput) + .getSnapshot() + .getElement() + .addAll(generatedR5.getSnapshot().getElement()); break; case DSTU2: case DSTU2_HL7ORG: case DSTU2_1: default: - throw new IllegalStateException(Msg.code(706) + "Can not generate snapshot for version: " + version); + throw new IllegalStateException( + Msg.code(706) + "Can not generate snapshot for version: " + version); } return theInput; @@ -135,5 +179,4 @@ public class SnapshotGeneratingValidationSupport implements IValidationSupport { public FhirContext getFhirContext() { return myCtx; } - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java index 18392bd55a5..26617914305 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.java @@ -45,18 +45,27 @@ public class UnknownCodeSystemWarningValidationSupport extends BaseValidationSup @Nullable @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { // filters out error/fatal if (canValidateCodeSystem(theValidationSupportContext, theSystem)) { - return new LookupCodeResult() - .setFound(true); + return new LookupCodeResult().setFound(true); } return null; } @Override - public CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { + public CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { // filters out error/fatal if (!canValidateCodeSystem(theValidationSupportContext, theCodeSystem)) { return null; @@ -79,15 +88,22 @@ public class UnknownCodeSystemWarningValidationSupport extends BaseValidationSup @Nullable @Override - public CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + public CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { if (!canValidateCodeSystem(theValidationSupportContext, theCodeSystem)) { return null; } return new CodeValidationResult() - .setCode(theCode) - .setSeverity(IssueSeverity.INFORMATION) - .setMessage("Code " + theCodeSystem + "#" + theCode + " was not checked because the CodeSystem is not available"); + .setCode(theCode) + .setSeverity(IssueSeverity.INFORMATION) + .setMessage("Code " + theCodeSystem + "#" + theCode + + " was not checked because the CodeSystem is not available"); } /** @@ -105,7 +121,7 @@ public class UnknownCodeSystemWarningValidationSupport extends BaseValidationSup return true; default: ourLog.info("Unknown issue severity " + myNonExistentCodeSystemSeverity.name() - + ". Treating as INFO/WARNING"); + + ". Treating as INFO/WARNING"); return true; } } @@ -116,15 +132,15 @@ public class UnknownCodeSystemWarningValidationSupport extends BaseValidationSup * @param theCodeSystem * @return */ - private boolean canValidateCodeSystem(ValidationSupportContext theValidationSupportContext, - String theCodeSystem) { + private boolean canValidateCodeSystem(ValidationSupportContext theValidationSupportContext, String theCodeSystem) { if (!allowNonExistentCodeSystems()) { return false; } if (theCodeSystem == null) { return false; } - IBaseResource codeSystem = theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(theCodeSystem); + IBaseResource codeSystem = + theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(theCodeSystem); if (codeSystem != null) { return false; } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationConstants.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationConstants.java index 505d5e720f1..ea4c99bf429 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationConstants.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationConstants.java @@ -28,5 +28,5 @@ public class ValidationConstants { public static final String LOINC_GENERIC_VALUESET_URL_PLUS_SLASH = LOINC_GENERIC_VALUESET_URL + "/"; // not to be instantiated - private ValidationConstants() { } + private ValidationConstants() {} } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java index 13051ad308d..70e7ed9d3bf 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java @@ -13,13 +13,13 @@ import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.function.Function; +import javax.annotation.Nonnull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -91,9 +91,15 @@ public class ValidationSupportChain implements IValidationSupport { } @Override - public IBaseResource generateSnapshot(ValidationSupportContext theValidationSupportContext, IBaseResource theInput, String theUrl, String theWebUrl, String theProfileName) { + public IBaseResource generateSnapshot( + ValidationSupportContext theValidationSupportContext, + IBaseResource theInput, + String theUrl, + String theWebUrl, + String theProfileName) { for (IValidationSupport next : myChain) { - IBaseResource retVal = next.generateSnapshot(theValidationSupportContext, theInput, theUrl, theWebUrl, theProfileName); + IBaseResource retVal = + next.generateSnapshot(theValidationSupportContext, theInput, theUrl, theWebUrl, theProfileName); if (retVal != null) { return retVal; } @@ -141,10 +147,12 @@ public class ValidationSupportChain implements IValidationSupport { FhirContext existingFhirContext = getFhirContext(); if (existingFhirContext != null) { - FhirVersionEnum newVersion = theValidationSupport.getFhirContext().getVersion().getVersion(); + FhirVersionEnum newVersion = + theValidationSupport.getFhirContext().getVersion().getVersion(); FhirVersionEnum existingVersion = existingFhirContext.getVersion().getVersion(); if (!existingVersion.equals(newVersion)) { - String message = "Trying to add validation support of version " + newVersion + " to chain with " + myChain.size() + " entries of version " + existingVersion; + String message = "Trying to add validation support of version " + newVersion + " to chain with " + + myChain.size() + " entries of version " + existingVersion; throw new ConfigurationException(Msg.code(709) + message); } } @@ -161,10 +169,14 @@ public class ValidationSupportChain implements IValidationSupport { } @Override - public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) { + public ValueSetExpansionOutcome expandValueSet( + ValidationSupportContext theValidationSupportContext, + ValueSetExpansionOptions theExpansionOptions, + @Nonnull IBaseResource theValueSetToExpand) { for (IValidationSupport next : myChain) { // TODO: test if code system is supported? - ValueSetExpansionOutcome expanded = next.expandValueSet(theValidationSupportContext, theExpansionOptions, theValueSetToExpand); + ValueSetExpansionOutcome expanded = + next.expandValueSet(theValidationSupportContext, theExpansionOptions, theValueSetToExpand); if (expanded != null) { return expanded; } @@ -175,7 +187,9 @@ public class ValidationSupportChain implements IValidationSupport { @Override public boolean isRemoteTerminologyServiceConfigured() { if (myChain != null) { - Optional remoteTerminologyService = myChain.stream().filter(RemoteTerminologyServiceValidationSupport.class::isInstance).findFirst(); + Optional remoteTerminologyService = myChain.stream() + .filter(RemoteTerminologyServiceValidationSupport.class::isInstance) + .findFirst(); if (remoteTerminologyService.isPresent()) { return true; } @@ -197,15 +211,16 @@ public class ValidationSupportChain implements IValidationSupport { @Override public List fetchAllStructureDefinitions() { - return doFetchStructureDefinitions(t->t.fetchAllStructureDefinitions()); + return doFetchStructureDefinitions(t -> t.fetchAllStructureDefinitions()); } @Override public List fetchAllNonBaseStructureDefinitions() { - return doFetchStructureDefinitions(t->t.fetchAllNonBaseStructureDefinitions()); + return doFetchStructureDefinitions(t -> t.fetchAllNonBaseStructureDefinitions()); } - private List doFetchStructureDefinitions(Function> theFunction) { + private List doFetchStructureDefinitions( + Function> theFunction) { ArrayList retVal = new ArrayList<>(); Set urls = new HashSet<>(); for (IValidationSupport nextSupport : myChain) { @@ -213,8 +228,11 @@ public class ValidationSupportChain implements IValidationSupport { if (allStructureDefinitions != null) { for (IBaseResource next : allStructureDefinitions) { - IPrimitiveType urlType = getFhirContext().newTerser().getSingleValueOrNull(next, "url", IPrimitiveType.class); - if (urlType == null || isBlank(urlType.getValueAsString()) || urls.add(urlType.getValueAsString())) { + IPrimitiveType urlType = + getFhirContext().newTerser().getSingleValueOrNull(next, "url", IPrimitiveType.class); + if (urlType == null + || isBlank(urlType.getValueAsString()) + || urls.add(urlType.getValueAsString())) { retVal.add(next); } } @@ -245,7 +263,6 @@ public class ValidationSupportChain implements IValidationSupport { return null; } - @Override public T fetchResource(Class theClass, String theUri) { for (IValidationSupport next : myChain) { @@ -290,10 +307,19 @@ public class ValidationSupportChain implements IValidationSupport { } @Override - public CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, String theValueSetUrl) { + public CodeValidationResult validateCode( + @Nonnull ValidationSupportContext theValidationSupportContext, + @Nonnull ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + String theValueSetUrl) { for (IValidationSupport next : myChain) { - if ((isBlank(theValueSetUrl) && next.isCodeSystemSupported(theValidationSupportContext, theCodeSystem)) || (isNotBlank(theValueSetUrl) && next.isValueSetSupported(theValidationSupportContext, theValueSetUrl))) { - CodeValidationResult retVal = next.validateCode(theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, theValueSetUrl); + if ((isBlank(theValueSetUrl) && next.isCodeSystemSupported(theValidationSupportContext, theCodeSystem)) + || (isNotBlank(theValueSetUrl) + && next.isValueSetSupported(theValidationSupportContext, theValueSetUrl))) { + CodeValidationResult retVal = next.validateCode( + theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, theValueSetUrl); if (retVal != null) { return retVal; } @@ -303,11 +329,18 @@ public class ValidationSupportChain implements IValidationSupport { } @Override - public CodeValidationResult validateCodeInValueSet(ValidationSupportContext theValidationSupportContext, ConceptValidationOptions theOptions, String theCodeSystem, String theCode, String theDisplay, @Nonnull IBaseResource theValueSet) { + public CodeValidationResult validateCodeInValueSet( + ValidationSupportContext theValidationSupportContext, + ConceptValidationOptions theOptions, + String theCodeSystem, + String theCode, + String theDisplay, + @Nonnull IBaseResource theValueSet) { for (IValidationSupport next : myChain) { String url = CommonCodeSystemsTerminologyService.getValueSetUrl(getFhirContext(), theValueSet); if (isBlank(url) || next.isValueSetSupported(theValidationSupportContext, url)) { - CodeValidationResult retVal = next.validateCodeInValueSet(theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, theValueSet); + CodeValidationResult retVal = next.validateCodeInValueSet( + theValidationSupportContext, theOptions, theCodeSystem, theCode, theDisplay, theValueSet); if (retVal != null) { return retVal; } @@ -317,7 +350,11 @@ public class ValidationSupportChain implements IValidationSupport { } @Override - public LookupCodeResult lookupCode(ValidationSupportContext theValidationSupportContext, String theSystem, String theCode, String theDisplayLanguage) { + public LookupCodeResult lookupCode( + ValidationSupportContext theValidationSupportContext, + String theSystem, + String theCode, + String theDisplayLanguage) { for (IValidationSupport next : myChain) { if (next.isCodeSystemSupported(theValidationSupportContext, theSystem)) { return next.lookupCode(theValidationSupportContext, theSystem, theCode, theDisplayLanguage); diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/BaseValidatorBridge.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/BaseValidatorBridge.java index fbaff74248b..0985a37cbf9 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/BaseValidatorBridge.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/BaseValidatorBridge.java @@ -32,7 +32,8 @@ abstract class BaseValidatorBridge implements IValidatorModule { hapiMessage.setLocationString(riMessage.getLocation()); hapiMessage.setMessage(riMessage.getMessage()); if (riMessage.getLevel() != null) { - hapiMessage.setSeverity(ResultSeverityEnum.fromCode(riMessage.getLevel().toCode())); + hapiMessage.setSeverity( + ResultSeverityEnum.fromCode(riMessage.getLevel().toCode())); } if (riMessage.getMessageId() != null) { hapiMessage.setMessageId(riMessage.getMessageId()); @@ -47,5 +48,4 @@ abstract class BaseValidatorBridge implements IValidatorModule { public void validateResource(IValidationContext theCtx) { doValidate(theCtx); } - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FHIRPathResourceGeneratorR4.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FHIRPathResourceGeneratorR4.java index 2b030fb56ff..c092dd751f9 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FHIRPathResourceGeneratorR4.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FHIRPathResourceGeneratorR4.java @@ -1,24 +1,23 @@ package org.hl7.fhir.common.hapi.validation.validator; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Stack; - -import org.hl7.fhir.r4.utils.FHIRPathEngine; -import org.hl7.fhir.instance.model.api.IBase; -import org.hl7.fhir.instance.model.api.ICompositeType; -import org.hl7.fhir.instance.model.api.IPrimitiveType; -import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext; -import org.hl7.fhir.r4.model.ExpressionNode; -import org.hl7.fhir.r4.model.Resource; - import ca.uhn.fhir.context.BaseRuntimeChildDefinition; import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeCompositeDatatypeDefinition; import ca.uhn.fhir.context.RuntimePrimitiveDatatypeDefinition; +import org.hl7.fhir.instance.model.api.IBase; +import org.hl7.fhir.instance.model.api.ICompositeType; +import org.hl7.fhir.instance.model.api.IPrimitiveType; +import org.hl7.fhir.r4.hapi.ctx.HapiWorkerContext; +import org.hl7.fhir.r4.model.ExpressionNode; +import org.hl7.fhir.r4.model.Resource; +import org.hl7.fhir.r4.utils.FHIRPathEngine; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Stack; /** * This class can be used to generate resources using FHIRPath expressions. @@ -31,617 +30,622 @@ import ca.uhn.fhir.context.RuntimePrimitiveDatatypeDefinition; */ public class FHIRPathResourceGeneratorR4 { - private FhirContext ctx; - private FHIRPathEngine engine; - private Map pathMapping; - private T resource = null; + private FhirContext ctx; + private FHIRPathEngine engine; + private Map pathMapping; + private T resource = null; - private String valueToSet = null; - private Stack nodeStack = null; + private String valueToSet = null; + private Stack nodeStack = null; - /** - * The GenerationTier summarizes some variables that are needed to create FHIR - * elements later on. - */ - class GenerationTier { - // The RuntimeDefinition of nodes - public BaseRuntimeElementDefinition nodeDefinition = null; - // The actual nodes, i.e. the instances that hold the values - public List nodes = new ArrayList<>(); - // The ChildDefinition applied to the parent (i.e. one of the nodes from a lower - // GenerationTier) to create nodes - public BaseRuntimeChildDefinition childDefinition = null; - // The path segment name of nodes - public String fhirPathName = null; + /** + * The GenerationTier summarizes some variables that are needed to create FHIR + * elements later on. + */ + class GenerationTier { + // The RuntimeDefinition of nodes + public BaseRuntimeElementDefinition nodeDefinition = null; + // The actual nodes, i.e. the instances that hold the values + public List nodes = new ArrayList<>(); + // The ChildDefinition applied to the parent (i.e. one of the nodes from a lower + // GenerationTier) to create nodes + public BaseRuntimeChildDefinition childDefinition = null; + // The path segment name of nodes + public String fhirPathName = null; - public GenerationTier() { - } + public GenerationTier() {} - public GenerationTier(BaseRuntimeElementDefinition nodeDef, IBase firstNode) { - this.nodeDefinition = nodeDef; - this.nodes.add(firstNode); - } - } + public GenerationTier(BaseRuntimeElementDefinition nodeDef, IBase firstNode) { + this.nodeDefinition = nodeDef; + this.nodes.add(firstNode); + } + } - /** - * Constructor without parameters, needs a call to `setMapping` later on in - * order to generate any Resources. - */ - public FHIRPathResourceGeneratorR4() { - this.pathMapping = new HashMap(); - this.ctx = FhirContext.forR4(); - this.engine = new FHIRPathEngine(new HapiWorkerContext(ctx, ctx.getValidationSupport())); - } + /** + * Constructor without parameters, needs a call to `setMapping` later on in + * order to generate any Resources. + */ + public FHIRPathResourceGeneratorR4() { + this.pathMapping = new HashMap(); + this.ctx = FhirContext.forR4(); + this.engine = new FHIRPathEngine(new HapiWorkerContext(ctx, ctx.getValidationSupport())); + } - /** - * Constructor that allows to provide a mapping right away. - * - * @param mapping Map a mapping of FHIRPath to value Strings - * that will be used to create a Resource. - */ - public FHIRPathResourceGeneratorR4(Map mapping) { - this(); - this.setMapping(mapping); - } + /** + * Constructor that allows to provide a mapping right away. + * + * @param mapping Map a mapping of FHIRPath to value Strings + * that will be used to create a Resource. + */ + public FHIRPathResourceGeneratorR4(Map mapping) { + this(); + this.setMapping(mapping); + } - /** - * Setter for the FHIRPath mapping Map instance. - * - * @param mapping Map a mapping of FHIRPath to value Strings - * that will be used to create a Resource. - */ - public void setMapping(Map mapping) { - this.pathMapping = mapping; - } + /** + * Setter for the FHIRPath mapping Map instance. + * + * @param mapping Map a mapping of FHIRPath to value Strings + * that will be used to create a Resource. + */ + public void setMapping(Map mapping) { + this.pathMapping = mapping; + } - /** - * Getter for a generated Resource. null if no Resource has been generated yet. - * - * @return T the generated Resource or null. - */ - public T getResource() { - return this.resource; - } + /** + * Getter for a generated Resource. null if no Resource has been generated yet. + * + * @return T the generated Resource or null. + */ + public T getResource() { + return this.resource; + } - /** - * Prepares the internal state prior to generating a FHIR Resource. Called once - * upon generation at the start. - * - * @param resourceClass Class The class of the Resource that shall be created - * (an empty Resource will be created in this method). - */ - @SuppressWarnings("unchecked") - private void prepareInternalState(Class resourceClass) { - this.resource = (T) this.ctx.getResourceDefinition(resourceClass).newInstance(); - } + /** + * Prepares the internal state prior to generating a FHIR Resource. Called once + * upon generation at the start. + * + * @param resourceClass Class The class of the Resource that shall be created + * (an empty Resource will be created in this method). + */ + @SuppressWarnings("unchecked") + private void prepareInternalState(Class resourceClass) { + this.resource = (T) this.ctx.getResourceDefinition(resourceClass).newInstance(); + } - /** - * The generation method that yields a new instance of class `resourceClass` - * with every value set in the FHIRPath mapping. - * - * @param resourceClass Class The class of the Resource that shall be - * created. - * @return T a new FHIR Resource instance of class `resourceClass`. - */ - public T generateResource(Class resourceClass) { - this.prepareInternalState(resourceClass); + /** + * The generation method that yields a new instance of class `resourceClass` + * with every value set in the FHIRPath mapping. + * + * @param resourceClass Class The class of the Resource that shall be + * created. + * @return T a new FHIR Resource instance of class `resourceClass`. + */ + public T generateResource(Class resourceClass) { + this.prepareInternalState(resourceClass); - for (String fhirPath : this.sortedPaths()) { - // prepare the next fhirPath iteration: create a new nodeStack and set the value - this.nodeStack = new Stack<>(); - this.nodeStack.push(new GenerationTier(this.ctx.getResourceDefinition(this.resource), this.resource)); - this.valueToSet = this.pathMapping.get(fhirPath); + for (String fhirPath : this.sortedPaths()) { + // prepare the next fhirPath iteration: create a new nodeStack and set the value + this.nodeStack = new Stack<>(); + this.nodeStack.push(new GenerationTier(this.ctx.getResourceDefinition(this.resource), this.resource)); + this.valueToSet = this.pathMapping.get(fhirPath); - // pathNode is the part of the FHIRPath we are processing - ExpressionNode pathNode = this.engine.parse(fhirPath); - while (pathNode != null) { - switch (pathNode.getKind()) { - case Name: - this.handleNameNode(pathNode); - break; - case Function: - this.handleFunctionNode(pathNode); - break; - case Constant: - case Group: - case Unary: - // TODO: unimplmemented, what to do? - break; - } - pathNode = pathNode.getInner(); - } - } + // pathNode is the part of the FHIRPath we are processing + ExpressionNode pathNode = this.engine.parse(fhirPath); + while (pathNode != null) { + switch (pathNode.getKind()) { + case Name: + this.handleNameNode(pathNode); + break; + case Function: + this.handleFunctionNode(pathNode); + break; + case Constant: + case Group: + case Unary: + // TODO: unimplmemented, what to do? + break; + } + pathNode = pathNode.getInner(); + } + } - this.nodeStack = null; - return this.resource; - } + this.nodeStack = null; + return this.resource; + } - /* - * Handling Named nodes - */ + /* + * Handling Named nodes + */ - /** - * Handles a named node, either adding a new layer to the `nodeStack` when - * reaching a Composite Node or adding the value for Primitive Nodes. - * - * @param fhirPath String the FHIRPath section for the next GenerationTier. - */ - private void handleNameNode(ExpressionNode fhirPath) { - BaseRuntimeChildDefinition childDef = this.nodeStack.peek().nodeDefinition.getChildByName(fhirPath.getName()); - if (childDef == null) { - // nothing to do - return; - } + /** + * Handles a named node, either adding a new layer to the `nodeStack` when + * reaching a Composite Node or adding the value for Primitive Nodes. + * + * @param fhirPath String the FHIRPath section for the next GenerationTier. + */ + private void handleNameNode(ExpressionNode fhirPath) { + BaseRuntimeChildDefinition childDef = + this.nodeStack.peek().nodeDefinition.getChildByName(fhirPath.getName()); + if (childDef == null) { + // nothing to do + return; + } - // identify the type of named node we need to handle here by getting the runtime - // definition type - switch (childDef.getChildByName(fhirPath.getName()).getChildType()) { - case COMPOSITE_DATATYPE: - handleCompositeNode(fhirPath); - break; + // identify the type of named node we need to handle here by getting the runtime + // definition type + switch (childDef.getChildByName(fhirPath.getName()).getChildType()) { + case COMPOSITE_DATATYPE: + handleCompositeNode(fhirPath); + break; - case PRIMITIVE_DATATYPE: - handlePrimitiveNode(fhirPath); - break; + case PRIMITIVE_DATATYPE: + handlePrimitiveNode(fhirPath); + break; - case ID_DATATYPE: - case RESOURCE: - case CONTAINED_RESOURCE_LIST: - case CONTAINED_RESOURCES: - case EXTENSION_DECLARED: - case PRIMITIVE_XHTML: - case PRIMITIVE_XHTML_HL7ORG: - case RESOURCE_BLOCK: - case UNDECL_EXT: - // TODO: not implemented. What to do? - } - } + case ID_DATATYPE: + case RESOURCE: + case CONTAINED_RESOURCE_LIST: + case CONTAINED_RESOURCES: + case EXTENSION_DECLARED: + case PRIMITIVE_XHTML: + case PRIMITIVE_XHTML_HL7ORG: + case RESOURCE_BLOCK: + case UNDECL_EXT: + // TODO: not implemented. What to do? + } + } - /** - * Handles primitive nodes with regards to the current latest tier of the - * nodeStack. Sets a primitive value to all nodes. - * - * @param fhirPath ExpressionNode segment of the fhirPath that specifies the - * primitive value to set. - */ - private void handlePrimitiveNode(ExpressionNode fhirPath) { - // Get the child definition from the parent - BaseRuntimeChildDefinition childDefinition = this.nodeStack.peek().nodeDefinition - .getChildByName(fhirPath.getName()); - // Get the primitive type definition from the childDeftinion - RuntimePrimitiveDatatypeDefinition primitiveTarget = (RuntimePrimitiveDatatypeDefinition) childDefinition - .getChildByName(fhirPath.getName()); - for (IBase nodeElement : this.nodeStack.peek().nodes) { - // add the primitive value to each parent node - IPrimitiveType primitive = primitiveTarget - .newInstance(childDefinition.getInstanceConstructorArguments()); - primitive.setValueAsString(this.valueToSet); - childDefinition.getMutator().addValue(nodeElement, primitive); - } - } + /** + * Handles primitive nodes with regards to the current latest tier of the + * nodeStack. Sets a primitive value to all nodes. + * + * @param fhirPath ExpressionNode segment of the fhirPath that specifies the + * primitive value to set. + */ + private void handlePrimitiveNode(ExpressionNode fhirPath) { + // Get the child definition from the parent + BaseRuntimeChildDefinition childDefinition = + this.nodeStack.peek().nodeDefinition.getChildByName(fhirPath.getName()); + // Get the primitive type definition from the childDeftinion + RuntimePrimitiveDatatypeDefinition primitiveTarget = + (RuntimePrimitiveDatatypeDefinition) childDefinition.getChildByName(fhirPath.getName()); + for (IBase nodeElement : this.nodeStack.peek().nodes) { + // add the primitive value to each parent node + IPrimitiveType primitive = + primitiveTarget.newInstance(childDefinition.getInstanceConstructorArguments()); + primitive.setValueAsString(this.valueToSet); + childDefinition.getMutator().addValue(nodeElement, primitive); + } + } - /** - * Handles a composite node with regards to the current latest tier of the - * nodeStack. Creates a new node based on fhirPath if none are available. - * - * @param fhirPath ExpressionNode the segment of the FHIRPath that is being - * handled right now. - */ - private void handleCompositeNode(ExpressionNode fhirPath) { - GenerationTier nextTier = new GenerationTier(); - // get the name of the FHIRPath for the next tier - nextTier.fhirPathName = fhirPath.getName(); - // get the child definition from the parent nodePefinition - nextTier.childDefinition = this.nodeStack.peek().nodeDefinition.getChildByName(fhirPath.getName()); - // create a nodeDefinition for the next tier - nextTier.nodeDefinition = nextTier.childDefinition.getChildByName(nextTier.fhirPathName); + /** + * Handles a composite node with regards to the current latest tier of the + * nodeStack. Creates a new node based on fhirPath if none are available. + * + * @param fhirPath ExpressionNode the segment of the FHIRPath that is being + * handled right now. + */ + private void handleCompositeNode(ExpressionNode fhirPath) { + GenerationTier nextTier = new GenerationTier(); + // get the name of the FHIRPath for the next tier + nextTier.fhirPathName = fhirPath.getName(); + // get the child definition from the parent nodePefinition + nextTier.childDefinition = this.nodeStack.peek().nodeDefinition.getChildByName(fhirPath.getName()); + // create a nodeDefinition for the next tier + nextTier.nodeDefinition = nextTier.childDefinition.getChildByName(nextTier.fhirPathName); - RuntimeCompositeDatatypeDefinition compositeTarget = (RuntimeCompositeDatatypeDefinition) nextTier.nodeDefinition; - // iterate through all parent nodes - for (IBase nodeElement : this.nodeStack.peek().nodes) { - List containedNodes = nextTier.childDefinition.getAccessor().getValues(nodeElement); - if (containedNodes.size() > 0) { - // check if sister nodes are already available - nextTier.nodes.addAll(containedNodes); - } else { - // if not nodes are available, create a new node - ICompositeType compositeNode = compositeTarget - .newInstance(nextTier.childDefinition.getInstanceConstructorArguments()); - nextTier.childDefinition.getMutator().addValue(nodeElement, compositeNode); - nextTier.nodes.add(compositeNode); - } - } - // push the created nextTier to the nodeStack - this.nodeStack.push(nextTier); - } + RuntimeCompositeDatatypeDefinition compositeTarget = + (RuntimeCompositeDatatypeDefinition) nextTier.nodeDefinition; + // iterate through all parent nodes + for (IBase nodeElement : this.nodeStack.peek().nodes) { + List containedNodes = nextTier.childDefinition.getAccessor().getValues(nodeElement); + if (containedNodes.size() > 0) { + // check if sister nodes are already available + nextTier.nodes.addAll(containedNodes); + } else { + // if not nodes are available, create a new node + ICompositeType compositeNode = + compositeTarget.newInstance(nextTier.childDefinition.getInstanceConstructorArguments()); + nextTier.childDefinition.getMutator().addValue(nodeElement, compositeNode); + nextTier.nodes.add(compositeNode); + } + } + // push the created nextTier to the nodeStack + this.nodeStack.push(nextTier); + } - /* - * Handling Function Nodes - */ + /* + * Handling Function Nodes + */ - /** - * Handles a function node of a FHIRPath. - * - * @param fhirPath ExpressionNode the segment of the FHIRPath that is being - * handled right now. - */ - private void handleFunctionNode(ExpressionNode fhirPath) { - switch(fhirPath.getFunction()) { - case Where: - this.handleWhereFunctionNode(fhirPath); - break; - case MatchesFull: - case Aggregate: - case Alias: - case AliasAs: - case All: - case AllFalse: - case AllTrue: - case AnyFalse: - case AnyTrue: - case As: - case Check: - case Children: - case Combine: - case ConformsTo: - case Contains: - case ConvertsToBoolean: - case ConvertsToDateTime: - case ConvertsToDecimal: - case ConvertsToInteger: - case ConvertsToQuantity: - case ConvertsToString: - case ConvertsToTime: - case Count: - case Custom: - case Descendants: - case Distinct: - case Empty: - case EndsWith: - case Exclude: - case Exists: - case Extension: - case First: - case HasValue: - case Iif: - case IndexOf: - case Intersect: - case Is: - case IsDistinct: - case Item: - case Last: - case Length: - case Lower: - case Matches: - case MemberOf: - case Not: - case Now: - case OfType: - case Repeat: - case Replace: - case ReplaceMatches: - case Resolve: - case Select: - case Single: - case Skip: - case StartsWith: - case SubsetOf: - case Substring: - case SupersetOf: - case Tail: - case Take: - case ToBoolean: - case ToChars: - case ToDateTime: - case ToDecimal: - case ToInteger: - case ToQuantity: - case ToString: - case ToTime: - case Today: - case Trace: - case Type: - case Union: - case Upper: - // TODO: unimplemented, what to do? - case ConvertsToDate: - break; - case Round: - break; - case Sqrt: - break; - case Abs: - break; - case Ceiling: - break; - case Exp: - break; - case Floor: - break; - case Ln: - break; - case Log: - break; - case Power: - break; - case Truncate: - break; - case Encode: - break; - case Decode: - break; - case Escape: - break; - case Unescape: - break; - case Trim: - break; - case Split: - break; - case Join: - break; - case LowBoundary: - break; - case HighBoundary: - break; - case Precision: - break; - case HtmlChecks1: - break; - case HtmlChecks2: - break; - } - } + /** + * Handles a function node of a FHIRPath. + * + * @param fhirPath ExpressionNode the segment of the FHIRPath that is being + * handled right now. + */ + private void handleFunctionNode(ExpressionNode fhirPath) { + switch (fhirPath.getFunction()) { + case Where: + this.handleWhereFunctionNode(fhirPath); + break; + case MatchesFull: + case Aggregate: + case Alias: + case AliasAs: + case All: + case AllFalse: + case AllTrue: + case AnyFalse: + case AnyTrue: + case As: + case Check: + case Children: + case Combine: + case ConformsTo: + case Contains: + case ConvertsToBoolean: + case ConvertsToDateTime: + case ConvertsToDecimal: + case ConvertsToInteger: + case ConvertsToQuantity: + case ConvertsToString: + case ConvertsToTime: + case Count: + case Custom: + case Descendants: + case Distinct: + case Empty: + case EndsWith: + case Exclude: + case Exists: + case Extension: + case First: + case HasValue: + case Iif: + case IndexOf: + case Intersect: + case Is: + case IsDistinct: + case Item: + case Last: + case Length: + case Lower: + case Matches: + case MemberOf: + case Not: + case Now: + case OfType: + case Repeat: + case Replace: + case ReplaceMatches: + case Resolve: + case Select: + case Single: + case Skip: + case StartsWith: + case SubsetOf: + case Substring: + case SupersetOf: + case Tail: + case Take: + case ToBoolean: + case ToChars: + case ToDateTime: + case ToDecimal: + case ToInteger: + case ToQuantity: + case ToString: + case ToTime: + case Today: + case Trace: + case Type: + case Union: + case Upper: + // TODO: unimplemented, what to do? + case ConvertsToDate: + break; + case Round: + break; + case Sqrt: + break; + case Abs: + break; + case Ceiling: + break; + case Exp: + break; + case Floor: + break; + case Ln: + break; + case Log: + break; + case Power: + break; + case Truncate: + break; + case Encode: + break; + case Decode: + break; + case Escape: + break; + case Unescape: + break; + case Trim: + break; + case Split: + break; + case Join: + break; + case LowBoundary: + break; + case HighBoundary: + break; + case Precision: + break; + case HtmlChecks1: + break; + case HtmlChecks2: + break; + } + } - /** - * Handles a function node of a `where`-function. Iterates through all params - * and handle where functions for primitive datatypes (others are not - * implemented and yield errors.) - * - * @param fhirPath ExpressionNode the segment of the FHIRPath that contains the - * where function - */ - private void handleWhereFunctionNode(ExpressionNode fhirPath) { - // iterate through all where parameters - for (ExpressionNode param : fhirPath.getParameters()) { - BaseRuntimeChildDefinition wherePropertyChild = this.nodeStack.peek().nodeDefinition - .getChildByName(param.getName()); - BaseRuntimeElementDefinition wherePropertyDefinition = wherePropertyChild - .getChildByName(param.getName()); + /** + * Handles a function node of a `where`-function. Iterates through all params + * and handle where functions for primitive datatypes (others are not + * implemented and yield errors.) + * + * @param fhirPath ExpressionNode the segment of the FHIRPath that contains the + * where function + */ + private void handleWhereFunctionNode(ExpressionNode fhirPath) { + // iterate through all where parameters + for (ExpressionNode param : fhirPath.getParameters()) { + BaseRuntimeChildDefinition wherePropertyChild = + this.nodeStack.peek().nodeDefinition.getChildByName(param.getName()); + BaseRuntimeElementDefinition wherePropertyDefinition = + wherePropertyChild.getChildByName(param.getName()); - // only primitive nodes can be checked using the where function - switch(wherePropertyDefinition.getChildType()) { - case PRIMITIVE_DATATYPE: - this.handleWhereFunctionParam(param); - break; - case COMPOSITE_DATATYPE: - case CONTAINED_RESOURCES: - case CONTAINED_RESOURCE_LIST: - case EXTENSION_DECLARED: - case ID_DATATYPE: - case PRIMITIVE_XHTML: - case PRIMITIVE_XHTML_HL7ORG: - case RESOURCE: - case RESOURCE_BLOCK: - case UNDECL_EXT: - // TODO: unimplemented. What to do? - } - } - } + // only primitive nodes can be checked using the where function + switch (wherePropertyDefinition.getChildType()) { + case PRIMITIVE_DATATYPE: + this.handleWhereFunctionParam(param); + break; + case COMPOSITE_DATATYPE: + case CONTAINED_RESOURCES: + case CONTAINED_RESOURCE_LIST: + case EXTENSION_DECLARED: + case ID_DATATYPE: + case PRIMITIVE_XHTML: + case PRIMITIVE_XHTML_HL7ORG: + case RESOURCE: + case RESOURCE_BLOCK: + case UNDECL_EXT: + // TODO: unimplemented. What to do? + } + } + } - /** - * Filter the latest nodeStack tier using `param`. - * - * @param param ExpressionNode parameter type ExpressionNode that provides the - * where clause that is used to filter nodes from the nodeStack. - */ - private void handleWhereFunctionParam(ExpressionNode param) { - BaseRuntimeChildDefinition wherePropertyChild = this.nodeStack.peek().nodeDefinition - .getChildByName(param.getName()); - BaseRuntimeElementDefinition wherePropertyDefinition = wherePropertyChild.getChildByName(param.getName()); + /** + * Filter the latest nodeStack tier using `param`. + * + * @param param ExpressionNode parameter type ExpressionNode that provides the + * where clause that is used to filter nodes from the nodeStack. + */ + private void handleWhereFunctionParam(ExpressionNode param) { + BaseRuntimeChildDefinition wherePropertyChild = + this.nodeStack.peek().nodeDefinition.getChildByName(param.getName()); + BaseRuntimeElementDefinition wherePropertyDefinition = wherePropertyChild.getChildByName(param.getName()); - String matchingValue = param.getOpNext().getConstant().toString(); - List matchingNodes = new ArrayList<>(); - List unlabeledNodes = new ArrayList<>(); - // sort all nodes from the nodeStack into matching nodes and unlabeled nodes - for (IBase node : this.nodeStack.peek().nodes) { - List operationValues = wherePropertyChild.getAccessor().getValues(node); - if (operationValues.size() == 0) { - unlabeledNodes.add(node); - } else { - for (IBase operationValue : operationValues) { - IPrimitiveType primitive = (IPrimitiveType) operationValue; - switch (param.getOperation()) { - case Equals: - if (primitive.getValueAsString().equals(matchingValue)) { - matchingNodes.add(node); - } - break; - case NotEquals: - if (!primitive.getValueAsString().equals(matchingValue)) { - matchingNodes.add(node); - } - break; - case And: - case As: - case Concatenate: - case Contains: - case Div: - case DivideBy: - case Equivalent: - case Greater: - case GreaterOrEqual: - case Implies: - case In: - case Is: - case LessOrEqual: - case LessThan: - case MemberOf: - case Minus: - case Mod: - case NotEquivalent: - case Or: - case Plus: - case Times: - case Union: - case Xor: - // TODO: unimplemented, what to do? - } - } - } - } + String matchingValue = param.getOpNext().getConstant().toString(); + List matchingNodes = new ArrayList<>(); + List unlabeledNodes = new ArrayList<>(); + // sort all nodes from the nodeStack into matching nodes and unlabeled nodes + for (IBase node : this.nodeStack.peek().nodes) { + List operationValues = wherePropertyChild.getAccessor().getValues(node); + if (operationValues.size() == 0) { + unlabeledNodes.add(node); + } else { + for (IBase operationValue : operationValues) { + IPrimitiveType primitive = (IPrimitiveType) operationValue; + switch (param.getOperation()) { + case Equals: + if (primitive.getValueAsString().equals(matchingValue)) { + matchingNodes.add(node); + } + break; + case NotEquals: + if (!primitive.getValueAsString().equals(matchingValue)) { + matchingNodes.add(node); + } + break; + case And: + case As: + case Concatenate: + case Contains: + case Div: + case DivideBy: + case Equivalent: + case Greater: + case GreaterOrEqual: + case Implies: + case In: + case Is: + case LessOrEqual: + case LessThan: + case MemberOf: + case Minus: + case Mod: + case NotEquivalent: + case Or: + case Plus: + case Times: + case Union: + case Xor: + // TODO: unimplemented, what to do? + } + } + } + } - if (matchingNodes.size() == 0) { - if (unlabeledNodes.size() == 0) { - // no nodes were matched and no unlabeled nodes are available. We need to add a - // sister node to the nodeStack - GenerationTier latestTier = this.nodeStack.pop(); - GenerationTier previousTier = this.nodeStack.peek(); - this.nodeStack.push(latestTier); + if (matchingNodes.size() == 0) { + if (unlabeledNodes.size() == 0) { + // no nodes were matched and no unlabeled nodes are available. We need to add a + // sister node to the nodeStack + GenerationTier latestTier = this.nodeStack.pop(); + GenerationTier previousTier = this.nodeStack.peek(); + this.nodeStack.push(latestTier); - RuntimeCompositeDatatypeDefinition compositeTarget = (RuntimeCompositeDatatypeDefinition) latestTier.nodeDefinition; - ICompositeType compositeNode = compositeTarget - .newInstance(latestTier.childDefinition.getInstanceConstructorArguments()); - latestTier.childDefinition.getMutator().addValue(previousTier.nodes.get(0), compositeNode); - unlabeledNodes.add(compositeNode); - } + RuntimeCompositeDatatypeDefinition compositeTarget = + (RuntimeCompositeDatatypeDefinition) latestTier.nodeDefinition; + ICompositeType compositeNode = + compositeTarget.newInstance(latestTier.childDefinition.getInstanceConstructorArguments()); + latestTier.childDefinition.getMutator().addValue(previousTier.nodes.get(0), compositeNode); + unlabeledNodes.add(compositeNode); + } - switch(param.getOperation()) { - case Equals: - // if we are checking for equality, we need to set the property we looked for on - // the unlabeled node(s) - RuntimePrimitiveDatatypeDefinition equalsPrimitive = (RuntimePrimitiveDatatypeDefinition) wherePropertyDefinition; - IPrimitiveType primitive = equalsPrimitive - .newInstance(wherePropertyChild.getInstanceConstructorArguments()); - primitive.setValueAsString(param.getOpNext().getConstant().toString()); - for (IBase node : unlabeledNodes) { - wherePropertyChild.getMutator().addValue(node, primitive); - matchingNodes.add(node); - } - break; - case NotEquals: - // if we are checking for inequality, we need to pass all unlabeled (or created - // if none were available) - matchingNodes.addAll(unlabeledNodes); - break; - case And: - case As: - case Concatenate: - case Contains: - case Div: - case DivideBy: - case Equivalent: - case Greater: - case GreaterOrEqual: - case Implies: - case In: - case Is: - case LessOrEqual: - case LessThan: - case MemberOf: - case Minus: - case Mod: - case NotEquivalent: - case Or: - case Plus: - case Times: - case Union: - case Xor: - // TODO: need to implement above first - } - } + switch (param.getOperation()) { + case Equals: + // if we are checking for equality, we need to set the property we looked for on + // the unlabeled node(s) + RuntimePrimitiveDatatypeDefinition equalsPrimitive = + (RuntimePrimitiveDatatypeDefinition) wherePropertyDefinition; + IPrimitiveType primitive = + equalsPrimitive.newInstance(wherePropertyChild.getInstanceConstructorArguments()); + primitive.setValueAsString(param.getOpNext().getConstant().toString()); + for (IBase node : unlabeledNodes) { + wherePropertyChild.getMutator().addValue(node, primitive); + matchingNodes.add(node); + } + break; + case NotEquals: + // if we are checking for inequality, we need to pass all unlabeled (or created + // if none were available) + matchingNodes.addAll(unlabeledNodes); + break; + case And: + case As: + case Concatenate: + case Contains: + case Div: + case DivideBy: + case Equivalent: + case Greater: + case GreaterOrEqual: + case Implies: + case In: + case Is: + case LessOrEqual: + case LessThan: + case MemberOf: + case Minus: + case Mod: + case NotEquivalent: + case Or: + case Plus: + case Times: + case Union: + case Xor: + // TODO: need to implement above first + } + } - // set the nodes to the filtered ones - this.nodeStack.peek().nodes = matchingNodes; - } + // set the nodes to the filtered ones + this.nodeStack.peek().nodes = matchingNodes; + } - /** - * Creates a list all FHIRPaths from the mapping ordered by paths with where - * equals, where unequals and the rest. - * - * @return List a List of FHIRPaths ordered by the type. - */ - private List sortedPaths() { - List whereEquals = new ArrayList(); - List whereUnequals = new ArrayList(); - List withoutWhere = new ArrayList(); + /** + * Creates a list all FHIRPaths from the mapping ordered by paths with where + * equals, where unequals and the rest. + * + * @return List a List of FHIRPaths ordered by the type. + */ + private List sortedPaths() { + List whereEquals = new ArrayList(); + List whereUnequals = new ArrayList(); + List withoutWhere = new ArrayList(); - for (String fhirPath : this.pathMapping.keySet()) { - switch (this.getTypeOfFhirPath(fhirPath)) { - case WHERE_EQUALS: - whereEquals.add(fhirPath); - break; - case WHERE_UNEQUALS: - whereUnequals.add(fhirPath); - break; - case WITHOUT_WHERE: - withoutWhere.add(fhirPath); - break; - } - } + for (String fhirPath : this.pathMapping.keySet()) { + switch (this.getTypeOfFhirPath(fhirPath)) { + case WHERE_EQUALS: + whereEquals.add(fhirPath); + break; + case WHERE_UNEQUALS: + whereUnequals.add(fhirPath); + break; + case WITHOUT_WHERE: + withoutWhere.add(fhirPath); + break; + } + } - List ret = new ArrayList(); - ret.addAll(whereEquals); - ret.addAll(whereUnequals); - ret.addAll(withoutWhere); - return ret; - } + List ret = new ArrayList(); + ret.addAll(whereEquals); + ret.addAll(whereUnequals); + ret.addAll(withoutWhere); + return ret; + } - /** - * Returns the type of path based on the FHIRPath String. - * - * @param fhirPath String representation of a FHIRPath. - * @return PathType the type of path supplied as `fhirPath`. - */ - private PathType getTypeOfFhirPath(String fhirPath) { - ExpressionNode fhirPathExpression = this.engine.parse(fhirPath); - while (fhirPathExpression != null) { - if (fhirPathExpression.getKind() == ExpressionNode.Kind.Function) { - if (fhirPathExpression.getFunction() == ExpressionNode.Function.Where) { - for (ExpressionNode params : fhirPathExpression.getParameters()) { - switch (params.getOperation()) { - case Equals: - return PathType.WHERE_EQUALS; - case NotEquals: - return PathType.WHERE_UNEQUALS; - case And: - case As: - case Concatenate: - case Contains: - case Div: - case DivideBy: - case Equivalent: - case Greater: - case GreaterOrEqual: - case Implies: - case In: - case Is: - case LessOrEqual: - case LessThan: - case MemberOf: - case Minus: - case Mod: - case NotEquivalent: - case Or: - case Plus: - case Times: - case Union: - case Xor: - // TODO: need to implement above first - } - } - } - } - fhirPathExpression = fhirPathExpression.getInner(); - } - return PathType.WITHOUT_WHERE; - } + /** + * Returns the type of path based on the FHIRPath String. + * + * @param fhirPath String representation of a FHIRPath. + * @return PathType the type of path supplied as `fhirPath`. + */ + private PathType getTypeOfFhirPath(String fhirPath) { + ExpressionNode fhirPathExpression = this.engine.parse(fhirPath); + while (fhirPathExpression != null) { + if (fhirPathExpression.getKind() == ExpressionNode.Kind.Function) { + if (fhirPathExpression.getFunction() == ExpressionNode.Function.Where) { + for (ExpressionNode params : fhirPathExpression.getParameters()) { + switch (params.getOperation()) { + case Equals: + return PathType.WHERE_EQUALS; + case NotEquals: + return PathType.WHERE_UNEQUALS; + case And: + case As: + case Concatenate: + case Contains: + case Div: + case DivideBy: + case Equivalent: + case Greater: + case GreaterOrEqual: + case Implies: + case In: + case Is: + case LessOrEqual: + case LessThan: + case MemberOf: + case Minus: + case Mod: + case NotEquivalent: + case Or: + case Plus: + case Times: + case Union: + case Xor: + // TODO: need to implement above first + } + } + } + } + fhirPathExpression = fhirPathExpression.getInner(); + } + return PathType.WITHOUT_WHERE; + } - /** - * A simple enum to diffirentiate between types of FHIRPaths in the special use - * case of generating FHIR Resources. - */ - public enum PathType { - WHERE_EQUALS, WHERE_UNEQUALS, WITHOUT_WHERE - } + /** + * A simple enum to diffirentiate between types of FHIRPaths in the special use + * case of generating FHIR Resources. + */ + public enum PathType { + WHERE_EQUALS, + WHERE_UNEQUALS, + WITHOUT_WHERE + } } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FhirDefaultPolicyAdvisor.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FhirDefaultPolicyAdvisor.java index 036f3553111..295a14b449a 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FhirDefaultPolicyAdvisor.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FhirDefaultPolicyAdvisor.java @@ -20,21 +20,33 @@ import java.util.List; public class FhirDefaultPolicyAdvisor implements IValidationPolicyAdvisor { @Override - public ReferenceValidationPolicy policyForReference(IResourceValidator validator, Object appContext, String path, - String url) { + public ReferenceValidationPolicy policyForReference( + IResourceValidator validator, Object appContext, String path, String url) { return ReferenceValidationPolicy.IGNORE; } @Override - public ContainedReferenceValidationPolicy policyForContained(IResourceValidator validator, Object appContext, String containerType, String containerId, - Element.SpecialElement containingResourceType, String path, String url) { + public ContainedReferenceValidationPolicy policyForContained( + IResourceValidator validator, + Object appContext, + String containerType, + String containerId, + Element.SpecialElement containingResourceType, + String path, + String url) { return ContainedReferenceValidationPolicy.CHECK_VALID; } @Override - public CodedContentValidationPolicy policyForCodedContent(IResourceValidator validator, Object appContext, String stackPath, - ElementDefinition definition, StructureDefinition structure, BindingKind kind, - ValueSet valueSet, List systems) { + public CodedContentValidationPolicy policyForCodedContent( + IResourceValidator validator, + Object appContext, + String stackPath, + ElementDefinition definition, + StructureDefinition structure, + BindingKind kind, + ValueSet valueSet, + List systems) { return CodedContentValidationPolicy.CODE; } } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FhirInstanceValidator.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FhirInstanceValidator.java index 9ad1b826fab..b1e76206df0 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FhirInstanceValidator.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/FhirInstanceValidator.java @@ -18,10 +18,10 @@ import org.hl7.fhir.r5.utils.validation.IValidatorResourceFetcher; import org.hl7.fhir.r5.utils.validation.constants.BestPracticeWarningLevel; import org.hl7.fhir.utilities.validation.ValidationMessage; -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.Collections; import java.util.List; +import javax.annotation.Nonnull; @SuppressWarnings({"PackageAccessibility", "Duplicates"}) public class FhirInstanceValidator extends BaseValidatorBridge implements IInstanceValidatorModule { @@ -225,24 +225,25 @@ public class FhirInstanceValidator extends BaseValidatorBridge implements IInsta VersionSpecificWorkerContextWrapper wrappedWorkerContext = provideWorkerContext(); return new ValidatorWrapper() - .setAnyExtensionsAllowed(isAnyExtensionsAllowed()) - .setBestPracticeWarningLevel(getBestPracticeWarningLevel()) - .setErrorForUnknownProfiles(isErrorForUnknownProfiles()) - .setExtensionDomains(getExtensionDomains()) - .setValidationPolicyAdvisor(validatorPolicyAdvisor) - .setNoTerminologyChecks(isNoTerminologyChecks()) - .setNoExtensibleWarnings(isNoExtensibleWarnings()) - .setNoBindingMsgSuppressed(isNoBindingMsgSuppressed()) - .setValidatorResourceFetcher(getValidatorResourceFetcher()) - .setAssumeValidRestReferences(isAssumeValidRestReferences()) - .validate(wrappedWorkerContext, theValidationCtx); + .setAnyExtensionsAllowed(isAnyExtensionsAllowed()) + .setBestPracticeWarningLevel(getBestPracticeWarningLevel()) + .setErrorForUnknownProfiles(isErrorForUnknownProfiles()) + .setExtensionDomains(getExtensionDomains()) + .setValidationPolicyAdvisor(validatorPolicyAdvisor) + .setNoTerminologyChecks(isNoTerminologyChecks()) + .setNoExtensibleWarnings(isNoExtensibleWarnings()) + .setNoBindingMsgSuppressed(isNoBindingMsgSuppressed()) + .setValidatorResourceFetcher(getValidatorResourceFetcher()) + .setAssumeValidRestReferences(isAssumeValidRestReferences()) + .validate(wrappedWorkerContext, theValidationCtx); } @Nonnull protected VersionSpecificWorkerContextWrapper provideWorkerContext() { VersionSpecificWorkerContextWrapper wrappedWorkerContext = myWrappedWorkerContext; if (wrappedWorkerContext == null) { - wrappedWorkerContext = VersionSpecificWorkerContextWrapper.newVersionSpecificWorkerContextWrapper(myValidationSupport); + wrappedWorkerContext = + VersionSpecificWorkerContextWrapper.newVersionSpecificWorkerContextWrapper(myValidationSupport); } myWrappedWorkerContext = wrappedWorkerContext; return wrappedWorkerContext; @@ -283,11 +284,11 @@ public class FhirInstanceValidator extends BaseValidatorBridge implements IInsta } } - public static class NullEvaluationContext implements FHIRPathEngine.IEvaluationContext { @Override - public List resolveConstant(Object appContext, String name, boolean beforeContext) throws PathEngineException { + public List resolveConstant(Object appContext, String name, boolean beforeContext) + throws PathEngineException { return Collections.emptyList(); } @@ -307,12 +308,14 @@ public class FhirInstanceValidator extends BaseValidatorBridge implements IInsta } @Override - public TypeDetails checkFunction(Object appContext, String functionName, List parameters) throws PathEngineException { + public TypeDetails checkFunction(Object appContext, String functionName, List parameters) + throws PathEngineException { return null; } @Override - public List executeFunction(Object appContext, List focus, String functionName, List> parameters) { + public List executeFunction( + Object appContext, List focus, String functionName, List> parameters) { return null; } @@ -331,6 +334,4 @@ public class FhirInstanceValidator extends BaseValidatorBridge implements IInsta return null; } } - - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/HapiToHl7OrgDstu2ValidatingSupportWrapper.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/HapiToHl7OrgDstu2ValidatingSupportWrapper.java index 922f1f4c328..bd45f0c6335 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/HapiToHl7OrgDstu2ValidatingSupportWrapper.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/HapiToHl7OrgDstu2ValidatingSupportWrapper.java @@ -10,7 +10,8 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import java.util.List; import java.util.stream.Collectors; -public class HapiToHl7OrgDstu2ValidatingSupportWrapper extends BaseValidationSupportWrapper implements IValidationSupport { +public class HapiToHl7OrgDstu2ValidatingSupportWrapper extends BaseValidationSupportWrapper + implements IValidationSupport { private final FhirContext myHapiCtx; /** @@ -30,11 +31,9 @@ public class HapiToHl7OrgDstu2ValidatingSupportWrapper extends BaseValidationSup @Override public List fetchAllStructureDefinitions() { - return super - .fetchAllStructureDefinitions() - .stream() - .map(t -> translate(t)) - .collect(Collectors.toList()); + return super.fetchAllStructureDefinitions().stream() + .map(t -> translate(t)) + .collect(Collectors.toList()); } @Override diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/ProfileKnowledgeWorkerR5.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/ProfileKnowledgeWorkerR5.java index 4af92ae7e0e..9a0f70b1516 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/ProfileKnowledgeWorkerR5.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/ProfileKnowledgeWorkerR5.java @@ -1,11 +1,11 @@ package org.hl7.fhir.common.hapi.validation.validator; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.BaseRuntimeElementDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeCompositeDatatypeDefinition; import ca.uhn.fhir.context.RuntimePrimitiveDatatypeDefinition; import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.i18n.Msg; import org.apache.commons.lang3.Validate; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.r5.conformance.profile.BindingResolution; @@ -14,18 +14,19 @@ import org.hl7.fhir.r5.model.ElementDefinition; import org.hl7.fhir.r5.model.StructureDefinition; public class ProfileKnowledgeWorkerR5 implements ProfileKnowledgeProvider { - private final FhirContext myCtx; + private final FhirContext myCtx; - public ProfileKnowledgeWorkerR5(FhirContext theCtx) { - myCtx = theCtx; - } + public ProfileKnowledgeWorkerR5(FhirContext theCtx) { + myCtx = theCtx; + } - @Override - public boolean isDatatype(String typeSimple) { - BaseRuntimeElementDefinition def = myCtx.getElementDefinition(typeSimple); - Validate.notNull(typeSimple); - return (def instanceof RuntimePrimitiveDatatypeDefinition) || (def instanceof RuntimeCompositeDatatypeDefinition); - } + @Override + public boolean isDatatype(String typeSimple) { + BaseRuntimeElementDefinition def = myCtx.getElementDefinition(typeSimple); + Validate.notNull(typeSimple); + return (def instanceof RuntimePrimitiveDatatypeDefinition) + || (def instanceof RuntimeCompositeDatatypeDefinition); + } @Override public boolean isPrimitiveType(String typeSimple) { @@ -35,45 +36,49 @@ public class ProfileKnowledgeWorkerR5 implements ProfileKnowledgeProvider { } @Override - public boolean isResource(String typeSimple) { - BaseRuntimeElementDefinition def = myCtx.getElementDefinition(typeSimple); - Validate.notNull(typeSimple); - return def instanceof RuntimeResourceDefinition; - } + public boolean isResource(String typeSimple) { + BaseRuntimeElementDefinition def = myCtx.getElementDefinition(typeSimple); + Validate.notNull(typeSimple); + return def instanceof RuntimeResourceDefinition; + } - @Override - public boolean hasLinkFor(String typeSimple) { - return false; - } + @Override + public boolean hasLinkFor(String typeSimple) { + return false; + } - @Override - public String getLinkFor(String corePath, String typeSimple) { - return null; - } + @Override + public String getLinkFor(String corePath, String typeSimple) { + return null; + } - @Override - public BindingResolution resolveBinding(StructureDefinition theStructureDefinition, ElementDefinition.ElementDefinitionBindingComponent theElementDefinitionBindingComponent, String theS) throws FHIRException { - return null; - } + @Override + public BindingResolution resolveBinding( + StructureDefinition theStructureDefinition, + ElementDefinition.ElementDefinitionBindingComponent theElementDefinitionBindingComponent, + String theS) + throws FHIRException { + return null; + } - @Override - public BindingResolution resolveBinding(StructureDefinition theStructureDefinition, String theS, String theS1) throws FHIRException { - return null; - } + @Override + public BindingResolution resolveBinding(StructureDefinition theStructureDefinition, String theS, String theS1) + throws FHIRException { + return null; + } - @Override - public String getLinkForProfile(StructureDefinition theStructureDefinition, String theS) { - return null; - } + @Override + public String getLinkForProfile(StructureDefinition theStructureDefinition, String theS) { + return null; + } - @Override - public boolean prependLinks() { - return false; - } - - @Override - public String getLinkForUrl(String corePath, String url) { - throw new UnsupportedOperationException(Msg.code(693)); - } + @Override + public boolean prependLinks() { + return false; + } + @Override + public String getLinkForUrl(String corePath, String url) { + throw new UnsupportedOperationException(Msg.code(693)); + } } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/ValidatorWrapper.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/ValidatorWrapper.java index 29d28d372e1..f8e0c9bda89 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/ValidatorWrapper.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/ValidatorWrapper.java @@ -112,7 +112,8 @@ class ValidatorWrapper { return this; } - public List validate(IWorkerContext theWorkerContext, IValidationContext theValidationContext) { + public List validate( + IWorkerContext theWorkerContext, IValidationContext theValidationContext) { InstanceValidator v; FHIRPathEngine.IEvaluationContext evaluationCtx = new FhirInstanceValidator.NullEvaluationContext(); XVerExtensionManager xverManager = new XVerExtensionManager(theWorkerContext); @@ -197,21 +198,30 @@ class ValidatorWrapper { } // TODO: are these still needed? messages = messages.stream() - .filter(m -> m.getMessageId() == null - || !(m.getMessageId().equals(I18nConstants.TERMINOLOGY_TX_BINDING_NOSOURCE) - || m.getMessageId().equals(I18nConstants.TERMINOLOGY_TX_BINDING_NOSOURCE2) - || (m.getMessageId().equals(I18nConstants.TERMINOLOGY_TX_VALUESET_NOTFOUND) && m.getMessage().contains("http://hl7.org/fhir/ValueSet/mimetypes")))) - .collect(Collectors.toList()); + .filter(m -> m.getMessageId() == null + || !(m.getMessageId().equals(I18nConstants.TERMINOLOGY_TX_BINDING_NOSOURCE) + || m.getMessageId().equals(I18nConstants.TERMINOLOGY_TX_BINDING_NOSOURCE2) + || (m.getMessageId().equals(I18nConstants.TERMINOLOGY_TX_VALUESET_NOTFOUND) + && m.getMessage().contains("http://hl7.org/fhir/ValueSet/mimetypes")))) + .collect(Collectors.toList()); if (myErrorForUnknownProfiles) { - messages.stream().filter(m -> m.getMessageId() != null && (m.getMessageId().equals(I18nConstants.VALIDATION_VAL_PROFILE_UNKNOWN) || m.getMessageId().equals(I18nConstants.VALIDATION_VAL_PROFILE_UNKNOWN_NOT_POLICY))) - .filter(m -> m.getLevel() == ValidationMessage.IssueSeverity.WARNING) - .forEach(m -> m.setLevel(ValidationMessage.IssueSeverity.ERROR)); + messages.stream() + .filter(m -> m.getMessageId() != null + && (m.getMessageId().equals(I18nConstants.VALIDATION_VAL_PROFILE_UNKNOWN) + || m.getMessageId() + .equals(I18nConstants.VALIDATION_VAL_PROFILE_UNKNOWN_NOT_POLICY))) + .filter(m -> m.getLevel() == ValidationMessage.IssueSeverity.WARNING) + .forEach(m -> m.setLevel(ValidationMessage.IssueSeverity.ERROR)); } return messages; } - private void fetchAndAddProfile(IWorkerContext theWorkerContext, List theProfileStructureDefinitions, String theUrl, List theMessages) { + private void fetchAndAddProfile( + IWorkerContext theWorkerContext, + List theProfileStructureDefinitions, + String theUrl, + List theMessages) { try { StructureDefinition structureDefinition = theWorkerContext.fetchResource(StructureDefinition.class, theUrl); if (structureDefinition != null) { @@ -222,7 +232,6 @@ class ValidatorWrapper { } } - private ArrayList determineIfProfilesSpecified(Document theDocument) { ArrayList profileNames = new ArrayList<>(); NodeList list = theDocument.getChildNodes().item(0).getChildNodes(); @@ -231,7 +240,8 @@ class ValidatorWrapper { NodeList metaList = list.item(i).getChildNodes(); for (int j = 0; j < metaList.getLength(); j++) { if (metaList.item(j).getNodeName().compareToIgnoreCase("profile") == 0) { - profileNames.add(metaList.item(j).getAttributes().item(0).getNodeValue()); + profileNames.add( + metaList.item(j).getAttributes().item(0).getNodeValue()); } } break; @@ -239,5 +249,4 @@ class ValidatorWrapper { } return profileNames; } - } diff --git a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java index 76685eb2ea6..374dc4852b9 100644 --- a/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java +++ b/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/validator/VersionSpecificWorkerContextWrapper.java @@ -27,7 +27,6 @@ import org.hl7.fhir.r5.model.Resource; import org.hl7.fhir.r5.model.StructureDefinition; import org.hl7.fhir.r5.model.ValueSet; import org.hl7.fhir.r5.profilemodel.PEBuilder; - import org.hl7.fhir.r5.terminologies.expansion.ValueSetExpansionOutcome; import org.hl7.fhir.r5.terminologies.utilities.TerminologyServiceErrorClass; import org.hl7.fhir.r5.utils.validation.IResourceValidator; @@ -42,8 +41,6 @@ import org.hl7.fhir.utilities.validation.ValidationOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; @@ -51,6 +48,8 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -63,16 +62,21 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo private volatile List myAllStructures; private org.hl7.fhir.r5.model.Parameters myExpansionProfile; - public VersionSpecificWorkerContextWrapper(ValidationSupportContext theValidationSupportContext, VersionCanonicalizer theVersionCanonicalizer) { + public VersionSpecificWorkerContextWrapper( + ValidationSupportContext theValidationSupportContext, VersionCanonicalizer theVersionCanonicalizer) { myValidationSupportContext = theValidationSupportContext; myVersionCanonicalizer = theVersionCanonicalizer; long timeoutMillis = HapiSystemProperties.getTestValidationResourceCachesMs(); myFetchResourceCache = CacheFactory.build(timeoutMillis, 10000, key -> { - String fetchResourceName = key.getResourceName(); - if (myValidationSupportContext.getRootValidationSupport().getFhirContext().getVersion().getVersion() == FhirVersionEnum.DSTU2) { + if (myValidationSupportContext + .getRootValidationSupport() + .getFhirContext() + .getVersion() + .getVersion() + == FhirVersionEnum.DSTU2) { if ("CodeSystem".equals(fetchResourceName)) { fetchResourceName = "ValueSet"; } @@ -82,10 +86,16 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo if (fetchResourceName.equals("Resource")) { fetchResourceType = null; } else { - fetchResourceType = myValidationSupportContext.getRootValidationSupport().getFhirContext().getResourceDefinition(fetchResourceName).getImplementingClass(); + fetchResourceType = myValidationSupportContext + .getRootValidationSupport() + .getFhirContext() + .getResourceDefinition(fetchResourceName) + .getImplementingClass(); } - IBaseResource fetched = myValidationSupportContext.getRootValidationSupport().fetchResource(fetchResourceType, key.getUri()); + IBaseResource fetched = myValidationSupportContext + .getRootValidationSupport() + .fetchResource(fetchResourceType, key.getUri()); Resource canonical = myVersionCanonicalizer.resourceToValidatorCanonical(fetched); @@ -93,8 +103,13 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo StructureDefinition canonicalSd = (StructureDefinition) canonical; if (canonicalSd.getSnapshot().isEmpty()) { ourLog.info("Generating snapshot for StructureDefinition: {}", canonicalSd.getUrl()); - fetched = myValidationSupportContext.getRootValidationSupport().generateSnapshot(theValidationSupportContext, fetched, "", null, ""); - Validate.isTrue(fetched != null, "StructureDefinition %s has no snapshot, and no snapshot generator is configured", key.getUri()); + fetched = myValidationSupportContext + .getRootValidationSupport() + .generateSnapshot(theValidationSupportContext, fetched, "", null, ""); + Validate.isTrue( + fetched != null, + "StructureDefinition %s has no snapshot, and no snapshot generator is configured", + key.getUri()); canonical = myVersionCanonicalizer.resourceToValidatorCanonical(fetched); } } @@ -126,12 +141,14 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public int loadFromPackage(NpmPackage pi, IContextResourceLoader loader, List types) throws FileNotFoundException, IOException, FHIRException { + public int loadFromPackage(NpmPackage pi, IContextResourceLoader loader, List types) + throws FileNotFoundException, IOException, FHIRException { throw new UnsupportedOperationException(Msg.code(653)); } @Override - public int loadFromPackageAndDependencies(NpmPackage pi, IContextResourceLoader loader, BasePackageCacheManager pcm) throws FHIRException { + public int loadFromPackageAndDependencies(NpmPackage pi, IContextResourceLoader loader, BasePackageCacheManager pcm) + throws FHIRException { throw new UnsupportedOperationException(Msg.code(654)); } @@ -171,18 +188,18 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public IWorkerContext setPackageTracker( - IWorkerContextManager.IPackageLoadingTracker packageTracker) { + public IWorkerContext setPackageTracker(IWorkerContextManager.IPackageLoadingTracker packageTracker) { throw new UnsupportedOperationException(Msg.code(2266)); } @Override public String getSpecUrl() { - return ""; + return ""; } @Override - public PEBuilder getProfiledElementBuilder(PEBuilder.PEElementPropertiesPolicy thePEElementPropertiesPolicy, boolean theB) { + public PEBuilder getProfiledElementBuilder( + PEBuilder.PEElementPropertiesPolicy thePEElementPropertiesPolicy, boolean theB) { throw new UnsupportedOperationException(Msg.code(2264)); } @@ -206,7 +223,8 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo List retVal = myAllStructures; if (retVal == null) { retVal = new ArrayList<>(); - for (IBaseResource next : myValidationSupportContext.getRootValidationSupport().fetchAllStructureDefinitions()) { + for (IBaseResource next : + myValidationSupportContext.getRootValidationSupport().fetchAllStructureDefinitions()) { try { StructureDefinition converted = myVersionCanonicalizer.structureDefinitionToCanonical(next); retVal.add(converted); @@ -221,38 +239,39 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public void cacheResource(Resource res) { - - } + public void cacheResource(Resource res) {} @Override - public void cacheResourceFromPackage(Resource res, PackageInformation packageDetails) throws FHIRException { - - } + public void cacheResourceFromPackage(Resource res, PackageInformation packageDetails) throws FHIRException {} @Override - public void cachePackage(PackageInformation packageInformation) { - - } + public void cachePackage(PackageInformation packageInformation) {} @Nonnull - private ValidationResult convertValidationResult(String theSystem, @Nullable IValidationSupport.CodeValidationResult theResult) { + private ValidationResult convertValidationResult( + String theSystem, @Nullable IValidationSupport.CodeValidationResult theResult) { ValidationResult retVal = null; if (theResult != null) { String code = theResult.getCode(); String display = theResult.getDisplay(); - + String issueSeverity = theResult.getSeverityCode(); String message = theResult.getMessage(); if (isNotBlank(code)) { - retVal = new ValidationResult(theSystem, null, new org.hl7.fhir.r5.model.CodeSystem.ConceptDefinitionComponent() - .setCode(code) - .setDisplay(display), - null); + retVal = new ValidationResult( + theSystem, + null, + new org.hl7.fhir.r5.model.CodeSystem.ConceptDefinitionComponent() + .setCode(code) + .setDisplay(display), + null); } else if (isNotBlank(issueSeverity)) { - retVal = new ValidationResult(ValidationMessage.IssueSeverity.fromCode(issueSeverity), message, TerminologyServiceErrorClass.UNKNOWN, null); + retVal = new ValidationResult( + ValidationMessage.IssueSeverity.fromCode(issueSeverity), + message, + TerminologyServiceErrorClass.UNKNOWN, + null); } - } if (retVal == null) { @@ -263,14 +282,17 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public ValueSetExpansionOutcome expandVS(org.hl7.fhir.r5.model.ValueSet source, boolean cacheOk, boolean Hierarchical) { + public ValueSetExpansionOutcome expandVS( + org.hl7.fhir.r5.model.ValueSet source, boolean cacheOk, boolean Hierarchical) { IBaseResource convertedSource; try { convertedSource = myVersionCanonicalizer.valueSetFromValidatorCanonical(source); } catch (FHIRException e) { throw new InternalErrorException(Msg.code(661) + e); } - IValidationSupport.ValueSetExpansionOutcome expanded = myValidationSupportContext.getRootValidationSupport().expandValueSet(myValidationSupportContext, null, convertedSource); + IValidationSupport.ValueSetExpansionOutcome expanded = myValidationSupportContext + .getRootValidationSupport() + .expandValueSet(myValidationSupportContext, null, convertedSource); org.hl7.fhir.r5.model.ValueSet convertedResult = null; if (expanded.getValueSet() != null) { @@ -288,18 +310,27 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public ValueSetExpansionOutcome expandVS(Resource src, org.hl7.fhir.r5.model.ElementDefinition.ElementDefinitionBindingComponent binding, boolean cacheOk, boolean Hierarchical) { + public ValueSetExpansionOutcome expandVS( + Resource src, + org.hl7.fhir.r5.model.ElementDefinition.ElementDefinitionBindingComponent binding, + boolean cacheOk, + boolean Hierarchical) { throw new UnsupportedOperationException(Msg.code(663)); } @Override - public ValueSetExpansionOutcome expandVS(ValueSet.ConceptSetComponent inc, boolean hierarchical, boolean noInactive) throws TerminologyServiceException { + public ValueSetExpansionOutcome expandVS(ValueSet.ConceptSetComponent inc, boolean hierarchical, boolean noInactive) + throws TerminologyServiceException { throw new UnsupportedOperationException(Msg.code(664)); } @Override public Locale getLocale() { - return myValidationSupportContext.getRootValidationSupport().getFhirContext().getLocalizer().getLocale(); + return myValidationSupportContext + .getRootValidationSupport() + .getFhirContext() + .getLocalizer() + .getLocale(); } @Override @@ -309,7 +340,8 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo @Override public org.hl7.fhir.r5.model.CodeSystem fetchCodeSystem(String system) { - IBaseResource fetched = myValidationSupportContext.getRootValidationSupport().fetchCodeSystem(system); + IBaseResource fetched = + myValidationSupportContext.getRootValidationSupport().fetchCodeSystem(system); if (fetched == null) { return null; } @@ -322,7 +354,8 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo @Override public CodeSystem fetchCodeSystem(String system, String verison) { - IBaseResource fetched = myValidationSupportContext.getRootValidationSupport().fetchCodeSystem(system); + IBaseResource fetched = + myValidationSupportContext.getRootValidationSupport().fetchCodeSystem(system); if (fetched == null) { return null; } @@ -366,7 +399,8 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo public T fetchResourceWithException(Class class_, String uri) throws FHIRException { T retVal = fetchResource(class_, uri); if (retVal == null) { - throw new FHIRException(Msg.code(667) + "Can not find resource of type " + class_.getSimpleName() + " with uri " + uri); + throw new FHIRException( + Msg.code(667) + "Can not find resource of type " + class_.getSimpleName() + " with uri " + uri); } return retVal; } @@ -382,18 +416,25 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public T fetchResourceWithException(Class class_, String uri, Resource sourceOfReference) throws FHIRException { + public T fetchResourceWithException(Class class_, String uri, Resource sourceOfReference) + throws FHIRException { throw new UnsupportedOperationException(Msg.code(2214)); } @Override public List getResourceNames() { - return new ArrayList<>(myValidationSupportContext.getRootValidationSupport().getFhirContext().getResourceTypes()); + return new ArrayList<>(myValidationSupportContext + .getRootValidationSupport() + .getFhirContext() + .getResourceTypes()); } @Override public Set getResourceNamesAsSet() { - return myValidationSupportContext.getRootValidationSupport().getFhirContext().getResourceTypes(); + return myValidationSupportContext + .getRootValidationSupport() + .getFhirContext() + .getResourceTypes(); } @Override @@ -418,7 +459,12 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo @Override public String getVersion() { - return myValidationSupportContext.getRootValidationSupport().getFhirContext().getVersion().getVersion().getFhirVersionString(); + return myValidationSupportContext + .getRootValidationSupport() + .getFhirContext() + .getVersion() + .getVersion() + .getFhirVersionString(); } @Override @@ -458,7 +504,9 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo @Override public boolean supportsSystem(String system) { - return myValidationSupportContext.getRootValidationSupport().isCodeSystemSupported(myValidationSupportContext, system); + return myValidationSupportContext + .getRootValidationSupport() + .isCodeSystemSupported(myValidationSupportContext, system); } @Override @@ -467,18 +515,26 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public ValueSetExpansionOutcome expandVS(ValueSet source, boolean cacheOk, boolean heiarchical, boolean incompleteOk) { + public ValueSetExpansionOutcome expandVS( + ValueSet source, boolean cacheOk, boolean heiarchical, boolean incompleteOk) { return null; } @Override - public ValidationResult validateCode(ValidationOptions theOptions, String system, String version, String code, String display) { + public ValidationResult validateCode( + ValidationOptions theOptions, String system, String version, String code, String display) { ConceptValidationOptions validationOptions = convertConceptValidationOptions(theOptions); return doValidation(null, validationOptions, system, code, display); } @Override - public ValidationResult validateCode(ValidationOptions theOptions, String theSystem, String version, String theCode, String display, ValueSet theValueSet) { + public ValidationResult validateCode( + ValidationOptions theOptions, + String theSystem, + String version, + String theCode, + String display, + ValueSet theValueSet) { IBaseResource convertedVs = null; try { @@ -495,7 +551,8 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public ValidationResult validateCode(ValidationOptions theOptions, String code, org.hl7.fhir.r5.model.ValueSet theValueSet) { + public ValidationResult validateCode( + ValidationOptions theOptions, String code, org.hl7.fhir.r5.model.ValueSet theValueSet) { IBaseResource convertedVs = null; try { if (theValueSet != null) { @@ -505,13 +562,17 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo throw new InternalErrorException(Msg.code(690) + e); } - ConceptValidationOptions validationOptions = convertConceptValidationOptions(theOptions).setInferSystem(true); + ConceptValidationOptions validationOptions = + convertConceptValidationOptions(theOptions).setInferSystem(true); return doValidation(convertedVs, validationOptions, null, code, null); } @Override - public ValidationResult validateCode(ValidationOptions theOptions, org.hl7.fhir.r5.model.Coding theCoding, org.hl7.fhir.r5.model.ValueSet theValueSet) { + public ValidationResult validateCode( + ValidationOptions theOptions, + org.hl7.fhir.r5.model.Coding theCoding, + org.hl7.fhir.r5.model.ValueSet theValueSet) { IBaseResource convertedVs = null; try { @@ -531,12 +592,14 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Override - public ValidationResult validateCode(ValidationOptions options, Coding code, ValueSet vs, ValidationContextCarrier ctxt) { + public ValidationResult validateCode( + ValidationOptions options, Coding code, ValueSet vs, ValidationContextCarrier ctxt) { return validateCode(options, code, vs); } @Override - public void validateCodeBatch(ValidationOptions options, List codes, ValueSet vs) { + public void validateCodeBatch( + ValidationOptions options, List codes, ValueSet vs) { for (CodingValidationRequest next : codes) { ValidationResult outcome = validateCode(options, next.getCoding(), vs); next.setResult(outcome); @@ -544,18 +607,37 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Nonnull - private ValidationResult doValidation(IBaseResource theValueSet, ConceptValidationOptions theValidationOptions, String theSystem, String theCode, String theDisplay) { + private ValidationResult doValidation( + IBaseResource theValueSet, + ConceptValidationOptions theValidationOptions, + String theSystem, + String theCode, + String theDisplay) { IValidationSupport.CodeValidationResult result; if (theValueSet != null) { - result = myValidationSupportContext.getRootValidationSupport().validateCodeInValueSet(myValidationSupportContext, theValidationOptions, theSystem, theCode, theDisplay, theValueSet); + result = myValidationSupportContext + .getRootValidationSupport() + .validateCodeInValueSet( + myValidationSupportContext, + theValidationOptions, + theSystem, + theCode, + theDisplay, + theValueSet); } else { - result = myValidationSupportContext.getRootValidationSupport().validateCode(myValidationSupportContext, theValidationOptions, theSystem, theCode, theDisplay, null); + result = myValidationSupportContext + .getRootValidationSupport() + .validateCode( + myValidationSupportContext, theValidationOptions, theSystem, theCode, theDisplay, null); } return convertValidationResult(theSystem, result); } @Override - public ValidationResult validateCode(ValidationOptions theOptions, org.hl7.fhir.r5.model.CodeableConcept code, org.hl7.fhir.r5.model.ValueSet theVs) { + public ValidationResult validateCode( + ValidationOptions theOptions, + org.hl7.fhir.r5.model.CodeableConcept code, + org.hl7.fhir.r5.model.ValueSet theVs) { List validationResultsOk = new ArrayList<>(); for (Coding next : code.getCoding()) { ValidationResult retVal = validateCode(theOptions, next, theVs); @@ -568,7 +650,8 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } } - if (code.getCoding().size() > 0 && validationResultsOk.size() == code.getCoding().size()) { + if (code.getCoding().size() > 0 + && validationResultsOk.size() == code.getCoding().size()) { return validationResultsOk.get(0); } @@ -596,9 +679,9 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo myResourceName = theResourceName; myUri = theUri; myHashCode = new HashCodeBuilder(17, 37) - .append(myResourceName) - .append(myUri) - .toHashCode(); + .append(myResourceName) + .append(myUri) + .toHashCode(); } @Override @@ -614,9 +697,9 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo ResourceKey that = (ResourceKey) theO; return new EqualsBuilder() - .append(myResourceName, that.myResourceName) - .append(myUri, that.myUri) - .isEquals(); + .append(myResourceName, that.myResourceName) + .append(myUri, that.myUri) + .isEquals(); } public String getResourceName() { @@ -642,9 +725,11 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo } @Nonnull - public static VersionSpecificWorkerContextWrapper newVersionSpecificWorkerContextWrapper(IValidationSupport theValidationSupport) { + public static VersionSpecificWorkerContextWrapper newVersionSpecificWorkerContextWrapper( + IValidationSupport theValidationSupport) { VersionCanonicalizer versionCanonicalizer = new VersionCanonicalizer(theValidationSupport.getFhirContext()); - return new VersionSpecificWorkerContextWrapper(new ValidationSupportContext(theValidationSupport), versionCanonicalizer); + return new VersionSpecificWorkerContextWrapper( + new ValidationSupportContext(theValidationSupport), versionCanonicalizer); } @Override @@ -655,9 +740,5 @@ public class VersionSpecificWorkerContextWrapper extends I18nBase implements IWo @Override public void setForPublication(boolean b) { throw new UnsupportedOperationException(Msg.code(2351)); - } } - - - diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/FhirDstu2.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/FhirDstu2.java index f862b0d17c5..e2eb8bd9858 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/FhirDstu2.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/FhirDstu2.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,15 +19,9 @@ */ package ca.uhn.fhir.model.dstu2; -import ca.uhn.fhir.i18n.Msg; -import java.io.InputStream; -import java.util.Date; - -import ca.uhn.fhir.fhirpath.IFhirPath; -import org.apache.commons.lang3.StringUtils; -import org.hl7.fhir.instance.model.api.*; - import ca.uhn.fhir.context.*; +import ca.uhn.fhir.fhirpath.IFhirPath; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.*; import ca.uhn.fhir.model.base.composite.*; import ca.uhn.fhir.model.dstu2.composite.*; @@ -35,6 +29,11 @@ import ca.uhn.fhir.model.dstu2.resource.StructureDefinition; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory; import ca.uhn.fhir.util.ReflectionUtil; +import org.apache.commons.lang3.StringUtils; +import org.hl7.fhir.instance.model.api.*; + +import java.io.InputStream; +import java.util.Date; public class FhirDstu2 implements IFhirVersion { @@ -45,7 +44,6 @@ public class FhirDstu2 implements IFhirVersion { throw new UnsupportedOperationException(Msg.code(74) + "FluentPath is not supported in DSTU2 contexts"); } - @Override public IResource generateProfile(RuntimeResourceDefinition theRuntimeResourceDefinition, String theServerBase) { StructureDefinition retVal = new StructureDefinition(); @@ -73,7 +71,8 @@ public class FhirDstu2 implements IFhirVersion { str = FhirDstu2.class.getResourceAsStream("ca/uhn/fhir/model/dstu2/fhirversion.properties"); } if (str == null) { - throw new ConfigurationException(Msg.code(75) + "Can not find model property file on classpath: " + "/ca/uhn/fhir/model/dstu2/fhirversion.properties"); + throw new ConfigurationException(Msg.code(75) + "Can not find model property file on classpath: " + + "/ca/uhn/fhir/model/dstu2/fhirversion.properties"); } return str; } @@ -113,13 +112,8 @@ public class FhirDstu2 implements IFhirVersion { return new IdDt(); } - - - @Override public Object getServerVersion() { return ReflectionUtil.newInstanceOfFhirServerType("ca.uhn.fhir.model.dstu2.FhirServerDstu2"); } - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/AgeDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/AgeDt.java index 903815d2520..0690cba6a32 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/AgeDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/AgeDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,10 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -import ca.uhn.fhir.model.primitive.IntegerDt; -@DatatypeDef(name="AgeDt", profileOf=QuantityDt.class) -public class AgeDt extends QuantityDt { - -} +@DatatypeDef(name = "AgeDt", profileOf = QuantityDt.class) +public class AgeDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/CountDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/CountDt.java index 47e0595b9a5..ab2c0c6ea17 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/CountDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/CountDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,9 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -@DatatypeDef(name="CountDt", profileOf=QuantityDt.class) -public class CountDt extends QuantityDt { - -} +@DatatypeDef(name = "CountDt", profileOf = QuantityDt.class) +public class CountDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/DistanceDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/DistanceDt.java index bd82c4e5dc3..41fd7550a0d 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/DistanceDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/DistanceDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,10 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -import ca.uhn.fhir.model.primitive.IntegerDt; -@DatatypeDef(name="DistanceDt", profileOf=QuantityDt.class) -public class DistanceDt extends QuantityDt { - -} +@DatatypeDef(name = "DistanceDt", profileOf = QuantityDt.class) +public class DistanceDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/DurationDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/DurationDt.java index 84cb62d7815..3aa07cb6e4e 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/DurationDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/DurationDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,9 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -@DatatypeDef(name="DurationDt", profileOf=QuantityDt.class) -public class DurationDt extends QuantityDt { - -} +@DatatypeDef(name = "DurationDt", profileOf = QuantityDt.class) +public class DurationDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/ElementDefinitionDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/ElementDefinitionDt.java index 6d76f1b8be2..9427be887e0 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/ElementDefinitionDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/ElementDefinitionDt.java @@ -1,24 +1,6 @@ - - - - - - - - - - - - - - - - package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.i18n.Msg; -import java.util.List; - import ca.uhn.fhir.model.api.BaseIdentifiableElement; import ca.uhn.fhir.model.api.ICompositeDatatype; import ca.uhn.fhir.model.api.IDatatype; @@ -43,6 +25,8 @@ import ca.uhn.fhir.model.primitive.MarkdownDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.model.primitive.UriDt; +import java.util.List; + /** * HAPI/FHIR ElementDefinitionDt Datatype * () @@ -50,16 +34,15 @@ import ca.uhn.fhir.model.primitive.UriDt; *

    * Definition: * Captures constraints on each element within the resource, profile, or extension - *

    + *

    * *

    * Requirements: - * - *

    + * + *

    */ -@DatatypeDef(name="ElementDefinitionDt") -public class ElementDefinitionDt - extends BaseIdentifiableElement implements ICompositeDatatype{ +@DatatypeDef(name = "ElementDefinitionDt") +public class ElementDefinitionDt extends BaseIdentifiableElement implements ICompositeDatatype { /** * Constructor @@ -68,233 +51,346 @@ public class ElementDefinitionDt // nothing } - - @Child(name="path", type=StringDt.class, order=0, min=1, max=1, summary=true, modifier=false) + @Child(name = "path", type = StringDt.class, order = 0, min = 1, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension" - ) + shortDefinition = "", + formalDefinition = + "The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension") private StringDt myPath; - - @Child(name="representation", type=CodeDt.class, order=1, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) + + @Child( + name = "representation", + type = CodeDt.class, + order = 1, + min = 0, + max = Child.MAX_UNLIMITED, + summary = true, + modifier = false) @Description( - shortDefinition="", - formalDefinition="Codes that define how this element is represented in instances, when the deviation varies from the normal case" - ) + shortDefinition = "", + formalDefinition = + "Codes that define how this element is represented in instances, when the deviation varies from the normal case") private java.util.List> myRepresentation; - - @Child(name="name", type=StringDt.class, order=2, min=0, max=1, summary=true, modifier=false) + + @Child(name = "name", type = StringDt.class, order = 2, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element" - ) + shortDefinition = "", + formalDefinition = + "The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element") private StringDt myName; - - @Child(name="label", type=StringDt.class, order=3, min=0, max=1, summary=true, modifier=false) + + @Child(name = "label", type = StringDt.class, order = 3, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form." - ) + shortDefinition = "", + formalDefinition = + "The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form.") private StringDt myLabel; - - @Child(name="code", type=CodingDt.class, order=4, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) + + @Child( + name = "code", + type = CodingDt.class, + order = 4, + min = 0, + max = Child.MAX_UNLIMITED, + summary = true, + modifier = false) @Description( - shortDefinition="", - formalDefinition="A code that provides the meaning for the element according to a particular terminology." - ) + shortDefinition = "", + formalDefinition = + "A code that provides the meaning for the element according to a particular terminology.") private java.util.List myCode; - - @Child(name="slicing", order=5, min=0, max=1, summary=true, modifier=false) + + @Child(name = "slicing", order = 5, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Indicates that the element is sliced into a set of alternative definitions (i.e. in a structure definition, there are multiple different constraints on a single element in the base resource). Slicing can be used in any resource that has cardinality ..* on the base resource, or any resource with a choice of types. The set of slices is any elements that come after this in the element sequence that have the same path, until a shorter path occurs (the shorter path terminates the set)" - ) + shortDefinition = "", + formalDefinition = + "Indicates that the element is sliced into a set of alternative definitions (i.e. in a structure definition, there are multiple different constraints on a single element in the base resource). Slicing can be used in any resource that has cardinality ..* on the base resource, or any resource with a choice of types. The set of slices is any elements that come after this in the element sequence that have the same path, until a shorter path occurs (the shorter path terminates the set)") private Slicing mySlicing; - - @Child(name="short", type=StringDt.class, order=6, min=0, max=1, summary=true, modifier=false) + + @Child(name = "short", type = StringDt.class, order = 6, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="A concise description of what this element means (e.g. for use in auto-generated summaries)" - ) + shortDefinition = "", + formalDefinition = + "A concise description of what this element means (e.g. for use in auto-generated summaries)") private StringDt myShort; - - @Child(name="definition", type=MarkdownDt.class, order=7, min=0, max=1, summary=true, modifier=false) + + @Child(name = "definition", type = MarkdownDt.class, order = 7, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Provides a complete explanation of the meaning of the data element for human readability. For the case of elements derived from existing elements (e.g. constraints), the definition SHALL be consistent with the base definition, but convey the meaning of the element in the particular context of use of the resource" - ) + shortDefinition = "", + formalDefinition = + "Provides a complete explanation of the meaning of the data element for human readability. For the case of elements derived from existing elements (e.g. constraints), the definition SHALL be consistent with the base definition, but convey the meaning of the element in the particular context of use of the resource") private MarkdownDt myDefinition; - - @Child(name="comments", type=MarkdownDt.class, order=8, min=0, max=1, summary=true, modifier=false) + + @Child(name = "comments", type = MarkdownDt.class, order = 8, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Explanatory notes and implementation guidance about the data element, including notes about how to use the data properly, exceptions to proper use, etc." - ) + shortDefinition = "", + formalDefinition = + "Explanatory notes and implementation guidance about the data element, including notes about how to use the data properly, exceptions to proper use, etc.") private MarkdownDt myComments; - - @Child(name="requirements", type=MarkdownDt.class, order=9, min=0, max=1, summary=true, modifier=false) + + @Child( + name = "requirements", + type = MarkdownDt.class, + order = 9, + min = 0, + max = 1, + summary = true, + modifier = false) @Description( - shortDefinition="", - formalDefinition="This element is for traceability of why the element was created and why the constraints exist as they do. This may be used to point to source materials or specifications that drove the structure of this element." - ) + shortDefinition = "", + formalDefinition = + "This element is for traceability of why the element was created and why the constraints exist as they do. This may be used to point to source materials or specifications that drove the structure of this element.") private MarkdownDt myRequirements; - - @Child(name="alias", type=StringDt.class, order=10, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) + + @Child( + name = "alias", + type = StringDt.class, + order = 10, + min = 0, + max = Child.MAX_UNLIMITED, + summary = true, + modifier = false) @Description( - shortDefinition="", - formalDefinition="Identifies additional names by which this element might also be known" - ) + shortDefinition = "", + formalDefinition = "Identifies additional names by which this element might also be known") private java.util.List myAlias; - - @Child(name="min", type=IntegerDt.class, order=11, min=0, max=1, summary=true, modifier=false) + + @Child(name = "min", type = IntegerDt.class, order = 11, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The minimum number of times this element SHALL appear in the instance" - ) + shortDefinition = "", + formalDefinition = "The minimum number of times this element SHALL appear in the instance") private IntegerDt myMin; - - @Child(name="max", type=StringDt.class, order=12, min=0, max=1, summary=true, modifier=false) + + @Child(name = "max", type = StringDt.class, order = 12, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The maximum number of times this element is permitted to appear in the instance" - ) + shortDefinition = "", + formalDefinition = "The maximum number of times this element is permitted to appear in the instance") private StringDt myMax; - - @Child(name="base", order=13, min=0, max=1, summary=true, modifier=false) + + @Child(name = "base", order = 13, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Information about the base definition of the element, provided to make it unncessary for tools to trace the derviation of the element through the derived and related profiles. This information is only provided where the element definition represents a constraint on another element definition, and must be present if there is a base element definition" - ) + shortDefinition = "", + formalDefinition = + "Information about the base definition of the element, provided to make it unncessary for tools to trace the derviation of the element through the derived and related profiles. This information is only provided where the element definition represents a constraint on another element definition, and must be present if there is a base element definition") private Base myBase; - - @Child(name="type", order=14, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) + + @Child(name = "type", order = 14, min = 0, max = Child.MAX_UNLIMITED, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The data type or resource that the value of this element is permitted to be" - ) + shortDefinition = "", + formalDefinition = "The data type or resource that the value of this element is permitted to be") private java.util.List myType; - - @Child(name="nameReference", type=StringDt.class, order=15, min=0, max=1, summary=true, modifier=false) + + @Child( + name = "nameReference", + type = StringDt.class, + order = 15, + min = 0, + max = 1, + summary = true, + modifier = false) @Description( - shortDefinition="", - formalDefinition="Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element" - ) + shortDefinition = "", + formalDefinition = + "Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element") private StringDt myNameReference; - - @Child(name="defaultValue", type=IDatatype.class, order=16, min=0, max=1, summary=true, modifier=false) + + @Child( + name = "defaultValue", + type = IDatatype.class, + order = 16, + min = 0, + max = 1, + summary = true, + modifier = false) @Description( - shortDefinition="", - formalDefinition="The value that should be used if there is no value stated in the instance (e.g. 'if not otherwise specified, the abstract is false')" - ) + shortDefinition = "", + formalDefinition = + "The value that should be used if there is no value stated in the instance (e.g. 'if not otherwise specified, the abstract is false')") private IDatatype myDefaultValue; - - @Child(name="meaningWhenMissing", type=MarkdownDt.class, order=17, min=0, max=1, summary=true, modifier=false) + + @Child( + name = "meaningWhenMissing", + type = MarkdownDt.class, + order = 17, + min = 0, + max = 1, + summary = true, + modifier = false) @Description( - shortDefinition="", - formalDefinition="The Implicit meaning that is to be understood when this element is missing (e.g. 'when this element is missing, the period is ongoing'" - ) + shortDefinition = "", + formalDefinition = + "The Implicit meaning that is to be understood when this element is missing (e.g. 'when this element is missing, the period is ongoing'") private MarkdownDt myMeaningWhenMissing; - - @Child(name="fixed", type=IDatatype.class, order=18, min=0, max=1, summary=true, modifier=false) + + @Child(name = "fixed", type = IDatatype.class, order = 18, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Specifies a value that SHALL be exactly the value for this element in the instance. For purposes of comparison, non-signficant whitespace is ignored, and all values must be an exact match (case and accent sensitive). Missing elements/attributes must also be missing" - ) + shortDefinition = "", + formalDefinition = + "Specifies a value that SHALL be exactly the value for this element in the instance. For purposes of comparison, non-signficant whitespace is ignored, and all values must be an exact match (case and accent sensitive). Missing elements/attributes must also be missing") private IDatatype myFixed; - - @Child(name="pattern", type=IDatatype.class, order=19, min=0, max=1, summary=true, modifier=false) + + @Child(name = "pattern", type = IDatatype.class, order = 19, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Specifies a value that the value in the instance SHALL follow - that is, any value in the pattern must be found in the instance. Other additional values may be found too. This is effectively constraint by example. The values of elements present in the pattern must match exactly (case-senstive, accent-sensitive, etc.)" - ) + shortDefinition = "", + formalDefinition = + "Specifies a value that the value in the instance SHALL follow - that is, any value in the pattern must be found in the instance. Other additional values may be found too. This is effectively constraint by example. The values of elements present in the pattern must match exactly (case-senstive, accent-sensitive, etc.)") private IDatatype myPattern; - - @Child(name="example", type=IDatatype.class, order=20, min=0, max=1, summary=true, modifier=false) + + @Child(name = "example", type = IDatatype.class, order = 20, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="A sample value for this element demonstrating the type of information that would typically be captured." - ) + shortDefinition = "", + formalDefinition = + "A sample value for this element demonstrating the type of information that would typically be captured.") private IDatatype myExample; - - @Child(name="minValue", type=IDatatype.class, order=21, min=0, max=1, summary=true, modifier=false) + + @Child(name = "minValue", type = IDatatype.class, order = 21, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The minimum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity" - ) + shortDefinition = "", + formalDefinition = + "The minimum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity") private IDatatype myMinValue; - - @Child(name="maxValue", type=IDatatype.class, order=22, min=0, max=1, summary=true, modifier=false) + + @Child(name = "maxValue", type = IDatatype.class, order = 22, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The maximum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity" - ) + shortDefinition = "", + formalDefinition = + "The maximum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity") private IDatatype myMaxValue; - - @Child(name="maxLength", type=IntegerDt.class, order=23, min=0, max=1, summary=true, modifier=false) + + @Child(name = "maxLength", type = IntegerDt.class, order = 23, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element" - ) + shortDefinition = "", + formalDefinition = + "Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element") private IntegerDt myMaxLength; - - @Child(name="condition", type=IdDt.class, order=24, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) + + @Child( + name = "condition", + type = IdDt.class, + order = 24, + min = 0, + max = Child.MAX_UNLIMITED, + summary = true, + modifier = false) @Description( - shortDefinition="", - formalDefinition="A reference to an invariant that may make additional statements about the cardinality or value in the instance" - ) + shortDefinition = "", + formalDefinition = + "A reference to an invariant that may make additional statements about the cardinality or value in the instance") private java.util.List myCondition; - - @Child(name="constraint", order=25, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) + + @Child(name = "constraint", order = 25, min = 0, max = Child.MAX_UNLIMITED, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance" - ) + shortDefinition = "", + formalDefinition = + "Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance") private java.util.List myConstraint; - - @Child(name="mustSupport", type=BooleanDt.class, order=26, min=0, max=1, summary=true, modifier=false) + + @Child(name = "mustSupport", type = BooleanDt.class, order = 26, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported" - ) + shortDefinition = "", + formalDefinition = + "If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported") private BooleanDt myMustSupport; - - @Child(name="isModifier", type=BooleanDt.class, order=27, min=0, max=1, summary=true, modifier=false) + + @Child(name = "isModifier", type = BooleanDt.class, order = 27, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system." - ) + shortDefinition = "", + formalDefinition = + "If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system.") private BooleanDt myIsModifier; - - @Child(name="isSummary", type=BooleanDt.class, order=28, min=0, max=1, summary=true, modifier=false) + + @Child(name = "isSummary", type = BooleanDt.class, order = 28, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Whether the element should be included if a client requests a search with the parameter _summary=true" - ) + shortDefinition = "", + formalDefinition = + "Whether the element should be included if a client requests a search with the parameter _summary=true") private BooleanDt myIsSummary; - - @Child(name="binding", order=29, min=0, max=1, summary=true, modifier=false) + + @Child(name = "binding", order = 29, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Binds to a value set if this element is coded (code, Coding, CodeableConcept)" - ) + shortDefinition = "", + formalDefinition = "Binds to a value set if this element is coded (code, Coding, CodeableConcept)") private Binding myBinding; - - @Child(name="mapping", order=30, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) + + @Child(name = "mapping", order = 30, min = 0, max = Child.MAX_UNLIMITED, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Identifies a concept from an external specification that roughly corresponds to this element" - ) + shortDefinition = "", + formalDefinition = + "Identifies a concept from an external specification that roughly corresponds to this element") private java.util.List myMapping; - @Override public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myPath, myRepresentation, myName, myLabel, myCode, mySlicing, myShort, myDefinition, myComments, myRequirements, myAlias, myMin, myMax, myBase, myType, myNameReference, myDefaultValue, myMeaningWhenMissing, myFixed, myPattern, myExample, myMinValue, myMaxValue, myMaxLength, myCondition, myConstraint, myMustSupport, myIsModifier, myIsSummary, myBinding, myMapping); + return super.isBaseEmpty() + && ca.uhn.fhir.util.ElementUtil.isEmpty( + myPath, + myRepresentation, + myName, + myLabel, + myCode, + mySlicing, + myShort, + myDefinition, + myComments, + myRequirements, + myAlias, + myMin, + myMax, + myBase, + myType, + myNameReference, + myDefaultValue, + myMeaningWhenMissing, + myFixed, + myPattern, + myExample, + myMinValue, + myMaxValue, + myMaxLength, + myCondition, + myConstraint, + myMustSupport, + myIsModifier, + myIsSummary, + myBinding, + myMapping); } - + @Override public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myPath, myRepresentation, myName, myLabel, myCode, mySlicing, myShort, myDefinition, myComments, myRequirements, myAlias, myMin, myMax, myBase, myType, myNameReference, myDefaultValue, myMeaningWhenMissing, myFixed, myPattern, myExample, myMinValue, myMaxValue, myMaxLength, myCondition, myConstraint, myMustSupport, myIsModifier, myIsSummary, myBinding, myMapping); + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( + theType, + myPath, + myRepresentation, + myName, + myLabel, + myCode, + mySlicing, + myShort, + myDefinition, + myComments, + myRequirements, + myAlias, + myMin, + myMax, + myBase, + myType, + myNameReference, + myDefaultValue, + myMeaningWhenMissing, + myFixed, + myPattern, + myExample, + myMinValue, + myMaxValue, + myMaxLength, + myCondition, + myConstraint, + myMustSupport, + myIsModifier, + myIsSummary, + myBinding, + myMapping); } /** @@ -302,73 +398,69 @@ public class ElementDefinitionDt * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension - *

    + *

    + * Definition: + * The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension + *

    */ - public StringDt getPathElement() { + public StringDt getPathElement() { if (myPath == null) { myPath = new StringDt(); } return myPath; } - /** * Gets the value(s) for path (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension - *

    + *

    + * Definition: + * The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension + *

    */ - public String getPath() { + public String getPath() { return getPathElement().getValue(); } /** * Sets the value(s) for path () * - *

    - * Definition: - * The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension - *

    + *

    + * Definition: + * The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension + *

    */ public ElementDefinitionDt setPath(StringDt theValue) { myPath = theValue; return this; } - - - /** + /** * Sets the value for path () * - *

    - * Definition: - * The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension - *

    + *

    + * Definition: + * The path identifies the element and is expressed as a \".\"-separated list of ancestor elements, beginning with the name of the resource or extension + *

    */ - public ElementDefinitionDt setPath( String theString) { - myPath = new StringDt(theString); - return this; + public ElementDefinitionDt setPath(String theString) { + myPath = new StringDt(theString); + return this; } - /** * Gets the value(s) for representation (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Codes that define how this element is represented in instances, when the deviation varies from the normal case - *

    + *

    + * Definition: + * Codes that define how this element is represented in instances, when the deviation varies from the normal case + *

    */ - public java.util.List> getRepresentation() { + public java.util.List> getRepresentation() { if (myRepresentation == null) { myRepresentation = new java.util.ArrayList>(); } @@ -378,31 +470,30 @@ public class ElementDefinitionDt /** * Sets the value(s) for representation () * - *

    - * Definition: - * Codes that define how this element is represented in instances, when the deviation varies from the normal case - *

    + *

    + * Definition: + * Codes that define how this element is represented in instances, when the deviation varies from the normal case + *

    */ public ElementDefinitionDt setRepresentation(java.util.List> theValue) { myRepresentation = theValue; return this; } - - /** * Add a value for representation () using an enumerated type. This * is intended as a convenience method for situations where the FHIR defined ValueSets are mandatory - * or contain the desirable codes. If you wish to use codes other than those which are built-in, + * or contain the desirable codes. If you wish to use codes other than those which are built-in, * you may also use the {@link #addRepresentation()} method. * - *

    - * Definition: - * Codes that define how this element is represented in instances, when the deviation varies from the normal case - *

    + *

    + * Definition: + * Codes that define how this element is represented in instances, when the deviation varies from the normal case + *

    */ public BoundCodeDt addRepresentation(PropertyRepresentationEnum theValue) { - BoundCodeDt retVal = new BoundCodeDt(PropertyRepresentationEnum.VALUESET_BINDER, theValue); + BoundCodeDt retVal = + new BoundCodeDt(PropertyRepresentationEnum.VALUESET_BINDER, theValue); getRepresentation().add(retVal); return retVal; } @@ -411,10 +502,10 @@ public class ElementDefinitionDt * Gets the first repetition for representation (), * creating it if it does not already exist. * - *

    - * Definition: - * Codes that define how this element is represented in instances, when the deviation varies from the normal case - *

    + *

    + * Definition: + * Codes that define how this element is represented in instances, when the deviation varies from the normal case + *

    */ public BoundCodeDt getRepresentationFirstRep() { if (getRepresentation().size() == 0) { @@ -426,13 +517,14 @@ public class ElementDefinitionDt /** * Add a value for representation () * - *

    - * Definition: - * Codes that define how this element is represented in instances, when the deviation varies from the normal case - *

    + *

    + * Definition: + * Codes that define how this element is represented in instances, when the deviation varies from the normal case + *

    */ public BoundCodeDt addRepresentation() { - BoundCodeDt retVal = new BoundCodeDt(PropertyRepresentationEnum.VALUESET_BINDER); + BoundCodeDt retVal = + new BoundCodeDt(PropertyRepresentationEnum.VALUESET_BINDER); getRepresentation().add(retVal); return retVal; } @@ -440,10 +532,10 @@ public class ElementDefinitionDt /** * Sets the value(s), and clears any existing value(s) for representation () * - *

    - * Definition: - * Codes that define how this element is represented in instances, when the deviation varies from the normal case - *

    + *

    + * Definition: + * Codes that define how this element is represented in instances, when the deviation varies from the normal case + *

    */ public ElementDefinitionDt setRepresentation(PropertyRepresentationEnum theValue) { getRepresentation().clear(); @@ -451,140 +543,131 @@ public class ElementDefinitionDt return this; } - /** * Gets the value(s) for name (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element - *

    + *

    + * Definition: + * The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element + *

    */ - public StringDt getNameElement() { + public StringDt getNameElement() { if (myName == null) { myName = new StringDt(); } return myName; } - /** * Gets the value(s) for name (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element - *

    + *

    + * Definition: + * The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element + *

    */ - public String getName() { + public String getName() { return getNameElement().getValue(); } /** * Sets the value(s) for name () * - *

    - * Definition: - * The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element - *

    + *

    + * Definition: + * The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element + *

    */ public ElementDefinitionDt setName(StringDt theValue) { myName = theValue; return this; } - - - /** + /** * Sets the value for name () * - *

    - * Definition: - * The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element - *

    + *

    + * Definition: + * The name of this element definition (to refer to it from other element definitions using ElementDefinition.nameReference). This is a unique name referring to a specific set of constraints applied to this element. One use of this is to provide a name to different slices of the same element + *

    */ - public ElementDefinitionDt setName( String theString) { - myName = new StringDt(theString); - return this; + public ElementDefinitionDt setName(String theString) { + myName = new StringDt(theString); + return this; } - /** * Gets the value(s) for label (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form. - *

    + *

    + * Definition: + * The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form. + *

    */ - public StringDt getLabelElement() { + public StringDt getLabelElement() { if (myLabel == null) { myLabel = new StringDt(); } return myLabel; } - /** * Gets the value(s) for label (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form. - *

    + *

    + * Definition: + * The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form. + *

    */ - public String getLabel() { + public String getLabel() { return getLabelElement().getValue(); } /** * Sets the value(s) for label () * - *

    - * Definition: - * The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form. - *

    + *

    + * Definition: + * The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form. + *

    */ public ElementDefinitionDt setLabel(StringDt theValue) { myLabel = theValue; return this; } - - - /** + /** * Sets the value for label () * - *

    - * Definition: - * The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form. - *

    + *

    + * Definition: + * The text to display beside the element indicating its meaning or to use to prompt for the element in a user display or form. + *

    */ - public ElementDefinitionDt setLabel( String theString) { - myLabel = new StringDt(theString); - return this; + public ElementDefinitionDt setLabel(String theString) { + myLabel = new StringDt(theString); + return this; } - /** * Gets the value(s) for code (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A code that provides the meaning for the element according to a particular terminology. - *

    + *

    + * Definition: + * A code that provides the meaning for the element according to a particular terminology. + *

    */ - public java.util.List getCode() { + public java.util.List getCode() { if (myCode == null) { myCode = new java.util.ArrayList(); } @@ -594,30 +677,28 @@ public class ElementDefinitionDt /** * Sets the value(s) for code () * - *

    - * Definition: - * A code that provides the meaning for the element according to a particular terminology. - *

    + *

    + * Definition: + * A code that provides the meaning for the element according to a particular terminology. + *

    */ public ElementDefinitionDt setCode(java.util.List theValue) { myCode = theValue; return this; } - - /** * Adds and returns a new value for code () * - *

    - * Definition: - * A code that provides the meaning for the element according to a particular terminology. - *

    + *

    + * Definition: + * A code that provides the meaning for the element according to a particular terminology. + *

    */ public CodingDt addCode() { CodingDt newType = new CodingDt(); getCode().add(newType); - return newType; + return newType; } /** @@ -641,29 +722,29 @@ public class ElementDefinitionDt * Gets the first repetition for code (), * creating it if it does not already exist. * - *

    - * Definition: - * A code that provides the meaning for the element according to a particular terminology. - *

    + *

    + * Definition: + * A code that provides the meaning for the element according to a particular terminology. + *

    */ public CodingDt getCodeFirstRep() { if (getCode().isEmpty()) { return addCode(); } - return getCode().get(0); + return getCode().get(0); } - + /** * Gets the value(s) for slicing (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Indicates that the element is sliced into a set of alternative definitions (i.e. in a structure definition, there are multiple different constraints on a single element in the base resource). Slicing can be used in any resource that has cardinality ..* on the base resource, or any resource with a choice of types. The set of slices is any elements that come after this in the element sequence that have the same path, until a shorter path occurs (the shorter path terminates the set) - *

    + *

    + * Definition: + * Indicates that the element is sliced into a set of alternative definitions (i.e. in a structure definition, there are multiple different constraints on a single element in the base resource). Slicing can be used in any resource that has cardinality ..* on the base resource, or any resource with a choice of types. The set of slices is any elements that come after this in the element sequence that have the same path, until a shorter path occurs (the shorter path terminates the set) + *

    */ - public Slicing getSlicing() { + public Slicing getSlicing() { if (mySlicing == null) { mySlicing = new Slicing(); } @@ -673,235 +754,216 @@ public class ElementDefinitionDt /** * Sets the value(s) for slicing () * - *

    - * Definition: - * Indicates that the element is sliced into a set of alternative definitions (i.e. in a structure definition, there are multiple different constraints on a single element in the base resource). Slicing can be used in any resource that has cardinality ..* on the base resource, or any resource with a choice of types. The set of slices is any elements that come after this in the element sequence that have the same path, until a shorter path occurs (the shorter path terminates the set) - *

    + *

    + * Definition: + * Indicates that the element is sliced into a set of alternative definitions (i.e. in a structure definition, there are multiple different constraints on a single element in the base resource). Slicing can be used in any resource that has cardinality ..* on the base resource, or any resource with a choice of types. The set of slices is any elements that come after this in the element sequence that have the same path, until a shorter path occurs (the shorter path terminates the set) + *

    */ public ElementDefinitionDt setSlicing(Slicing theValue) { mySlicing = theValue; return this; } - - - /** * Gets the value(s) for short (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A concise description of what this element means (e.g. for use in auto-generated summaries) - *

    + *

    + * Definition: + * A concise description of what this element means (e.g. for use in auto-generated summaries) + *

    */ - public StringDt getShortElement() { + public StringDt getShortElement() { if (myShort == null) { myShort = new StringDt(); } return myShort; } - /** * Gets the value(s) for short (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A concise description of what this element means (e.g. for use in auto-generated summaries) - *

    + *

    + * Definition: + * A concise description of what this element means (e.g. for use in auto-generated summaries) + *

    */ - public String getShort() { + public String getShort() { return getShortElement().getValue(); } /** * Sets the value(s) for short () * - *

    - * Definition: - * A concise description of what this element means (e.g. for use in auto-generated summaries) - *

    + *

    + * Definition: + * A concise description of what this element means (e.g. for use in auto-generated summaries) + *

    */ public ElementDefinitionDt setShort(StringDt theValue) { myShort = theValue; return this; } - - - /** + /** * Sets the value for short () * - *

    - * Definition: - * A concise description of what this element means (e.g. for use in auto-generated summaries) - *

    + *

    + * Definition: + * A concise description of what this element means (e.g. for use in auto-generated summaries) + *

    */ - public ElementDefinitionDt setShort( String theString) { - myShort = new StringDt(theString); - return this; + public ElementDefinitionDt setShort(String theString) { + myShort = new StringDt(theString); + return this; } - /** * Gets the value(s) for definition (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Provides a complete explanation of the meaning of the data element for human readability. For the case of elements derived from existing elements (e.g. constraints), the definition SHALL be consistent with the base definition, but convey the meaning of the element in the particular context of use of the resource - *

    + *

    + * Definition: + * Provides a complete explanation of the meaning of the data element for human readability. For the case of elements derived from existing elements (e.g. constraints), the definition SHALL be consistent with the base definition, but convey the meaning of the element in the particular context of use of the resource + *

    */ - public MarkdownDt getDefinitionElement() { + public MarkdownDt getDefinitionElement() { if (myDefinition == null) { myDefinition = new MarkdownDt(); } return myDefinition; } - /** * Gets the value(s) for definition (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Provides a complete explanation of the meaning of the data element for human readability. For the case of elements derived from existing elements (e.g. constraints), the definition SHALL be consistent with the base definition, but convey the meaning of the element in the particular context of use of the resource - *

    + *

    + * Definition: + * Provides a complete explanation of the meaning of the data element for human readability. For the case of elements derived from existing elements (e.g. constraints), the definition SHALL be consistent with the base definition, but convey the meaning of the element in the particular context of use of the resource + *

    */ - public String getDefinition() { + public String getDefinition() { return getDefinitionElement().getValue(); } /** * Sets the value(s) for definition () * - *

    - * Definition: - * Provides a complete explanation of the meaning of the data element for human readability. For the case of elements derived from existing elements (e.g. constraints), the definition SHALL be consistent with the base definition, but convey the meaning of the element in the particular context of use of the resource - *

    + *

    + * Definition: + * Provides a complete explanation of the meaning of the data element for human readability. For the case of elements derived from existing elements (e.g. constraints), the definition SHALL be consistent with the base definition, but convey the meaning of the element in the particular context of use of the resource + *

    */ public ElementDefinitionDt setDefinition(MarkdownDt theValue) { myDefinition = theValue; return this; } - - - /** * Gets the value(s) for comments (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Explanatory notes and implementation guidance about the data element, including notes about how to use the data properly, exceptions to proper use, etc. - *

    + *

    + * Definition: + * Explanatory notes and implementation guidance about the data element, including notes about how to use the data properly, exceptions to proper use, etc. + *

    */ - public MarkdownDt getCommentsElement() { + public MarkdownDt getCommentsElement() { if (myComments == null) { myComments = new MarkdownDt(); } return myComments; } - /** * Gets the value(s) for comments (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Explanatory notes and implementation guidance about the data element, including notes about how to use the data properly, exceptions to proper use, etc. - *

    + *

    + * Definition: + * Explanatory notes and implementation guidance about the data element, including notes about how to use the data properly, exceptions to proper use, etc. + *

    */ - public String getComments() { + public String getComments() { return getCommentsElement().getValue(); } /** * Sets the value(s) for comments () * - *

    - * Definition: - * Explanatory notes and implementation guidance about the data element, including notes about how to use the data properly, exceptions to proper use, etc. - *

    + *

    + * Definition: + * Explanatory notes and implementation guidance about the data element, including notes about how to use the data properly, exceptions to proper use, etc. + *

    */ public ElementDefinitionDt setComments(MarkdownDt theValue) { myComments = theValue; return this; } - - - /** * Gets the value(s) for requirements (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * This element is for traceability of why the element was created and why the constraints exist as they do. This may be used to point to source materials or specifications that drove the structure of this element. - *

    + *

    + * Definition: + * This element is for traceability of why the element was created and why the constraints exist as they do. This may be used to point to source materials or specifications that drove the structure of this element. + *

    */ - public MarkdownDt getRequirementsElement() { + public MarkdownDt getRequirementsElement() { if (myRequirements == null) { myRequirements = new MarkdownDt(); } return myRequirements; } - /** * Gets the value(s) for requirements (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * This element is for traceability of why the element was created and why the constraints exist as they do. This may be used to point to source materials or specifications that drove the structure of this element. - *

    + *

    + * Definition: + * This element is for traceability of why the element was created and why the constraints exist as they do. This may be used to point to source materials or specifications that drove the structure of this element. + *

    */ - public String getRequirements() { + public String getRequirements() { return getRequirementsElement().getValue(); } /** * Sets the value(s) for requirements () * - *

    - * Definition: - * This element is for traceability of why the element was created and why the constraints exist as they do. This may be used to point to source materials or specifications that drove the structure of this element. - *

    + *

    + * Definition: + * This element is for traceability of why the element was created and why the constraints exist as they do. This may be used to point to source materials or specifications that drove the structure of this element. + *

    */ public ElementDefinitionDt setRequirements(MarkdownDt theValue) { myRequirements = theValue; return this; } - - - /** * Gets the value(s) for alias (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Identifies additional names by which this element might also be known - *

    + *

    + * Definition: + * Identifies additional names by which this element might also be known + *

    */ - public java.util.List getAlias() { + public java.util.List getAlias() { if (myAlias == null) { myAlias = new java.util.ArrayList(); } @@ -911,30 +973,28 @@ public class ElementDefinitionDt /** * Sets the value(s) for alias () * - *

    - * Definition: - * Identifies additional names by which this element might also be known - *

    + *

    + * Definition: + * Identifies additional names by which this element might also be known + *

    */ public ElementDefinitionDt setAlias(java.util.List theValue) { myAlias = theValue; return this; } - - /** * Adds and returns a new value for alias () * - *

    - * Definition: - * Identifies additional names by which this element might also be known - *

    + *

    + * Definition: + * Identifies additional names by which this element might also be known + *

    */ public StringDt addAlias() { StringDt newType = new StringDt(); getAlias().add(newType); - return newType; + return newType; } /** @@ -958,169 +1018,160 @@ public class ElementDefinitionDt * Gets the first repetition for alias (), * creating it if it does not already exist. * - *

    - * Definition: - * Identifies additional names by which this element might also be known - *

    + *

    + * Definition: + * Identifies additional names by which this element might also be known + *

    */ public StringDt getAliasFirstRep() { if (getAlias().isEmpty()) { return addAlias(); } - return getAlias().get(0); + return getAlias().get(0); } - /** + /** * Adds a new value for alias () * - *

    - * Definition: - * Identifies additional names by which this element might also be known - *

    - * - * @return Returns a reference to this object, to allow for simple chaining. + *

    + * Definition: + * Identifies additional names by which this element might also be known + *

    + * + * @return Returns a reference to this object, to allow for simple chaining. */ - public ElementDefinitionDt addAlias( String theString) { + public ElementDefinitionDt addAlias(String theString) { if (myAlias == null) { myAlias = new java.util.ArrayList(); } myAlias.add(new StringDt(theString)); - return this; + return this; } - /** * Gets the value(s) for min (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The minimum number of times this element SHALL appear in the instance - *

    + *

    + * Definition: + * The minimum number of times this element SHALL appear in the instance + *

    */ - public IntegerDt getMinElement() { + public IntegerDt getMinElement() { if (myMin == null) { myMin = new IntegerDt(); } return myMin; } - /** * Gets the value(s) for min (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The minimum number of times this element SHALL appear in the instance - *

    + *

    + * Definition: + * The minimum number of times this element SHALL appear in the instance + *

    */ - public Integer getMin() { + public Integer getMin() { return getMinElement().getValue(); } /** * Sets the value(s) for min () * - *

    - * Definition: - * The minimum number of times this element SHALL appear in the instance - *

    + *

    + * Definition: + * The minimum number of times this element SHALL appear in the instance + *

    */ public ElementDefinitionDt setMin(IntegerDt theValue) { myMin = theValue; return this; } - - - /** + /** * Sets the value for min () * - *

    - * Definition: - * The minimum number of times this element SHALL appear in the instance - *

    + *

    + * Definition: + * The minimum number of times this element SHALL appear in the instance + *

    */ - public ElementDefinitionDt setMin( int theInteger) { - myMin = new IntegerDt(theInteger); - return this; + public ElementDefinitionDt setMin(int theInteger) { + myMin = new IntegerDt(theInteger); + return this; } - /** * Gets the value(s) for max (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The maximum number of times this element is permitted to appear in the instance - *

    + *

    + * Definition: + * The maximum number of times this element is permitted to appear in the instance + *

    */ - public StringDt getMaxElement() { + public StringDt getMaxElement() { if (myMax == null) { myMax = new StringDt(); } return myMax; } - /** * Gets the value(s) for max (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The maximum number of times this element is permitted to appear in the instance - *

    + *

    + * Definition: + * The maximum number of times this element is permitted to appear in the instance + *

    */ - public String getMax() { + public String getMax() { return getMaxElement().getValue(); } /** * Sets the value(s) for max () * - *

    - * Definition: - * The maximum number of times this element is permitted to appear in the instance - *

    + *

    + * Definition: + * The maximum number of times this element is permitted to appear in the instance + *

    */ public ElementDefinitionDt setMax(StringDt theValue) { myMax = theValue; return this; } - - - /** + /** * Sets the value for max () * - *

    - * Definition: - * The maximum number of times this element is permitted to appear in the instance - *

    + *

    + * Definition: + * The maximum number of times this element is permitted to appear in the instance + *

    */ - public ElementDefinitionDt setMax( String theString) { - myMax = new StringDt(theString); - return this; + public ElementDefinitionDt setMax(String theString) { + myMax = new StringDt(theString); + return this; } - /** * Gets the value(s) for base (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Information about the base definition of the element, provided to make it unncessary for tools to trace the derviation of the element through the derived and related profiles. This information is only provided where the element definition represents a constraint on another element definition, and must be present if there is a base element definition - *

    + *

    + * Definition: + * Information about the base definition of the element, provided to make it unncessary for tools to trace the derviation of the element through the derived and related profiles. This information is only provided where the element definition represents a constraint on another element definition, and must be present if there is a base element definition + *

    */ - public Base getBase() { + public Base getBase() { if (myBase == null) { myBase = new Base(); } @@ -1130,30 +1181,27 @@ public class ElementDefinitionDt /** * Sets the value(s) for base () * - *

    - * Definition: - * Information about the base definition of the element, provided to make it unncessary for tools to trace the derviation of the element through the derived and related profiles. This information is only provided where the element definition represents a constraint on another element definition, and must be present if there is a base element definition - *

    + *

    + * Definition: + * Information about the base definition of the element, provided to make it unncessary for tools to trace the derviation of the element through the derived and related profiles. This information is only provided where the element definition represents a constraint on another element definition, and must be present if there is a base element definition + *

    */ public ElementDefinitionDt setBase(Base theValue) { myBase = theValue; return this; } - - - /** * Gets the value(s) for type (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The data type or resource that the value of this element is permitted to be - *

    + *

    + * Definition: + * The data type or resource that the value of this element is permitted to be + *

    */ - public java.util.List getType() { + public java.util.List getType() { if (myType == null) { myType = new java.util.ArrayList(); } @@ -1163,30 +1211,28 @@ public class ElementDefinitionDt /** * Sets the value(s) for type () * - *

    - * Definition: - * The data type or resource that the value of this element is permitted to be - *

    + *

    + * Definition: + * The data type or resource that the value of this element is permitted to be + *

    */ public ElementDefinitionDt setType(java.util.List theValue) { myType = theValue; return this; } - - /** * Adds and returns a new value for type () * - *

    - * Definition: - * The data type or resource that the value of this element is permitted to be - *

    + *

    + * Definition: + * The data type or resource that the value of this element is permitted to be + *

    */ public Type addType() { Type newType = new Type(); getType().add(newType); - return newType; + return newType; } /** @@ -1210,379 +1256,349 @@ public class ElementDefinitionDt * Gets the first repetition for type (), * creating it if it does not already exist. * - *

    - * Definition: - * The data type or resource that the value of this element is permitted to be - *

    + *

    + * Definition: + * The data type or resource that the value of this element is permitted to be + *

    */ public Type getTypeFirstRep() { if (getType().isEmpty()) { return addType(); } - return getType().get(0); + return getType().get(0); } - + /** * Gets the value(s) for nameReference (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element - *

    + *

    + * Definition: + * Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element + *

    */ - public StringDt getNameReferenceElement() { + public StringDt getNameReferenceElement() { if (myNameReference == null) { myNameReference = new StringDt(); } return myNameReference; } - /** * Gets the value(s) for nameReference (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element - *

    + *

    + * Definition: + * Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element + *

    */ - public String getNameReference() { + public String getNameReference() { return getNameReferenceElement().getValue(); } /** * Sets the value(s) for nameReference () * - *

    - * Definition: - * Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element - *

    + *

    + * Definition: + * Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element + *

    */ public ElementDefinitionDt setNameReference(StringDt theValue) { myNameReference = theValue; return this; } - - - /** + /** * Sets the value for nameReference () * - *

    - * Definition: - * Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element - *

    + *

    + * Definition: + * Identifies the name of a slice defined elsewhere in the profile whose constraints should be applied to the current element + *

    */ - public ElementDefinitionDt setNameReference( String theString) { - myNameReference = new StringDt(theString); - return this; + public ElementDefinitionDt setNameReference(String theString) { + myNameReference = new StringDt(theString); + return this; } - /** * Gets the value(s) for defaultValue[x] (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The value that should be used if there is no value stated in the instance (e.g. 'if not otherwise specified, the abstract is false') - *

    + *

    + * Definition: + * The value that should be used if there is no value stated in the instance (e.g. 'if not otherwise specified, the abstract is false') + *

    */ - public IDatatype getDefaultValue() { + public IDatatype getDefaultValue() { return myDefaultValue; } /** * Sets the value(s) for defaultValue[x] () * - *

    - * Definition: - * The value that should be used if there is no value stated in the instance (e.g. 'if not otherwise specified, the abstract is false') - *

    + *

    + * Definition: + * The value that should be used if there is no value stated in the instance (e.g. 'if not otherwise specified, the abstract is false') + *

    */ public ElementDefinitionDt setDefaultValue(IDatatype theValue) { myDefaultValue = theValue; return this; } - - - /** * Gets the value(s) for meaningWhenMissing (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The Implicit meaning that is to be understood when this element is missing (e.g. 'when this element is missing, the period is ongoing' - *

    + *

    + * Definition: + * The Implicit meaning that is to be understood when this element is missing (e.g. 'when this element is missing, the period is ongoing' + *

    */ - public MarkdownDt getMeaningWhenMissingElement() { + public MarkdownDt getMeaningWhenMissingElement() { if (myMeaningWhenMissing == null) { myMeaningWhenMissing = new MarkdownDt(); } return myMeaningWhenMissing; } - /** * Gets the value(s) for meaningWhenMissing (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The Implicit meaning that is to be understood when this element is missing (e.g. 'when this element is missing, the period is ongoing' - *

    + *

    + * Definition: + * The Implicit meaning that is to be understood when this element is missing (e.g. 'when this element is missing, the period is ongoing' + *

    */ - public String getMeaningWhenMissing() { + public String getMeaningWhenMissing() { return getMeaningWhenMissingElement().getValue(); } /** * Sets the value(s) for meaningWhenMissing () * - *

    - * Definition: - * The Implicit meaning that is to be understood when this element is missing (e.g. 'when this element is missing, the period is ongoing' - *

    + *

    + * Definition: + * The Implicit meaning that is to be understood when this element is missing (e.g. 'when this element is missing, the period is ongoing' + *

    */ public ElementDefinitionDt setMeaningWhenMissing(MarkdownDt theValue) { myMeaningWhenMissing = theValue; return this; } - - - /** * Gets the value(s) for fixed[x] (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Specifies a value that SHALL be exactly the value for this element in the instance. For purposes of comparison, non-signficant whitespace is ignored, and all values must be an exact match (case and accent sensitive). Missing elements/attributes must also be missing - *

    + *

    + * Definition: + * Specifies a value that SHALL be exactly the value for this element in the instance. For purposes of comparison, non-signficant whitespace is ignored, and all values must be an exact match (case and accent sensitive). Missing elements/attributes must also be missing + *

    */ - public IDatatype getFixed() { + public IDatatype getFixed() { return myFixed; } /** * Sets the value(s) for fixed[x] () * - *

    - * Definition: - * Specifies a value that SHALL be exactly the value for this element in the instance. For purposes of comparison, non-signficant whitespace is ignored, and all values must be an exact match (case and accent sensitive). Missing elements/attributes must also be missing - *

    + *

    + * Definition: + * Specifies a value that SHALL be exactly the value for this element in the instance. For purposes of comparison, non-signficant whitespace is ignored, and all values must be an exact match (case and accent sensitive). Missing elements/attributes must also be missing + *

    */ public ElementDefinitionDt setFixed(IDatatype theValue) { myFixed = theValue; return this; } - - - /** * Gets the value(s) for pattern[x] (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Specifies a value that the value in the instance SHALL follow - that is, any value in the pattern must be found in the instance. Other additional values may be found too. This is effectively constraint by example. The values of elements present in the pattern must match exactly (case-senstive, accent-sensitive, etc.) - *

    + *

    + * Definition: + * Specifies a value that the value in the instance SHALL follow - that is, any value in the pattern must be found in the instance. Other additional values may be found too. This is effectively constraint by example. The values of elements present in the pattern must match exactly (case-senstive, accent-sensitive, etc.) + *

    */ - public IDatatype getPattern() { + public IDatatype getPattern() { return myPattern; } /** * Sets the value(s) for pattern[x] () * - *

    - * Definition: - * Specifies a value that the value in the instance SHALL follow - that is, any value in the pattern must be found in the instance. Other additional values may be found too. This is effectively constraint by example. The values of elements present in the pattern must match exactly (case-senstive, accent-sensitive, etc.) - *

    + *

    + * Definition: + * Specifies a value that the value in the instance SHALL follow - that is, any value in the pattern must be found in the instance. Other additional values may be found too. This is effectively constraint by example. The values of elements present in the pattern must match exactly (case-senstive, accent-sensitive, etc.) + *

    */ public ElementDefinitionDt setPattern(IDatatype theValue) { myPattern = theValue; return this; } - - - /** * Gets the value(s) for example[x] (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A sample value for this element demonstrating the type of information that would typically be captured. - *

    + *

    + * Definition: + * A sample value for this element demonstrating the type of information that would typically be captured. + *

    */ - public IDatatype getExample() { + public IDatatype getExample() { return myExample; } /** * Sets the value(s) for example[x] () * - *

    - * Definition: - * A sample value for this element demonstrating the type of information that would typically be captured. - *

    + *

    + * Definition: + * A sample value for this element demonstrating the type of information that would typically be captured. + *

    */ public ElementDefinitionDt setExample(IDatatype theValue) { myExample = theValue; return this; } - - - /** * Gets the value(s) for minValue[x] (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The minimum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity - *

    + *

    + * Definition: + * The minimum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity + *

    */ - public IDatatype getMinValue() { + public IDatatype getMinValue() { return myMinValue; } /** * Sets the value(s) for minValue[x] () * - *

    - * Definition: - * The minimum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity - *

    + *

    + * Definition: + * The minimum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity + *

    */ public ElementDefinitionDt setMinValue(IDatatype theValue) { myMinValue = theValue; return this; } - - - /** * Gets the value(s) for maxValue[x] (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The maximum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity - *

    + *

    + * Definition: + * The maximum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity + *

    */ - public IDatatype getMaxValue() { + public IDatatype getMaxValue() { return myMaxValue; } /** * Sets the value(s) for maxValue[x] () * - *

    - * Definition: - * The maximum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity - *

    + *

    + * Definition: + * The maximum allowed value for the element. The value is inclusive. This is allowed for the types date, dateTime, instant, time, decimal, integer, and Quantity + *

    */ public ElementDefinitionDt setMaxValue(IDatatype theValue) { myMaxValue = theValue; return this; } - - - /** * Gets the value(s) for maxLength (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element - *

    + *

    + * Definition: + * Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element + *

    */ - public IntegerDt getMaxLengthElement() { + public IntegerDt getMaxLengthElement() { if (myMaxLength == null) { myMaxLength = new IntegerDt(); } return myMaxLength; } - /** * Gets the value(s) for maxLength (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element - *

    + *

    + * Definition: + * Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element + *

    */ - public Integer getMaxLength() { + public Integer getMaxLength() { return getMaxLengthElement().getValue(); } /** * Sets the value(s) for maxLength () * - *

    - * Definition: - * Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element - *

    + *

    + * Definition: + * Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element + *

    */ public ElementDefinitionDt setMaxLength(IntegerDt theValue) { myMaxLength = theValue; return this; } - - - /** + /** * Sets the value for maxLength () * - *

    - * Definition: - * Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element - *

    + *

    + * Definition: + * Indicates the maximum length in characters that is permitted to be present in conformant instances and which is expected to be supported by conformant consumers that support the element + *

    */ - public ElementDefinitionDt setMaxLength( int theInteger) { - myMaxLength = new IntegerDt(theInteger); - return this; + public ElementDefinitionDt setMaxLength(int theInteger) { + myMaxLength = new IntegerDt(theInteger); + return this; } - /** * Gets the value(s) for condition (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A reference to an invariant that may make additional statements about the cardinality or value in the instance - *

    + *

    + * Definition: + * A reference to an invariant that may make additional statements about the cardinality or value in the instance + *

    */ - public java.util.List getCondition() { + public java.util.List getCondition() { if (myCondition == null) { myCondition = new java.util.ArrayList(); } @@ -1592,30 +1608,28 @@ public class ElementDefinitionDt /** * Sets the value(s) for condition () * - *

    - * Definition: - * A reference to an invariant that may make additional statements about the cardinality or value in the instance - *

    + *

    + * Definition: + * A reference to an invariant that may make additional statements about the cardinality or value in the instance + *

    */ public ElementDefinitionDt setCondition(java.util.List theValue) { myCondition = theValue; return this; } - - /** * Adds and returns a new value for condition () * - *

    - * Definition: - * A reference to an invariant that may make additional statements about the cardinality or value in the instance - *

    + *

    + * Definition: + * A reference to an invariant that may make additional statements about the cardinality or value in the instance + *

    */ public IdDt addCondition() { IdDt newType = new IdDt(); getCondition().add(newType); - return newType; + return newType; } /** @@ -1639,47 +1653,46 @@ public class ElementDefinitionDt * Gets the first repetition for condition (), * creating it if it does not already exist. * - *

    - * Definition: - * A reference to an invariant that may make additional statements about the cardinality or value in the instance - *

    + *

    + * Definition: + * A reference to an invariant that may make additional statements about the cardinality or value in the instance + *

    */ public IdDt getConditionFirstRep() { if (getCondition().isEmpty()) { return addCondition(); } - return getCondition().get(0); + return getCondition().get(0); } - /** + /** * Adds a new value for condition () * - *

    - * Definition: - * A reference to an invariant that may make additional statements about the cardinality or value in the instance - *

    - * - * @return Returns a reference to this object, to allow for simple chaining. + *

    + * Definition: + * A reference to an invariant that may make additional statements about the cardinality or value in the instance + *

    + * + * @return Returns a reference to this object, to allow for simple chaining. */ - public ElementDefinitionDt addCondition( String theId) { + public ElementDefinitionDt addCondition(String theId) { if (myCondition == null) { myCondition = new java.util.ArrayList(); } myCondition.add(new IdDt(theId)); - return this; + return this; } - /** * Gets the value(s) for constraint (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance - *

    + *

    + * Definition: + * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance + *

    */ - public java.util.List getConstraint() { + public java.util.List getConstraint() { if (myConstraint == null) { myConstraint = new java.util.ArrayList(); } @@ -1689,30 +1702,28 @@ public class ElementDefinitionDt /** * Sets the value(s) for constraint () * - *

    - * Definition: - * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance - *

    + *

    + * Definition: + * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance + *

    */ public ElementDefinitionDt setConstraint(java.util.List theValue) { myConstraint = theValue; return this; } - - /** * Adds and returns a new value for constraint () * - *

    - * Definition: - * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance - *

    + *

    + * Definition: + * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance + *

    */ public Constraint addConstraint() { Constraint newType = new Constraint(); getConstraint().add(newType); - return newType; + return newType; } /** @@ -1736,212 +1747,200 @@ public class ElementDefinitionDt * Gets the first repetition for constraint (), * creating it if it does not already exist. * - *

    - * Definition: - * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance - *

    + *

    + * Definition: + * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance + *

    */ public Constraint getConstraintFirstRep() { if (getConstraint().isEmpty()) { return addConstraint(); } - return getConstraint().get(0); + return getConstraint().get(0); } - + /** * Gets the value(s) for mustSupport (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported - *

    + *

    + * Definition: + * If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported + *

    */ - public BooleanDt getMustSupportElement() { + public BooleanDt getMustSupportElement() { if (myMustSupport == null) { myMustSupport = new BooleanDt(); } return myMustSupport; } - /** * Gets the value(s) for mustSupport (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported - *

    + *

    + * Definition: + * If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported + *

    */ - public Boolean getMustSupport() { + public Boolean getMustSupport() { return getMustSupportElement().getValue(); } /** * Sets the value(s) for mustSupport () * - *

    - * Definition: - * If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported - *

    + *

    + * Definition: + * If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported + *

    */ public ElementDefinitionDt setMustSupport(BooleanDt theValue) { myMustSupport = theValue; return this; } - - - /** + /** * Sets the value for mustSupport () * - *

    - * Definition: - * If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported - *

    + *

    + * Definition: + * If true, implementations that produce or consume resources SHALL provide \"support\" for the element in some meaningful way. If false, the element may be ignored and not supported + *

    */ - public ElementDefinitionDt setMustSupport( boolean theBoolean) { - myMustSupport = new BooleanDt(theBoolean); - return this; + public ElementDefinitionDt setMustSupport(boolean theBoolean) { + myMustSupport = new BooleanDt(theBoolean); + return this; } - /** * Gets the value(s) for isModifier (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system. - *

    + *

    + * Definition: + * If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system. + *

    */ - public BooleanDt getIsModifierElement() { + public BooleanDt getIsModifierElement() { if (myIsModifier == null) { myIsModifier = new BooleanDt(); } return myIsModifier; } - /** * Gets the value(s) for isModifier (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system. - *

    + *

    + * Definition: + * If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system. + *

    */ - public Boolean getIsModifier() { + public Boolean getIsModifier() { return getIsModifierElement().getValue(); } /** * Sets the value(s) for isModifier () * - *

    - * Definition: - * If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system. - *

    + *

    + * Definition: + * If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system. + *

    */ public ElementDefinitionDt setIsModifier(BooleanDt theValue) { myIsModifier = theValue; return this; } - - - /** + /** * Sets the value for isModifier () * - *

    - * Definition: - * If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system. - *

    + *

    + * Definition: + * If true, the value of this element affects the interpretation of the element or resource that contains it, and the value of the element cannot be ignored. Typically, this is used for status, negation and qualification codes. The effect of this is that the element cannot be ignored by systems: they SHALL either recognize the element and process it, and/or a pre-determination has been made that it is not relevant to their particular system. + *

    */ - public ElementDefinitionDt setIsModifier( boolean theBoolean) { - myIsModifier = new BooleanDt(theBoolean); - return this; + public ElementDefinitionDt setIsModifier(boolean theBoolean) { + myIsModifier = new BooleanDt(theBoolean); + return this; } - /** * Gets the value(s) for isSummary (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Whether the element should be included if a client requests a search with the parameter _summary=true - *

    + *

    + * Definition: + * Whether the element should be included if a client requests a search with the parameter _summary=true + *

    */ - public BooleanDt getIsSummaryElement() { + public BooleanDt getIsSummaryElement() { if (myIsSummary == null) { myIsSummary = new BooleanDt(); } return myIsSummary; } - /** * Gets the value(s) for isSummary (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Whether the element should be included if a client requests a search with the parameter _summary=true - *

    + *

    + * Definition: + * Whether the element should be included if a client requests a search with the parameter _summary=true + *

    */ - public Boolean getIsSummary() { + public Boolean getIsSummary() { return getIsSummaryElement().getValue(); } /** * Sets the value(s) for isSummary () * - *

    - * Definition: - * Whether the element should be included if a client requests a search with the parameter _summary=true - *

    + *

    + * Definition: + * Whether the element should be included if a client requests a search with the parameter _summary=true + *

    */ public ElementDefinitionDt setIsSummary(BooleanDt theValue) { myIsSummary = theValue; return this; } - - - /** + /** * Sets the value for isSummary () * - *

    - * Definition: - * Whether the element should be included if a client requests a search with the parameter _summary=true - *

    + *

    + * Definition: + * Whether the element should be included if a client requests a search with the parameter _summary=true + *

    */ - public ElementDefinitionDt setIsSummary( boolean theBoolean) { - myIsSummary = new BooleanDt(theBoolean); - return this; + public ElementDefinitionDt setIsSummary(boolean theBoolean) { + myIsSummary = new BooleanDt(theBoolean); + return this; } - /** * Gets the value(s) for binding (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Binds to a value set if this element is coded (code, Coding, CodeableConcept) - *

    + *

    + * Definition: + * Binds to a value set if this element is coded (code, Coding, CodeableConcept) + *

    */ - public Binding getBinding() { + public Binding getBinding() { if (myBinding == null) { myBinding = new Binding(); } @@ -1951,30 +1950,27 @@ public class ElementDefinitionDt /** * Sets the value(s) for binding () * - *

    - * Definition: - * Binds to a value set if this element is coded (code, Coding, CodeableConcept) - *

    + *

    + * Definition: + * Binds to a value set if this element is coded (code, Coding, CodeableConcept) + *

    */ public ElementDefinitionDt setBinding(Binding theValue) { myBinding = theValue; return this; } - - - /** * Gets the value(s) for mapping (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Identifies a concept from an external specification that roughly corresponds to this element - *

    + *

    + * Definition: + * Identifies a concept from an external specification that roughly corresponds to this element + *

    */ - public java.util.List getMapping() { + public java.util.List getMapping() { if (myMapping == null) { myMapping = new java.util.ArrayList(); } @@ -1984,30 +1980,28 @@ public class ElementDefinitionDt /** * Sets the value(s) for mapping () * - *

    - * Definition: - * Identifies a concept from an external specification that roughly corresponds to this element - *

    + *

    + * Definition: + * Identifies a concept from an external specification that roughly corresponds to this element + *

    */ public ElementDefinitionDt setMapping(java.util.List theValue) { myMapping = theValue; return this; } - - /** * Adds and returns a new value for mapping () * - *

    - * Definition: - * Identifies a concept from an external specification that roughly corresponds to this element - *

    + *

    + * Definition: + * Identifies a concept from an external specification that roughly corresponds to this element + *

    */ public Mapping addMapping() { Mapping newType = new Mapping(); getMapping().add(newType); - return newType; + return newType; } /** @@ -2031,1689 +2025,1613 @@ public class ElementDefinitionDt * Gets the first repetition for mapping (), * creating it if it does not already exist. * - *

    - * Definition: - * Identifies a concept from an external specification that roughly corresponds to this element - *

    + *

    + * Definition: + * Identifies a concept from an external specification that roughly corresponds to this element + *

    */ public Mapping getMappingFirstRep() { if (getMapping().isEmpty()) { return addMapping(); } - return getMapping().get(0); + return getMapping().get(0); } - + /** * Block class for child element: ElementDefinition.slicing () * - *

    - * Definition: - * Indicates that the element is sliced into a set of alternative definitions (i.e. in a structure definition, there are multiple different constraints on a single element in the base resource). Slicing can be used in any resource that has cardinality ..* on the base resource, or any resource with a choice of types. The set of slices is any elements that come after this in the element sequence that have the same path, until a shorter path occurs (the shorter path terminates the set) - *

    - */ - @Block() - public static class Slicing - extends BaseIdentifiableElement implements IResourceBlock { - - @Child(name="discriminator", type=StringDt.class, order=0, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices" - ) - private java.util.List myDiscriminator; - - @Child(name="description", type=StringDt.class, order=1, min=0, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated" - ) - private StringDt myDescription; - - @Child(name="ordered", type=BooleanDt.class, order=2, min=0, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="If the matching elements have to occur in the same order as defined in the profile" - ) - private BooleanDt myOrdered; - - @Child(name="rules", type=CodeDt.class, order=3, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end" - ) - private BoundCodeDt myRules; - - - @Override - public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myDiscriminator, myDescription, myOrdered, myRules); - } - - @Override - public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myDiscriminator, myDescription, myOrdered, myRules); - } - - /** - * Gets the value(s) for discriminator (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices - *

    - */ - public java.util.List getDiscriminator() { - if (myDiscriminator == null) { - myDiscriminator = new java.util.ArrayList(); - } - return myDiscriminator; - } - - /** - * Sets the value(s) for discriminator () - * - *

    - * Definition: - * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices - *

    - */ - public Slicing setDiscriminator(java.util.List theValue) { - myDiscriminator = theValue; - return this; - } - - - - /** - * Adds and returns a new value for discriminator () - * - *

    - * Definition: - * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices - *

    - */ - public StringDt addDiscriminator() { - StringDt newType = new StringDt(); - getDiscriminator().add(newType); - return newType; - } - - /** - * Adds a given new value for discriminator () - * *

    * Definition: - * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices + * Indicates that the element is sliced into a set of alternative definitions (i.e. in a structure definition, there are multiple different constraints on a single element in the base resource). Slicing can be used in any resource that has cardinality ..* on the base resource, or any resource with a choice of types. The set of slices is any elements that come after this in the element sequence that have the same path, until a shorter path occurs (the shorter path terminates the set) *

    - * @param theValue The discriminator to add (must not be null) */ - public Slicing addDiscriminator(StringDt theValue) { - if (theValue == null) { - throw new NullPointerException(Msg.code(83) + "theValue must not be null"); + @Block() + public static class Slicing extends BaseIdentifiableElement implements IResourceBlock { + + @Child( + name = "discriminator", + type = StringDt.class, + order = 0, + min = 0, + max = Child.MAX_UNLIMITED, + summary = true, + modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices") + private java.util.List myDiscriminator; + + @Child( + name = "description", + type = StringDt.class, + order = 1, + min = 0, + max = 1, + summary = true, + modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated") + private StringDt myDescription; + + @Child(name = "ordered", type = BooleanDt.class, order = 2, min = 0, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "If the matching elements have to occur in the same order as defined in the profile") + private BooleanDt myOrdered; + + @Child(name = "rules", type = CodeDt.class, order = 3, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end") + private BoundCodeDt myRules; + + @Override + public boolean isEmpty() { + return super.isBaseEmpty() + && ca.uhn.fhir.util.ElementUtil.isEmpty(myDiscriminator, myDescription, myOrdered, myRules); } - getDiscriminator().add(theValue); - return this; - } - /** - * Gets the first repetition for discriminator (), - * creating it if it does not already exist. - * - *

    - * Definition: - * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices - *

    - */ - public StringDt getDiscriminatorFirstRep() { - if (getDiscriminator().isEmpty()) { - return addDiscriminator(); + @Override + public List getAllPopulatedChildElementsOfType(Class theType) { + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( + theType, myDiscriminator, myDescription, myOrdered, myRules); } - return getDiscriminator().get(0); - } - /** - * Adds a new value for discriminator () - * - *

    - * Definition: - * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices - *

    - * - * @return Returns a reference to this object, to allow for simple chaining. - */ - public Slicing addDiscriminator( String theString) { - if (myDiscriminator == null) { - myDiscriminator = new java.util.ArrayList(); + + /** + * Gets the value(s) for discriminator (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices + *

    + */ + public java.util.List getDiscriminator() { + if (myDiscriminator == null) { + myDiscriminator = new java.util.ArrayList(); + } + return myDiscriminator; } - myDiscriminator.add(new StringDt(theString)); - return this; - } - - /** - * Gets the value(s) for description (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated - *

    - */ - public StringDt getDescriptionElement() { - if (myDescription == null) { - myDescription = new StringDt(); + /** + * Sets the value(s) for discriminator () + * + *

    + * Definition: + * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices + *

    + */ + public Slicing setDiscriminator(java.util.List theValue) { + myDiscriminator = theValue; + return this; } - return myDescription; - } - - /** - * Gets the value(s) for description (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated - *

    - */ - public String getDescription() { - return getDescriptionElement().getValue(); - } - - /** - * Sets the value(s) for description () - * - *

    - * Definition: - * A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated - *

    - */ - public Slicing setDescription(StringDt theValue) { - myDescription = theValue; - return this; - } - - - - /** - * Sets the value for description () - * - *

    - * Definition: - * A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated - *

    - */ - public Slicing setDescription( String theString) { - myDescription = new StringDt(theString); - return this; - } - - - /** - * Gets the value(s) for ordered (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * If the matching elements have to occur in the same order as defined in the profile - *

    - */ - public BooleanDt getOrderedElement() { - if (myOrdered == null) { - myOrdered = new BooleanDt(); + /** + * Adds and returns a new value for discriminator () + * + *

    + * Definition: + * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices + *

    + */ + public StringDt addDiscriminator() { + StringDt newType = new StringDt(); + getDiscriminator().add(newType); + return newType; } - return myOrdered; - } - - /** - * Gets the value(s) for ordered (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * If the matching elements have to occur in the same order as defined in the profile - *

    - */ - public Boolean getOrdered() { - return getOrderedElement().getValue(); - } - - /** - * Sets the value(s) for ordered () - * - *

    - * Definition: - * If the matching elements have to occur in the same order as defined in the profile - *

    - */ - public Slicing setOrdered(BooleanDt theValue) { - myOrdered = theValue; - return this; - } - - - - /** - * Sets the value for ordered () - * - *

    - * Definition: - * If the matching elements have to occur in the same order as defined in the profile - *

    - */ - public Slicing setOrdered( boolean theBoolean) { - myOrdered = new BooleanDt(theBoolean); - return this; - } - - - /** - * Gets the value(s) for rules (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end - *

    - */ - public BoundCodeDt getRulesElement() { - if (myRules == null) { - myRules = new BoundCodeDt(SlicingRulesEnum.VALUESET_BINDER); + /** + * Adds a given new value for discriminator () + * + *

    + * Definition: + * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices + *

    + * @param theValue The discriminator to add (must not be null) + */ + public Slicing addDiscriminator(StringDt theValue) { + if (theValue == null) { + throw new NullPointerException(Msg.code(83) + "theValue must not be null"); + } + getDiscriminator().add(theValue); + return this; + } + + /** + * Gets the first repetition for discriminator (), + * creating it if it does not already exist. + * + *

    + * Definition: + * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices + *

    + */ + public StringDt getDiscriminatorFirstRep() { + if (getDiscriminator().isEmpty()) { + return addDiscriminator(); + } + return getDiscriminator().get(0); + } + /** + * Adds a new value for discriminator () + * + *

    + * Definition: + * Designates which child elements are used to discriminate between the slices when processing an instance. If one or more discriminators are provided, the value of the child elements in the instance data SHALL completely distinguish which slice the element in the resource matches based on the allowed values for those elements in each of the slices + *

    + * + * @return Returns a reference to this object, to allow for simple chaining. + */ + public Slicing addDiscriminator(String theString) { + if (myDiscriminator == null) { + myDiscriminator = new java.util.ArrayList(); + } + myDiscriminator.add(new StringDt(theString)); + return this; + } + + /** + * Gets the value(s) for description (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated + *

    + */ + public StringDt getDescriptionElement() { + if (myDescription == null) { + myDescription = new StringDt(); + } + return myDescription; + } + + /** + * Gets the value(s) for description (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated + *

    + */ + public String getDescription() { + return getDescriptionElement().getValue(); + } + + /** + * Sets the value(s) for description () + * + *

    + * Definition: + * A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated + *

    + */ + public Slicing setDescription(StringDt theValue) { + myDescription = theValue; + return this; + } + + /** + * Sets the value for description () + * + *

    + * Definition: + * A human-readable text description of how the slicing works. If there is no discriminator, this is required to be present to provide whatever information is possible about how the slices can be differentiated + *

    + */ + public Slicing setDescription(String theString) { + myDescription = new StringDt(theString); + return this; + } + + /** + * Gets the value(s) for ordered (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * If the matching elements have to occur in the same order as defined in the profile + *

    + */ + public BooleanDt getOrderedElement() { + if (myOrdered == null) { + myOrdered = new BooleanDt(); + } + return myOrdered; + } + + /** + * Gets the value(s) for ordered (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * If the matching elements have to occur in the same order as defined in the profile + *

    + */ + public Boolean getOrdered() { + return getOrderedElement().getValue(); + } + + /** + * Sets the value(s) for ordered () + * + *

    + * Definition: + * If the matching elements have to occur in the same order as defined in the profile + *

    + */ + public Slicing setOrdered(BooleanDt theValue) { + myOrdered = theValue; + return this; + } + + /** + * Sets the value for ordered () + * + *

    + * Definition: + * If the matching elements have to occur in the same order as defined in the profile + *

    + */ + public Slicing setOrdered(boolean theBoolean) { + myOrdered = new BooleanDt(theBoolean); + return this; + } + + /** + * Gets the value(s) for rules (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end + *

    + */ + public BoundCodeDt getRulesElement() { + if (myRules == null) { + myRules = new BoundCodeDt(SlicingRulesEnum.VALUESET_BINDER); + } + return myRules; + } + + /** + * Gets the value(s) for rules (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end + *

    + */ + public String getRules() { + return getRulesElement().getValue(); + } + + /** + * Sets the value(s) for rules () + * + *

    + * Definition: + * Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end + *

    + */ + public Slicing setRules(BoundCodeDt theValue) { + myRules = theValue; + return this; + } + + /** + * Sets the value(s) for rules () + * + *

    + * Definition: + * Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end + *

    + */ + public Slicing setRules(SlicingRulesEnum theValue) { + getRulesElement().setValueAsEnum(theValue); + return this; } - return myRules; } - - /** - * Gets the value(s) for rules (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end - *

    - */ - public String getRules() { - return getRulesElement().getValue(); - } - - /** - * Sets the value(s) for rules () - * - *

    - * Definition: - * Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end - *

    - */ - public Slicing setRules(BoundCodeDt theValue) { - myRules = theValue; - return this; - } - - - - /** - * Sets the value(s) for rules () - * - *

    - * Definition: - * Whether additional slices are allowed or not. When the slices are ordered, profile authors can also say that additional slices are only allowed at the end - *

    - */ - public Slicing setRules(SlicingRulesEnum theValue) { - getRulesElement().setValueAsEnum(theValue); - return this; - } - - - - - } - - /** * Block class for child element: ElementDefinition.base () * - *

    - * Definition: - * Information about the base definition of the element, provided to make it unncessary for tools to trace the derviation of the element through the derived and related profiles. This information is only provided where the element definition represents a constraint on another element definition, and must be present if there is a base element definition - *

    + *

    + * Definition: + * Information about the base definition of the element, provided to make it unncessary for tools to trace the derviation of the element through the derived and related profiles. This information is only provided where the element definition represents a constraint on another element definition, and must be present if there is a base element definition + *

    */ - @Block() - public static class Base - extends BaseIdentifiableElement implements IResourceBlock { - - @Child(name="path", type=StringDt.class, order=0, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base" - ) - private StringDt myPath; - - @Child(name="min", type=IntegerDt.class, order=1, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Minimum cardinality of the base element identified by the path" - ) - private IntegerDt myMin; - - @Child(name="max", type=StringDt.class, order=2, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Maximum cardinality of the base element identified by the path" - ) - private StringDt myMax; - + @Block() + public static class Base extends BaseIdentifiableElement implements IResourceBlock { - @Override - public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myPath, myMin, myMax); - } - - @Override - public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myPath, myMin, myMax); - } + @Child(name = "path", type = StringDt.class, order = 0, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base") + private StringDt myPath; - /** - * Gets the value(s) for path (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base - *

    - */ - public StringDt getPathElement() { - if (myPath == null) { - myPath = new StringDt(); + @Child(name = "min", type = IntegerDt.class, order = 1, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "Minimum cardinality of the base element identified by the path") + private IntegerDt myMin; + + @Child(name = "max", type = StringDt.class, order = 2, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "Maximum cardinality of the base element identified by the path") + private StringDt myMax; + + @Override + public boolean isEmpty() { + return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(myPath, myMin, myMax); } - return myPath; - } - - /** - * Gets the value(s) for path (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base - *

    - */ - public String getPath() { - return getPathElement().getValue(); - } - - /** - * Sets the value(s) for path () - * - *

    - * Definition: - * The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base - *

    - */ - public Base setPath(StringDt theValue) { - myPath = theValue; - return this; - } - - - - /** - * Sets the value for path () - * - *

    - * Definition: - * The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base - *

    - */ - public Base setPath( String theString) { - myPath = new StringDt(theString); - return this; - } - - - /** - * Gets the value(s) for min (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Minimum cardinality of the base element identified by the path - *

    - */ - public IntegerDt getMinElement() { - if (myMin == null) { - myMin = new IntegerDt(); + @Override + public List getAllPopulatedChildElementsOfType(Class theType) { + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myPath, myMin, myMax); } - return myMin; - } - - /** - * Gets the value(s) for min (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Minimum cardinality of the base element identified by the path - *

    - */ - public Integer getMin() { - return getMinElement().getValue(); - } - - /** - * Sets the value(s) for min () - * - *

    - * Definition: - * Minimum cardinality of the base element identified by the path - *

    - */ - public Base setMin(IntegerDt theValue) { - myMin = theValue; - return this; - } - - - - /** - * Sets the value for min () - * - *

    - * Definition: - * Minimum cardinality of the base element identified by the path - *

    - */ - public Base setMin( int theInteger) { - myMin = new IntegerDt(theInteger); - return this; - } - - - /** - * Gets the value(s) for max (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Maximum cardinality of the base element identified by the path - *

    - */ - public StringDt getMaxElement() { - if (myMax == null) { - myMax = new StringDt(); + /** + * Gets the value(s) for path (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base + *

    + */ + public StringDt getPathElement() { + if (myPath == null) { + myPath = new StringDt(); + } + return myPath; + } + + /** + * Gets the value(s) for path (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base + *

    + */ + public String getPath() { + return getPathElement().getValue(); + } + + /** + * Sets the value(s) for path () + * + *

    + * Definition: + * The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base + *

    + */ + public Base setPath(StringDt theValue) { + myPath = theValue; + return this; + } + + /** + * Sets the value for path () + * + *

    + * Definition: + * The Path that identifies the base element - this matches the ElementDefinition.path for that element. Across FHIR, there is only one base definition of any element - that is, an element definition on a [[[StructureDefinition]]] without a StructureDefinition.base + *

    + */ + public Base setPath(String theString) { + myPath = new StringDt(theString); + return this; + } + + /** + * Gets the value(s) for min (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Minimum cardinality of the base element identified by the path + *

    + */ + public IntegerDt getMinElement() { + if (myMin == null) { + myMin = new IntegerDt(); + } + return myMin; + } + + /** + * Gets the value(s) for min (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Minimum cardinality of the base element identified by the path + *

    + */ + public Integer getMin() { + return getMinElement().getValue(); + } + + /** + * Sets the value(s) for min () + * + *

    + * Definition: + * Minimum cardinality of the base element identified by the path + *

    + */ + public Base setMin(IntegerDt theValue) { + myMin = theValue; + return this; + } + + /** + * Sets the value for min () + * + *

    + * Definition: + * Minimum cardinality of the base element identified by the path + *

    + */ + public Base setMin(int theInteger) { + myMin = new IntegerDt(theInteger); + return this; + } + + /** + * Gets the value(s) for max (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Maximum cardinality of the base element identified by the path + *

    + */ + public StringDt getMaxElement() { + if (myMax == null) { + myMax = new StringDt(); + } + return myMax; + } + + /** + * Gets the value(s) for max (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Maximum cardinality of the base element identified by the path + *

    + */ + public String getMax() { + return getMaxElement().getValue(); + } + + /** + * Sets the value(s) for max () + * + *

    + * Definition: + * Maximum cardinality of the base element identified by the path + *

    + */ + public Base setMax(StringDt theValue) { + myMax = theValue; + return this; + } + + /** + * Sets the value for max () + * + *

    + * Definition: + * Maximum cardinality of the base element identified by the path + *

    + */ + public Base setMax(String theString) { + myMax = new StringDt(theString); + return this; } - return myMax; } - - /** - * Gets the value(s) for max (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Maximum cardinality of the base element identified by the path - *

    - */ - public String getMax() { - return getMaxElement().getValue(); - } - - /** - * Sets the value(s) for max () - * - *

    - * Definition: - * Maximum cardinality of the base element identified by the path - *

    - */ - public Base setMax(StringDt theValue) { - myMax = theValue; - return this; - } - - - - /** - * Sets the value for max () - * - *

    - * Definition: - * Maximum cardinality of the base element identified by the path - *

    - */ - public Base setMax( String theString) { - myMax = new StringDt(theString); - return this; - } - - - - - } - - /** * Block class for child element: ElementDefinition.type () * - *

    - * Definition: - * The data type or resource that the value of this element is permitted to be - *

    - */ - @Block() - public static class Type - extends BaseIdentifiableElement implements IResourceBlock { - - @Child(name="code", type=CodeDt.class, order=0, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Name of Data type or Resource that is a(or the) type used for this element" - ) - private CodeDt myCode; - - @Child(name="profile", type=UriDt.class, order=1, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide" - ) - private java.util.List myProfile; - - @Child(name="aggregation", type=CodeDt.class, order=2, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle" - ) - private java.util.List> myAggregation; - - - @Override - public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myCode, myProfile, myAggregation); - } - - @Override - public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myCode, myProfile, myAggregation); - } - - /** - * Gets the value(s) for code (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Name of Data type or Resource that is a(or the) type used for this element - *

    - */ - public CodeDt getCodeElement() { - if (myCode == null) { - myCode = new CodeDt(); - } - return myCode; - } - - - /** - * Gets the value(s) for code (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Name of Data type or Resource that is a(or the) type used for this element - *

    - */ - public String getCode() { - return getCodeElement().getValue(); - } - - /** - * Sets the value(s) for code () - * - *

    - * Definition: - * Name of Data type or Resource that is a(or the) type used for this element - *

    - */ - public Type setCode(CodeDt theValue) { - myCode = theValue; - return this; - } - - - - /** - * Sets the value for code () - * - *

    - * Definition: - * Name of Data type or Resource that is a(or the) type used for this element - *

    - */ - public Type setCode( String theCode) { - myCode = new CodeDt(theCode); - return this; - } - - - /** - * Gets the value(s) for profile (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide - *

    - */ - public java.util.List getProfile() { - if (myProfile == null) { - myProfile = new java.util.ArrayList(); - } - return myProfile; - } - - /** - * Sets the value(s) for profile () - * - *

    - * Definition: - * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide - *

    - */ - public Type setProfile(java.util.List theValue) { - myProfile = theValue; - return this; - } - - - - /** - * Adds and returns a new value for profile () - * - *

    - * Definition: - * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide - *

    - */ - public UriDt addProfile() { - UriDt newType = new UriDt(); - getProfile().add(newType); - return newType; - } - - /** - * Adds a given new value for profile () - * *

    * Definition: - * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide + * The data type or resource that the value of this element is permitted to be *

    - * @param theValue The profile to add (must not be null) */ - public Type addProfile(UriDt theValue) { - if (theValue == null) { - throw new NullPointerException(Msg.code(84) + "theValue must not be null"); + @Block() + public static class Type extends BaseIdentifiableElement implements IResourceBlock { + + @Child(name = "code", type = CodeDt.class, order = 0, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "Name of Data type or Resource that is a(or the) type used for this element") + private CodeDt myCode; + + @Child( + name = "profile", + type = UriDt.class, + order = 1, + min = 0, + max = Child.MAX_UNLIMITED, + summary = true, + modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide") + private java.util.List myProfile; + + @Child( + name = "aggregation", + type = CodeDt.class, + order = 2, + min = 0, + max = Child.MAX_UNLIMITED, + summary = true, + modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle") + private java.util.List> myAggregation; + + @Override + public boolean isEmpty() { + return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(myCode, myProfile, myAggregation); } - getProfile().add(theValue); - return this; - } - /** - * Gets the first repetition for profile (), - * creating it if it does not already exist. - * - *

    - * Definition: - * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide - *

    - */ - public UriDt getProfileFirstRep() { - if (getProfile().isEmpty()) { - return addProfile(); + @Override + public List getAllPopulatedChildElementsOfType(Class theType) { + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myCode, myProfile, myAggregation); } - return getProfile().get(0); - } - /** - * Adds a new value for profile () - * - *

    - * Definition: - * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide - *

    - * - * @return Returns a reference to this object, to allow for simple chaining. - */ - public Type addProfile( String theUri) { - if (myProfile == null) { - myProfile = new java.util.ArrayList(); + + /** + * Gets the value(s) for code (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Name of Data type or Resource that is a(or the) type used for this element + *

    + */ + public CodeDt getCodeElement() { + if (myCode == null) { + myCode = new CodeDt(); + } + return myCode; } - myProfile.add(new UriDt(theUri)); - return this; - } - - /** - * Gets the value(s) for aggregation (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle - *

    - */ - public java.util.List> getAggregation() { - if (myAggregation == null) { - myAggregation = new java.util.ArrayList>(); + /** + * Gets the value(s) for code (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Name of Data type or Resource that is a(or the) type used for this element + *

    + */ + public String getCode() { + return getCodeElement().getValue(); } - return myAggregation; - } - /** - * Sets the value(s) for aggregation () - * - *

    - * Definition: - * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle - *

    - */ - public Type setAggregation(java.util.List> theValue) { - myAggregation = theValue; - return this; - } - - - - /** - * Add a value for aggregation () using an enumerated type. This - * is intended as a convenience method for situations where the FHIR defined ValueSets are mandatory - * or contain the desirable codes. If you wish to use codes other than those which are built-in, - * you may also use the {@link #addAggregation()} method. - * - *

    - * Definition: - * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle - *

    - */ - public BoundCodeDt addAggregation(AggregationModeEnum theValue) { - BoundCodeDt retVal = new BoundCodeDt(AggregationModeEnum.VALUESET_BINDER, theValue); - getAggregation().add(retVal); - return retVal; - } - - /** - * Gets the first repetition for aggregation (), - * creating it if it does not already exist. - * - *

    - * Definition: - * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle - *

    - */ - public BoundCodeDt getAggregationFirstRep() { - if (getAggregation().size() == 0) { - addAggregation(); + /** + * Sets the value(s) for code () + * + *

    + * Definition: + * Name of Data type or Resource that is a(or the) type used for this element + *

    + */ + public Type setCode(CodeDt theValue) { + myCode = theValue; + return this; + } + + /** + * Sets the value for code () + * + *

    + * Definition: + * Name of Data type or Resource that is a(or the) type used for this element + *

    + */ + public Type setCode(String theCode) { + myCode = new CodeDt(theCode); + return this; + } + + /** + * Gets the value(s) for profile (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide + *

    + */ + public java.util.List getProfile() { + if (myProfile == null) { + myProfile = new java.util.ArrayList(); + } + return myProfile; + } + + /** + * Sets the value(s) for profile () + * + *

    + * Definition: + * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide + *

    + */ + public Type setProfile(java.util.List theValue) { + myProfile = theValue; + return this; + } + + /** + * Adds and returns a new value for profile () + * + *

    + * Definition: + * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide + *

    + */ + public UriDt addProfile() { + UriDt newType = new UriDt(); + getProfile().add(newType); + return newType; + } + + /** + * Adds a given new value for profile () + * + *

    + * Definition: + * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide + *

    + * @param theValue The profile to add (must not be null) + */ + public Type addProfile(UriDt theValue) { + if (theValue == null) { + throw new NullPointerException(Msg.code(84) + "theValue must not be null"); + } + getProfile().add(theValue); + return this; + } + + /** + * Gets the first repetition for profile (), + * creating it if it does not already exist. + * + *

    + * Definition: + * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide + *

    + */ + public UriDt getProfileFirstRep() { + if (getProfile().isEmpty()) { + return addProfile(); + } + return getProfile().get(0); + } + /** + * Adds a new value for profile () + * + *

    + * Definition: + * Identifies a profile structure or implementation Guide that SHALL hold for resources or datatypes referenced as the type of this element. Can be a local reference - to another structure in this profile, or a reference to a structure in another profile. When more than one profile is specified, the content must conform to all of them. When an implementation guide is specified, the resource SHALL conform to at least one profile defined in the implementation guide + *

    + * + * @return Returns a reference to this object, to allow for simple chaining. + */ + public Type addProfile(String theUri) { + if (myProfile == null) { + myProfile = new java.util.ArrayList(); + } + myProfile.add(new UriDt(theUri)); + return this; + } + + /** + * Gets the value(s) for aggregation (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle + *

    + */ + public java.util.List> getAggregation() { + if (myAggregation == null) { + myAggregation = new java.util.ArrayList>(); + } + return myAggregation; + } + + /** + * Sets the value(s) for aggregation () + * + *

    + * Definition: + * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle + *

    + */ + public Type setAggregation(java.util.List> theValue) { + myAggregation = theValue; + return this; + } + + /** + * Add a value for aggregation () using an enumerated type. This + * is intended as a convenience method for situations where the FHIR defined ValueSets are mandatory + * or contain the desirable codes. If you wish to use codes other than those which are built-in, + * you may also use the {@link #addAggregation()} method. + * + *

    + * Definition: + * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle + *

    + */ + public BoundCodeDt addAggregation(AggregationModeEnum theValue) { + BoundCodeDt retVal = + new BoundCodeDt(AggregationModeEnum.VALUESET_BINDER, theValue); + getAggregation().add(retVal); + return retVal; + } + + /** + * Gets the first repetition for aggregation (), + * creating it if it does not already exist. + * + *

    + * Definition: + * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle + *

    + */ + public BoundCodeDt getAggregationFirstRep() { + if (getAggregation().size() == 0) { + addAggregation(); + } + return getAggregation().get(0); + } + + /** + * Add a value for aggregation () + * + *

    + * Definition: + * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle + *

    + */ + public BoundCodeDt addAggregation() { + BoundCodeDt retVal = + new BoundCodeDt(AggregationModeEnum.VALUESET_BINDER); + getAggregation().add(retVal); + return retVal; + } + + /** + * Sets the value(s), and clears any existing value(s) for aggregation () + * + *

    + * Definition: + * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle + *

    + */ + public Type setAggregation(AggregationModeEnum theValue) { + getAggregation().clear(); + addAggregation(theValue); + return this; } - return getAggregation().get(0); } - /** - * Add a value for aggregation () - * - *

    - * Definition: - * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle - *

    - */ - public BoundCodeDt addAggregation() { - BoundCodeDt retVal = new BoundCodeDt(AggregationModeEnum.VALUESET_BINDER); - getAggregation().add(retVal); - return retVal; - } - - /** - * Sets the value(s), and clears any existing value(s) for aggregation () - * - *

    - * Definition: - * If the type is a reference to another resource, how the resource is or can be aggreated - is it a contained resource, or a reference, and if the context is a bundle, is it included in the bundle - *

    - */ - public Type setAggregation(AggregationModeEnum theValue) { - getAggregation().clear(); - addAggregation(theValue); - return this; - } - - - - - } - - /** * Block class for child element: ElementDefinition.constraint () * - *

    - * Definition: - * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance - *

    + *

    + * Definition: + * Formal constraints such as co-occurrence and other constraints that can be computationally evaluated within the context of the instance + *

    */ - @Block() - public static class Constraint - extends BaseIdentifiableElement implements IResourceBlock { - - @Child(name="key", type=IdDt.class, order=0, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality" - ) - private IdDt myKey; - - @Child(name="requirements", type=StringDt.class, order=1, min=0, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Description of why this constraint is necessary or appropriate" - ) - private StringDt myRequirements; - - @Child(name="severity", type=CodeDt.class, order=2, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Identifies the impact constraint violation has on the conformance of the instance" - ) - private BoundCodeDt mySeverity; - - @Child(name="human", type=StringDt.class, order=3, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Text that can be used to describe the constraint in messages identifying that the constraint has been violated" - ) - private StringDt myHuman; - - @Child(name="xpath", type=StringDt.class, order=4, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="An XPath expression of constraint that can be executed to see if this constraint is met" - ) - private StringDt myXpath; - + @Block() + public static class Constraint extends BaseIdentifiableElement implements IResourceBlock { - @Override - public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myKey, myRequirements, mySeverity, myHuman, myXpath); - } - - @Override - public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myKey, myRequirements, mySeverity, myHuman, myXpath); - } + @Child(name = "key", type = IdDt.class, order = 0, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality") + private IdDt myKey; - /** - * Gets the value(s) for key (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality - *

    - */ - public IdDt getKeyElement() { - if (myKey == null) { - myKey = new IdDt(); + @Child( + name = "requirements", + type = StringDt.class, + order = 1, + min = 0, + max = 1, + summary = true, + modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "Description of why this constraint is necessary or appropriate") + private StringDt myRequirements; + + @Child(name = "severity", type = CodeDt.class, order = 2, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "Identifies the impact constraint violation has on the conformance of the instance") + private BoundCodeDt mySeverity; + + @Child(name = "human", type = StringDt.class, order = 3, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "Text that can be used to describe the constraint in messages identifying that the constraint has been violated") + private StringDt myHuman; + + @Child(name = "xpath", type = StringDt.class, order = 4, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "An XPath expression of constraint that can be executed to see if this constraint is met") + private StringDt myXpath; + + @Override + public boolean isEmpty() { + return super.isBaseEmpty() + && ca.uhn.fhir.util.ElementUtil.isEmpty(myKey, myRequirements, mySeverity, myHuman, myXpath); } - return myKey; - } - - /** - * Gets the value(s) for key (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality - *

    - */ - public String getKey() { - return getKeyElement().getValue(); - } - - /** - * Sets the value(s) for key () - * - *

    - * Definition: - * Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality - *

    - */ - public Constraint setKey(IdDt theValue) { - myKey = theValue; - return this; - } - - - - /** - * Sets the value for key () - * - *

    - * Definition: - * Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality - *

    - */ - public Constraint setKey( String theId) { - myKey = new IdDt(theId); - return this; - } - - - /** - * Gets the value(s) for requirements (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Description of why this constraint is necessary or appropriate - *

    - */ - public StringDt getRequirementsElement() { - if (myRequirements == null) { - myRequirements = new StringDt(); + @Override + public List getAllPopulatedChildElementsOfType(Class theType) { + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( + theType, myKey, myRequirements, mySeverity, myHuman, myXpath); } - return myRequirements; - } - - /** - * Gets the value(s) for requirements (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Description of why this constraint is necessary or appropriate - *

    - */ - public String getRequirements() { - return getRequirementsElement().getValue(); - } - - /** - * Sets the value(s) for requirements () - * - *

    - * Definition: - * Description of why this constraint is necessary or appropriate - *

    - */ - public Constraint setRequirements(StringDt theValue) { - myRequirements = theValue; - return this; - } - - - - /** - * Sets the value for requirements () - * - *

    - * Definition: - * Description of why this constraint is necessary or appropriate - *

    - */ - public Constraint setRequirements( String theString) { - myRequirements = new StringDt(theString); - return this; - } - - - /** - * Gets the value(s) for severity (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Identifies the impact constraint violation has on the conformance of the instance - *

    - */ - public BoundCodeDt getSeverityElement() { - if (mySeverity == null) { - mySeverity = new BoundCodeDt(ConstraintSeverityEnum.VALUESET_BINDER); + /** + * Gets the value(s) for key (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality + *

    + */ + public IdDt getKeyElement() { + if (myKey == null) { + myKey = new IdDt(); + } + return myKey; } - return mySeverity; - } - - /** - * Gets the value(s) for severity (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Identifies the impact constraint violation has on the conformance of the instance - *

    - */ - public String getSeverity() { - return getSeverityElement().getValue(); - } - - /** - * Sets the value(s) for severity () - * - *

    - * Definition: - * Identifies the impact constraint violation has on the conformance of the instance - *

    - */ - public Constraint setSeverity(BoundCodeDt theValue) { - mySeverity = theValue; - return this; - } - - - - /** - * Sets the value(s) for severity () - * - *

    - * Definition: - * Identifies the impact constraint violation has on the conformance of the instance - *

    - */ - public Constraint setSeverity(ConstraintSeverityEnum theValue) { - getSeverityElement().setValueAsEnum(theValue); - return this; - } - - - /** - * Gets the value(s) for human (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Text that can be used to describe the constraint in messages identifying that the constraint has been violated - *

    - */ - public StringDt getHumanElement() { - if (myHuman == null) { - myHuman = new StringDt(); + /** + * Gets the value(s) for key (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality + *

    + */ + public String getKey() { + return getKeyElement().getValue(); } - return myHuman; - } - - /** - * Gets the value(s) for human (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Text that can be used to describe the constraint in messages identifying that the constraint has been violated - *

    - */ - public String getHuman() { - return getHumanElement().getValue(); - } - - /** - * Sets the value(s) for human () - * - *

    - * Definition: - * Text that can be used to describe the constraint in messages identifying that the constraint has been violated - *

    - */ - public Constraint setHuman(StringDt theValue) { - myHuman = theValue; - return this; - } - - - - /** - * Sets the value for human () - * - *

    - * Definition: - * Text that can be used to describe the constraint in messages identifying that the constraint has been violated - *

    - */ - public Constraint setHuman( String theString) { - myHuman = new StringDt(theString); - return this; - } - - - /** - * Gets the value(s) for xpath (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * An XPath expression of constraint that can be executed to see if this constraint is met - *

    - */ - public StringDt getXpathElement() { - if (myXpath == null) { - myXpath = new StringDt(); + /** + * Sets the value(s) for key () + * + *

    + * Definition: + * Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality + *

    + */ + public Constraint setKey(IdDt theValue) { + myKey = theValue; + return this; + } + + /** + * Sets the value for key () + * + *

    + * Definition: + * Allows identification of which elements have their cardinalities impacted by the constraint. Will not be referenced for constraints that do not affect cardinality + *

    + */ + public Constraint setKey(String theId) { + myKey = new IdDt(theId); + return this; + } + + /** + * Gets the value(s) for requirements (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Description of why this constraint is necessary or appropriate + *

    + */ + public StringDt getRequirementsElement() { + if (myRequirements == null) { + myRequirements = new StringDt(); + } + return myRequirements; + } + + /** + * Gets the value(s) for requirements (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Description of why this constraint is necessary or appropriate + *

    + */ + public String getRequirements() { + return getRequirementsElement().getValue(); + } + + /** + * Sets the value(s) for requirements () + * + *

    + * Definition: + * Description of why this constraint is necessary or appropriate + *

    + */ + public Constraint setRequirements(StringDt theValue) { + myRequirements = theValue; + return this; + } + + /** + * Sets the value for requirements () + * + *

    + * Definition: + * Description of why this constraint is necessary or appropriate + *

    + */ + public Constraint setRequirements(String theString) { + myRequirements = new StringDt(theString); + return this; + } + + /** + * Gets the value(s) for severity (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Identifies the impact constraint violation has on the conformance of the instance + *

    + */ + public BoundCodeDt getSeverityElement() { + if (mySeverity == null) { + mySeverity = new BoundCodeDt(ConstraintSeverityEnum.VALUESET_BINDER); + } + return mySeverity; + } + + /** + * Gets the value(s) for severity (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Identifies the impact constraint violation has on the conformance of the instance + *

    + */ + public String getSeverity() { + return getSeverityElement().getValue(); + } + + /** + * Sets the value(s) for severity () + * + *

    + * Definition: + * Identifies the impact constraint violation has on the conformance of the instance + *

    + */ + public Constraint setSeverity(BoundCodeDt theValue) { + mySeverity = theValue; + return this; + } + + /** + * Sets the value(s) for severity () + * + *

    + * Definition: + * Identifies the impact constraint violation has on the conformance of the instance + *

    + */ + public Constraint setSeverity(ConstraintSeverityEnum theValue) { + getSeverityElement().setValueAsEnum(theValue); + return this; + } + + /** + * Gets the value(s) for human (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Text that can be used to describe the constraint in messages identifying that the constraint has been violated + *

    + */ + public StringDt getHumanElement() { + if (myHuman == null) { + myHuman = new StringDt(); + } + return myHuman; + } + + /** + * Gets the value(s) for human (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Text that can be used to describe the constraint in messages identifying that the constraint has been violated + *

    + */ + public String getHuman() { + return getHumanElement().getValue(); + } + + /** + * Sets the value(s) for human () + * + *

    + * Definition: + * Text that can be used to describe the constraint in messages identifying that the constraint has been violated + *

    + */ + public Constraint setHuman(StringDt theValue) { + myHuman = theValue; + return this; + } + + /** + * Sets the value for human () + * + *

    + * Definition: + * Text that can be used to describe the constraint in messages identifying that the constraint has been violated + *

    + */ + public Constraint setHuman(String theString) { + myHuman = new StringDt(theString); + return this; + } + + /** + * Gets the value(s) for xpath (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * An XPath expression of constraint that can be executed to see if this constraint is met + *

    + */ + public StringDt getXpathElement() { + if (myXpath == null) { + myXpath = new StringDt(); + } + return myXpath; + } + + /** + * Gets the value(s) for xpath (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * An XPath expression of constraint that can be executed to see if this constraint is met + *

    + */ + public String getXpath() { + return getXpathElement().getValue(); + } + + /** + * Sets the value(s) for xpath () + * + *

    + * Definition: + * An XPath expression of constraint that can be executed to see if this constraint is met + *

    + */ + public Constraint setXpath(StringDt theValue) { + myXpath = theValue; + return this; + } + + /** + * Sets the value for xpath () + * + *

    + * Definition: + * An XPath expression of constraint that can be executed to see if this constraint is met + *

    + */ + public Constraint setXpath(String theString) { + myXpath = new StringDt(theString); + return this; } - return myXpath; } - - /** - * Gets the value(s) for xpath (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * An XPath expression of constraint that can be executed to see if this constraint is met - *

    - */ - public String getXpath() { - return getXpathElement().getValue(); - } - - /** - * Sets the value(s) for xpath () - * - *

    - * Definition: - * An XPath expression of constraint that can be executed to see if this constraint is met - *

    - */ - public Constraint setXpath(StringDt theValue) { - myXpath = theValue; - return this; - } - - - - /** - * Sets the value for xpath () - * - *

    - * Definition: - * An XPath expression of constraint that can be executed to see if this constraint is met - *

    - */ - public Constraint setXpath( String theString) { - myXpath = new StringDt(theString); - return this; - } - - - - - } - - /** * Block class for child element: ElementDefinition.binding () * - *

    - * Definition: - * Binds to a value set if this element is coded (code, Coding, CodeableConcept) - *

    + *

    + * Definition: + * Binds to a value set if this element is coded (code, Coding, CodeableConcept) + *

    */ - @Block() - public static class Binding - extends BaseIdentifiableElement implements IResourceBlock { - - @Child(name="strength", type=CodeDt.class, order=0, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances" - ) - private BoundCodeDt myStrength; - - @Child(name="description", type=StringDt.class, order=1, min=0, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Describes the intended use of this particular set of codes" - ) - private StringDt myDescription; - - @Child(name="valueSet", order=2, min=0, max=1, summary=true, modifier=false, type={ - UriDt.class, ValueSet.class }) - @Description( - shortDefinition="", - formalDefinition="Points to the value set or external definition (e.g. implicit value set) that identifies the set of codes to be used" - ) - private IDatatype myValueSet; - + @Block() + public static class Binding extends BaseIdentifiableElement implements IResourceBlock { - @Override - public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myStrength, myDescription, myValueSet); - } - - @Override - public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myStrength, myDescription, myValueSet); - } + @Child(name = "strength", type = CodeDt.class, order = 0, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = + "Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances") + private BoundCodeDt myStrength; - /** - * Gets the value(s) for strength (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances - *

    - */ - public BoundCodeDt getStrengthElement() { - if (myStrength == null) { - myStrength = new BoundCodeDt(BindingStrengthEnum.VALUESET_BINDER); + @Child( + name = "description", + type = StringDt.class, + order = 1, + min = 0, + max = 1, + summary = true, + modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "Describes the intended use of this particular set of codes") + private StringDt myDescription; + + @Child( + name = "valueSet", + order = 2, + min = 0, + max = 1, + summary = true, + modifier = false, + type = {UriDt.class, ValueSet.class}) + @Description( + shortDefinition = "", + formalDefinition = + "Points to the value set or external definition (e.g. implicit value set) that identifies the set of codes to be used") + private IDatatype myValueSet; + + @Override + public boolean isEmpty() { + return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(myStrength, myDescription, myValueSet); } - return myStrength; - } - - /** - * Gets the value(s) for strength (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances - *

    - */ - public String getStrength() { - return getStrengthElement().getValue(); - } - - /** - * Sets the value(s) for strength () - * - *

    - * Definition: - * Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances - *

    - */ - public Binding setStrength(BoundCodeDt theValue) { - myStrength = theValue; - return this; - } - - - - /** - * Sets the value(s) for strength () - * - *

    - * Definition: - * Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances - *

    - */ - public Binding setStrength(BindingStrengthEnum theValue) { - getStrengthElement().setValueAsEnum(theValue); - return this; - } - - - /** - * Gets the value(s) for description (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Describes the intended use of this particular set of codes - *

    - */ - public StringDt getDescriptionElement() { - if (myDescription == null) { - myDescription = new StringDt(); + @Override + public List getAllPopulatedChildElementsOfType(Class theType) { + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( + theType, myStrength, myDescription, myValueSet); + } + + /** + * Gets the value(s) for strength (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances + *

    + */ + public BoundCodeDt getStrengthElement() { + if (myStrength == null) { + myStrength = new BoundCodeDt(BindingStrengthEnum.VALUESET_BINDER); + } + return myStrength; + } + + /** + * Gets the value(s) for strength (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances + *

    + */ + public String getStrength() { + return getStrengthElement().getValue(); + } + + /** + * Sets the value(s) for strength () + * + *

    + * Definition: + * Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances + *

    + */ + public Binding setStrength(BoundCodeDt theValue) { + myStrength = theValue; + return this; + } + + /** + * Sets the value(s) for strength () + * + *

    + * Definition: + * Indicates the degree of conformance expectations associated with this binding - that is, the degree to which the provided value set must be adhered to in the instances + *

    + */ + public Binding setStrength(BindingStrengthEnum theValue) { + getStrengthElement().setValueAsEnum(theValue); + return this; + } + + /** + * Gets the value(s) for description (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Describes the intended use of this particular set of codes + *

    + */ + public StringDt getDescriptionElement() { + if (myDescription == null) { + myDescription = new StringDt(); + } + return myDescription; + } + + /** + * Gets the value(s) for description (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Describes the intended use of this particular set of codes + *

    + */ + public String getDescription() { + return getDescriptionElement().getValue(); + } + + /** + * Sets the value(s) for description () + * + *

    + * Definition: + * Describes the intended use of this particular set of codes + *

    + */ + public Binding setDescription(StringDt theValue) { + myDescription = theValue; + return this; + } + + /** + * Sets the value for description () + * + *

    + * Definition: + * Describes the intended use of this particular set of codes + *

    + */ + public Binding setDescription(String theString) { + myDescription = new StringDt(theString); + return this; + } + + /** + * Gets the value(s) for valueSet[x] (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Points to the value set or external definition (e.g. implicit value set) that identifies the set of codes to be used + *

    + */ + public IDatatype getValueSet() { + return myValueSet; + } + + /** + * Sets the value(s) for valueSet[x] () + * + *

    + * Definition: + * Points to the value set or external definition (e.g. implicit value set) that identifies the set of codes to be used + *

    + */ + public Binding setValueSet(IDatatype theValue) { + myValueSet = theValue; + return this; } - return myDescription; } - - /** - * Gets the value(s) for description (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Describes the intended use of this particular set of codes - *

    - */ - public String getDescription() { - return getDescriptionElement().getValue(); - } - - /** - * Sets the value(s) for description () - * - *

    - * Definition: - * Describes the intended use of this particular set of codes - *

    - */ - public Binding setDescription(StringDt theValue) { - myDescription = theValue; - return this; - } - - - - /** - * Sets the value for description () - * - *

    - * Definition: - * Describes the intended use of this particular set of codes - *

    - */ - public Binding setDescription( String theString) { - myDescription = new StringDt(theString); - return this; - } - - - /** - * Gets the value(s) for valueSet[x] (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Points to the value set or external definition (e.g. implicit value set) that identifies the set of codes to be used - *

    - */ - public IDatatype getValueSet() { - return myValueSet; - } - - /** - * Sets the value(s) for valueSet[x] () - * - *

    - * Definition: - * Points to the value set or external definition (e.g. implicit value set) that identifies the set of codes to be used - *

    - */ - public Binding setValueSet(IDatatype theValue) { - myValueSet = theValue; - return this; - } - - - - - - - } - - /** * Block class for child element: ElementDefinition.mapping () * - *

    - * Definition: - * Identifies a concept from an external specification that roughly corresponds to this element - *

    + *

    + * Definition: + * Identifies a concept from an external specification that roughly corresponds to this element + *

    */ - @Block() - public static class Mapping - extends BaseIdentifiableElement implements IResourceBlock { - - @Child(name="identity", type=IdDt.class, order=0, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="An internal reference to the definition of a mapping" - ) - private IdDt myIdentity; - - @Child(name="language", type=CodeDt.class, order=1, min=0, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Identifies the computable language in which mapping.map is expressed." - ) - private CodeDt myLanguage; - - @Child(name="map", type=StringDt.class, order=2, min=1, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="Expresses what part of the target specification corresponds to this element" - ) - private StringDt myMap; - + @Block() + public static class Mapping extends BaseIdentifiableElement implements IResourceBlock { - @Override - public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myIdentity, myLanguage, myMap); - } - - @Override - public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myIdentity, myLanguage, myMap); - } + @Child(name = "identity", type = IdDt.class, order = 0, min = 1, max = 1, summary = true, modifier = false) + @Description(shortDefinition = "", formalDefinition = "An internal reference to the definition of a mapping") + private IdDt myIdentity; - /** - * Gets the value(s) for identity (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * An internal reference to the definition of a mapping - *

    - */ - public IdDt getIdentityElement() { - if (myIdentity == null) { - myIdentity = new IdDt(); + @Child(name = "language", type = CodeDt.class, order = 1, min = 0, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "Identifies the computable language in which mapping.map is expressed.") + private CodeDt myLanguage; + + @Child(name = "map", type = StringDt.class, order = 2, min = 1, max = 1, summary = true, modifier = false) + @Description( + shortDefinition = "", + formalDefinition = "Expresses what part of the target specification corresponds to this element") + private StringDt myMap; + + @Override + public boolean isEmpty() { + return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(myIdentity, myLanguage, myMap); } - return myIdentity; - } - - /** - * Gets the value(s) for identity (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * An internal reference to the definition of a mapping - *

    - */ - public String getIdentity() { - return getIdentityElement().getValue(); - } - - /** - * Sets the value(s) for identity () - * - *

    - * Definition: - * An internal reference to the definition of a mapping - *

    - */ - public Mapping setIdentity(IdDt theValue) { - myIdentity = theValue; - return this; - } - - - - /** - * Sets the value for identity () - * - *

    - * Definition: - * An internal reference to the definition of a mapping - *

    - */ - public Mapping setIdentity( String theId) { - myIdentity = new IdDt(theId); - return this; - } - - - /** - * Gets the value(s) for language (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Identifies the computable language in which mapping.map is expressed. - *

    - */ - public CodeDt getLanguageElement() { - if (myLanguage == null) { - myLanguage = new CodeDt(); + @Override + public List getAllPopulatedChildElementsOfType(Class theType) { + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myIdentity, myLanguage, myMap); } - return myLanguage; - } - - /** - * Gets the value(s) for language (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Identifies the computable language in which mapping.map is expressed. - *

    - */ - public String getLanguage() { - return getLanguageElement().getValue(); - } - - /** - * Sets the value(s) for language () - * - *

    - * Definition: - * Identifies the computable language in which mapping.map is expressed. - *

    - */ - public Mapping setLanguage(CodeDt theValue) { - myLanguage = theValue; - return this; - } - - - - /** - * Sets the value for language () - * - *

    - * Definition: - * Identifies the computable language in which mapping.map is expressed. - *

    - */ - public Mapping setLanguage( String theCode) { - myLanguage = new CodeDt(theCode); - return this; - } - - - /** - * Gets the value(s) for map (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Expresses what part of the target specification corresponds to this element - *

    - */ - public StringDt getMapElement() { - if (myMap == null) { - myMap = new StringDt(); + /** + * Gets the value(s) for identity (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * An internal reference to the definition of a mapping + *

    + */ + public IdDt getIdentityElement() { + if (myIdentity == null) { + myIdentity = new IdDt(); + } + return myIdentity; + } + + /** + * Gets the value(s) for identity (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * An internal reference to the definition of a mapping + *

    + */ + public String getIdentity() { + return getIdentityElement().getValue(); + } + + /** + * Sets the value(s) for identity () + * + *

    + * Definition: + * An internal reference to the definition of a mapping + *

    + */ + public Mapping setIdentity(IdDt theValue) { + myIdentity = theValue; + return this; + } + + /** + * Sets the value for identity () + * + *

    + * Definition: + * An internal reference to the definition of a mapping + *

    + */ + public Mapping setIdentity(String theId) { + myIdentity = new IdDt(theId); + return this; + } + + /** + * Gets the value(s) for language (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Identifies the computable language in which mapping.map is expressed. + *

    + */ + public CodeDt getLanguageElement() { + if (myLanguage == null) { + myLanguage = new CodeDt(); + } + return myLanguage; + } + + /** + * Gets the value(s) for language (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Identifies the computable language in which mapping.map is expressed. + *

    + */ + public String getLanguage() { + return getLanguageElement().getValue(); + } + + /** + * Sets the value(s) for language () + * + *

    + * Definition: + * Identifies the computable language in which mapping.map is expressed. + *

    + */ + public Mapping setLanguage(CodeDt theValue) { + myLanguage = theValue; + return this; + } + + /** + * Sets the value for language () + * + *

    + * Definition: + * Identifies the computable language in which mapping.map is expressed. + *

    + */ + public Mapping setLanguage(String theCode) { + myLanguage = new CodeDt(theCode); + return this; + } + + /** + * Gets the value(s) for map (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Expresses what part of the target specification corresponds to this element + *

    + */ + public StringDt getMapElement() { + if (myMap == null) { + myMap = new StringDt(); + } + return myMap; + } + + /** + * Gets the value(s) for map (). + * creating it if it does + * not exist. Will not return null. + * + *

    + * Definition: + * Expresses what part of the target specification corresponds to this element + *

    + */ + public String getMap() { + return getMapElement().getValue(); + } + + /** + * Sets the value(s) for map () + * + *

    + * Definition: + * Expresses what part of the target specification corresponds to this element + *

    + */ + public Mapping setMap(StringDt theValue) { + myMap = theValue; + return this; + } + + /** + * Sets the value for map () + * + *

    + * Definition: + * Expresses what part of the target specification corresponds to this element + *

    + */ + public Mapping setMap(String theString) { + myMap = new StringDt(theString); + return this; } - return myMap; } - - - /** - * Gets the value(s) for map (). - * creating it if it does - * not exist. Will not return null. - * - *

    - * Definition: - * Expresses what part of the target specification corresponds to this element - *

    - */ - public String getMap() { - return getMapElement().getValue(); - } - - /** - * Sets the value(s) for map () - * - *

    - * Definition: - * Expresses what part of the target specification corresponds to this element - *

    - */ - public Mapping setMap(StringDt theValue) { - myMap = theValue; - return this; - } - - - - /** - * Sets the value for map () - * - *

    - * Definition: - * Expresses what part of the target specification corresponds to this element - *

    - */ - public Mapping setMap( String theString) { - myMap = new StringDt(theString); - return this; - } - - - - - } - - - - -} \ No newline at end of file +} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/MoneyDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/MoneyDt.java index eb88044a80d..cd164a2192a 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/MoneyDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/MoneyDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,9 +20,6 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; -@DatatypeDef(name="Money", profileOf=QuantityDt.class) -public class MoneyDt extends QuantityDt { - -} +@DatatypeDef(name = "Money", profileOf = QuantityDt.class) +public class MoneyDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/SimpleQuantityDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/SimpleQuantityDt.java index 1a0d8567843..ea325addf2a 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/SimpleQuantityDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/composite/SimpleQuantityDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,10 +21,9 @@ package ca.uhn.fhir.model.dstu2.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; -import ca.uhn.fhir.model.dstu2.composite.QuantityDt; import ca.uhn.fhir.model.dstu2.valueset.QuantityComparatorEnum; -@DatatypeDef(name="SimpleQuantity", profileOf=QuantityDt.class) +@DatatypeDef(name = "SimpleQuantity", profileOf = QuantityDt.class) public class SimpleQuantityDt extends QuantityDt { private static final long serialVersionUID = 1L; @@ -36,12 +35,11 @@ public class SimpleQuantityDt extends QuantityDt { // nothing } - /** * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") double theValue) { + public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theValue") double theValue) { setValue(theValue); } @@ -49,15 +47,17 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") long theValue) { + public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theValue") long theValue) { setValue(theValue); } - + /** * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name = "theValue") double theValue, + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") double theValue, @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); @@ -68,7 +68,9 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name = "theValue") long theValue, + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") long theValue, @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); @@ -79,7 +81,10 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") double theValue, @SimpleSetter.Parameter(name="theSystem") String theSystem, @SimpleSetter.Parameter(name="theUnits") String theUnits) { + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theValue") double theValue, + @SimpleSetter.Parameter(name = "theSystem") String theSystem, + @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setSystem(theSystem); setUnits(theUnits); @@ -89,10 +94,12 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") long theValue, @SimpleSetter.Parameter(name="theSystem") String theSystem, @SimpleSetter.Parameter(name="theUnits") String theUnits) { + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theValue") long theValue, + @SimpleSetter.Parameter(name = "theSystem") String theSystem, + @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setSystem(theSystem); setUnits(theUnits); } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/valueset/StructureDefinitionKindEnum.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/valueset/StructureDefinitionKindEnum.java index 827cce14421..c1f183fb537 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/valueset/StructureDefinitionKindEnum.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu2/valueset/StructureDefinitionKindEnum.java @@ -1,7 +1,7 @@ - package ca.uhn.fhir.model.dstu2.valueset; import ca.uhn.fhir.model.api.*; + import java.util.HashMap; import java.util.Map; @@ -14,7 +14,7 @@ public enum StructureDefinitionKindEnum { * A data type - either a primitive or complex structure that defines a set of data elements. These can be used throughout Resource and extension definitions */ DATA_TYPE("datatype", "http://hl7.org/fhir/structure-definition-kind"), - + /** * Display: Resource
    * Code Value: resource @@ -22,7 +22,7 @@ public enum StructureDefinitionKindEnum { * A resource defined by the FHIR specification */ RESOURCE("resource", "http://hl7.org/fhir/structure-definition-kind"), - + /** * Display: Logical Model
    * Code Value: logical @@ -30,12 +30,11 @@ public enum StructureDefinitionKindEnum { * A logical model - a conceptual package of data that will be mapped to resources for implementation */ LOGICAL_MODEL("logical", "http://hl7.org/fhir/structure-definition-kind"), - ; - + /** * Identifier for this Value Set: - * + * */ public static final String VALUESET_IDENTIFIER = ""; @@ -45,37 +44,39 @@ public enum StructureDefinitionKindEnum { */ public static final String VALUESET_NAME = "StructureDefinitionKind"; - private static Map CODE_TO_ENUM = new HashMap(); - private static Map> SYSTEM_TO_CODE_TO_ENUM = new HashMap>(); - + private static Map CODE_TO_ENUM = + new HashMap(); + private static Map> SYSTEM_TO_CODE_TO_ENUM = + new HashMap>(); + private final String myCode; private final String mySystem; - + static { for (StructureDefinitionKindEnum next : StructureDefinitionKindEnum.values()) { CODE_TO_ENUM.put(next.getCode(), next); - + if (!SYSTEM_TO_CODE_TO_ENUM.containsKey(next.getSystem())) { SYSTEM_TO_CODE_TO_ENUM.put(next.getSystem(), new HashMap()); } - SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); + SYSTEM_TO_CODE_TO_ENUM.get(next.getSystem()).put(next.getCode(), next); } } - + /** * Returns the code associated with this enumerated value */ public String getCode() { return myCode; } - + /** * Returns the code system associated with this enumerated value */ public String getSystem() { return mySystem; } - + /** * Returns the enumerated value associated with this code */ @@ -87,40 +88,38 @@ public enum StructureDefinitionKindEnum { /** * Converts codes to their respective enumerated values */ - public static final IValueSetEnumBinder VALUESET_BINDER = new IValueSetEnumBinder() { - @Override - public String toCodeString(StructureDefinitionKindEnum theEnum) { - return theEnum.getCode(); - } + public static final IValueSetEnumBinder VALUESET_BINDER = + new IValueSetEnumBinder() { + @Override + public String toCodeString(StructureDefinitionKindEnum theEnum) { + return theEnum.getCode(); + } - @Override - public String toSystemString(StructureDefinitionKindEnum theEnum) { - return theEnum.getSystem(); - } - - @Override - public StructureDefinitionKindEnum fromCodeString(String theCodeString) { - return CODE_TO_ENUM.get(theCodeString); - } - - @Override - public StructureDefinitionKindEnum fromCodeString(String theCodeString, String theSystemString) { - Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); - if (map == null) { - return null; - } - return map.get(theCodeString); - } - - }; - - /** + @Override + public String toSystemString(StructureDefinitionKindEnum theEnum) { + return theEnum.getSystem(); + } + + @Override + public StructureDefinitionKindEnum fromCodeString(String theCodeString) { + return CODE_TO_ENUM.get(theCodeString); + } + + @Override + public StructureDefinitionKindEnum fromCodeString(String theCodeString, String theSystemString) { + Map map = SYSTEM_TO_CODE_TO_ENUM.get(theSystemString); + if (map == null) { + return null; + } + return map.get(theCodeString); + } + }; + + /** * Constructor */ StructureDefinitionKindEnum(String theCode, String theSystem) { myCode = theCode; mySystem = theSystem; } - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/AgeDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/AgeDt.java index 31815ebb538..1887366b31c 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/AgeDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/AgeDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,10 +20,6 @@ package ca.uhn.fhir.model.dstu3.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu3.composite.QuantityDt; -import ca.uhn.fhir.model.primitive.IntegerDt; -@DatatypeDef(name="AgeDt") -public class AgeDt extends QuantityDt { - -} +@DatatypeDef(name = "AgeDt") +public class AgeDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/BoundCodeableConceptDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/BoundCodeableConceptDt.java index d50b200bd36..992397d3493 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/BoundCodeableConceptDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/BoundCodeableConceptDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,19 +19,16 @@ */ package ca.uhn.fhir.model.dstu3.composite; -import static org.apache.commons.lang3.StringUtils.defaultString; +import ca.uhn.fhir.model.api.IBoundCodeableConcept; +import ca.uhn.fhir.model.api.IValueSetEnumBinder; +import ca.uhn.fhir.model.api.annotation.DatatypeDef; +import org.apache.commons.lang3.Validate; import java.util.Collection; import java.util.HashSet; import java.util.Set; -import org.apache.commons.lang3.Validate; - -import ca.uhn.fhir.model.api.IBoundCodeableConcept; -import ca.uhn.fhir.model.api.IValueSetEnumBinder; -import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu3.composite.CodeableConceptDt; -import ca.uhn.fhir.model.dstu3.composite.CodingDt; +import static org.apache.commons.lang3.StringUtils.defaultString; @DatatypeDef(name = "CodeableConcept", isSpecialization = true) public class BoundCodeableConceptDt> extends CodeableConceptDt implements IBoundCodeableConcept { @@ -77,12 +74,14 @@ public class BoundCodeableConceptDt> extends CodeableConceptDt * system defined by the given enumerated types, AND clearing any existing * codings first. If theValue is null, existing codings are cleared and no * codings are added. - * + * * @param theValues * The value to add, or null */ public void setValueAsEnum(Collection theValues) { - Validate.notNull(myBinder, "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); + Validate.notNull( + myBinder, + "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); getCoding().clear(); if (theValues != null) { for (T next : theValues) { @@ -96,12 +95,14 @@ public class BoundCodeableConceptDt> extends CodeableConceptDt * system defined by the given enumerated type, AND clearing any existing * codings first. If theValue is null, existing codings are cleared and no * codings are added. - * + * * @param theValue * The value to add, or null */ public void setValueAsEnum(T theValue) { - Validate.notNull(myBinder, "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); + Validate.notNull( + myBinder, + "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); getCoding().clear(); if (theValue == null) { return; @@ -114,20 +115,24 @@ public class BoundCodeableConceptDt> extends CodeableConceptDt * and returns the first bound enumerated type that matches. Use * caution using this method, see the return description for more * information. - * + * * @return Returns the bound enumerated type, or null if none * are found. Note that a null return value doesn't neccesarily * imply that this Codeable Concept has no codes, only that it has * no codes that match the enum. */ public Set getValueAsEnum() { - Validate.notNull(myBinder, "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); + Validate.notNull( + myBinder, + "This object does not have a binder. Constructor BoundCodeableConceptDt() should not be called!"); Set retVal = new HashSet(); for (CodingDt next : getCoding()) { if (next == null) { continue; } - T nextT = myBinder.fromCodeString(defaultString(next.getCodeElement().getValue()), defaultString(next.getSystemElement().getValueAsString())); + T nextT = myBinder.fromCodeString( + defaultString(next.getCodeElement().getValue()), + defaultString(next.getSystemElement().getValueAsString())); if (nextT != null) { retVal.add(nextT); } else { @@ -136,5 +141,4 @@ public class BoundCodeableConceptDt> extends CodeableConceptDt } return retVal; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodeableConceptDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodeableConceptDt.java index 27008e73fa9..845a16445bb 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodeableConceptDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodeableConceptDt.java @@ -1,24 +1,6 @@ - - - - - - - - - - - - - - - - package ca.uhn.fhir.model.dstu3.composite; import ca.uhn.fhir.i18n.Msg; -import java.util.List; - import ca.uhn.fhir.model.api.BaseIdentifiableElement; import ca.uhn.fhir.model.api.ICompositeDatatype; import ca.uhn.fhir.model.api.IElement; @@ -27,6 +9,8 @@ import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.Description; import ca.uhn.fhir.model.primitive.StringDt; +import java.util.List; + /** * HAPI/FHIR CodeableConceptDt Datatype * () @@ -34,16 +18,15 @@ import ca.uhn.fhir.model.primitive.StringDt; *

    * Definition: * A concept that may be defined by a formal reference to a terminology or ontology or may be provided by text - *

    + *

    * *

    * Requirements: * This is a common pattern in healthcare - a concept that may be defined by one or more codes from formal definitions including LOINC and SNOMED CT, and/or defined by the provision of text that captures a human sense of the concept - *

    + *

    */ -@DatatypeDef(name="CodeableConceptDt") -public class CodeableConceptDt - extends BaseIdentifiableElement implements ICompositeDatatype{ +@DatatypeDef(name = "CodeableConceptDt") +public class CodeableConceptDt extends BaseIdentifiableElement implements ICompositeDatatype { /** * Constructor @@ -60,26 +43,29 @@ public class CodeableConceptDt addCoding().setSystem(theSystem).setCode(theCode); } - @Child(name="coding", type=CodingDt.class, order=0, min=0, max=Child.MAX_UNLIMITED, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="A reference to a code defined by a terminology system" - ) + @Child( + name = "coding", + type = CodingDt.class, + order = 0, + min = 0, + max = Child.MAX_UNLIMITED, + summary = true, + modifier = false) + @Description(shortDefinition = "", formalDefinition = "A reference to a code defined by a terminology system") private java.util.List myCoding; - - @Child(name="text", type=StringDt.class, order=1, min=0, max=1, summary=true, modifier=false) + + @Child(name = "text", type = StringDt.class, order = 1, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user" - ) + shortDefinition = "", + formalDefinition = + "A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user") private StringDt myText; - @Override public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myCoding, myText); + return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(myCoding, myText); } - + @Override public List getAllPopulatedChildElementsOfType(Class theType) { return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myCoding, myText); @@ -90,12 +76,12 @@ public class CodeableConceptDt * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A reference to a code defined by a terminology system - *

    + *

    + * Definition: + * A reference to a code defined by a terminology system + *

    */ - public java.util.List getCoding() { + public java.util.List getCoding() { if (myCoding == null) { myCoding = new java.util.ArrayList(); } @@ -105,30 +91,28 @@ public class CodeableConceptDt /** * Sets the value(s) for coding () * - *

    - * Definition: - * A reference to a code defined by a terminology system - *

    + *

    + * Definition: + * A reference to a code defined by a terminology system + *

    */ public CodeableConceptDt setCoding(java.util.List theValue) { myCoding = theValue; return this; } - - /** * Adds and returns a new value for coding () * - *

    - * Definition: - * A reference to a code defined by a terminology system - *

    + *

    + * Definition: + * A reference to a code defined by a terminology system + *

    */ public CodingDt addCoding() { CodingDt newType = new CodingDt(); getCoding().add(newType); - return newType; + return newType; } /** @@ -152,79 +136,72 @@ public class CodeableConceptDt * Gets the first repetition for coding (), * creating it if it does not already exist. * - *

    - * Definition: - * A reference to a code defined by a terminology system - *

    + *

    + * Definition: + * A reference to a code defined by a terminology system + *

    */ public CodingDt getCodingFirstRep() { if (getCoding().isEmpty()) { return addCoding(); } - return getCoding().get(0); + return getCoding().get(0); } - + /** * Gets the value(s) for text (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user - *

    + *

    + * Definition: + * A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user + *

    */ - public StringDt getTextElement() { + public StringDt getTextElement() { if (myText == null) { myText = new StringDt(); } return myText; } - /** * Gets the value(s) for text (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user - *

    + *

    + * Definition: + * A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user + *

    */ - public String getText() { + public String getText() { return getTextElement().getValue(); } /** * Sets the value(s) for text () * - *

    - * Definition: - * A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user - *

    + *

    + * Definition: + * A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user + *

    */ public CodeableConceptDt setText(StringDt theValue) { myText = theValue; return this; } - - - /** + /** * Sets the value for text () * - *

    - * Definition: - * A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user - *

    + *

    + * Definition: + * A human language representation of the concept as seen/selected/uttered by the user who entered the data and/or which represents the intended meaning of the user + *

    */ - public CodeableConceptDt setText( String theString) { - myText = new StringDt(theString); - return this; + public CodeableConceptDt setText(String theString) { + myText = new StringDt(theString); + return this; } - - - - -} \ No newline at end of file +} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodingDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodingDt.java index 7d74c842ff8..e6626f34665 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodingDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodingDt.java @@ -1,23 +1,5 @@ - - - - - - - - - - - - - - - - package ca.uhn.fhir.model.dstu3.composite; -import java.util.List; - import ca.uhn.fhir.model.api.ICompositeDatatype; import ca.uhn.fhir.model.api.IElement; import ca.uhn.fhir.model.api.annotation.Child; @@ -29,6 +11,8 @@ import ca.uhn.fhir.model.primitive.CodeDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.model.primitive.UriDt; +import java.util.List; + /** * HAPI/FHIR CodingDt Datatype * () @@ -36,16 +20,15 @@ import ca.uhn.fhir.model.primitive.UriDt; *

    * Definition: * A reference to a code defined by a terminology system - *

    + *

    * *

    * Requirements: * References to codes are very common in healthcare models - *

    + *

    */ -@DatatypeDef(name="CodingDt") -public class CodingDt - extends BaseCodingDt implements ICompositeDatatype, org.hl7.fhir.instance.model.api.IBaseCoding { +@DatatypeDef(name = "CodingDt") +public class CodingDt extends BaseCodingDt implements ICompositeDatatype, org.hl7.fhir.instance.model.api.IBaseCoding { /** * Constructor @@ -61,59 +44,61 @@ public class CodingDt setSystem(theSystem); setCode(theCode); } - + /** * Copy constructor: Creates a new Coding with the system and code copied out of the given coding */ public CodingDt(BaseCodingDt theCoding) { - this(theCoding.getSystemElement().getValueAsString(), theCoding.getCodeElement().getValue()); + this( + theCoding.getSystemElement().getValueAsString(), + theCoding.getCodeElement().getValue()); } - - @Child(name="system", type=UriDt.class, order=0, min=0, max=1, summary=true, modifier=false) + @Child(name = "system", type = UriDt.class, order = 0, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The identification of the code system that defines the meaning of the symbol in the code." - ) + shortDefinition = "", + formalDefinition = + "The identification of the code system that defines the meaning of the symbol in the code.") private UriDt mySystem; - - @Child(name="version", type=StringDt.class, order=1, min=0, max=1, summary=true, modifier=false) + + @Child(name = "version", type = StringDt.class, order = 1, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged" - ) + shortDefinition = "", + formalDefinition = + "The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged") private StringDt myVersion; - - @Child(name="code", type=CodeDt.class, order=2, min=0, max=1, summary=true, modifier=false) + + @Child(name = "code", type = CodeDt.class, order = 2, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)" - ) + shortDefinition = "", + formalDefinition = + "A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)") private CodeDt myCode; - - @Child(name="display", type=StringDt.class, order=3, min=0, max=1, summary=true, modifier=false) + + @Child(name = "display", type = StringDt.class, order = 3, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="A representation of the meaning of the code in the system, following the rules of the system" - ) + shortDefinition = "", + formalDefinition = + "A representation of the meaning of the code in the system, following the rules of the system") private StringDt myDisplay; - - @Child(name="userSelected", type=BooleanDt.class, order=4, min=0, max=1, summary=true, modifier=false) + + @Child(name = "userSelected", type = BooleanDt.class, order = 4, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays)" - ) + shortDefinition = "", + formalDefinition = + "Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays)") private BooleanDt myUserSelected; - @Override public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( mySystem, myVersion, myCode, myDisplay, myUserSelected); + return super.isBaseEmpty() + && ca.uhn.fhir.util.ElementUtil.isEmpty(mySystem, myVersion, myCode, myDisplay, myUserSelected); } - + @Override public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, mySystem, myVersion, myCode, myDisplay, myUserSelected); + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( + theType, mySystem, myVersion, myCode, myDisplay, myUserSelected); } /** @@ -121,10 +106,10 @@ public class CodingDt * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The identification of the code system that defines the meaning of the symbol in the code. - *

    + *

    + * Definition: + * The identification of the code system that defines the meaning of the symbol in the code. + *

    */ @Override public UriDt getSystemElement() { @@ -134,16 +119,15 @@ public class CodingDt return mySystem; } - /** * Gets the value(s) for system (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The identification of the code system that defines the meaning of the symbol in the code. - *

    + *

    + * Definition: + * The identification of the code system that defines the meaning of the symbol in the code. + *

    */ @Override public String getSystem() { @@ -153,103 +137,96 @@ public class CodingDt /** * Sets the value(s) for system () * - *

    - * Definition: - * The identification of the code system that defines the meaning of the symbol in the code. - *

    + *

    + * Definition: + * The identification of the code system that defines the meaning of the symbol in the code. + *

    */ public CodingDt setSystem(UriDt theValue) { mySystem = theValue; return this; } - - - /** + /** * Sets the value for system () * - *

    - * Definition: - * The identification of the code system that defines the meaning of the symbol in the code. - *

    + *

    + * Definition: + * The identification of the code system that defines the meaning of the symbol in the code. + *

    */ @Override public CodingDt setSystem(String theUri) { - mySystem = new UriDt(theUri); - return this; + mySystem = new UriDt(theUri); + return this; } - /** * Gets the value(s) for version (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged - *

    + *

    + * Definition: + * The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged + *

    */ - public StringDt getVersionElement() { + public StringDt getVersionElement() { if (myVersion == null) { myVersion = new StringDt(); } return myVersion; } - /** * Gets the value(s) for version (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged - *

    + *

    + * Definition: + * The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged + *

    */ - public String getVersion() { + public String getVersion() { return getVersionElement().getValue(); } /** * Sets the value(s) for version () * - *

    - * Definition: - * The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged - *

    + *

    + * Definition: + * The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged + *

    */ public CodingDt setVersion(StringDt theValue) { myVersion = theValue; return this; } - - - /** + /** * Sets the value for version () * - *

    - * Definition: - * The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged - *

    + *

    + * Definition: + * The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged + *

    */ - public CodingDt setVersion( String theString) { - myVersion = new StringDt(theString); - return this; + public CodingDt setVersion(String theString) { + myVersion = new StringDt(theString); + return this; } - /** * Gets the value(s) for code (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) - *

    + *

    + * Definition: + * A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) + *

    */ @Override public CodeDt getCodeElement() { @@ -259,16 +236,15 @@ public class CodingDt return myCode; } - /** * Gets the value(s) for code (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) - *

    + *

    + * Definition: + * A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) + *

    */ @Override public String getCode() { @@ -278,42 +254,39 @@ public class CodingDt /** * Sets the value(s) for code () * - *

    - * Definition: - * A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) - *

    + *

    + * Definition: + * A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) + *

    */ public CodingDt setCode(CodeDt theValue) { myCode = theValue; return this; } - - - /** + /** * Sets the value for code () * - *

    - * Definition: - * A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) - *

    + *

    + * Definition: + * A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination) + *

    */ @Override public CodingDt setCode(String theCode) { - myCode = new CodeDt(theCode); - return this; + myCode = new CodeDt(theCode); + return this; } - /** * Gets the value(s) for display (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A representation of the meaning of the code in the system, following the rules of the system - *

    + *

    + * Definition: + * A representation of the meaning of the code in the system, following the rules of the system + *

    */ @Override public StringDt getDisplayElement() { @@ -323,16 +296,15 @@ public class CodingDt return myDisplay; } - /** * Gets the value(s) for display (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A representation of the meaning of the code in the system, following the rules of the system - *

    + *

    + * Definition: + * A representation of the meaning of the code in the system, following the rules of the system + *

    */ @Override public String getDisplay() { @@ -342,60 +314,56 @@ public class CodingDt /** * Sets the value(s) for display () * - *

    - * Definition: - * A representation of the meaning of the code in the system, following the rules of the system - *

    + *

    + * Definition: + * A representation of the meaning of the code in the system, following the rules of the system + *

    */ public CodingDt setDisplay(StringDt theValue) { myDisplay = theValue; return this; } - - - /** + /** * Sets the value for display () * - *

    - * Definition: - * A representation of the meaning of the code in the system, following the rules of the system - *

    + *

    + * Definition: + * A representation of the meaning of the code in the system, following the rules of the system + *

    */ @Override public CodingDt setDisplay(String theString) { - myDisplay = new StringDt(theString); - return this; + myDisplay = new StringDt(theString); + return this; } - /** * Gets the value(s) for userSelected (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays) - *

    + *

    + * Definition: + * Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays) + *

    */ - public BooleanDt getUserSelectedElement() { + public BooleanDt getUserSelectedElement() { if (myUserSelected == null) { myUserSelected = new BooleanDt(); } return myUserSelected; } - /** * Gets the value(s) for userSelected (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays) - *

    + *

    + * Definition: + * Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays) + *

    */ public boolean getUserSelected() { return getUserSelectedElement().getValue(); @@ -404,32 +372,26 @@ public class CodingDt /** * Sets the value(s) for userSelected () * - *

    - * Definition: - * Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays) - *

    + *

    + * Definition: + * Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays) + *

    */ public CodingDt setUserSelected(BooleanDt theValue) { myUserSelected = theValue; return this; } - - - /** + /** * Sets the value for userSelected () * - *

    - * Definition: - * Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays) - *

    + *

    + * Definition: + * Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays) + *

    */ - public CodingDt setUserSelected( boolean theBoolean) { - myUserSelected = new BooleanDt(theBoolean); - return this; + public CodingDt setUserSelected(boolean theBoolean) { + myUserSelected = new BooleanDt(theBoolean); + return this; } - - - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/ContainedDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/ContainedDt.java index aee63781637..5364cc9773f 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/ContainedDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/ContainedDt.java @@ -60,5 +60,4 @@ public class ContainedDt extends BaseContainedDt { public void setUserData(String theName, Object theValue) { throw new UnsupportedOperationException(Msg.code(86)); } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CountDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CountDt.java index ffc5c318b89..c2e21e41634 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CountDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CountDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,9 +20,6 @@ package ca.uhn.fhir.model.dstu3.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu3.composite.QuantityDt; -@DatatypeDef(name="CountDt") -public class CountDt extends QuantityDt { - -} +@DatatypeDef(name = "CountDt") +public class CountDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/DistanceDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/DistanceDt.java index 865abad63f9..365fde0d3bd 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/DistanceDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/DistanceDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,10 +20,6 @@ package ca.uhn.fhir.model.dstu3.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu3.composite.QuantityDt; -import ca.uhn.fhir.model.primitive.IntegerDt; -@DatatypeDef(name="DistanceDt") -public class DistanceDt extends QuantityDt { - -} +@DatatypeDef(name = "DistanceDt") +public class DistanceDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/DurationDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/DurationDt.java index e59cd07f3d6..0ebc950e7e0 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/DurationDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/DurationDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,9 +20,6 @@ package ca.uhn.fhir.model.dstu3.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu3.composite.QuantityDt; -@DatatypeDef(name="DurationDt") -public class DurationDt extends QuantityDt { - -} +@DatatypeDef(name = "DurationDt") +public class DurationDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/MoneyDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/MoneyDt.java index b4decdac793..6f31a642ffd 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/MoneyDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/MoneyDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,9 +20,6 @@ package ca.uhn.fhir.model.dstu3.composite; import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.dstu3.composite.QuantityDt; -@DatatypeDef(name="Money") -public class MoneyDt extends QuantityDt { - -} +@DatatypeDef(name = "Money") +public class MoneyDt extends QuantityDt {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/NarrativeDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/NarrativeDt.java index eb931400674..a26f5ca0eb9 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/NarrativeDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/NarrativeDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -18,25 +18,8 @@ * #L% */ - - - - - - - - - - - - - - - package ca.uhn.fhir.model.dstu3.composite; -import java.util.List; - import ca.uhn.fhir.model.api.IElement; import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.DatatypeDef; @@ -44,6 +27,8 @@ import ca.uhn.fhir.model.base.composite.BaseNarrativeDt; import ca.uhn.fhir.model.primitive.BoundCodeDt; import ca.uhn.fhir.model.primitive.XhtmlDt; +import java.util.List; + /** * HAPI/FHIR Narrative Datatype * (A human-readable formatted text, including images) @@ -51,32 +36,31 @@ import ca.uhn.fhir.model.primitive.XhtmlDt; *

    * Definition: * A human-readable formatted text, including images - *

    + *

    * *

    * Requirements: - * - *

    + * + *

    */ -@DatatypeDef(name="Narrative") +@DatatypeDef(name = "Narrative") public class NarrativeDt extends BaseNarrativeDt { - @Child(name="div", type=XhtmlDt.class, order=1, min=1, max=1) + @Child(name = "div", type = XhtmlDt.class, order = 1, min = 1, max = 1) private XhtmlDt myDiv; - + public NarrativeDt() { // nothing } - + @Override public boolean isEmpty() { - return ca.uhn.fhir.util.ElementUtil.isEmpty( myDiv ); + return ca.uhn.fhir.util.ElementUtil.isEmpty(myDiv); } - @Override public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( theType, myDiv ); + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myDiv); } /** @@ -84,24 +68,24 @@ public class NarrativeDt extends BaseNarrativeDt { * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The actual narrative content, a stripped down version of XHTML - *

    + *

    + * Definition: + * The actual narrative content, a stripped down version of XHTML + *

    */ public XhtmlDt getDivElement() { return getDiv(); } - + /** * Gets the value(s) for div (Limited xhtml content). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The actual narrative content, a stripped down version of XHTML - *

    + *

    + * Definition: + * The actual narrative content, a stripped down version of XHTML + *

    */ @Override public XhtmlDt getDiv() { @@ -114,10 +98,10 @@ public class NarrativeDt extends BaseNarrativeDt { /** * Sets the value(s) for div (Limited xhtml content) * - *

    - * Definition: - * The actual narrative content, a stripped down version of XHTML - *

    + *

    + * Definition: + * The actual narrative content, a stripped down version of XHTML + *

    */ public void setDiv(XhtmlDt theValue) { myDiv = theValue; @@ -135,8 +119,4 @@ public class NarrativeDt extends BaseNarrativeDt { public BoundCodeDt getStatus() { return null; } - - - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/QuantityDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/QuantityDt.java index b9222b542a9..fd032f0ccc7 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/QuantityDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/QuantityDt.java @@ -1,24 +1,5 @@ - - - - - - - - - - - - - - - - package ca.uhn.fhir.model.dstu3.composite; -import java.math.BigDecimal; -import java.util.List; - import ca.uhn.fhir.model.api.ICompositeDatatype; import ca.uhn.fhir.model.api.IElement; import ca.uhn.fhir.model.api.annotation.Child; @@ -33,6 +14,9 @@ import ca.uhn.fhir.model.primitive.DecimalDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.model.primitive.UriDt; +import java.math.BigDecimal; +import java.util.List; + /** * HAPI/FHIR QuantityDt Datatype * () @@ -40,16 +24,15 @@ import ca.uhn.fhir.model.primitive.UriDt; *

    * Definition: * A measured amount (or an amount that can potentially be measured). Note that measured amounts include amounts that are not precisely quantified, including amounts involving arbitrary units and floating currencies - *

    + *

    * *

    * Requirements: * Need to able to capture all sorts of measured values, even if the measured value are not precisely quantified. Values include exact measures such as 3.51g, customary units such as 3 tablets, and currencies such as $100.32USD - *

    + *

    */ -@DatatypeDef(name="QuantityDt") -public class QuantityDt - extends BaseQuantityDt implements ICompositeDatatype{ +@DatatypeDef(name = "QuantityDt") +public class QuantityDt extends BaseQuantityDt implements ICompositeDatatype { /** * Constructor @@ -58,12 +41,11 @@ public class QuantityDt // nothing } - /** * Constructor */ @SimpleSetter - public QuantityDt(@SimpleSetter.Parameter(name="theValue") double theValue) { + public QuantityDt(@SimpleSetter.Parameter(name = "theValue") double theValue) { setValue(theValue); } @@ -71,15 +53,17 @@ public class QuantityDt * Constructor */ @SimpleSetter - public QuantityDt(@SimpleSetter.Parameter(name="theValue") long theValue) { + public QuantityDt(@SimpleSetter.Parameter(name = "theValue") long theValue) { setValue(theValue); } - + /** * Constructor */ @SimpleSetter - public QuantityDt(@SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name = "theValue") double theValue, + public QuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") double theValue, @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); @@ -90,7 +74,9 @@ public class QuantityDt * Constructor */ @SimpleSetter - public QuantityDt(@SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name = "theValue") long theValue, + public QuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") long theValue, @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); @@ -101,7 +87,11 @@ public class QuantityDt * Constructor */ @SimpleSetter - public QuantityDt(@SimpleSetter.Parameter(name="theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name="theValue") double theValue, @SimpleSetter.Parameter(name="theSystem") String theSystem, @SimpleSetter.Parameter(name="theUnits") String theUnits) { + public QuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") double theValue, + @SimpleSetter.Parameter(name = "theSystem") String theSystem, + @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); setSystem(theSystem); @@ -112,7 +102,11 @@ public class QuantityDt * Constructor */ @SimpleSetter - public QuantityDt(@SimpleSetter.Parameter(name="theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name="theValue") long theValue, @SimpleSetter.Parameter(name="theSystem") String theSystem, @SimpleSetter.Parameter(name="theUnits") String theUnits) { + public QuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") long theValue, + @SimpleSetter.Parameter(name = "theSystem") String theSystem, + @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); setSystem(theSystem); @@ -137,50 +131,46 @@ public class QuantityDt return getUnitElement(); } - @Child(name="value", type=DecimalDt.class, order=0, min=0, max=1, summary=true, modifier=false) + @Child(name = "value", type = DecimalDt.class, order = 0, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The value of the measured amount. The value includes an implicit precision in the presentation of the value" - ) + shortDefinition = "", + formalDefinition = + "The value of the measured amount. The value includes an implicit precision in the presentation of the value") private DecimalDt myValue; - - @Child(name="comparator", type=CodeDt.class, order=1, min=0, max=1, summary=true, modifier=true) + + @Child(name = "comparator", type = CodeDt.class, order = 1, min = 0, max = 1, summary = true, modifier = true) @Description( - shortDefinition="", - formalDefinition="How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value" - ) + shortDefinition = "", + formalDefinition = + "How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value") private BoundCodeDt myComparator; - - @Child(name="unit", type=StringDt.class, order=2, min=0, max=1, summary=true, modifier=false) - @Description( - shortDefinition="", - formalDefinition="A human-readable form of the unit" - ) + + @Child(name = "unit", type = StringDt.class, order = 2, min = 0, max = 1, summary = true, modifier = false) + @Description(shortDefinition = "", formalDefinition = "A human-readable form of the unit") private StringDt myUnit; - - @Child(name="system", type=UriDt.class, order=3, min=0, max=1, summary=true, modifier=false) + + @Child(name = "system", type = UriDt.class, order = 3, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="The identification of the system that provides the coded form of the unit" - ) + shortDefinition = "", + formalDefinition = "The identification of the system that provides the coded form of the unit") private UriDt mySystem; - - @Child(name="code", type=CodeDt.class, order=4, min=0, max=1, summary=true, modifier=false) + + @Child(name = "code", type = CodeDt.class, order = 4, min = 0, max = 1, summary = true, modifier = false) @Description( - shortDefinition="", - formalDefinition="A computer processable form of the unit in some unit representation system" - ) + shortDefinition = "", + formalDefinition = "A computer processable form of the unit in some unit representation system") private CodeDt myCode; - @Override public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myValue, myComparator, myUnit, mySystem, myCode); + return super.isBaseEmpty() + && ca.uhn.fhir.util.ElementUtil.isEmpty(myValue, myComparator, myUnit, mySystem, myCode); } - + @Override public List getAllPopulatedChildElementsOfType(Class theType) { - return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myValue, myComparator, myUnit, mySystem, myCode); + return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements( + theType, myValue, myComparator, myUnit, mySystem, myCode); } /** @@ -188,10 +178,10 @@ public class QuantityDt * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The value of the measured amount. The value includes an implicit precision in the presentation of the value - *

    + *

    + * Definition: + * The value of the measured amount. The value includes an implicit precision in the presentation of the value + *

    */ @Override public DecimalDt getValueElement() { @@ -201,86 +191,82 @@ public class QuantityDt return myValue; } - /** * Gets the value(s) for value (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The value of the measured amount. The value includes an implicit precision in the presentation of the value - *

    + *

    + * Definition: + * The value of the measured amount. The value includes an implicit precision in the presentation of the value + *

    */ - public BigDecimal getValue() { + public BigDecimal getValue() { return getValueElement().getValue(); } /** * Sets the value(s) for value () * - *

    - * Definition: - * The value of the measured amount. The value includes an implicit precision in the presentation of the value - *

    + *

    + * Definition: + * The value of the measured amount. The value includes an implicit precision in the presentation of the value + *

    */ public QuantityDt setValue(DecimalDt theValue) { myValue = theValue; return this; } - - - /** + /** * Sets the value for value () * - *

    - * Definition: - * The value of the measured amount. The value includes an implicit precision in the presentation of the value - *

    + *

    + * Definition: + * The value of the measured amount. The value includes an implicit precision in the presentation of the value + *

    */ - public QuantityDt setValue( long theValue) { - myValue = new DecimalDt(theValue); - return this; + public QuantityDt setValue(long theValue) { + myValue = new DecimalDt(theValue); + return this; } /** * Sets the value for value () * - *

    - * Definition: - * The value of the measured amount. The value includes an implicit precision in the presentation of the value - *

    + *

    + * Definition: + * The value of the measured amount. The value includes an implicit precision in the presentation of the value + *

    */ - public QuantityDt setValue( double theValue) { - myValue = new DecimalDt(theValue); - return this; + public QuantityDt setValue(double theValue) { + myValue = new DecimalDt(theValue); + return this; } /** * Sets the value for value () * - *

    - * Definition: - * The value of the measured amount. The value includes an implicit precision in the presentation of the value - *

    + *

    + * Definition: + * The value of the measured amount. The value includes an implicit precision in the presentation of the value + *

    */ @Override public QuantityDt setValue(java.math.BigDecimal theValue) { - myValue = new DecimalDt(theValue); - return this; + myValue = new DecimalDt(theValue); + return this; } - /** * Gets the value(s) for comparator (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value - *

    + *

    + * Definition: + * How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value + *

    */ @Override public BoundCodeDt getComparatorElement() { @@ -290,124 +276,116 @@ public class QuantityDt return myComparator; } - /** * Gets the value(s) for comparator (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value - *

    + *

    + * Definition: + * How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value + *

    */ - public String getComparator() { + public String getComparator() { return getComparatorElement().getValue(); } /** * Sets the value(s) for comparator () * - *

    - * Definition: - * How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value - *

    + *

    + * Definition: + * How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value + *

    */ public QuantityDt setComparator(BoundCodeDt theValue) { myComparator = theValue; return this; } - - /** * Sets the value(s) for comparator () * - *

    - * Definition: - * How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value - *

    + *

    + * Definition: + * How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"<\" , then the real value is < stated value + *

    */ public QuantityDt setComparator(QuantityComparatorEnum theValue) { setComparator(new BoundCodeDt(QuantityComparatorEnum.VALUESET_BINDER, theValue)); - -/* - getComparatorElement().setValueAsEnum(theValue); -*/ + + /* + getComparatorElement().setValueAsEnum(theValue); + */ return this; } - /** * Gets the value(s) for unit (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A human-readable form of the unit - *

    + *

    + * Definition: + * A human-readable form of the unit + *

    */ - public StringDt getUnitElement() { + public StringDt getUnitElement() { if (myUnit == null) { myUnit = new StringDt(); } return myUnit; } - /** * Gets the value(s) for unit (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A human-readable form of the unit - *

    + *

    + * Definition: + * A human-readable form of the unit + *

    */ - public String getUnit() { + public String getUnit() { return getUnitElement().getValue(); } /** * Sets the value(s) for unit () * - *

    - * Definition: - * A human-readable form of the unit - *

    + *

    + * Definition: + * A human-readable form of the unit + *

    */ public QuantityDt setUnit(StringDt theValue) { myUnit = theValue; return this; } - - - /** + /** * Sets the value for unit () * - *

    - * Definition: - * A human-readable form of the unit - *

    + *

    + * Definition: + * A human-readable form of the unit + *

    */ - public QuantityDt setUnit( String theString) { - myUnit = new StringDt(theString); - return this; + public QuantityDt setUnit(String theString) { + myUnit = new StringDt(theString); + return this; } - /** * Gets the value(s) for system (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The identification of the system that provides the coded form of the unit - *

    + *

    + * Definition: + * The identification of the system that provides the coded form of the unit + *

    */ @Override public UriDt getSystemElement() { @@ -417,60 +395,56 @@ public class QuantityDt return mySystem; } - /** * Gets the value(s) for system (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * The identification of the system that provides the coded form of the unit - *

    + *

    + * Definition: + * The identification of the system that provides the coded form of the unit + *

    */ - public String getSystem() { + public String getSystem() { return getSystemElement().getValue(); } /** * Sets the value(s) for system () * - *

    - * Definition: - * The identification of the system that provides the coded form of the unit - *

    + *

    + * Definition: + * The identification of the system that provides the coded form of the unit + *

    */ public QuantityDt setSystem(UriDt theValue) { mySystem = theValue; return this; } - - - /** + /** * Sets the value for system () * - *

    - * Definition: - * The identification of the system that provides the coded form of the unit - *

    + *

    + * Definition: + * The identification of the system that provides the coded form of the unit + *

    */ @Override public QuantityDt setSystem(String theUri) { - mySystem = new UriDt(theUri); - return this; + mySystem = new UriDt(theUri); + return this; } - /** * Gets the value(s) for code (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A computer processable form of the unit in some unit representation system - *

    + *

    + * Definition: + * A computer processable form of the unit in some unit representation system + *

    */ @Override public CodeDt getCodeElement() { @@ -480,51 +454,44 @@ public class QuantityDt return myCode; } - /** * Gets the value(s) for code (). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A computer processable form of the unit in some unit representation system - *

    + *

    + * Definition: + * A computer processable form of the unit in some unit representation system + *

    */ - public String getCode() { + public String getCode() { return getCodeElement().getValue(); } /** * Sets the value(s) for code () * - *

    - * Definition: - * A computer processable form of the unit in some unit representation system - *

    + *

    + * Definition: + * A computer processable form of the unit in some unit representation system + *

    */ public QuantityDt setCode(CodeDt theValue) { myCode = theValue; return this; } - - - /** + /** * Sets the value for code () * - *

    - * Definition: - * A computer processable form of the unit in some unit representation system - *

    + *

    + * Definition: + * A computer processable form of the unit in some unit representation system + *

    */ @Override public QuantityDt setCode(String theCode) { - myCode = new CodeDt(theCode); - return this; + myCode = new CodeDt(theCode); + return this; } - - - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/ResourceReferenceDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/ResourceReferenceDt.java index a95ca5fefed..eac930ad482 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/ResourceReferenceDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/ResourceReferenceDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -18,27 +18,8 @@ * #L% */ - - - - - - - - - - - - - - - package ca.uhn.fhir.model.dstu3.composite; -import java.util.List; - -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.model.api.ICompositeDatatype; import ca.uhn.fhir.model.api.IElement; import ca.uhn.fhir.model.api.IResource; @@ -49,6 +30,9 @@ import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.StringDt; +import org.hl7.fhir.instance.model.api.IIdType; + +import java.util.List; /** * HAPI/FHIR ResourceReferenceDt Datatype @@ -57,17 +41,15 @@ import ca.uhn.fhir.model.primitive.StringDt; *

    * Definition: * A reference from one resource to another - *

    + *

    * *

    * Requirements: - * - *

    + * + *

    */ -@DatatypeDef(name="ResourceReferenceDt") -public class ResourceReferenceDt - extends BaseResourceReferenceDt implements ICompositeDatatype -{ +@DatatypeDef(name = "ResourceReferenceDt") +public class ResourceReferenceDt extends BaseResourceReferenceDt implements ICompositeDatatype { /** * Constructor @@ -84,7 +66,7 @@ public class ResourceReferenceDt * a hard-and-fast rule however, as the server can be configured to not serialized this resource, or to load an ID * and contain even if this constructor is not used. *

    - * + * * @param theResource * The resource instance */ @@ -96,7 +78,7 @@ public class ResourceReferenceDt /** * Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute * URL) - * + * * @param theId * The reference itself */ @@ -107,7 +89,7 @@ public class ResourceReferenceDt /** * Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute * URL) - * + * * @param theResourceId * The reference itself */ @@ -118,7 +100,7 @@ public class ResourceReferenceDt /** * Constructor which accepts a reference directly (this can be an ID, a partial/relative URL or a complete/absolute * URL) - * + * * @param theResourceId * The reference itself */ @@ -126,26 +108,25 @@ public class ResourceReferenceDt setReference(theResourceId); } - @Child(name="reference", type=IdDt.class, order=0, min=0, max=1) + @Child(name = "reference", type = IdDt.class, order = 0, min = 0, max = 1) @Description( - shortDefinition="Relative, internal or absolute URL reference", - formalDefinition="A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources" - ) + shortDefinition = "Relative, internal or absolute URL reference", + formalDefinition = + "A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources") private IdDt myReference; - - @Child(name="display", type=StringDt.class, order=1, min=0, max=1) + + @Child(name = "display", type = StringDt.class, order = 1, min = 0, max = 1) @Description( - shortDefinition="Text alternative for the resource", - formalDefinition="Plain text narrative that identifies the resource in addition to the resource reference" - ) + shortDefinition = "Text alternative for the resource", + formalDefinition = + "Plain text narrative that identifies the resource in addition to the resource reference") private StringDt myDisplay; - @Override public boolean isEmpty() { - return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( myReference, myDisplay); + return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(myReference, myDisplay); } - + @Override public List getAllPopulatedChildElementsOfType(Class theType) { return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, myReference, myDisplay); @@ -156,10 +137,10 @@ public class ResourceReferenceDt * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources - *

    + *

    + * Definition: + * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources + *

    */ @Override public IdDt getReference() { @@ -174,14 +155,13 @@ public class ResourceReferenceDt return getReference(); } - /** * Sets the value(s) for reference (Relative, internal or absolute URL reference) * - *

    - * Definition: - * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources - *

    + *

    + * Definition: + * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources + *

    */ @Override public ResourceReferenceDt setReference(IdDt theValue) { @@ -189,32 +169,31 @@ public class ResourceReferenceDt return this; } - /** + /** * Sets the value for reference (Relative, internal or absolute URL reference) * - *

    - * Definition: - * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources - *

    + *

    + * Definition: + * A reference to a location at which the other resource is found. The reference may a relative reference, in which case it is relative to the service base URL, or an absolute URL that resolves to the location where the resource is found. The reference may be version specific or not. If the reference is not to a FHIR RESTful server, then it should be assumed to be version specific. Internal fragment references (start with '#') refer to contained resources + *

    */ @Override public ResourceReferenceDt setReference(String theId) { - myReference = new IdDt(theId); - return this; + myReference = new IdDt(theId); + return this; } - /** * Gets the value(s) for display (Text alternative for the resource). * creating it if it does * not exist. Will not return null. * - *

    - * Definition: - * Plain text narrative that identifies the resource in addition to the resource reference - *

    + *

    + * Definition: + * Plain text narrative that identifies the resource in addition to the resource reference + *

    */ - public StringDt getDisplay() { + public StringDt getDisplay() { if (myDisplay == null) { myDisplay = new StringDt(); } @@ -224,36 +203,32 @@ public class ResourceReferenceDt /** * Sets the value(s) for display (Text alternative for the resource) * - *

    - * Definition: - * Plain text narrative that identifies the resource in addition to the resource reference - *

    + *

    + * Definition: + * Plain text narrative that identifies the resource in addition to the resource reference + *

    */ public ResourceReferenceDt setDisplay(StringDt theValue) { myDisplay = theValue; return this; } - /** + /** * Sets the value for display (Text alternative for the resource) * - *

    - * Definition: - * Plain text narrative that identifies the resource in addition to the resource reference - *

    + *

    + * Definition: + * Plain text narrative that identifies the resource in addition to the resource reference + *

    */ @Override public ResourceReferenceDt setDisplay(String theString) { - myDisplay = new StringDt(theString); - return this; + myDisplay = new StringDt(theString); + return this; } @Override public StringDt getDisplayElement() { return getDisplay(); } - - - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/SimpleQuantityDt.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/SimpleQuantityDt.java index a40aae9768f..052b8b34781 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/SimpleQuantityDt.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/SimpleQuantityDt.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,7 +23,7 @@ import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.SimpleSetter; import ca.uhn.fhir.model.dstu2.valueset.QuantityComparatorEnum; -@DatatypeDef(name="SimpleQuantity") +@DatatypeDef(name = "SimpleQuantity") public class SimpleQuantityDt extends QuantityDt { private static final long serialVersionUID = 1L; @@ -35,12 +35,11 @@ public class SimpleQuantityDt extends QuantityDt { // nothing } - /** * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") double theValue) { + public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theValue") double theValue) { setValue(theValue); } @@ -48,15 +47,17 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") long theValue) { + public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theValue") long theValue) { setValue(theValue); } - + /** * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name = "theValue") double theValue, + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") double theValue, @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); @@ -67,7 +68,9 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, @SimpleSetter.Parameter(name = "theValue") long theValue, + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theComparator") QuantityComparatorEnum theComparator, + @SimpleSetter.Parameter(name = "theValue") long theValue, @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setComparator(theComparator); @@ -78,7 +81,10 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") double theValue, @SimpleSetter.Parameter(name="theSystem") String theSystem, @SimpleSetter.Parameter(name="theUnits") String theUnits) { + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theValue") double theValue, + @SimpleSetter.Parameter(name = "theSystem") String theSystem, + @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setSystem(theSystem); setUnit(theUnits); @@ -88,10 +94,12 @@ public class SimpleQuantityDt extends QuantityDt { * Constructor */ @SimpleSetter - public SimpleQuantityDt(@SimpleSetter.Parameter(name="theValue") long theValue, @SimpleSetter.Parameter(name="theSystem") String theSystem, @SimpleSetter.Parameter(name="theUnits") String theUnits) { + public SimpleQuantityDt( + @SimpleSetter.Parameter(name = "theValue") long theValue, + @SimpleSetter.Parameter(name = "theSystem") String theSystem, + @SimpleSetter.Parameter(name = "theUnits") String theUnits) { setValue(theValue); setSystem(theSystem); setUnit(theUnits); } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/AbstractGenerator.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/AbstractGenerator.java index c7ab26b2c80..019d7031abf 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/AbstractGenerator.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/AbstractGenerator.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -18,9 +18,10 @@ * #L% */ package ca.uhn.fhir.tinder; -import ca.uhn.fhir.i18n.Msg; + import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.tinder.GeneratorContext.ResourceSource; import ca.uhn.fhir.tinder.parser.DatatypeGeneratorUsingSpreadsheet; import ca.uhn.fhir.tinder.parser.ResourceGeneratorUsingModel; @@ -32,11 +33,11 @@ import java.util.*; public abstract class AbstractGenerator { - protected abstract void logDebug (String message); + protected abstract void logDebug(String message); - protected abstract void logInfo (String message); - - public void prepare (GeneratorContext context) throws FailureException, MojoFailureException { + protected abstract void logInfo(String message); + + public void prepare(GeneratorContext context) throws FailureException, MojoFailureException { /* * Deal with the FHIR spec version @@ -58,18 +59,19 @@ public abstract class AbstractGenerator { throw new FailureException(Msg.code(95) + "Unknown version configured: " + context.getVersion()); } context.setPackageSuffix(packageSuffix); - + /* * Deal with which resources to process */ List includeResources = context.getIncludeResources(); List excludeResources = context.getExcludeResources(); - + if (includeResources == null || includeResources.isEmpty()) { includeResources = new ArrayList<>(); - - logInfo("No resource names supplied, going to use all resources from version: "+fhirContext.getVersion().getVersion()); - + + logInfo("No resource names supplied, going to use all resources from version: " + + fhirContext.getVersion().getVersion()); + Properties p = new Properties(); try { p.load(fhirContext.getVersion().getFhirVersionPropertiesFile()); @@ -77,10 +79,10 @@ public abstract class AbstractGenerator { throw new FailureException(Msg.code(96) + "Failed to load version property file", e); } - logDebug("Property file contains: "+p); + logDebug("Property file contains: " + p); TreeSet keys = new TreeSet<>(); - for(Object next : p.keySet()) { + for (Object next : p.keySet()) { keys.add((String) next); } for (String next : keys) { @@ -88,7 +90,7 @@ public abstract class AbstractGenerator { includeResources.add(next.substring("resource.".length()).toLowerCase()); } } - + if (fhirContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { includeResources.remove("conformance"); } @@ -105,9 +107,8 @@ public abstract class AbstractGenerator { includeResources.removeAll(excludeResources); } context.setIncludeResources(includeResources); - - logInfo("Including the following elements: "+includeResources); - + + logInfo("Including the following elements: " + includeResources); /* * Fill in ValueSet and DataTypes used by the resources @@ -125,7 +126,7 @@ public abstract class AbstractGenerator { } catch (Exception e) { throw new FailureException(Msg.code(97) + "Failed to load valuesets", e); } - + /* * A few enums are not found by default because none of the generated classes * refer to them, but we still want them. @@ -133,7 +134,7 @@ public abstract class AbstractGenerator { vsp.getClassForValueSetIdAndMarkAsNeeded("NarrativeStatus"); logInfo("Loading Datatypes..."); - + dtp = new DatatypeGeneratorUsingSpreadsheet(context.getVersion(), context.getBaseDir()); context.setDatatypeGenerator(dtp); try { @@ -143,20 +144,21 @@ public abstract class AbstractGenerator { throw new FailureException(Msg.code(98) + "Failed to load datatypes", e); } dtp.bindValueSets(vsp); - + datatypeLocalImports = dtp.getLocalImports(); } /* * Load the requested resources */ - + logInfo("Loading Resources..."); try { switch (context.getResourceSource()) { case SPREADSHEET: { logInfo("... resource definitions from spreadsheets"); - ResourceGeneratorUsingSpreadsheet rp = new ResourceGeneratorUsingSpreadsheet(context.getVersion(), context.getBaseDir()); + ResourceGeneratorUsingSpreadsheet rp = + new ResourceGeneratorUsingSpreadsheet(context.getVersion(), context.getBaseDir()); context.setResourceGenerator(rp); rp.setBaseResourceNames(includeResources); @@ -167,16 +169,17 @@ public abstract class AbstractGenerator { rp.getLocalImports().putAll(datatypeLocalImports); datatypeLocalImports.putAll(rp.getLocalImports()); - + rp.combineContentMaps(dtp); dtp.combineContentMaps(rp); break; } case MODEL: { logInfo("... resource definitions from model structures"); - ResourceGeneratorUsingModel rp = new ResourceGeneratorUsingModel(context.getVersion(), context.getBaseDir()); + ResourceGeneratorUsingModel rp = + new ResourceGeneratorUsingModel(context.getVersion(), context.getBaseDir()); context.setResourceGenerator(rp); - + rp.setBaseResourceNames(includeResources); rp.parse(); rp.markResourcesForImports(); @@ -186,7 +189,6 @@ public abstract class AbstractGenerator { } catch (Exception e) { throw new FailureException(Msg.code(99) + "Failed to load resources", e); } - } public static class FailureException extends Exception { @@ -198,7 +200,6 @@ public abstract class AbstractGenerator { FailureException(String message) { super(message); } - } public static class ExecutionException extends Exception { @@ -206,6 +207,5 @@ public abstract class AbstractGenerator { public ExecutionException(String message) { super(message); } - } } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/AbstractGeneratorMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/AbstractGeneratorMojo.java index 113b8eaaad2..67f72ea95fd 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/AbstractGeneratorMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/AbstractGeneratorMojo.java @@ -40,11 +40,17 @@ public abstract class AbstractGeneratorMojo extends AbstractMojo { @Override public final void execute() throws MojoExecutionException, MojoFailureException { - doExecute(new Configuration(this.version, baseDir, getTargetDirectory(), this.packageBase, this.baseResourceNames, this.excludeResourceNames)); + doExecute(new Configuration( + this.version, + baseDir, + getTargetDirectory(), + this.packageBase, + this.baseResourceNames, + this.excludeResourceNames)); } - protected abstract void doExecute(Configuration mavenGeneratorConfiguration) throws MojoExecutionException, MojoFailureException; + protected abstract void doExecute(Configuration mavenGeneratorConfiguration) + throws MojoExecutionException, MojoFailureException; protected abstract File getTargetDirectory(); - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/Configuration.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/Configuration.java index 820286a65b1..3fd0a49aa5e 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/Configuration.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/Configuration.java @@ -1,8 +1,8 @@ package ca.uhn.fhir.tinder; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.tinder.parser.BaseStructureSpreadsheetParser; import org.apache.commons.lang.WordUtils; @@ -28,7 +28,13 @@ public class Configuration { private final List resourceNames = new ArrayList<>(); private String baseDir; - public Configuration(String version, String baseDir, File targetDirectory, String packageBase, List baseResourceNames, List excludeResourceNames) { + public Configuration( + String version, + String baseDir, + File targetDirectory, + String packageBase, + List baseResourceNames, + List excludeResourceNames) { this.targetDirectory = targetDirectory; this.packageBase = packageBase; this.packageDirectoryBase = new File(targetDirectory, packageBase.replace(".", File.separatorChar + "")); @@ -55,7 +61,9 @@ public class Configuration { this.version = version; if (baseResourceNames == null || baseResourceNames.isEmpty()) { - ourLog.info("No resource names supplied, going to use all resources from version: {}", fhirContext.getVersion().getVersion()); + ourLog.info( + "No resource names supplied, going to use all resources from version: {}", + fhirContext.getVersion().getVersion()); Properties p = new Properties(); try { diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ExamineTestTrace.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ExamineTestTrace.java index cbc3a5772ab..53c3633e039 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ExamineTestTrace.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ExamineTestTrace.java @@ -14,8 +14,8 @@ public class ExamineTestTrace { private static final Logger ourLog = LoggerFactory.getLogger(ExamineTestTrace.class); public static void main(String[] aaa) { - String input = "[INFO] Running ca.uhn.fhir.rest.client.RestfulClientFactoryDstu2Test\n" + - "[INFO] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.982 s - in ca.uhn.fhir.validation.ResourceValidatorDstu2Test"; + String input = "[INFO] Running ca.uhn.fhir.rest.client.RestfulClientFactoryDstu2Test\n" + + "[INFO] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.982 s - in ca.uhn.fhir.validation.ResourceValidatorDstu2Test"; Set started = new HashSet<>(); Set finished = new HashSet<>(); @@ -39,8 +39,5 @@ public class ExamineTestTrace { ourLog.info("Started {}", started.size()); ourLog.info("Finished {}", finished.size()); - } - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/GeneratorContext.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/GeneratorContext.java index 21afe9fb9ee..0b2858c91c6 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/GeneratorContext.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/GeneratorContext.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -18,33 +18,35 @@ * #L% */ package ca.uhn.fhir.tinder; + import ca.uhn.fhir.i18n.Msg; -import java.util.List; - -import javax.security.auth.login.FailedLoginException; - -import org.apache.commons.lang3.StringUtils; -import org.apache.maven.plugins.annotations.Parameter; - import ca.uhn.fhir.tinder.AbstractGenerator.FailureException; import ca.uhn.fhir.tinder.TinderStructuresMojo.ValueSetFileDefinition; import ca.uhn.fhir.tinder.parser.BaseStructureParser; import ca.uhn.fhir.tinder.parser.DatatypeGeneratorUsingSpreadsheet; +import org.apache.commons.lang3.StringUtils; +import org.apache.maven.plugins.annotations.Parameter; + +import java.util.List; /** * @author Bill.Denton * */ public class GeneratorContext { - public enum ResourceSource {SPREADSHEET, MODEL}; + public enum ResourceSource { + SPREADSHEET, + MODEL + }; + public static final ResourceSource DEFAULT_RESOURCE_SOURCE = ResourceSource.SPREADSHEET; - + private String version; private String packageSuffix; private String baseDir; private List includeResources; private List excludeResources; - private ResourceSource resourceSource = DEFAULT_RESOURCE_SOURCE; + private ResourceSource resourceSource = DEFAULT_RESOURCE_SOURCE; private List valueSetFiles; private BaseStructureParser resourceGenerator = null; private ValueSetGenerator valueSetGenerator = null; @@ -77,6 +79,7 @@ public class GeneratorContext { public List getIncludeResources() { return includeResources; } + public void setIncludeResources(List includeResources) { this.includeResources = includeResources; } @@ -91,13 +94,11 @@ public class GeneratorContext { public void setResourceSource(String resourceSource) throws FailureException { resourceSource = StringUtils.stripToNull(resourceSource); - if (null == resourceSource) { + if (null == resourceSource) { this.resourceSource = DEFAULT_RESOURCE_SOURCE; - } else - if (ResourceSource.SPREADSHEET.name().equalsIgnoreCase(resourceSource)) { + } else if (ResourceSource.SPREADSHEET.name().equalsIgnoreCase(resourceSource)) { this.resourceSource = ResourceSource.SPREADSHEET; - } else - if (ResourceSource.MODEL.name().equalsIgnoreCase(resourceSource)) { + } else if (ResourceSource.MODEL.name().equalsIgnoreCase(resourceSource)) { this.resourceSource = ResourceSource.MODEL; } else { throw new FailureException(Msg.code(112) + "Unknown resource-source option: " + resourceSource); @@ -131,6 +132,7 @@ public class GeneratorContext { public ValueSetGenerator getValueSetGenerator() { return valueSetGenerator; } + public void setValueSetGenerator(ValueSetGenerator valueSetGenerator) { this.valueSetGenerator = valueSetGenerator; } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ResourceMinimizerMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ResourceMinimizerMojo.java index 6d2ab137c6e..57ce394f3d4 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ResourceMinimizerMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ResourceMinimizerMojo.java @@ -1,12 +1,15 @@ package ca.uhn.fhir.tinder; +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; -import java.io.*; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.util.Collection; - +import ca.uhn.fhir.model.api.IResource; +import ca.uhn.fhir.model.dstu2.resource.Bundle; +import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; +import ca.uhn.fhir.parser.IParser; +import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.util.ResourceUtil; +import ch.qos.logback.classic.LoggerContext; +import ch.qos.logback.core.joran.util.ConfigurationWatchListUtil; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -16,14 +19,10 @@ import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.hl7.fhir.instance.model.api.IBaseResource; -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.model.api.IResource; -import ca.uhn.fhir.model.dstu2.resource.Bundle; -import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; -import ca.uhn.fhir.parser.IParser; -import ca.uhn.fhir.rest.api.EncodingEnum; -import ch.qos.logback.classic.LoggerContext; -import ch.qos.logback.core.joran.util.ConfigurationWatchListUtil; +import java.io.*; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Collection; @Mojo(name = "minimize-resources", defaultPhase = LifecyclePhase.GENERATE_SOURCES) public class ResourceMinimizerMojo extends AbstractMojo { @@ -65,8 +64,8 @@ public class ResourceMinimizerMojo extends AbstractMojo { } ourLog.info("Looking for files in directory: {}", targetDirectory.getAbsolutePath()); - - Collection files = FileUtils.listFiles(targetDirectory, new String[] { "xml", "json" }, true); + + Collection files = FileUtils.listFiles(targetDirectory, new String[] {"xml", "json"}, true); for (File nextFile : files) { ourLog.debug("Checking file: {}", nextFile); @@ -80,7 +79,7 @@ public class ResourceMinimizerMojo extends AbstractMojo { IParser parser = EncodingEnum.detectEncoding(inputString).newParser(myCtx); IBaseResource input = parser.parseResource(inputString); - if (input instanceof IResource) { + if (input instanceof IResource) { ((IResource) input).getText().getDiv().setValueAsString(null); ((IResource) input).getText().getStatus().setValueAsString(null); if (input instanceof Bundle) { @@ -112,7 +111,11 @@ public class ResourceMinimizerMojo extends AbstractMojo { outputString = b.toString(); if (!inputString.equals(outputString)) { - ourLog.info("Trimming contents of resource: {} - From {} to {}", nextFile, FileUtils.byteCountToDisplaySize(inputString.length()), FileUtils.byteCountToDisplaySize(outputString.length())); + ourLog.info( + "Trimming contents of resource: {} - From {} to {}", + nextFile, + FileUtils.byteCountToDisplaySize(inputString.length()), + FileUtils.byteCountToDisplaySize(outputString.length())); myByteCount += (inputString.length() - outputString.length()); myFileCount++; try { @@ -124,9 +127,7 @@ public class ResourceMinimizerMojo extends AbstractMojo { } catch (IOException e) { throw new MojoFailureException(Msg.code(119) + "Failed to write " + nextFile, e); } - } - } } @@ -146,7 +147,7 @@ public class ResourceMinimizerMojo extends AbstractMojo { FhirContext ctxR4B; FhirContext ctxR5; ctxDstu2 = FhirContext.forDstu2(); -// ctxDstu2_1 = FhirContext.forDstu2_1(); + // ctxDstu2_1 = FhirContext.forDstu2_1(); ctxDstu3 = FhirContext.forDstu3(); ctxR4 = FhirContext.forR4(); ctxR4B = FhirContext.forR4B(); @@ -160,90 +161,100 @@ public class ResourceMinimizerMojo extends AbstractMojo { int fileCount = 0; long byteCount = 0; - + ResourceMinimizerMojo m = new ResourceMinimizerMojo(); -// m.myCtx = ctxDstu2; -// m.targetDirectory = new File("./hapi-tinder-plugin/src/main/resources/vs/dstu2"); -// m.fhirVersion = "DSTU2"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); -// -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxDstu2; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-dstu2/src/main/resources/org/hl7/fhir/instance/model/valueset"); -// m.fhirVersion = "DSTU2"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); -// -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxDstu2; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-dstu2/src/main/resources/org/hl7/fhir/instance/model/profile"); -// m.fhirVersion = "DSTU2"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); + // m.myCtx = ctxDstu2; + // m.targetDirectory = new File("./hapi-tinder-plugin/src/main/resources/vs/dstu2"); + // m.fhirVersion = "DSTU2"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); + // + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxDstu2; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-dstu2/src/main/resources/org/hl7/fhir/instance/model/valueset"); + // m.fhirVersion = "DSTU2"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); + // + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxDstu2; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-dstu2/src/main/resources/org/hl7/fhir/instance/model/profile"); + // m.fhirVersion = "DSTU2"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxDstu3; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-dstu3/src/main/resources/org/hl7/fhir/dstu3/model/profile"); -// m.fhirVersion = "DSTU3"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); -// -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxDstu3; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-dstu3/src/main/resources/org/hl7/fhir/dstu3/model/valueset"); -// m.fhirVersion = "DSTU3"; -// m.execute(); -// byteCount += m.getByteCount(); + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxDstu3; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-dstu3/src/main/resources/org/hl7/fhir/dstu3/model/profile"); + // m.fhirVersion = "DSTU3"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); + // + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxDstu3; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-dstu3/src/main/resources/org/hl7/fhir/dstu3/model/valueset"); + // m.fhirVersion = "DSTU3"; + // m.execute(); + // byteCount += m.getByteCount(); -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxDstu2_1; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-dstu2.1/src/main/resources/org/hl7/fhir/dstu2016may/model/profile"); -// m.fhirVersion = "DSTU2_1"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxDstu2_1; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-dstu2.1/src/main/resources/org/hl7/fhir/dstu2016may/model/profile"); + // m.fhirVersion = "DSTU2_1"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxDstu2_1; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-dstu2.1/src/main/resources/org/hl7/fhir/dstu2016may/model/valueset"); -// m.fhirVersion = "DSTU2_1"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxDstu2_1; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-dstu2.1/src/main/resources/org/hl7/fhir/dstu2016may/model/valueset"); + // m.fhirVersion = "DSTU2_1"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxR4; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-r4/src/main/resources/org/hl7/fhir/r4/model/profile"); -// m.fhirVersion = "R4"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); -// -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxR4; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-r4/src/main/resources/org/hl7/fhir/r4/model/valueset"); -// m.fhirVersion = "R4"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxR4; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-r4/src/main/resources/org/hl7/fhir/r4/model/profile"); + // m.fhirVersion = "R4"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); + // + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxR4; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-r4/src/main/resources/org/hl7/fhir/r4/model/valueset"); + // m.fhirVersion = "R4"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); -// m = new ResourceMinimizerMojo(); -// m.myCtx = ctxR4; -// m.targetDirectory = new File("./hapi-fhir-validation-resources-r4/src/main/resources/org/hl7/fhir/r4/model/sp"); -// m.fhirVersion = "R4"; -// m.execute(); -// byteCount += m.getByteCount(); -// fileCount += m.getFileCount(); + // m = new ResourceMinimizerMojo(); + // m.myCtx = ctxR4; + // m.targetDirectory = new + // File("./hapi-fhir-validation-resources-r4/src/main/resources/org/hl7/fhir/r4/model/sp"); + // m.fhirVersion = "R4"; + // m.execute(); + // byteCount += m.getByteCount(); + // fileCount += m.getFileCount(); m = new ResourceMinimizerMojo(); m.myCtx = ctxR4B; - m.targetDirectory = new File("./hapi-fhir-validation-resources-r4b/src/main/resources/org/hl7/fhir/r4b/model/profile"); + m.targetDirectory = + new File("./hapi-fhir-validation-resources-r4b/src/main/resources/org/hl7/fhir/r4b/model/profile"); m.fhirVersion = "R4B"; m.execute(); byteCount += m.getByteCount(); @@ -251,7 +262,8 @@ public class ResourceMinimizerMojo extends AbstractMojo { m = new ResourceMinimizerMojo(); m.myCtx = ctxR4B; - m.targetDirectory = new File("./hapi-fhir-validation-resources-r4b/src/main/resources/org/hl7/fhir/r4b/model/valueset"); + m.targetDirectory = + new File("./hapi-fhir-validation-resources-r4b/src/main/resources/org/hl7/fhir/r4b/model/valueset"); m.fhirVersion = "R4B"; m.execute(); byteCount += m.getByteCount(); @@ -259,7 +271,8 @@ public class ResourceMinimizerMojo extends AbstractMojo { m = new ResourceMinimizerMojo(); m.myCtx = ctxR4B; - m.targetDirectory = new File("./hapi-fhir-validation-resources-r4b/src/main/resources/org/hl7/fhir/r4b/model/extension"); + m.targetDirectory = + new File("./hapi-fhir-validation-resources-r4b/src/main/resources/org/hl7/fhir/r4b/model/extension"); m.fhirVersion = "R4B"; m.execute(); byteCount += m.getByteCount(); @@ -267,19 +280,17 @@ public class ResourceMinimizerMojo extends AbstractMojo { m = new ResourceMinimizerMojo(); m.myCtx = ctxR4B; - m.targetDirectory = new File("./hapi-fhir-validation-resources-r4b/src/main/resources/org/hl7/fhir/r4b/model/sp"); + m.targetDirectory = + new File("./hapi-fhir-validation-resources-r4b/src/main/resources/org/hl7/fhir/r4b/model/sp"); m.fhirVersion = "R4B"; m.execute(); byteCount += m.getByteCount(); fileCount += m.getFileCount(); - - - - m = new ResourceMinimizerMojo(); m.myCtx = ctxR5; - m.targetDirectory = new File("./hapi-fhir-validation-resources-r5/src/main/resources/org/hl7/fhir/r5/model/profile"); + m.targetDirectory = + new File("./hapi-fhir-validation-resources-r5/src/main/resources/org/hl7/fhir/r5/model/profile"); m.fhirVersion = "R5"; m.execute(); byteCount += m.getByteCount(); @@ -287,7 +298,8 @@ public class ResourceMinimizerMojo extends AbstractMojo { m = new ResourceMinimizerMojo(); m.myCtx = ctxR5; - m.targetDirectory = new File("./hapi-fhir-validation-resources-r5/src/main/resources/org/hl7/fhir/r5/model/valueset"); + m.targetDirectory = + new File("./hapi-fhir-validation-resources-r5/src/main/resources/org/hl7/fhir/r5/model/valueset"); m.fhirVersion = "R5"; m.execute(); byteCount += m.getByteCount(); @@ -295,7 +307,8 @@ public class ResourceMinimizerMojo extends AbstractMojo { m = new ResourceMinimizerMojo(); m.myCtx = ctxR5; - m.targetDirectory = new File("./hapi-fhir-validation-resources-r5/src/main/resources/org/hl7/fhir/r5/model/extension"); + m.targetDirectory = + new File("./hapi-fhir-validation-resources-r5/src/main/resources/org/hl7/fhir/r5/model/extension"); m.fhirVersion = "R5"; m.execute(); byteCount += m.getByteCount(); @@ -312,5 +325,4 @@ public class ResourceMinimizerMojo extends AbstractMojo { ourLog.info("Trimmed {} files", fileCount); ourLog.info("Trimmed {} bytes", FileUtils.byteCountToDisplaySize(byteCount)); } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderGenericMultiFileMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderGenericMultiFileMojo.java index d38ba89ab3f..4b8ef6ac7f7 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderGenericMultiFileMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderGenericMultiFileMojo.java @@ -1,10 +1,12 @@ package ca.uhn.fhir.tinder; import ca.uhn.fhir.i18n.Msg; -import java.io.File; -import java.io.IOException; -import java.util.List; - +import ca.uhn.fhir.tinder.AbstractGenerator.FailureException; +import ca.uhn.fhir.tinder.GeneratorContext.ResourceSource; +import ca.uhn.fhir.tinder.TinderStructuresMojo.ValueSetFileDefinition; +import ca.uhn.fhir.tinder.parser.BaseStructureParser; +import ca.uhn.fhir.tinder.parser.DatatypeGeneratorUsingSpreadsheet; +import ca.uhn.fhir.tinder.parser.TargetType; import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; @@ -15,13 +17,9 @@ import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; -import ca.uhn.fhir.tinder.AbstractGenerator.ExecutionException; -import ca.uhn.fhir.tinder.AbstractGenerator.FailureException; -import ca.uhn.fhir.tinder.GeneratorContext.ResourceSource; -import ca.uhn.fhir.tinder.TinderStructuresMojo.ValueSetFileDefinition; -import ca.uhn.fhir.tinder.parser.BaseStructureParser; -import ca.uhn.fhir.tinder.parser.DatatypeGeneratorUsingSpreadsheet; -import ca.uhn.fhir.tinder.parser.TargetType; +import java.io.File; +import java.io.IOException; +import java.util.List; /** * Generate files from FHIR resource/composite metadata using Velocity templates. @@ -31,7 +29,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * files are generated using a Velocity template that can be taken from * inside the hapi-timder-plugin project or can be located in other projects *

    - * The following Maven plug-in configuration properties are used with this plug-in + * The following Maven plug-in configuration properties are used with this plug-in *

    * * @@ -48,38 +46,38 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * * - * * * * * * + * Valid values: true | false * * * * * + * Valid values: true | false * * * * + * Valid values: true | false * * * * + * Valid values: true | false * * * * + *
  • model  to cause resources to be generated based on the model structure classes. Note that + * generateResources is the only one of the above options that can be used when model is specified.
  • * * * @@ -110,13 +108,13 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * * * * * * - * * * @@ -155,7 +153,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * * + * to be used to generate the files. * * * @@ -173,7 +171,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * - * * @@ -187,7 +185,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * - * * * @@ -198,9 +196,9 @@ import ca.uhn.fhir.tinder.parser.TargetType; * for each selected resource * *
    baseDirThe Maven project's base directory. This is used to + * The Maven project's base directory. This is used to * possibly locate other assets within the project used in file generation.No. Defaults to: ${project.build.directory}/..
    generateResourcesShould files be generated from FHIR resource metadata?
    - * Valid values: true | false
    One of these four options must be specified as true
    generateDataTypesShould files be generated from FHIR composite data type metadata?
    - * Valid values: true | false
    generateValueSetsShould files be generated from FHIR value set metadata?
    - * Valid values: true | false
    generateProfilesShould files be generated from FHIR profile metadata?
    - * Valid values: true | false
    resourceSourceWhich source of resource definitions should be processed? Valid values are:
    *
      *
    • spreadsheet  to cause resources to be generated based on the FHIR spreadsheets
    • - *
    • model  to cause resources to be generated based on the model structure classes. Note that - * generateResources is the only one of the above options that can be used when model is specified.
    No. Defaults to: spreadsheet
    filenamePrefixThe prefix string that is to be added onto the - * beginning of the resource or composite data type name to become + * beginning of the resource or composite data type name to become * the Java class name or resource file name.No
    filenameSuffixSuffix that will be added onto the end of the resource + * Suffix that will be added onto the end of the resource * or composite data type name to become the Java class name or resource file name.No.
    templateFileThe full path to the Velocity template that is - * to be used to generate the files.
    velocityPathincludeResourcesA list of the names of the resources or composite data types that should * be used in the file generationNo. Defaults to all defined resources except for DSTU2, + * No. Defaults to all defined resources except for DSTU2, * the Binary resource is excluded and * for DSTU3, the Conformance resource is excluded.
    valueSetFilesA list of files containing value-set resource definitions * to be used.No. Defaults to all defined value-sets that + * No. Defaults to all defined value-sets that * are referenced from the selected resources.
    - * - * - * + * + * + * * @author Bill.Denton * */ @@ -215,7 +213,7 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { @Parameter(required = true, defaultValue = "${project.build.directory}/..") private String baseDir; - @Parameter(required = false, defaultValue="false") + @Parameter(required = false, defaultValue = "false") private boolean generateResources; @Parameter(required = false, defaultValue = "false") @@ -232,23 +230,26 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { @Parameter(required = false) private String filenamePrefix; - + @Parameter(required = false) private String filenameSuffix; - + @Parameter(required = false) private File targetResourceDirectory; @Parameter(required = false) private String targetFolder; - + // one of these two is required @Parameter(required = false) private String template; + @Parameter(required = false) private File templateFile; + @Parameter(required = false) private String velocityPath; + @Parameter(required = false) private String velocityProperties; @@ -257,7 +258,7 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { @Parameter(required = false) private List excludeResources; - + @Parameter(required = false) private String resourceSource; @@ -269,7 +270,7 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { @Override public void execute() throws MojoExecutionException, MojoFailureException { - + GeneratorContext context = new GeneratorContext(); Generator generator = new Generator(); try { @@ -281,10 +282,12 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { context.setValueSetFiles(valueSetFiles); if (ResourceSource.MODEL.equals(context.getResourceSource())) { if (generateDatatypes) { - throw new MojoFailureException(Msg.code(128) + "Cannot use \"generateDatatypes\" when resourceSource=model"); + throw new MojoFailureException( + Msg.code(128) + "Cannot use \"generateDatatypes\" when resourceSource=model"); } if (generateValueSets) { - throw new MojoFailureException(Msg.code(129) + "Cannot use \"generateValueSets\" when resourceSource=model"); + throw new MojoFailureException( + Msg.code(129) + "Cannot use \"generateValueSets\" when resourceSource=model"); } } @@ -292,7 +295,7 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { } catch (FailureException e) { throw new MojoFailureException(Msg.code(130) + e.getMessage(), e.getCause()); } - + /* * Deal with the generation target */ @@ -300,17 +303,21 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { File targetDirectory = null; if (targetSourceDirectory != null) { if (targetResourceDirectory != null) { - throw new MojoFailureException(Msg.code(131) + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); + throw new MojoFailureException( + Msg.code(131) + + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); } targetType = TargetType.SOURCE; if (null == targetPackage) { - throw new MojoFailureException(Msg.code(132) + "The [targetPackage] property must be specified when generating Java source code."); + throw new MojoFailureException(Msg.code(132) + + "The [targetPackage] property must be specified when generating Java source code."); } targetDirectory = new File(targetSourceDirectory, targetPackage.replace('.', File.separatorChar)); - } else - if (targetResourceDirectory != null) { + } else if (targetResourceDirectory != null) { if (targetSourceDirectory != null) { - throw new MojoFailureException(Msg.code(133) + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); + throw new MojoFailureException( + Msg.code(133) + + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); } targetType = TargetType.RESOURCE; if (targetFolder != null) { @@ -322,11 +329,12 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { targetPackage = ""; } } else { - throw new MojoFailureException(Msg.code(134) + "Either [targetSourceDirectory] or [targetResourceDirectory] must be specified."); + throw new MojoFailureException( + Msg.code(134) + "Either [targetSourceDirectory] or [targetResourceDirectory] must be specified."); } targetDirectory.mkdirs(); - ourLog.info(" * Output ["+targetType.toString()+"] Directory: " + targetDirectory.getAbsolutePath()); - + ourLog.info(" * Output [" + targetType.toString() + "] Directory: " + targetDirectory.getAbsolutePath()); + /* * Write resources if selected */ @@ -371,7 +379,7 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { vsp.setVelocityProperties(velocityProperties); vsp.writeMarkedValueSets(targetType, targetDirectory, targetPackage); } - + switch (targetType) { case SOURCE: { myProject.addCompileSourceRoot(targetSourceDirectory.getAbsolutePath()); @@ -381,7 +389,7 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { Resource resource = new Resource(); resource.setDirectory(targetResourceDirectory.getAbsolutePath()); if (targetFolder != null) { - resource.addInclude(targetFolder+"/*"); + resource.addInclude(targetFolder + "/*"); } else { resource.addInclude("*"); } @@ -390,12 +398,12 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { } default: } - } public static void main(String[] args) throws IOException, MojoFailureException, MojoExecutionException { - // PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); + // PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, + // TimeUnit.MILLISECONDS); // HttpClientBuilder builder = HttpClientBuilder.create(); // builder.setConnectionManager(connectionManager); // CloseableHttpClient client = builder.build(); @@ -407,8 +415,10 @@ public class TinderGenericMultiFileMojo extends AbstractMojo { // // ourLog.info("Metadata String: {}", metadataString); - // String metadataString = IOUtils.toString(new FileInputStream("src/test/resources/healthintersections-metadata.xml")); - // Conformance conformance = new FhirContext(Conformance.class).newXmlParser().parseResource(Conformance.class, metadataString); + // String metadataString = IOUtils.toString(new + // FileInputStream("src/test/resources/healthintersections-metadata.xml")); + // Conformance conformance = new FhirContext(Conformance.class).newXmlParser().parseResource(Conformance.class, + // metadataString); TinderGenericMultiFileMojo mojo = new TinderGenericMultiFileMojo(); mojo.myProject = new MavenProject(); diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderGenericSingleFileMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderGenericSingleFileMojo.java index d0ece584cce..e5a2816340f 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderGenericSingleFileMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderGenericSingleFileMojo.java @@ -1,16 +1,12 @@ package ca.uhn.fhir.tinder; import ca.uhn.fhir.i18n.Msg; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.nio.charset.StandardCharsets; -import java.util.List; - +import ca.uhn.fhir.tinder.AbstractGenerator.FailureException; +import ca.uhn.fhir.tinder.GeneratorContext.ResourceSource; +import ca.uhn.fhir.tinder.TinderStructuresMojo.ValueSetFileDefinition; +import ca.uhn.fhir.tinder.parser.BaseStructureParser; +import ca.uhn.fhir.tinder.parser.DatatypeGeneratorUsingSpreadsheet; +import ca.uhn.fhir.tinder.parser.TargetType; import org.apache.commons.lang.WordUtils; import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; @@ -24,14 +20,15 @@ import org.apache.maven.project.MavenProject; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.VelocityEngine; -import ca.uhn.fhir.tinder.AbstractGenerator.ExecutionException; -import ca.uhn.fhir.tinder.AbstractGenerator.FailureException; -import ca.uhn.fhir.tinder.GeneratorContext.ResourceSource; -import ca.uhn.fhir.tinder.TinderStructuresMojo.ValueSetFileDefinition; -import ca.uhn.fhir.tinder.parser.BaseStructureParser; -import ca.uhn.fhir.tinder.parser.BaseStructureSpreadsheetParser; -import ca.uhn.fhir.tinder.parser.DatatypeGeneratorUsingSpreadsheet; -import ca.uhn.fhir.tinder.parser.TargetType; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; +import java.util.List; /** * Generate a single file based on resource or composite type metadata. @@ -41,7 +38,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * generated using a Velocity template that can be taken from * inside the hapi-timder-plugin project or can be located in other projects *

    - * The following Maven plug-in configuration properties are used with this plug-in + * The following Maven plug-in configuration properties are used with this plug-in *

    * * @@ -58,28 +55,28 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * * - * * * * * * + * Valid values: true | false * * * * * + * Valid values: true | false * * * * + *
  • model  to cause resources to be generated based on the model structure classes. Note that + * generateResources is the only one of the above options that can be used when model is specified.
  • * * * @@ -151,7 +148,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * * + * to be used to generate the files. * * * @@ -169,7 +166,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * - * * @@ -183,7 +180,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * - * * * @@ -194,9 +191,9 @@ import ca.uhn.fhir.tinder.parser.TargetType; * for each selected resource * *
    baseDirThe Maven project's base directory. This is used to + * The Maven project's base directory. This is used to * possibly locate other assets within the project used in file generation.No. Defaults to: ${project.build.directory}/..
    generateResourcesShould files be generated from FHIR resource metadata?
    - * Valid values: true | false
    One of these two options must be specified as true
    generateDataTypesShould files be generated from FHIR composite data type metadata?
    - * Valid values: true | false
    resourceSourceWhich source of resource definitions should be processed? Valid values are:
    *
      *
    • spreadsheet  to cause resources to be generated based on the FHIR spreadsheets
    • - *
    • model  to cause resources to be generated based on the model structure classes. Note that - * generateResources is the only one of the above options that can be used when model is specified.
    No. Defaults to: spreadsheet
    templateFileThe full path to the Velocity template that is - * to be used to generate the files.
    velocityPathincludeResourcesA list of the names of the resources or composite data types that should * be used in the file generationNo. Defaults to all defined resources except for DSTU2, + * No. Defaults to all defined resources except for DSTU2, * the Binary resource is excluded and * for DSTU3, the Conformance resource is excluded.
    valueSetFilesA list of files containing value-set resource definitions * to be used.No. Defaults to all defined value-sets that + * No. Defaults to all defined value-sets that * are referenced from the selected resources.
    - * - * - * + * + * + * * @author Bill.Denton * */ @@ -211,12 +208,12 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { @Parameter(required = true, defaultValue = "${project.build.directory}/..") private String baseDir; - @Parameter(required = false, defaultValue="false") + @Parameter(required = false, defaultValue = "false") private boolean generateResources; @Parameter(required = false, defaultValue = "false") private boolean generateDatatypes; - + @Parameter(required = false) private File targetSourceDirectory; @@ -225,7 +222,7 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { @Parameter(required = false) private String packageBase; - + @Parameter(required = false) private File targetResourceDirectory; @@ -234,14 +231,17 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { @Parameter(required = false) private String targetFile; - + // one of these two is required @Parameter(required = false) private String template; + @Parameter(required = false) private File templateFile; + @Parameter(required = false) private String velocityPath; + @Parameter(required = false) private String velocityProperties; @@ -250,7 +250,7 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { @Parameter(required = false) private List excludeResources; - + @Parameter(required = false) private String resourceSource; @@ -262,7 +262,7 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { @Override public void execute() throws MojoExecutionException, MojoFailureException { - + GeneratorContext context = new GeneratorContext(); Generator generator = new Generator(); try { @@ -274,7 +274,8 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { context.setValueSetFiles(valueSetFiles); if (ResourceSource.MODEL.equals(context.getResourceSource())) { if (generateDatatypes) { - throw new MojoFailureException(Msg.code(120) + "Cannot use \"generateDatatypes\" when resourceSource=model"); + throw new MojoFailureException( + Msg.code(120) + "Cannot use \"generateDatatypes\" when resourceSource=model"); } } @@ -282,7 +283,7 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { } catch (FailureException e) { throw new MojoFailureException(Msg.code(121) + e.getMessage(), e.getCause()); } - + try { /* * Deal with the generation target @@ -294,20 +295,24 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { } if (targetSourceDirectory != null) { if (targetResourceDirectory != null) { - throw new MojoFailureException(Msg.code(123) + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); + throw new MojoFailureException( + Msg.code(123) + + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); } targetType = TargetType.SOURCE; if (null == targetPackage) { - throw new MojoFailureException(Msg.code(124) + "The [targetPackage] property must be specified when generating Java source code."); + throw new MojoFailureException(Msg.code(124) + + "The [targetPackage] property must be specified when generating Java source code."); } targetDirectory = new File(targetSourceDirectory, targetPackage.replace('.', File.separatorChar)); if (!targetFile.endsWith(".java")) { targetFile += ".java"; } - } else - if (targetResourceDirectory != null) { + } else if (targetResourceDirectory != null) { if (targetSourceDirectory != null) { - throw new MojoFailureException(Msg.code(125) + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); + throw new MojoFailureException( + Msg.code(125) + + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); } targetType = TargetType.RESOURCE; if (targetFolder != null) { @@ -321,13 +326,16 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { targetPackage = ""; } } else { - throw new MojoFailureException(Msg.code(126) + "Either [targetSourceDirectory] or [targetResourceDirectory] must be specified."); + throw new MojoFailureException(Msg.code(126) + + "Either [targetSourceDirectory] or [targetResourceDirectory] must be specified."); } - ourLog.info(" * Output ["+targetType.toString()+"] file ["+targetFile+"] in directory: " + targetDirectory.getAbsolutePath()); + ourLog.info(" * Output [" + targetType.toString() + "] file [" + targetFile + "] in directory: " + + targetDirectory.getAbsolutePath()); targetDirectory.mkdirs(); File target = new File(targetDirectory, targetFile); - OutputStreamWriter targetWriter = new OutputStreamWriter(new FileOutputStream(target, false), StandardCharsets.UTF_8); - + OutputStreamWriter targetWriter = + new OutputStreamWriter(new FileOutputStream(target, false), StandardCharsets.UTF_8); + /* * Next, deal with the template and initialize velocity */ @@ -339,15 +347,14 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { templateIs = this.getClass().getResourceAsStream(template); } InputStreamReader templateReader = new InputStreamReader(templateIs); - + /* * build new Velocity Context */ VelocityContext ctx = new VelocityContext(); if (packageBase != null) { ctx.put("packageBase", packageBase); - } else - if (targetPackage != null) { + } else if (targetPackage != null) { int ix = targetPackage.lastIndexOf('.'); if (ix > 0) { ctx.put("packageBase", targetPackage.subSequence(0, ix)); @@ -366,13 +373,13 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { } else { ctx.put("resourcePackage", "ca.uhn.fhir.model." + version + ".resource"); } - + String capitalize = WordUtils.capitalize(version); if ("R4b".equals(capitalize)) { - capitalize="R4B"; + capitalize = "R4B"; } ctx.put("versionCapitalized", capitalize); - + /* * Write resources if selected */ @@ -391,7 +398,7 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { targetWriter.close(); } } - + switch (targetType) { case SOURCE: { myProject.addCompileSourceRoot(targetSourceDirectory.getAbsolutePath()); @@ -402,7 +409,7 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { resource.setDirectory(targetResourceDirectory.getAbsolutePath()); String resName = targetFile; if (targetFolder != null) { - resName = targetFolder+File.separator+targetFile; + resName = targetFolder + File.separator + targetFile; } resource.addInclude(resName); myProject.addResource(resource); @@ -418,7 +425,8 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { public static void main(String[] args) throws IOException, MojoFailureException, MojoExecutionException { - // PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); + // PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, + // TimeUnit.MILLISECONDS); // HttpClientBuilder builder = HttpClientBuilder.create(); // builder.setConnectionManager(connectionManager); // CloseableHttpClient client = builder.build(); @@ -430,8 +438,10 @@ public class TinderGenericSingleFileMojo extends AbstractMojo { // // ourLog.info("Metadata String: {}", metadataString); - // String metadataString = IOUtils.toString(new FileInputStream("src/test/resources/healthintersections-metadata.xml")); - // Conformance conformance = new FhirContext(Conformance.class).newXmlParser().parseResource(Conformance.class, metadataString); + // String metadataString = IOUtils.toString(new + // FileInputStream("src/test/resources/healthintersections-metadata.xml")); + // Conformance conformance = new FhirContext(Conformance.class).newXmlParser().parseResource(Conformance.class, + // metadataString); TinderGenericSingleFileMojo mojo = new TinderGenericSingleFileMojo(); mojo.myProject = new MavenProject(); diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderJpaRestServerMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderJpaRestServerMojo.java index 563756f199d..4503b47061f 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderJpaRestServerMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderJpaRestServerMojo.java @@ -91,7 +91,9 @@ public class TinderJpaRestServerMojo extends AbstractMojo { if (baseResourceNames == null || baseResourceNames.isEmpty()) { baseResourceNames = new ArrayList<>(); - ourLog.info("No resource names supplied, going to use all resources from version: {}", fhirContext.getVersion().getVersion()); + ourLog.info( + "No resource names supplied, going to use all resources from version: {}", + fhirContext.getVersion().getVersion()); Properties p = new Properties(); try { @@ -130,7 +132,8 @@ public class TinderJpaRestServerMojo extends AbstractMojo { ourLog.info("Including the following resources: {}", baseResourceNames); - File configPackageDirectoryBase = new File(targetDirectory, configPackageBase.replace(".", File.separatorChar + "")); + File configPackageDirectoryBase = + new File(targetDirectory, configPackageBase.replace(".", File.separatorChar + "")); configPackageDirectoryBase.mkdirs(); File packageDirectoryBase = new File(targetDirectory, packageBase.replace(".", File.separatorChar + "")); packageDirectoryBase.mkdirs(); @@ -173,14 +176,16 @@ public class TinderJpaRestServerMojo extends AbstractMojo { VelocityEngine v = new VelocityEngine(); v.setProperty(RuntimeConstants.RESOURCE_LOADERS, "cp"); - v.setProperty("resource.loader.cp.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); + v.setProperty( + "resource.loader.cp.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); v.setProperty("runtime.strict_mode.enable", Boolean.TRUE); /* * Spring Java */ Reader templateReader = ClasspathUtil.loadResourceAsReader("/vm/jpa_spring_beans_java.vm"); - File f = new File(configPackageDirectoryBase, "GeneratedDaoAndResourceProviderConfig" + capitalize + ".java"); + File f = new File( + configPackageDirectoryBase, "GeneratedDaoAndResourceProviderConfig" + capitalize + ".java"); OutputStreamWriter w = new OutputStreamWriter(new FileOutputStream(f, false), "UTF-8"); v.evaluate(ctx, w, "", templateReader); w.close(); @@ -192,7 +197,8 @@ public class TinderJpaRestServerMojo extends AbstractMojo { public static void main(String[] args) throws IOException, MojoFailureException, MojoExecutionException { - // PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); + // PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, + // TimeUnit.MILLISECONDS); // HttpClientBuilder builder = HttpClientBuilder.create(); // builder.setConnectionManager(connectionManager); // CloseableHttpClient client = builder.build(); @@ -204,8 +210,10 @@ public class TinderJpaRestServerMojo extends AbstractMojo { // // ourLog.info("Metadata String: {}", metadataString); - // String metadataString = IOUtils.toString(new FileInputStream("src/test/resources/healthintersections-metadata.xml")); - // Conformance conformance = new FhirContext(Conformance.class).newXmlParser().parseResource(Conformance.class, metadataString); + // String metadataString = IOUtils.toString(new + // FileInputStream("src/test/resources/healthintersections-metadata.xml")); + // Conformance conformance = new FhirContext(Conformance.class).newXmlParser().parseResource(Conformance.class, + // metadataString); TinderJpaRestServerMojo mojo = new TinderJpaRestServerMojo(); mojo.myProject = new MavenProject(); @@ -213,16 +221,14 @@ public class TinderJpaRestServerMojo extends AbstractMojo { mojo.packageBase = "ca.uhn.fhir.jpa.rp.r4"; mojo.configPackageBase = "ca.uhn.fhir.jpa.config"; mojo.baseResourceNames = new ArrayList(Arrays.asList( - "bundle", - "observation", -// "communicationrequest" - "binary", - "structuredefinition" - )); + "bundle", + "observation", + // "communicationrequest" + "binary", + "structuredefinition")); mojo.targetDirectory = new File("target/generated/valuesets"); mojo.targetResourceDirectory = new File("target/generated/valuesets"); mojo.targetResourceSpringBeansFile = "tmp_beans.xml"; mojo.execute(); } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderResourceGeneratorMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderResourceGeneratorMojo.java index 3b411c0146f..a48e2fca181 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderResourceGeneratorMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderResourceGeneratorMojo.java @@ -34,7 +34,8 @@ public class TinderResourceGeneratorMojo extends AbstractGeneratorMojo { File packageDirectoryBase = configuration.getPackageDirectoryBase(); packageDirectoryBase.mkdirs(); - ResourceGeneratorUsingModel gen = new ResourceGeneratorUsingModel(configuration.getVersion(), configuration.getBaseDir()); + ResourceGeneratorUsingModel gen = + new ResourceGeneratorUsingModel(configuration.getVersion(), configuration.getBaseDir()); gen.setBaseResourceNames(configuration.getResourceNames()); try { @@ -52,7 +53,8 @@ public class TinderResourceGeneratorMojo extends AbstractGeneratorMojo { VelocityEngine v = new VelocityEngine(); v.setProperty(RuntimeConstants.RESOURCE_LOADERS, "cp"); - v.setProperty("resource.loader.cp.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); + v.setProperty( + "resource.loader.cp.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); v.setProperty("runtime.strict_mode.enable", Boolean.TRUE); InputStream templateIs = ResourceGeneratorUsingSpreadsheet.class.getResourceAsStream(templateName); @@ -65,8 +67,8 @@ public class TinderResourceGeneratorMojo extends AbstractGeneratorMojo { Resource resource = new Resource(); resource.setDirectory(packageDirectoryBase.getAbsolutePath()); - //resource.setDirectory(targetDirectory.getAbsolutePath()); - //resource.addInclude(packageBase); + // resource.setDirectory(targetDirectory.getAbsolutePath()); + // resource.addInclude(packageBase); myProject.addResource(resource); } catch (Exception e) { @@ -84,7 +86,5 @@ public class TinderResourceGeneratorMojo extends AbstractGeneratorMojo { public String html(String theHtml) { return StringEscapeUtils.escapeHtml4(theHtml); } - - } } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderSourcesGeneratorMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderSourcesGeneratorMojo.java index 239966b04e4..7789ba39f75 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderSourcesGeneratorMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderSourcesGeneratorMojo.java @@ -27,7 +27,8 @@ public class TinderSourcesGeneratorMojo extends AbstractGeneratorMojo { File packageDirectoryBase = configuration.getPackageDirectoryBase(); packageDirectoryBase.mkdirs(); - ResourceGeneratorUsingModel gen = new ResourceGeneratorUsingModel(configuration.getVersion(), configuration.getBaseDir()); + ResourceGeneratorUsingModel gen = + new ResourceGeneratorUsingModel(configuration.getVersion(), configuration.getBaseDir()); gen.setBaseResourceNames(configuration.getResourceNames()); try { diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderStructuresMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderStructuresMojo.java index f69109f5b60..a9f1c79c334 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderStructuresMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/TinderStructuresMojo.java @@ -36,7 +36,7 @@ public class TinderStructuresMojo extends AbstractMojo { @Parameter(alias = "package", required = true) private String packageName; - @Parameter(alias = "version", required = true, defaultValue="dstu") + @Parameter(alias = "version", required = true, defaultValue = "dstu") private String version = "dstu"; @Parameter(required = false) @@ -50,9 +50,9 @@ public class TinderStructuresMojo extends AbstractMojo { @Parameter(required = true, defaultValue = "${project.build.directory}/..") private String baseDir; - + @Override - public void execute() throws MojoExecutionException, MojoFailureException { + public void execute() throws MojoExecutionException, MojoFailureException { if (StringUtils.isBlank(packageName)) { throw new MojoFailureException(Msg.code(101) + "Package not specified"); } @@ -64,7 +64,8 @@ public class TinderStructuresMojo extends AbstractMojo { ourLog.info(" * Output Package: " + packageName); - File resDirectoryBase = new File(new File(targetResourceDirectory), packageName.replace('.', File.separatorChar)); + File resDirectoryBase = + new File(new File(targetResourceDirectory), packageName.replace('.', File.separatorChar)); resDirectoryBase.mkdirs(); ourLog.info(" * Output Resource Directory: " + resDirectoryBase.getAbsolutePath()); @@ -119,14 +120,14 @@ public class TinderStructuresMojo extends AbstractMojo { File resSubDirectoryBase = new File(directoryBase, "resource"); ourLog.info("Writing Resources to directory: {}", resSubDirectoryBase.getAbsolutePath()); - + rp.combineContentMaps(dtp); rp.writeAll(resSubDirectoryBase, resDirectoryBase, packageName); } if (dtp != null) { ourLog.info("Writing Composite Datatypes..."); - + dtp.combineContentMaps(rp); dtp.writeAll(new File(directoryBase, "composite"), resDirectoryBase, packageName); } @@ -179,12 +180,11 @@ public class TinderStructuresMojo extends AbstractMojo { public static void main(String[] args) throws Exception { - -// ProfileParser pp = new ProfileParser(); -// pp.parseSingleProfile(new File("../hapi-tinder-test/src/test/resources/profile/patient.xml"), "http://foo"); + // ProfileParser pp = new ProfileParser(); + // pp.parseSingleProfile(new File("../hapi-tinder-test/src/test/resources/profile/patient.xml"), "http://foo"); ValueSetGenerator vsp = new ValueSetGenerator("dstu2"); -// vsp.setResourceValueSetFiles(theResourceValueSetFiles);Directory("src/main/resources/vs/"); + // vsp.setResourceValueSetFiles(theResourceValueSetFiles);Directory("src/main/resources/vs/"); vsp.parse(); DatatypeGeneratorUsingSpreadsheet dtp = new DatatypeGeneratorUsingSpreadsheet("dstu2", "."); @@ -196,20 +196,21 @@ public class TinderStructuresMojo extends AbstractMojo { String dtOutputDir = "target/generated-sources/tinder/ca/uhn/fhir/model/dev/composite"; ResourceGeneratorUsingSpreadsheet rp = new ResourceGeneratorUsingSpreadsheet("dstu2", "."); - rp.setBaseResourceNames(Arrays.asList( "appointment"//, "auditevent" , "observation" -// //, "contract" -// "valueset", "organization", "location" -// , "observation", "conformance" -// //, "referralrequest" -// , "patient","practitioner","encounter", -// "organization","location","relatedperson","appointment","slot","order" -// //,"availability" -// ,"device", "valueset" + rp.setBaseResourceNames(Arrays.asList( + "appointment" // , "auditevent" , "observation" + // //, "contract" + // "valueset", "organization", "location" + // , "observation", "conformance" + // //, "referralrequest" + // , "patient","practitioner","encounter", + // "organization","location","relatedperson","appointment","slot","order" + // //,"availability" + // ,"device", "valueset" )); rp.parse(); rp.bindValueSets(vsp); rp.markResourcesForImports(); - + rp.bindValueSets(vsp); String rpOutputDir = "target/generated-sources/tinder/ca/uhn/fhir/model/dev/resource"; @@ -219,13 +220,12 @@ public class TinderStructuresMojo extends AbstractMojo { rp.combineContentMaps(dtp); rp.getLocalImports().putAll(datatypeLocalImports); datatypeLocalImports.putAll(rp.getLocalImports()); - + String vsOutputDir = "target/generated-sources/tinder/ca/uhn/fhir/model/dev/valueset"; vsp.writeMarkedValueSets(new File(vsOutputDir), "ca.uhn.fhir.model.dev"); - + dtp.writeAll(new File(dtOutputDir), null, "ca.uhn.fhir.model.dev"); rp.writeAll(new File(rpOutputDir), new File(rpSOutputDir), "ca.uhn.fhir.model.dev"); - } public static class ProfileFileDefinition { @@ -248,5 +248,4 @@ public class TinderStructuresMojo extends AbstractMojo { valueSetFile = theValueSetFile; } } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ValueSetGenerator.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ValueSetGenerator.java index 39ac7c63814..4d431c7a552 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ValueSetGenerator.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ValueSetGenerator.java @@ -1,7 +1,7 @@ package ca.uhn.fhir.tinder; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; import ca.uhn.fhir.model.dstu2.resource.ValueSet; import ca.uhn.fhir.model.dstu2.resource.ValueSet.CodeSystem; @@ -53,7 +53,6 @@ public class ValueSetGenerator { } } - public String getClassForValueSetIdAndMarkAsNeeded(String theId) { ValueSetTm vs = myValueSets.get(theId); if (vs == null) { @@ -94,9 +93,11 @@ public class ValueSetGenerator { } String vs = IOUtils.toString(is, Charset.defaultCharset()); if ("dstu2".equals(myVersion)) { - ca.uhn.fhir.model.dstu2.resource.Bundle bundle = newXmlParser.parseResource(ca.uhn.fhir.model.dstu2.resource.Bundle.class, vs); + ca.uhn.fhir.model.dstu2.resource.Bundle bundle = + newXmlParser.parseResource(ca.uhn.fhir.model.dstu2.resource.Bundle.class, vs); for (Entry nextEntry : bundle.getEntry()) { - ca.uhn.fhir.model.dstu2.resource.ValueSet nextVs = (ca.uhn.fhir.model.dstu2.resource.ValueSet) nextEntry.getResource(); + ca.uhn.fhir.model.dstu2.resource.ValueSet nextVs = + (ca.uhn.fhir.model.dstu2.resource.ValueSet) nextEntry.getResource(); parseValueSet(nextVs); } } else { @@ -113,7 +114,8 @@ public class ValueSetGenerator { ValueSet nextVs = (ValueSet) newXmlParser.parseResource(ValueSet.class, vs); tm = parseValueSet(nextVs); } else { - ca.uhn.fhir.model.dstu2.resource.ValueSet nextVs = (ca.uhn.fhir.model.dstu2.resource.ValueSet) newXmlParser.parseResource(ca.uhn.fhir.model.dstu2.resource.ValueSet.class, vs); + ca.uhn.fhir.model.dstu2.resource.ValueSet nextVs = (ca.uhn.fhir.model.dstu2.resource.ValueSet) + newXmlParser.parseResource(ca.uhn.fhir.model.dstu2.resource.ValueSet.class, vs); tm = parseValueSet(nextVs); } if (tm != null) { @@ -121,18 +123,19 @@ public class ValueSetGenerator { } } } - + /* - * Purge empty valuesets + * Purge empty valuesets */ - for (Iterator> iter = myValueSets.entrySet().iterator(); iter.hasNext(); ) { + for (Iterator> iter = + myValueSets.entrySet().iterator(); + iter.hasNext(); ) { java.util.Map.Entry next = iter.next(); if (next.getValue().getCodes().isEmpty()) { iter.remove(); continue; } } - // File[] files = new // File(myResourceValueSetFiles).listFiles((FilenameFilter) new @@ -148,7 +151,8 @@ public class ValueSetGenerator { private ValueSetTm parseValueSet(ca.uhn.fhir.model.dstu2.resource.ValueSet nextVs) { myConceptCount += nextVs.getCodeSystem().getConcept().size(); - ourLog.debug("Parsing ValueSetTm #{} - {} - {} concepts total", myValueSetCount++, nextVs.getName(), myConceptCount); + ourLog.debug( + "Parsing ValueSetTm #{} - {} - {} concepts total", myValueSetCount++, nextVs.getName(), myConceptCount); // output.addConcept(next.getCode().getValue(), // next.getDisplay().getValue(), next.getDefinition()); @@ -167,7 +171,8 @@ public class ValueSetGenerator { } } - for (ca.uhn.fhir.model.dstu2.resource.ValueSet.ComposeInclude nextInclude : nextVs.getCompose().getInclude()) { + for (ca.uhn.fhir.model.dstu2.resource.ValueSet.ComposeInclude nextInclude : + nextVs.getCompose().getInclude()) { String system = nextInclude.getSystemElement().getValueAsString(); for (ComposeIncludeConcept nextConcept : nextInclude.getConcept()) { String nextCodeValue = nextConcept.getCode(); @@ -175,11 +180,11 @@ public class ValueSetGenerator { } } -// if (vs.getCodes().isEmpty()) { -// ourLog.info("ValueSet " + nextVs.getName() + " has no codes, not going to generate any code for it"); -// return null; -// } - + // if (vs.getCodes().isEmpty()) { + // ourLog.info("ValueSet " + nextVs.getName() + " has no codes, not going to generate any code for it"); + // return null; + // } + if (myValueSets.containsKey(vs.getName())) { ourLog.warn("Duplicate Name: " + vs.getName()); } else { @@ -211,7 +216,7 @@ public class ValueSetGenerator { myTemplate = theTemplate; } - public void setTemplateFile (File theTemplateFile) { + public void setTemplateFile(File theTemplateFile) { myTemplateFile = theTemplateFile; } @@ -247,11 +252,14 @@ public class ValueSetGenerator { return b.toString(); } - public void write(Collection theValueSets, File theOutputDirectory, String thePackageBase) throws IOException { + public void write(Collection theValueSets, File theOutputDirectory, String thePackageBase) + throws IOException { write(TargetType.SOURCE, theValueSets, theOutputDirectory, thePackageBase); } - public void write(TargetType theTarget, Collection theValueSets, File theOutputDirectory, String thePackageBase) throws IOException { + public void write( + TargetType theTarget, Collection theValueSets, File theOutputDirectory, String thePackageBase) + throws IOException { for (ValueSetTm nextValueSetTm : theValueSets) { write(theTarget, nextValueSetTm, theOutputDirectory, thePackageBase); } @@ -261,7 +269,8 @@ public class ValueSetGenerator { // myValueSetName = theString; // } - private void write(TargetType theTarget, ValueSetTm theValueSetTm, File theOutputDirectory, String thePackageBase) throws IOException { + private void write(TargetType theTarget, ValueSetTm theValueSetTm, File theOutputDirectory, String thePackageBase) + throws IOException { if (!theOutputDirectory.exists()) { theOutputDirectory.mkdirs(); } @@ -310,7 +319,8 @@ public class ValueSetGenerator { writeMarkedValueSets(TargetType.SOURCE, theOutputDirectory, thePackageBase); } - public void writeMarkedValueSets(TargetType theTarget, File theOutputDirectory, String thePackageBase) throws MojoFailureException { + public void writeMarkedValueSets(TargetType theTarget, File theOutputDirectory, String thePackageBase) + throws MojoFailureException { try { write(theTarget, myMarkedValueSets, theOutputDirectory, thePackageBase); } catch (IOException e) { @@ -322,7 +332,5 @@ public class ValueSetGenerator { ValueSetGenerator p = new ValueSetGenerator("dstu1"); p.parse(); - } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/VelocityHelper.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/VelocityHelper.java index 6cc45dff959..a572f9cc987 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/VelocityHelper.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/VelocityHelper.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,9 @@ package ca.uhn.fhir.tinder; import ca.uhn.fhir.i18n.Msg; +import org.apache.velocity.app.VelocityEngine; +import org.apache.velocity.runtime.RuntimeConstants; + import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -28,16 +31,14 @@ import java.io.InputStream; import java.util.Map.Entry; import java.util.Properties; -import org.apache.velocity.app.VelocityEngine; -import org.apache.velocity.runtime.RuntimeConstants; - public class VelocityHelper { - public static VelocityEngine configureVelocityEngine (File templateFile, String velocityPath, String propertyFile) throws IOException { + public static VelocityEngine configureVelocityEngine(File templateFile, String velocityPath, String propertyFile) + throws IOException { VelocityEngine result = new VelocityEngine(); boolean haveResourceLoader = false; boolean haveRuntimeReferences = false; - + if (propertyFile != null) { File propFile = new File(propertyFile); if (propFile.exists() && propFile.isFile() && propFile.canRead()) { @@ -45,25 +46,26 @@ public class VelocityHelper { Properties props = new Properties(); props.load(propsIn); propsIn.close(); - for (Entry entry : props.entrySet()) { - String key = (String)entry.getKey(); + for (Entry entry : props.entrySet()) { + String key = (String) entry.getKey(); result.setProperty(key, entry.getValue()); if (RuntimeConstants.RESOURCE_LOADER.equals(key)) { haveResourceLoader = true; - } else - if (RuntimeConstants.RUNTIME_REFERENCES_STRICT.equals(key)) { + } else if (RuntimeConstants.RUNTIME_REFERENCES_STRICT.equals(key)) { haveRuntimeReferences = true; } } } else { - throw new FileNotFoundException(Msg.code(94) + "Velocity property file ["+propertyFile+"] does not exist or is not readable."); + throw new FileNotFoundException(Msg.code(94) + "Velocity property file [" + propertyFile + + "] does not exist or is not readable."); } } - + if (!haveResourceLoader) { if (templateFile != null) { result.setProperty(RuntimeConstants.RESOURCE_LOADER, "file"); - result.setProperty("file.resource.loader.class", "org.apache.velocity.runtime.resource.loader.FileResourceLoader"); + result.setProperty( + "file.resource.loader.class", "org.apache.velocity.runtime.resource.loader.FileResourceLoader"); if (velocityPath != null) { result.setProperty(RuntimeConstants.FILE_RESOURCE_LOADER_PATH, velocityPath); } else { @@ -75,14 +77,16 @@ public class VelocityHelper { } } else { result.setProperty(RuntimeConstants.RESOURCE_LOADERS, "cp"); - result.setProperty("resource.loader.cp.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); + result.setProperty( + "resource.loader.cp.class", + "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); } } - + if (!haveRuntimeReferences) { result.setProperty(RuntimeConstants.RUNTIME_REFERENCES_STRICT, Boolean.TRUE); } - + return result; } } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/VersionPropertyFileGeneratorMojo.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/VersionPropertyFileGeneratorMojo.java index ee92495a844..1d50326841f 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/VersionPropertyFileGeneratorMojo.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/VersionPropertyFileGeneratorMojo.java @@ -1,6 +1,17 @@ package ca.uhn.fhir.tinder; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.model.api.annotation.DatatypeDef; +import ca.uhn.fhir.model.api.annotation.ResourceDef; +import com.google.common.reflect.ClassPath; +import org.apache.commons.io.IOUtils; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoFailureException; +import org.hl7.fhir.instance.model.api.IBaseDatatype; +import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.r4.model.InstantType; +import org.springframework.util.Assert; + import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -9,25 +20,11 @@ import java.util.Map.Entry; import java.util.TreeMap; import java.util.stream.Collectors; -import com.google.common.reflect.ClassPath; -import org.apache.commons.io.IOUtils; -import org.apache.maven.plugin.AbstractMojo; -import org.apache.maven.plugin.MojoExecutionException; -import org.apache.maven.plugin.MojoFailureException; -import org.hl7.fhir.instance.model.api.IBaseDatatype; -import org.hl7.fhir.instance.model.api.IBaseResource; -import org.hl7.fhir.r4.model.InstantType; -import org.springframework.core.io.Resource; -import org.springframework.core.io.support.PathMatchingResourcePatternResolver; - -import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.api.annotation.ResourceDef; -import org.springframework.util.Assert; - -//@Mojo(name = "generate-version-propertyfile", defaultPhase = LifecyclePhase.GENERATE_SOURCES) +// @Mojo(name = "generate-version-propertyfile", defaultPhase = LifecyclePhase.GENERATE_SOURCES) public class VersionPropertyFileGeneratorMojo extends AbstractMojo { - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(VersionPropertyFileGeneratorMojo.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(VersionPropertyFileGeneratorMojo.class); // @Parameter(alias = "packageName", required = true) private String packageName; @@ -42,14 +39,14 @@ public class VersionPropertyFileGeneratorMojo extends AbstractMojo { List components = null; try { - components = ClassPath - .from(VersionPropertyFileGeneratorMojo.class.getClassLoader()) - .getTopLevelClasses() - .stream() - .filter(t -> { - return t.getPackageName().equals(packageName); - }) - .collect(Collectors.toList()); + components = + ClassPath.from(VersionPropertyFileGeneratorMojo.class.getClassLoader()) + .getTopLevelClasses() + .stream() + .filter(t -> { + return t.getPackageName().equals(packageName); + }) + .collect(Collectors.toList()); } catch (IOException e) { throw new MojoFailureException(Msg.code(108) + e.getMessage(), e); } @@ -82,9 +79,8 @@ public class VersionPropertyFileGeneratorMojo extends AbstractMojo { } datatypeTypes.put(name, clazz); } - } - + try { Class clazz = Class.forName("org.hl7.fhir.utilities.xhtml.XhtmlNode"); DatatypeDef annotation = clazz.getAnnotation(DatatypeDef.class); @@ -93,7 +89,7 @@ public class VersionPropertyFileGeneratorMojo extends AbstractMojo { } catch (ClassNotFoundException e1) { throw new MojoFailureException(Msg.code(110) + "Unknown", e1); } - + ourLog.info("Found {} resources and {} datatypes", resourceTypes.size(), datatypeTypes.size()); ourLog.info("Writing propertyfile: {}", targetFile.getAbsolutePath()); @@ -128,28 +124,31 @@ public class VersionPropertyFileGeneratorMojo extends AbstractMojo { public static void main(String[] theArgs) throws MojoFailureException { VersionPropertyFileGeneratorMojo m; -// VersionPropertyFileGeneratorMojo m = new VersionPropertyFileGeneratorMojo(); -// m.packageName = "org.hl7.fhir.r4.model"; -// m.targetFile = new File("hapi-fhir-structures-r4/src/main/resources/org/hl7/fhir/r4/model/fhirversion.properties"); -// m.execute(); + // VersionPropertyFileGeneratorMojo m = new VersionPropertyFileGeneratorMojo(); + // m.packageName = "org.hl7.fhir.r4.model"; + // m.targetFile = new + // File("hapi-fhir-structures-r4/src/main/resources/org/hl7/fhir/r4/model/fhirversion.properties"); + // m.execute(); -// m = new VersionPropertyFileGeneratorMojo(); -// m.packageName = "org.hl7.fhir.r4b.model"; -// m.targetFile = new File("hapi-fhir-structures-r4b/src/main/resources/org/hl7/fhir/r4b/model/fhirversion.properties"); -// m.execute(); + // m = new VersionPropertyFileGeneratorMojo(); + // m.packageName = "org.hl7.fhir.r4b.model"; + // m.targetFile = new + // File("hapi-fhir-structures-r4b/src/main/resources/org/hl7/fhir/r4b/model/fhirversion.properties"); + // m.execute(); m = new VersionPropertyFileGeneratorMojo(); m.packageName = "org.hl7.fhir.r5.model"; - m.targetFile = new File("hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties"); + m.targetFile = + new File("hapi-fhir-structures-r5/src/main/resources/org/hl7/fhir/r5/model/fhirversion.properties"); m.execute(); -// m.packageName = "org.hl7.fhir.dstu3.model"; -// m.targetFile = new File("../hapi-fhir-structures-dstu3/src/main/resources/org/hl7/fhir/dstu3/model/fhirversion.properties"); - -// m.packageName = "org.hl7.fhir.dstu2016may.model"; -// m.targetFile = new File("../hapi-fhir-structures-dstu2.1/src/main/resources/org/hl7/fhir/dstu2016may/model/fhirversion.properties"); + // m.packageName = "org.hl7.fhir.dstu3.model"; + // m.targetFile = new + // File("../hapi-fhir-structures-dstu3/src/main/resources/org/hl7/fhir/dstu3/model/fhirversion.properties"); + // m.packageName = "org.hl7.fhir.dstu2016may.model"; + // m.targetFile = new + // File("../hapi-fhir-structures-dstu2.1/src/main/resources/org/hl7/fhir/dstu2016may/model/fhirversion.properties"); } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ant/TinderGeneratorTask.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ant/TinderGeneratorTask.java index da6c9c6ada0..9819edcc795 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ant/TinderGeneratorTask.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/ant/TinderGeneratorTask.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -18,7 +18,28 @@ * #L% */ package ca.uhn.fhir.tinder.ant; + +import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.tinder.AbstractGenerator; +import ca.uhn.fhir.tinder.AbstractGenerator.FailureException; +import ca.uhn.fhir.tinder.GeneratorContext; +import ca.uhn.fhir.tinder.GeneratorContext.ResourceSource; +import ca.uhn.fhir.tinder.TinderResourceGeneratorMojo; +import ca.uhn.fhir.tinder.TinderStructuresMojo.ValueSetFileDefinition; +import ca.uhn.fhir.tinder.ValueSetGenerator; +import ca.uhn.fhir.tinder.VelocityHelper; +import ca.uhn.fhir.tinder.parser.BaseStructureParser; +import ca.uhn.fhir.tinder.parser.BaseStructureSpreadsheetParser; +import ca.uhn.fhir.tinder.parser.DatatypeGeneratorUsingSpreadsheet; +import ca.uhn.fhir.tinder.parser.TargetType; +import org.apache.commons.lang.WordUtils; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Task; +import org.apache.velocity.VelocityContext; +import org.apache.velocity.app.VelocityEngine; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -29,30 +50,8 @@ import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; -import ca.uhn.fhir.tinder.TinderResourceGeneratorMojo; -import org.apache.commons.lang.WordUtils; -import org.apache.maven.plugin.MojoFailureException; -import org.apache.tools.ant.BuildException; -import org.apache.tools.ant.Task; -import org.apache.velocity.VelocityContext; -import org.apache.velocity.app.VelocityEngine; - -import ca.uhn.fhir.context.FhirContext; -import ca.uhn.fhir.tinder.AbstractGenerator; -import ca.uhn.fhir.tinder.AbstractGenerator.ExecutionException; -import ca.uhn.fhir.tinder.AbstractGenerator.FailureException; -import ca.uhn.fhir.tinder.GeneratorContext; -import ca.uhn.fhir.tinder.GeneratorContext.ResourceSource; -import ca.uhn.fhir.tinder.TinderStructuresMojo.ValueSetFileDefinition; -import ca.uhn.fhir.tinder.ValueSetGenerator; -import ca.uhn.fhir.tinder.VelocityHelper; -import ca.uhn.fhir.tinder.parser.BaseStructureParser; -import ca.uhn.fhir.tinder.parser.BaseStructureSpreadsheetParser; -import ca.uhn.fhir.tinder.parser.DatatypeGeneratorUsingSpreadsheet; -import ca.uhn.fhir.tinder.parser.TargetType; - -/** /** + * /** * Generate files from FHIR resource/composite metadata using Velocity templates. *

    * Generates either source or resource files for each selected resource or @@ -60,7 +59,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * files are generated using a Velocity template that can be taken from * inside the hapi-timder-plugin project or can be located in other projects *

    - * The following Ant task properties are used + * The following Ant task properties are used *

    * * @@ -77,40 +76,40 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * * - * * * * * * + * Valid values: true | false * * * * * + * Valid values: true | false * * * * + * Valid values: true | false * * * * + * Valid values: true | false * * * * + *
  • model  to cause resources to be generated based on the model structure classes. Note that + * generateResources is the only one of the above options that can be used when model is specified.
  • * * * @@ -118,7 +117,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * * * * * * * * - * * * @@ -172,7 +171,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * * * * + * to be used to generate the files. * * * @@ -225,7 +224,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * - * * @@ -239,7 +238,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; * * - * * * @@ -250,11 +249,11 @@ import ca.uhn.fhir.tinder.parser.TargetType; * for each selected resource * *
    projectHomeThe project's base directory. This is used to + * The project's base directory. This is used to * possibly locate other assets within the project used in file generation.No. Defaults to: ${basedir}/..
    generateResourcesShould files be generated from FHIR resource metadata?
    - * Valid values: true | false
    At least one of these four options must be specified as true
    generateDataTypesShould files be generated from FHIR composite data type metadata?
    - * Valid values: true | false
    generateValueSetsShould files be generated from FHIR value set metadata?
    * This option can only be used if generating multiple files (one file per value-set.)
    - * Valid values: true | false
    generateProfilesShould files be generated from FHIR profile metadata?
    * This option can only be used if generating multiple files (one file per profile.)
    - * Valid values: true | false
    resourceSourceWhich source of resource definitions should be processed? Valid values are:
    *
      *
    • spreadsheet  to cause resources to be generated based on the FHIR spreadsheets
    • - *
    • model  to cause resources to be generated based on the model structure classes. Note that - * generateResources is the only one of the above options that can be used when model is specified.
    No. Defaults to: spreadsheet
    Java source files can be generated - * for FHIR resources or composite data types. Source files can be + * for FHIR resources or composite data types. Source files can be * generated for each selected entity or a single source file can * be generated containing all of the selected entity. The following configuration * properties control the naming of the generated source files: @@ -157,13 +156,13 @@ import ca.uhn.fhir.tinder.parser.TargetType; *
    filenamePrefixThe prefix string that is to be added onto the - * beginning of the resource or composite data type name to become + * beginning of the resource or composite data type name to become * the Java class name or resource file name.No
    filenameSuffixSuffix that will be added onto the end of the resource + * Suffix that will be added onto the end of the resource * or composite data type name to become the Java class name or resource file name.No.
    Resource (non-Java) files can also be generated - * for FHIR resources or composite data types. a file can be + * for FHIR resources or composite data types. a file can be * generated for each selected entity or a single file can * be generated containing all of the selected entity. The following configuration * properties control the naming of the generated files: @@ -207,7 +206,7 @@ import ca.uhn.fhir.tinder.parser.TargetType; *
    templateFileThe full path to the Velocity template that is - * to be used to generate the files.
    velocityPathincludeResourcesA list of the names of the resources or composite data types that should * be used in the file generationNo. Defaults to all defined resources except for DSTU2, + * No. Defaults to all defined resources except for DSTU2, * the Binary resource is excluded and * for DSTU3, the Conformance resource is excluded.
    valueSetFilesA list of files containing value-set resource definitions * to be used.No. Defaults to all defined value-sets that + * No. Defaults to all defined value-sets that * are referenced from the selected resources.
    - * - * - * + * + * + * * @author Bill Denton - * + * */ public class TinderGeneratorTask extends Task { @@ -269,7 +268,7 @@ public class TinderGeneratorTask extends Task { private boolean generateValueSets; private boolean generateProfiles; - + private File targetSourceDirectory; private String targetPackage; @@ -279,13 +278,13 @@ public class TinderGeneratorTask extends Task { private String targetFile; private String filenamePrefix; - + private String filenameSuffix; - + private File targetResourceDirectory; private String targetFolder; - + // one of these two is required private String template; private File templateFile; @@ -305,22 +304,20 @@ public class TinderGeneratorTask extends Task { private boolean verbose; private FhirContext fhirContext; // set from version in validateAttributes - + /** - * + * */ - public TinderGeneratorTask () { + public TinderGeneratorTask() { super(); } - - protected void cleanup () { - } + + protected void cleanup() {} @Override - public void execute () throws BuildException { + public void execute() throws BuildException { validateAttributes(); - GeneratorContext context = new GeneratorContext(); Generator generator = new Generator(); try { @@ -332,10 +329,12 @@ public class TinderGeneratorTask extends Task { context.setValueSetFiles(valueSetFiles); if (ResourceSource.MODEL.equals(context.getResourceSource())) { if (generateDatatypes) { - throw new BuildException(Msg.code(135) + "Cannot use \"generateDatatypes\" when resourceSource=model"); + throw new BuildException( + Msg.code(135) + "Cannot use \"generateDatatypes\" when resourceSource=model"); } if (generateValueSets) { - throw new BuildException(Msg.code(136) + "Cannot use \"generateValueSets\" when resourceSource=model"); + throw new BuildException( + Msg.code(136) + "Cannot use \"generateValueSets\" when resourceSource=model"); } } @@ -351,17 +350,21 @@ public class TinderGeneratorTask extends Task { File targetDirectory = null; if (targetSourceDirectory != null) { if (targetResourceDirectory != null) { - throw new BuildException(Msg.code(138) + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); + throw new BuildException( + Msg.code(138) + + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); } targetType = TargetType.SOURCE; if (null == targetPackage) { - throw new BuildException(Msg.code(139) + "The [targetPackage] property must be specified when generating Java source code."); + throw new BuildException(Msg.code(139) + + "The [targetPackage] property must be specified when generating Java source code."); } targetDirectory = new File(targetSourceDirectory, targetPackage.replace('.', File.separatorChar)); - } else - if (targetResourceDirectory != null) { + } else if (targetResourceDirectory != null) { if (targetSourceDirectory != null) { - throw new BuildException(Msg.code(140) + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); + throw new BuildException( + Msg.code(140) + + "Both [targetSourceDirectory] and [targetResourceDirectory] are specified. Please choose just one."); } targetType = TargetType.RESOURCE; if (targetFolder != null) { @@ -373,10 +376,11 @@ public class TinderGeneratorTask extends Task { targetPackage = ""; } } else { - throw new BuildException(Msg.code(141) + "Either [targetSourceDirectory] or [targetResourceDirectory] must be specified."); + throw new BuildException( + Msg.code(141) + "Either [targetSourceDirectory] or [targetResourceDirectory] must be specified."); } targetDirectory.mkdirs(); - log(" * Output ["+targetType.toString()+"] Directory: " + targetDirectory.getAbsolutePath()); + log(" * Output [" + targetType.toString() + "] Directory: " + targetDirectory.getAbsolutePath()); try { /* @@ -394,7 +398,8 @@ public class TinderGeneratorTask extends Task { /* * Next, deal with the template and initialize velocity */ - VelocityEngine v = VelocityHelper.configureVelocityEngine(templateFile, velocityPath, velocityProperties); + VelocityEngine v = + VelocityHelper.configureVelocityEngine(templateFile, velocityPath, velocityProperties); InputStream templateIs = null; if (templateFile != null) { templateIs = new FileInputStream(templateFile); @@ -409,8 +414,7 @@ public class TinderGeneratorTask extends Task { VelocityContext ctx = new VelocityContext(); if (packageBase != null) { ctx.put("packageBase", packageBase); - } else - if (targetPackage != null) { + } else if (targetPackage != null) { int ix = targetPackage.lastIndexOf('.'); if (ix > 0) { ctx.put("packageBase", targetPackage.subSequence(0, ix)); @@ -421,7 +425,10 @@ public class TinderGeneratorTask extends Task { ctx.put("targetPackage", targetPackage); ctx.put("targetFolder", targetFolder); ctx.put("version", version); - ctx.put("isRi", BaseStructureSpreadsheetParser.determineVersionEnum(version).isRi()); + ctx.put( + "isRi", + BaseStructureSpreadsheetParser.determineVersionEnum(version) + .isRi()); ctx.put("hash", "#"); ctx.put("esc", new TinderResourceGeneratorMojo.EscapeTool()); if (BaseStructureSpreadsheetParser.determineVersionEnum(version).isRi()) { @@ -432,7 +439,7 @@ public class TinderGeneratorTask extends Task { String capitalize = WordUtils.capitalize(version); if ("R4b".equals(capitalize)) { - capitalize="R4B"; + capitalize = "R4B"; } ctx.put("versionCapitalized", capitalize); @@ -455,9 +462,9 @@ public class TinderGeneratorTask extends Task { } } - /* - * Multiple files.. one for each element - */ + /* + * Multiple files.. one for each element + */ } else { /* * Write resources if selected @@ -503,16 +510,15 @@ public class TinderGeneratorTask extends Task { vsp.setVelocityProperties(velocityProperties); vsp.writeMarkedValueSets(targetType, targetDirectory, targetPackage); } - } } catch (Exception e) { if (e instanceof BuildException) { - throw (BuildException)e; + throw (BuildException) e; } - log("Caught exception: "+e.getClass().getName()+" ["+e.getMessage()+"]", 1); + log("Caught exception: " + e.getClass().getName() + " [" + e.getMessage() + "]", 1); e.printStackTrace(); - throw new BuildException(Msg.code(142) + "Error processing "+getTaskName()+" task.", e); + throw new BuildException(Msg.code(142) + "Error processing " + getTaskName() + " task.", e); } finally { cleanup(); } @@ -638,32 +644,38 @@ public class TinderGeneratorTask extends Task { public void setVerbose(boolean verbose) { this.verbose = verbose; } - + public void setVersion(String version) { this.version = version; } - protected void validateAttributes () throws BuildException { + protected void validateAttributes() throws BuildException { if (null == version) { - throw new BuildException(Msg.code(143) + "The "+this.getTaskName()+" task requires \"version\" attribute."); + throw new BuildException( + Msg.code(143) + "The " + this.getTaskName() + " task requires \"version\" attribute."); } if (null == template) { if (null == templateFile) { - throw new BuildException(Msg.code(144) + "The "+this.getTaskName()+" task requires \"template\" or \"templateFile\" attribute."); + throw new BuildException(Msg.code(144) + "The " + this.getTaskName() + + " task requires \"template\" or \"templateFile\" attribute."); } if (!templateFile.exists()) { - throw new BuildException(Msg.code(145) + "The Velocity template file ["+templateFile.getAbsolutePath()+"] does not exist."); + throw new BuildException(Msg.code(145) + "The Velocity template file [" + + templateFile.getAbsolutePath() + "] does not exist."); } if (!templateFile.canRead()) { - throw new BuildException(Msg.code(146) + "The Velocity template file ["+templateFile.getAbsolutePath()+"] cannot be read."); + throw new BuildException(Msg.code(146) + "The Velocity template file [" + templateFile.getAbsolutePath() + + "] cannot be read."); } if (!templateFile.isFile()) { - throw new BuildException(Msg.code(147) + "The Velocity template file ["+templateFile.getAbsolutePath()+"] is not a file."); + throw new BuildException(Msg.code(147) + "The Velocity template file [" + templateFile.getAbsolutePath() + + "] is not a file."); } } if (null == projectHome) { - throw new BuildException(Msg.code(148) + "The "+this.getTaskName()+" task requires \"projectHome\" attribute."); + throw new BuildException( + Msg.code(148) + "The " + this.getTaskName() + " task requires \"projectHome\" attribute."); } } @@ -680,5 +692,4 @@ public class TinderGeneratorTask extends Task { log(message); } } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/AnyChild.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/AnyChild.java index 0f903a8b780..a1c47b96a81 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/AnyChild.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/AnyChild.java @@ -16,6 +16,4 @@ public class AnyChild extends Child { public boolean isSingleChildInstantiable() { return false; } - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/BaseElement.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/BaseElement.java index 5023c4d1059..e3ca7aae456 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/BaseElement.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/BaseElement.java @@ -1,6 +1,6 @@ package ca.uhn.fhir.tinder.model; -import static org.apache.commons.lang.StringUtils.*; +import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; import java.util.Collections; @@ -8,7 +8,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.lang3.StringUtils; +import static org.apache.commons.lang.StringUtils.*; public abstract class BaseElement { @@ -288,7 +288,9 @@ public abstract class BaseElement { int idx = typeString.indexOf("Reference("); if (idx != -1) { int endIdx = typeString.indexOf(")"); - typeString = typeString.substring(0, idx) + typeString.substring(idx, endIdx).replace("|", ",") + typeString.substring(endIdx); + typeString = typeString.substring(0, idx) + + typeString.substring(idx, endIdx).replace("|", ",") + + typeString.substring(endIdx); } if (idx == 0 && typeString.endsWith(")")) { @@ -325,7 +327,6 @@ public abstract class BaseElement { } } } - } public void setV2Mapping(String theV2Mapping) { @@ -365,5 +366,4 @@ public abstract class BaseElement { public String getBindingUrl() { return myBindingUrl; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/BaseRootType.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/BaseRootType.java index c368b27ccc9..a7afd8c9f8c 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/BaseRootType.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/BaseRootType.java @@ -1,11 +1,11 @@ package ca.uhn.fhir.tinder.model; -import java.util.ArrayList; -import java.util.List; - import ca.uhn.fhir.tinder.model.SearchParameter.Include; import edu.emory.mathcs.backport.java.util.Collections; +import java.util.ArrayList; +import java.util.List; + public abstract class BaseRootType extends BaseElement { private String myId; @@ -59,10 +59,10 @@ public abstract class BaseRootType extends BaseElement { public void addSearchParameter(SearchParameter theParam) { getSearchParameters(); mySearchParameters.add(theParam); - + List includes = theParam.getPaths(); for (Include include : includes) { - if (myIncludes.contains(include)==false) { + if (myIncludes.contains(include) == false) { myIncludes.add(include); } } @@ -72,5 +72,4 @@ public abstract class BaseRootType extends BaseElement { Collections.sort(myIncludes); return myIncludes; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Child.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Child.java index 86ce96a2a3b..abe8c222338 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Child.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Child.java @@ -83,7 +83,8 @@ public abstract class Child extends BaseElement { retVal = getSingleType(); } } else { - if (this instanceof Extension && ((Extension) this).getChildExtensions().size() > 0) { + if (this instanceof Extension + && ((Extension) this).getChildExtensions().size() > 0) { retVal = ((Extension) this).getNameType(); } else { retVal = IDatatype.class.getSimpleName(); @@ -131,11 +132,11 @@ public abstract class Child extends BaseElement { // } else { // retVal = (elemName + getTypeSuffix()); // } - + if (retVal.equals("ResourceDt")) { retVal = "IResource"; } - + return retVal; } @@ -152,19 +153,19 @@ public abstract class Child extends BaseElement { return false; } - public boolean isPrimitive (String theType) { + public boolean isPrimitive(String theType) { return isPrimitiveInternal(theType); } public boolean isPrimitive() { - + if (IDatatype.class.getSimpleName().equals(getReferenceType())) { return false; } return isPrimitiveInternal(getSingleType()); } - protected boolean isPrimitiveInternal (String theType) { + protected boolean isPrimitiveInternal(String theType) { try { String name = "ca.uhn.fhir.model.primitive." + theType; Class.forName(name); @@ -174,7 +175,7 @@ public abstract class Child extends BaseElement { } } - public String getPrimitiveType (String theType) throws ClassNotFoundException { + public String getPrimitiveType(String theType) throws ClassNotFoundException { return getPrimitiveTypeInternal(theType); } @@ -182,13 +183,13 @@ public abstract class Child extends BaseElement { return getPrimitiveTypeInternal(getSingleType()); } - protected String getPrimitiveTypeInternal (String theType) throws ClassNotFoundException { + protected String getPrimitiveTypeInternal(String theType) throws ClassNotFoundException { String name = "ca.uhn.fhir.model.primitive." + theType; Class clazz = Class.forName(name); if (clazz.equals(IdDt.class)) { return String.class.getSimpleName(); } - + while (!clazz.getSuperclass().equals(BasePrimitive.class)) { clazz = clazz.getSuperclass(); if (clazz.equals(Object.class)) { @@ -199,7 +200,8 @@ public abstract class Child extends BaseElement { ParameterizedType type = (ParameterizedType) clazz.getGenericSuperclass(); Type type2 = type.getActualTypeArguments()[0]; if (type2 instanceof GenericArrayType) { - String arrayType = ((GenericArrayType) type2).getGenericComponentType().toString(); + String arrayType = + ((GenericArrayType) type2).getGenericComponentType().toString(); return arrayType + "[]"; } Class rawType = (Class) type2; @@ -228,5 +230,4 @@ public abstract class Child extends BaseElement { public boolean isSingleChildInstantiable() { return true; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/CompartmentDef.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/CompartmentDef.java index 7cf26611d78..f63a0b5bd77 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/CompartmentDef.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/CompartmentDef.java @@ -51,5 +51,4 @@ public class CompartmentDef { myResourceName = theResourceName; } } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Composite.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Composite.java index 416888fe839..61e6b3f42d9 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Composite.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Composite.java @@ -5,6 +5,6 @@ public class Composite extends BaseRootType { @Override public void setElementName(String theName) { super.setElementName(theName); - setDeclaringClassNameComplete(theName+"Dt"); + setDeclaringClassNameComplete(theName + "Dt"); } } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Extension.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Extension.java index 2ce951ac93e..5d24a297497 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Extension.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Extension.java @@ -1,12 +1,12 @@ package ca.uhn.fhir.tinder.model; import ca.uhn.fhir.i18n.Msg; +import org.apache.commons.lang3.StringUtils; + import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.lang3.StringUtils; - public class Extension extends Child { private List myChildExtensions; @@ -26,7 +26,7 @@ public class Extension extends Child { } return retVal; } - + public Extension(String theName, String theUrl, Extension... theChildExtensions) { setName(theName); setUrl(theUrl); @@ -65,8 +65,11 @@ public class Extension extends Child { } public void setChildExtensions(List theChildExtensions) { - if (theChildExtensions != null && theChildExtensions.size() > 0 && getType().size() > 0) { - throw new IllegalArgumentException(Msg.code(186) + "Extension may not have a datatype AND child extensions"); + if (theChildExtensions != null + && theChildExtensions.size() > 0 + && getType().size() > 0) { + throw new IllegalArgumentException( + Msg.code(186) + "Extension may not have a datatype AND child extensions"); } myChildExtensions = theChildExtensions; } @@ -90,7 +93,8 @@ public class Extension extends Child { @Override public void setTypeFromString(String theType) { if (myChildExtensions != null && myChildExtensions.size() > 0 && StringUtils.isNotBlank(theType)) { - throw new IllegalArgumentException(Msg.code(187) + "Extension may not have a datatype AND child extensions"); + throw new IllegalArgumentException( + Msg.code(187) + "Extension may not have a datatype AND child extensions"); } super.setTypeFromString(theType); } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Resource.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Resource.java index 3b9e60a3f80..1c4129f4c74 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Resource.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Resource.java @@ -15,7 +15,7 @@ public class Resource extends BaseRootType { public static String correctName(String theName) { String name = theName; if ("List".equals(name)) { - name="ListResource"; + name = "ListResource"; } if (name.endsWith(".List")) { name = name + "Resource"; @@ -31,6 +31,7 @@ public class Resource extends BaseRootType { } return null; } + public List getSearchParameterNames() { ArrayList retVal = new ArrayList(); for (SearchParameter next : getSearchParameters()) { @@ -38,5 +39,4 @@ public class Resource extends BaseRootType { } return retVal; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ResourceBlock.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ResourceBlock.java index 857a6c37444..c3713d641c0 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ResourceBlock.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ResourceBlock.java @@ -7,7 +7,7 @@ public class ResourceBlock extends Child { public ResourceBlock() { super(); } - + private String myForcedClassName; @Override @@ -23,32 +23,31 @@ public class ResourceBlock extends Child { setForcedClassName("ObjectElement"); } } - + public String getClassName() { if (myForcedClassName != null) { return myForcedClassName; } - -// return getElementName().substring(0, 1).toUpperCase() + getElementName().substring(1); + + // return getElementName().substring(0, 1).toUpperCase() + getElementName().substring(1); String name = getName(); return convertFhirPathNameToClassName(name); } public static String convertFhirPathNameToClassName(String name) { StringBuilder b = new StringBuilder(); - boolean first=true; + boolean first = true; for (String next : name.split("\\.")) { if (first) { - first=false; + first = false; continue; } b.append(next.substring(0, 1).toUpperCase() + next.substring(1)); } - + return b.toString(); } - @Override public String getDeclaringClassNameCompleteForChildren() { return getClassName(); @@ -65,7 +64,6 @@ public class ResourceBlock extends Child { } public void setForcedClassName(String theClassName) { - myForcedClassName =theClassName; + myForcedClassName = theClassName; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ResourceBlockCopy.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ResourceBlockCopy.java index c1fd2b4dc93..e1251be6cb4 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ResourceBlockCopy.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ResourceBlockCopy.java @@ -3,16 +3,17 @@ package ca.uhn.fhir.tinder.model; public class ResourceBlockCopy extends Child { private ResourceBlock referencedBlock = null; - + @Override public boolean isBlockRef() { return referencedBlock != null; } - public ResourceBlock getReferencedBlock () { + public ResourceBlock getReferencedBlock() { return this.referencedBlock; } - public void setReferencedBlock (ResourceBlock referencedBlock) { + + public void setReferencedBlock(ResourceBlock referencedBlock) { this.referencedBlock = referencedBlock; } } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SearchParameter.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SearchParameter.java index fa651198092..7ac33c81693 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SearchParameter.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SearchParameter.java @@ -1,13 +1,13 @@ package ca.uhn.fhir.tinder.model; -import static org.apache.commons.lang3.StringUtils.isNotBlank; +import org.apache.commons.lang.WordUtils; +import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.lang.WordUtils; -import org.apache.commons.lang3.StringUtils; +import static org.apache.commons.lang3.StringUtils.isNotBlank; public class SearchParameter { @@ -51,7 +51,12 @@ public class SearchParameter { } public String getConstantName() { - return "SP_" + myName.toUpperCase().replace("_[X]", "_X").replace("-[X]", "_X").replace('-', '_').replace("!", ""); + return "SP_" + + myName.toUpperCase() + .replace("_[X]", "_X") + .replace("-[X]", "_X") + .replace('-', '_') + .replace("!", ""); } public String getDescription() { @@ -63,7 +68,11 @@ public class SearchParameter { // return myName.toUpperCase(); // } // return myPath.toUpperCase().replace("_[X]", "_X").replace("-[X]", "_X").replace('-', '_').replace("!", ""); - return myName.toUpperCase().replace("_[X]", "_X").replace("-[X]", "_X").replace('-', '_').replace("!", ""); + return myName.toUpperCase() + .replace("_[X]", "_X") + .replace("-[X]", "_X") + .replace('-', '_') + .replace("!", ""); } public String getName() { @@ -71,7 +80,13 @@ public class SearchParameter { } public String getNameCapitalized() { - return WordUtils.capitalize(myName).replace("_[x]", "").replace("-[x]", "").replace("_[X]", "").replace("-[X]", "").replace('-', '_').replace("!", ""); + return WordUtils.capitalize(myName) + .replace("_[x]", "") + .replace("-[x]", "") + .replace("_[X]", "") + .replace("-[X]", "") + .replace('-', '_') + .replace("!", ""); } public String getPath() { @@ -165,18 +180,13 @@ public class SearchParameter { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; Include other = (Include) obj; if (myPath == null) { - if (other.myPath != null) - return false; - } else if (!myPath.equals(other.myPath)) - return false; + if (other.myPath != null) return false; + } else if (!myPath.equals(other.myPath)) return false; return true; } @@ -201,7 +211,5 @@ public class SearchParameter { result = prime * result + ((myPath == null) ? 0 : myPath.hashCode()); return result; } - } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SimpleChild.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SimpleChild.java index c3f163b9e8e..3df07737321 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SimpleChild.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SimpleChild.java @@ -1,5 +1,3 @@ package ca.uhn.fhir.tinder.model; -public class SimpleChild extends Child { - -} +public class SimpleChild extends Child {} diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SimpleSetter.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SimpleSetter.java index c42ffe889e8..1ceabc3227a 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SimpleSetter.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/SimpleSetter.java @@ -26,7 +26,7 @@ public class SimpleSetter { } public void setSuffix(String theSuffix) { - mySuffix=theSuffix; + mySuffix = theSuffix; } public static class Parameter { @@ -49,7 +49,5 @@ public class SimpleSetter { public void setParameter(String theParameter) { myParameter = theParameter; } - } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Slicing.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Slicing.java index 7c79b60cdd8..f76af2b14fd 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Slicing.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/Slicing.java @@ -11,5 +11,4 @@ public class Slicing { public void setDiscriminator(String theDiscriminator) { myDiscriminator = theDiscriminator; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/UndeclaredExtensionChild.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/UndeclaredExtensionChild.java index 223608552cf..77e84562039 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/UndeclaredExtensionChild.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/UndeclaredExtensionChild.java @@ -1,15 +1,15 @@ package ca.uhn.fhir.tinder.model; -import java.util.ArrayList; - import ca.uhn.fhir.model.api.ExtensionDt; +import java.util.ArrayList; + public class UndeclaredExtensionChild extends Child { @Override public String getReferenceType() { if (isRepeatable()) { - return ArrayList.class.getCanonicalName() + "<" + ExtensionDt.class.getSimpleName()+">"; + return ArrayList.class.getCanonicalName() + "<" + ExtensionDt.class.getSimpleName() + ">"; } return ExtensionDt.class.getSimpleName(); } @@ -28,6 +28,4 @@ public class UndeclaredExtensionChild extends Child { public boolean isSingleChildInstantiable() { return true; } - - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ValueSetTm.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ValueSetTm.java index 5e9060d11b6..abb1792964c 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ValueSetTm.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/model/ValueSetTm.java @@ -1,14 +1,13 @@ package ca.uhn.fhir.tinder.model; +import org.codehaus.plexus.util.StringUtils; + import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.lang3.ObjectUtils; -import org.codehaus.plexus.util.StringUtils; - public class ValueSetTm { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ValueSetTm.class); @@ -31,12 +30,9 @@ public class ValueSetTm { @Override public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; ValueSetTm other = (ValueSetTm) obj; String id1 = myId != null && myId.isEmpty() == false ? myId : myName; @@ -134,7 +130,9 @@ public class ValueSetTm { while (!Character.isLetterOrDigit(newValue.charAt(newValue.length() - 1))) { newValue = newValue.substring(0, newValue.length() - 1); } - ourLog.info("[{}] Replacing numeric code {} with description: {}", new Object[] { myName, retVal, newValue }); + ourLog.info( + "[{}] Replacing numeric code {} with description: {}", + new Object[] {myName, retVal, newValue}); retVal = newValue; } } @@ -158,7 +156,11 @@ public class ValueSetTm { } StringBuilder b = new StringBuilder(); - for (char next : retVal.toUpperCase().replace("'", "").replace("(", "").replace(")", "").toCharArray()) { + for (char next : retVal.toUpperCase() + .replace("'", "") + .replace("(", "") + .replace(")", "") + .toCharArray()) { if (Character.isJavaIdentifierPart(next)) { b.append(next); } else { @@ -189,7 +191,5 @@ public class ValueSetTm { public boolean isHasDisplay() { return StringUtils.isNotBlank(myDisplay); } - } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/BaseStructureParser.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/BaseStructureParser.java index f476b82322d..68b28b0b96b 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/BaseStructureParser.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/BaseStructureParser.java @@ -1,8 +1,8 @@ package ca.uhn.fhir.tinder.parser; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.api.ExtensionDt; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.annotation.SimpleSetter; @@ -120,7 +120,9 @@ public abstract class BaseStructureParser { myLocallyDefinedClassNames.put(bindingClass, "valueset"); } else { ourLog.debug("No binding found for: {}", theElement.getBinding()); - ourLog.debug(" * Valid: {}", new TreeSet(theVsp.getValueSets().keySet())); + ourLog.debug( + " * Valid: {}", + new TreeSet(theVsp.getValueSets().keySet())); } } for (BaseElement next : theElement.getChildren()) { @@ -153,14 +155,16 @@ public abstract class BaseStructureParser { if (theNextType.endsWith("Dt")) { unqualifiedTypeName = theNextType.substring(0, theNextType.length() - 2); try { - return Class.forName("org.hl7.fhir.dstu3.model." + unqualifiedTypeName + "Type").getName(); + return Class.forName("org.hl7.fhir.dstu3.model." + unqualifiedTypeName + "Type") + .getName(); } catch (ClassNotFoundException e1) { // not found } } try { - return Class.forName("org.hl7.fhir.dstu3.model." + unqualifiedTypeName).getName(); + return Class.forName("org.hl7.fhir.dstu3.model." + unqualifiedTypeName) + .getName(); } catch (ClassNotFoundException e) { // not found } @@ -228,17 +232,21 @@ public abstract class BaseStructureParser { Class.forName(type); return (type); } catch (ClassNotFoundException e6) { - String fileName = myBaseDir + "/src/main/java/" + myPackageBase.replace('.', '/') + "/composite/" + nextType + ".java"; + String fileName = myBaseDir + "/src/main/java/" + + myPackageBase.replace('.', '/') + "/composite/" + nextType + ".java"; File file = new File(fileName); if (file.exists()) { return myPackageBase + ".composite." + nextType; } - fileName = myBaseDir + "/src/main/java/ca/uhn/fhir/model/primitive/" + nextType + ".java"; + fileName = myBaseDir + "/src/main/java/ca/uhn/fhir/model/primitive/" + nextType + + ".java"; file = new File(fileName); if (file.exists()) { return "ca.uhn.fhir.model.primitive." + nextType; } - throw new MojoFailureException(Msg.code(152) + "Unknown type: " + nextType + " - Have locally defined names: " + new TreeSet(myLocallyDefinedClassNames.keySet())); + throw new MojoFailureException(Msg.code(152) + "Unknown type: " + nextType + + " - Have locally defined names: " + + new TreeSet(myLocallyDefinedClassNames.keySet())); } } } @@ -249,13 +257,17 @@ public abstract class BaseStructureParser { } } - private ca.uhn.fhir.model.api.annotation.SimpleSetter.Parameter findAnnotation(Class theBase, Annotation[] theAnnotations, Class theClass) { + private ca.uhn.fhir.model.api.annotation.SimpleSetter.Parameter findAnnotation( + Class theBase, + Annotation[] theAnnotations, + Class theClass) { for (Annotation next : theAnnotations) { if (theClass.equals(next.annotationType())) { return (ca.uhn.fhir.model.api.annotation.SimpleSetter.Parameter) next; } } - throw new IllegalArgumentException(Msg.code(153) + theBase.getCanonicalName() + " has @" + SimpleSetter.class.getCanonicalName() + " constructor with no/invalid parameter annotation"); + throw new IllegalArgumentException(Msg.code(153) + theBase.getCanonicalName() + " has @" + + SimpleSetter.class.getCanonicalName() + " constructor with no/invalid parameter annotation"); } /** @@ -417,11 +429,13 @@ public abstract class BaseStructureParser { Class childDt; if (theElem.getReferenceTypesForMultiple().size() == 1) { try { - childDt = Class.forName("ca.uhn.fhir.model.primitive." + theElem.getReferenceTypesForMultiple().get(0)); + childDt = Class.forName("ca.uhn.fhir.model.primitive." + + theElem.getReferenceTypesForMultiple().get(0)); } catch (ClassNotFoundException e) { if (myVersion.equals("dstu")) { try { - childDt = Class.forName("ca.uhn.fhir.model.dstu.composite." + theElem.getReferenceTypesForMultiple().get(0)); + childDt = Class.forName("ca.uhn.fhir.model.dstu.composite." + + theElem.getReferenceTypesForMultiple().get(0)); } catch (ClassNotFoundException e2) { return; } @@ -456,7 +470,8 @@ public abstract class BaseStructureParser { } p.setDatatype(paramTypes[i].getSimpleName()); } - p.setParameter(findAnnotation(childDt, paramAnn[i], SimpleSetter.Parameter.class).name()); + p.setParameter(findAnnotation(childDt, paramAnn[i], SimpleSetter.Parameter.class) + .name()); ss.getParameters().add(p); } } @@ -497,7 +512,8 @@ public abstract class BaseStructureParser { myVelocityProperties = theVelocityProperties; } - private void write(BaseRootType theResource, File theFile, String thePackageBase) throws IOException, MojoFailureException { + private void write(BaseRootType theResource, File theFile, String thePackageBase) + throws IOException, MojoFailureException { ArrayList imports = new ArrayList<>(); for (String next : myImports) { next = Resource.correctName(next); @@ -555,7 +571,8 @@ public abstract class BaseStructureParser { ctx.put("versionCapitalized", capitalize); ctx.put("this", theResource); - VelocityEngine v = VelocityHelper.configureVelocityEngine(getTemplateFile(), getVelocityPath(), myVelocityProperties); + VelocityEngine v = + VelocityHelper.configureVelocityEngine(getTemplateFile(), getVelocityPath(), myVelocityProperties); InputStream templateIs = null; if (getTemplateFile() != null) { templateIs = new FileInputStream(getTemplateFile()); @@ -596,11 +613,14 @@ public abstract class BaseStructureParser { } } - public void writeAll(File theOutputDirectory, File theResourceOutputDirectory, String thePackageBase) throws MojoFailureException { + public void writeAll(File theOutputDirectory, File theResourceOutputDirectory, String thePackageBase) + throws MojoFailureException { writeAll(TargetType.SOURCE, theOutputDirectory, theResourceOutputDirectory, thePackageBase); } - public void writeAll(TargetType theTarget, File theOutputDirectory, File theResourceOutputDirectory, String thePackageBase) throws MojoFailureException { + public void writeAll( + TargetType theTarget, File theOutputDirectory, File theResourceOutputDirectory, String thePackageBase) + throws MojoFailureException { myPackageBase = thePackageBase; if (!theOutputDirectory.exists()) { @@ -671,7 +691,9 @@ public abstract class BaseStructureParser { if (determineVersionEnum() == FhirVersionEnum.DSTU2) { myNameToDatatypeClass.put("boundCode", BoundCodeDt.class.getName()); - myNameToDatatypeClass.put("boundCodeableConcept", ca.uhn.fhir.model.dstu2.composite.BoundCodeableConceptDt.class.getName()); + myNameToDatatypeClass.put( + "boundCodeableConcept", + ca.uhn.fhir.model.dstu2.composite.BoundCodeableConceptDt.class.getName()); } try { @@ -701,10 +723,13 @@ public abstract class BaseStructureParser { VelocityEngine v = new VelocityEngine(); v.setProperty(RuntimeConstants.RESOURCE_LOADERS, "cp"); - v.setProperty("resource.loader.cp.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); + v.setProperty( + "resource.loader.cp.class", + "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader"); v.setProperty("runtime.strict_mode.enable", Boolean.TRUE); - InputStream templateIs = ResourceGeneratorUsingSpreadsheet.class.getResourceAsStream("/vm/fhirversion_properties.vm"); + InputStream templateIs = + ResourceGeneratorUsingSpreadsheet.class.getResourceAsStream("/vm/fhirversion_properties.vm"); InputStreamReader templateReader = new InputStreamReader(templateIs); v.evaluate(ctx, w, "", templateReader); @@ -778,5 +803,4 @@ public abstract class BaseStructureParser { m.setBuildDatatypes(true); m.execute(); } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/BaseStructureSpreadsheetParser.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/BaseStructureSpreadsheetParser.java index 6dcc5fbfe6c..42c3a3dcc48 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/BaseStructureSpreadsheetParser.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/BaseStructureSpreadsheetParser.java @@ -1,7 +1,7 @@ package ca.uhn.fhir.tinder.parser; -import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; import ca.uhn.fhir.model.dstu2.resource.ValueSet; @@ -31,7 +31,8 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser myBindingRefs = new HashMap<>(); } - private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseStructureSpreadsheetParser.class); + private static final org.slf4j.Logger ourLog = + org.slf4j.LoggerFactory.getLogger(BaseStructureSpreadsheetParser.class); private int myColBinding = -1; private int myColModifier = -1; private int myColSummary = -1; @@ -52,7 +53,8 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser if (getVersion().equals("dstu2")) { ourLog.info("Loading ValueSets..."); FhirContext ctx = FhirContext.forDstu2(); - String path = ctx.getVersion().getPathToSchemaDefinitions().replace("/schema", "/valueset") + "/valuesets.xml"; + String path = + ctx.getVersion().getPathToSchemaDefinitions().replace("/schema", "/valueset") + "/valuesets.xml"; InputStream valuesetText = BaseStructureSpreadsheetParser.class.getResourceAsStream(path); Bundle bundle = ctx.newXmlParser().parseResource(Bundle.class, new InputStreamReader(valuesetText)); @@ -150,7 +152,8 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser elements.put(elem.getName(), elem); BaseElement parent = elements.get(elem.getElementParentName()); if (parent == null) { - throw new Exception(Msg.code(165) + "Can't find element " + elem.getElementParentName() + " - Valid values are: " + elements.keySet()); + throw new Exception(Msg.code(165) + "Can't find element " + elem.getElementParentName() + + " - Valid values are: " + elements.keySet()); } parent.addChild(elem); @@ -160,7 +163,6 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser if (elem instanceof Child) { scanForSimpleSetters(elem); } - } postProcess(resource); @@ -192,24 +194,25 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser BaseElement parent = elements.get(element.getElementParentName()); List children = parent.getChildren(); for (BaseElement child : children) { - if (!child.equals(blockCopy) && child instanceof ResourceBlock - && child.getElementName().equals(blockCopy.getElementName())) { - ((ResourceBlockCopy)blockCopy).setReferencedBlock((ResourceBlock)child); + if (!child.equals(blockCopy) + && child instanceof ResourceBlock + && child.getElementName().equals(blockCopy.getElementName())) { + ((ResourceBlockCopy) blockCopy).setReferencedBlock((ResourceBlock) child); break refLoop; } } element = parent; } } - + index++; } ourLog.info("Parsed {} spreadsheet structures", getResources().size()); - } - private Element findSheetByName(String spreadsheetName, String wantedName, Document file, boolean theFailIfNotFound) throws Exception { + private Element findSheetByName(String spreadsheetName, String wantedName, Document file, boolean theFailIfNotFound) + throws Exception { Element retVal = null; for (int i = 0; i < file.getElementsByTagName("Worksheet").getLength() && retVal == null; i++) { retVal = (Element) file.getElementsByTagName("Worksheet").item(i); @@ -219,7 +222,8 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser } if (retVal == null && theFailIfNotFound) { - throw new Exception(Msg.code(166) + "Failed to find worksheet with name '" + wantedName + "' in spreadsheet: " + spreadsheetName); + throw new Exception(Msg.code(166) + "Failed to find worksheet with name '" + wantedName + + "' in spreadsheet: " + spreadsheetName); } return retVal; } @@ -346,11 +350,14 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser // if(true)continue; if (isBlank(nextCompositeParam.getPath())) { - throw new MojoExecutionException(Msg.code(167) + "Composite param " + nextCompositeParam.getName() + " has no path"); + throw new MojoExecutionException( + Msg.code(167) + "Composite param " + nextCompositeParam.getName() + " has no path"); } if (nextCompositeParam.getPath().indexOf('&') == -1) { - throw new MojoExecutionException(Msg.code(168) + "Composite param " + nextCompositeParam.getName() + " has path with no '&': " + nextCompositeParam.getPath()); + throw new MojoExecutionException( + Msg.code(168) + "Composite param " + nextCompositeParam.getName() + + " has path with no '&': " + nextCompositeParam.getPath()); } String[] parts = nextCompositeParam.getPath().split("\\&"); @@ -374,7 +381,8 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser possibleMatches.add(theResource.getName() + "." + nextPart.replace("-[x]", "[x]")); for (SearchParameter nextParam : theResource.getSearchParameters()) { - if (possibleMatches.contains(nextParam.getPath()) || possibleMatches.contains(nextParam.getName())) { + if (possibleMatches.contains(nextParam.getPath()) + || possibleMatches.contains(nextParam.getName())) { part.add(nextParam); } } @@ -383,23 +391,27 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser * Paths have changed in DSTU2 */ for (SearchParameter nextParam : theResource.getSearchParameters()) { - if (nextPart.equals("value[x]") && (nextParam.getName().startsWith("value-"))) { + if (nextPart.equals("value[x]") + && (nextParam.getName().startsWith("value-"))) { part.add(nextParam); } - if (nextPart.equals("component-value[x]") && (nextParam.getName().startsWith("component-value-"))) { + if (nextPart.equals("component-value[x]") + && (nextParam.getName().startsWith("component-value-"))) { part.add(nextParam); } } if (part.isEmpty()) { - throw new MojoExecutionException(Msg.code(169) + "Composite param " + nextCompositeParam.getName() + " has path that doesn't seem to correspond to any other params: " + nextPart); + throw new MojoExecutionException(Msg.code(169) + "Composite param " + + nextCompositeParam.getName() + + " has path that doesn't seem to correspond to any other params: " + nextPart); } - } if (compositeOf.size() > 2) { // TODO: change back to exception maybe? Grahame says these aren't allowed.. - ourLog.warn("Composite param " + nextCompositeParam.getName() + " has >2 parts, this isn't supported yet"); + ourLog.warn("Composite param " + nextCompositeParam.getName() + + " has >2 parts, this isn't supported yet"); continue; } @@ -412,10 +424,10 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser composite.setPath(nextCompositeParam.getPath()); composite.setType("composite"); composite.setCompositeOf(Arrays.asList(part1.getName(), part2.getName())); - composite.setCompositeTypes(Arrays.asList(WordUtils.capitalize(part1.getType()), WordUtils.capitalize(part2.getType()))); + composite.setCompositeTypes(Arrays.asList( + WordUtils.capitalize(part1.getType()), WordUtils.capitalize(part2.getType()))); } } - } } } @@ -520,7 +532,6 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser } } } - } /** @@ -529,5 +540,4 @@ public abstract class BaseStructureSpreadsheetParser extends BaseStructureParser protected void postProcess(BaseElement theTarget) throws MojoFailureException { // nothing } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/CompartmentParser.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/CompartmentParser.java index 244ce912445..d89a38e9f56 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/CompartmentParser.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/CompartmentParser.java @@ -1,22 +1,19 @@ package ca.uhn.fhir.tinder.parser; import ca.uhn.fhir.i18n.Msg; -import static org.apache.commons.lang3.StringUtils.isBlank; +import ca.uhn.fhir.tinder.model.Resource; +import ca.uhn.fhir.tinder.model.SearchParameter; +import ca.uhn.fhir.tinder.util.XMLUtils; +import org.apache.maven.plugin.MojoFailureException; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.NodeList; import java.io.InputStream; import java.util.HashMap; import java.util.Map; -import org.apache.maven.plugin.MojoFailureException; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; - -import ca.uhn.fhir.tinder.model.BaseElement; -import ca.uhn.fhir.tinder.model.Resource; -import ca.uhn.fhir.tinder.model.SearchParameter; -import ca.uhn.fhir.tinder.util.XMLUtils; +import static org.apache.commons.lang3.StringUtils.isBlank; public class CompartmentParser { @@ -28,7 +25,7 @@ public class CompartmentParser { myVersion = theVersion; myResourceDef = theResourceDef; } - + public void parse() throws Exception { String resName = "/compartment/" + myVersion + "/compartments.xml"; InputStream nextRes = getClass().getResourceAsStream(resName); @@ -48,44 +45,49 @@ public class CompartmentParser { Element resourcesSheet = null; for (int i = 0; i < file.getElementsByTagName("Worksheet").getLength() && resourcesSheet == null; i++) { resourcesSheet = (Element) file.getElementsByTagName("Worksheet").item(i); - if (!"resources".equals(resourcesSheet.getAttributeNS("urn:schemas-microsoft-com:office:spreadsheet", "Name"))) { + if (!"resources" + .equals(resourcesSheet.getAttributeNS("urn:schemas-microsoft-com:office:spreadsheet", "Name"))) { resourcesSheet = null; } } if (resourcesSheet == null) { - throw new Exception(Msg.code(180) + "Failed to find worksheet with name 'Data Elements' in spreadsheet: " + resName); + throw new Exception( + Msg.code(180) + "Failed to find worksheet with name 'Data Elements' in spreadsheet: " + resName); } Element table = (Element) resourcesSheet.getElementsByTagName("Table").item(0); NodeList rows = table.getElementsByTagName("Row"); - + Map col2compartment = new HashMap(); Element headerRow = (Element) rows.item(0); for (int i = 1; i < headerRow.getElementsByTagName("Cell").getLength(); i++) { - Element cellElement = (Element) headerRow.getElementsByTagName("Cell").item(i); - Element dataElement = (Element) cellElement.getElementsByTagName("Data").item(0); + Element cellElement = + (Element) headerRow.getElementsByTagName("Cell").item(i); + Element dataElement = + (Element) cellElement.getElementsByTagName("Data").item(0); col2compartment.put(i, dataElement.getTextContent()); } - + Element row = null; for (int i = 1; i < rows.getLength(); i++) { Element nextRow = (Element) rows.item(i); - + NodeList cells = nextRow.getElementsByTagName("Cell"); Element cellElement = (Element) cells.item(0); - Element dataElement = (Element) cellElement.getElementsByTagName("Data").item(0); + Element dataElement = + (Element) cellElement.getElementsByTagName("Data").item(0); if (dataElement.getTextContent().equals(myResourceDef.getName())) { row = nextRow; break; } } - + if (row == null) { ourLog.debug("No compartments for resource {}", myResourceDef.getName()); return; } - + NodeList cells = row.getElementsByTagName("Cell"); for (int i = 1; i < cells.getLength(); i++) { Element cellElement = (Element) cells.item(i); @@ -93,10 +95,11 @@ public class CompartmentParser { if (cellElement.hasAttribute("Index")) { index = Integer.parseInt(cellElement.getAttribute("Index")); } - + String compartment = col2compartment.get(index); - - Element dataElement = (Element) cellElement.getElementsByTagName("Data").item(0); + + Element dataElement = + (Element) cellElement.getElementsByTagName("Data").item(0); String namesUnsplit = dataElement.getTextContent(); String[] namesSplit = namesUnsplit.split("\\|"); for (String nextName : namesSplit) { @@ -104,7 +107,7 @@ public class CompartmentParser { if (isBlank(nextName)) { continue; } - + String[] parts = nextName.split("\\."); if (parts[0].equals("{def}")) { continue; @@ -112,12 +115,12 @@ public class CompartmentParser { Resource element = myResourceDef; SearchParameter sp = element.getSearchParameterByName(parts[0]); if (sp == null) { - throw new MojoFailureException(Msg.code(181) + "Can't find child named " + parts[0] + " - Valid names: " + element.getSearchParameterNames()); + throw new MojoFailureException(Msg.code(181) + "Can't find child named " + parts[0] + + " - Valid names: " + element.getSearchParameterNames()); } - + sp.addCompartment(compartment); } } } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/DatatypeGeneratorUsingSpreadsheet.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/DatatypeGeneratorUsingSpreadsheet.java index 35c8bf8a296..37e6938d81a 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/DatatypeGeneratorUsingSpreadsheet.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/DatatypeGeneratorUsingSpreadsheet.java @@ -1,6 +1,16 @@ package ca.uhn.fhir.tinder.parser; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.model.api.annotation.DatatypeDef; +import ca.uhn.fhir.model.primitive.StringDt; +import ca.uhn.fhir.tinder.model.BaseElement; +import ca.uhn.fhir.tinder.model.BaseRootType; +import ca.uhn.fhir.tinder.model.Composite; +import com.google.common.collect.ImmutableSet; +import com.google.common.reflect.ClassPath; +import com.google.common.reflect.ClassPath.ClassInfo; +import org.apache.maven.plugin.MojoFailureException; + import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -8,18 +18,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.maven.plugin.MojoFailureException; - -import ca.uhn.fhir.model.api.annotation.DatatypeDef; -import ca.uhn.fhir.model.primitive.StringDt; -import ca.uhn.fhir.tinder.model.BaseElement; -import ca.uhn.fhir.tinder.model.BaseRootType; -import ca.uhn.fhir.tinder.model.Composite; - -import com.google.common.collect.ImmutableSet; -import com.google.common.reflect.ClassPath; -import com.google.common.reflect.ClassPath.ClassInfo; - public class DatatypeGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetParser { @Override @@ -59,10 +57,12 @@ public class DatatypeGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetP } @Override - public void writeAll(File theOutputDirectory, File theResourceOutputDirectory, String thePackageBase) throws MojoFailureException { + public void writeAll(File theOutputDirectory, File theResourceOutputDirectory, String thePackageBase) + throws MojoFailureException { try { - ImmutableSet tlc = ClassPath.from(getClass().getClassLoader()).getTopLevelClasses(StringDt.class.getPackage().getName()); + ImmutableSet tlc = ClassPath.from(getClass().getClassLoader()) + .getTopLevelClasses(StringDt.class.getPackage().getName()); for (ClassInfo classInfo : tlc) { DatatypeDef def = Class.forName(classInfo.getName()).getAnnotation(DatatypeDef.class); if (def != null) { @@ -78,7 +78,8 @@ public class DatatypeGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetP } try { - ImmutableSet tlc = ClassPath.from(getClass().getClassLoader()).getTopLevelClasses(thePackageBase + ".composite"); + ImmutableSet tlc = + ClassPath.from(getClass().getClassLoader()).getTopLevelClasses(thePackageBase + ".composite"); for (ClassInfo classInfo : tlc) { DatatypeDef def = Class.forName(classInfo.getName()).getAnnotation(DatatypeDef.class); if (def != null) { @@ -140,5 +141,4 @@ public class DatatypeGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetP return retVal; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingModel.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingModel.java index 5513746fa73..7a77d4740c6 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingModel.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingModel.java @@ -1,19 +1,18 @@ package ca.uhn.fhir.tinder.parser; -import ca.uhn.fhir.i18n.Msg; -import java.io.File; -import java.util.*; -import java.util.stream.Collectors; - -import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; -import org.apache.commons.lang.WordUtils; - import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; import ca.uhn.fhir.tinder.model.Resource; import ca.uhn.fhir.tinder.model.SearchParameter; +import org.apache.commons.lang.WordUtils; import org.apache.maven.plugin.MojoFailureException; +import java.io.File; +import java.util.stream.Collectors; +import java.util.*; + public class ResourceGeneratorUsingModel extends BaseStructureParser { private String myFilenameSuffix; @@ -62,22 +61,26 @@ public class ResourceGeneratorUsingModel extends BaseStructureParser { public void parse() { for (String nextResourceName : myResourceNames) { RuntimeResourceDefinition def = getCtx().getResourceDefinition(nextResourceName); - + Resource resource = new Resource(); resource.setName(def.getName()); resource.setElementName(def.getName()); addResource(resource); - + for (RuntimeSearchParam nextSearchParam : def.getSearchParams()) { SearchParameter param = new SearchParameter(getVersion(), def.getName()); - List compositeOfParams = nextSearchParam - .getComponents() - .stream() - .map(t -> def.getSearchParams().stream().filter(y -> y.getUri().equals(t.getReference())).findFirst().orElseThrow(() -> new IllegalStateException())) - .collect(Collectors.toList()); - if (nextSearchParam.getParamType() == RestSearchParameterTypeEnum.COMPOSITE && compositeOfParams.size() != 2) { - throw new IllegalStateException(Msg.code(163) + "Search param " + nextSearchParam.getName() + " on base " + nextSearchParam.getBase() + " has components: " + nextSearchParam.getComponents()); + List compositeOfParams = nextSearchParam.getComponents().stream() + .map(t -> def.getSearchParams().stream() + .filter(y -> y.getUri().equals(t.getReference())) + .findFirst() + .orElseThrow(() -> new IllegalStateException())) + .collect(Collectors.toList()); + if (nextSearchParam.getParamType() == RestSearchParameterTypeEnum.COMPOSITE + && compositeOfParams.size() != 2) { + throw new IllegalStateException(Msg.code(163) + "Search param " + nextSearchParam.getName() + + " on base " + nextSearchParam.getBase() + " has components: " + + nextSearchParam.getComponents()); } param.setName(nextSearchParam.getName()); @@ -86,7 +89,7 @@ public class ResourceGeneratorUsingModel extends BaseStructureParser { param.setCompositeTypes(toCompositeOfTypes(compositeOfParams)); param.setPath(nextSearchParam.getPath()); param.setType(nextSearchParam.getParamType().getCode()); - + resource.addSearchParameter(param); } } @@ -115,5 +118,4 @@ public class ResourceGeneratorUsingModel extends BaseStructureParser { } return retVal; } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingSpreadsheet.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingSpreadsheet.java index dc791ca0ae0..94316997385 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingSpreadsheet.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/ResourceGeneratorUsingSpreadsheet.java @@ -1,17 +1,16 @@ package ca.uhn.fhir.tinder.parser; import ca.uhn.fhir.i18n.Msg; +import ca.uhn.fhir.tinder.model.BaseElement; +import ca.uhn.fhir.tinder.model.BaseRootType; +import ca.uhn.fhir.tinder.model.Resource; +import org.apache.maven.plugin.MojoFailureException; + import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.maven.plugin.MojoFailureException; - -import ca.uhn.fhir.tinder.model.BaseElement; -import ca.uhn.fhir.tinder.model.BaseRootType; -import ca.uhn.fhir.tinder.model.Resource; - public class ResourceGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetParser { private List myInputStreamNames; private ArrayList myInputStreams; @@ -29,11 +28,11 @@ public class ResourceGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetP @Override protected void postProcess(BaseElement theTarget) throws MojoFailureException { super.postProcess(theTarget); - + if ("Bundle".equals(theTarget.getName())) { addEverythingToSummary(theTarget); } - + if (getVersion().equals("dstu2") && theTarget instanceof Resource) { try { new CompartmentParser(getVersion(), (Resource) theTarget).parse(); @@ -41,7 +40,6 @@ public class ResourceGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetP throw new MojoFailureException(Msg.code(161) + e.toString(), e); } } - } private void addEverythingToSummary(BaseElement theTarget) { @@ -58,7 +56,7 @@ public class ResourceGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetP for (String next : theBaseResourceNames) { String resName = "/res/" + getVersion() + "/" + next.toLowerCase() + "-spreadsheet.xml"; resName = resName.replace("/dev/", "/dstu2/"); - + InputStream nextRes = getClass().getResourceAsStream(resName); myInputStreams.add(nextRes); if (nextRes == null) { @@ -93,5 +91,4 @@ public class ResourceGeneratorUsingSpreadsheet extends BaseStructureSpreadsheetP protected boolean isSpreadsheet(String theFileName) { return theFileName.endsWith("spreadsheet.xml"); } - } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/TargetType.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/TargetType.java index 737b5a9e617..5d3898d578e 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/TargetType.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/parser/TargetType.java @@ -1,5 +1,5 @@ /** - * + * */ package ca.uhn.fhir.tinder.parser; @@ -13,7 +13,7 @@ public enum TargetType { * but others might also be generated. */ SOURCE, - + /* * The generator will primarilly produce non-source * files that should be added to Maven Resources diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/util/SyncUtil.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/util/SyncUtil.java index 950ce7c1b82..f47e3d96626 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/util/SyncUtil.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/util/SyncUtil.java @@ -1,21 +1,22 @@ package ca.uhn.fhir.tinder.util; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileReader; -import java.io.OutputStreamWriter; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.dstu2.composite.NarrativeDt; import ca.uhn.fhir.model.dstu2.resource.BaseResource; import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; +import java.io.File; +import java.io.FileOutputStream; +import java.io.FileReader; +import java.io.OutputStreamWriter; + public class SyncUtil { -private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SyncUtil.class); + private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SyncUtil.class); + public static void main(String[] args) throws Exception { FhirContext ctx = FhirContext.forDstu2(); - + String fileName = "src/main/resources/vs/dstu2/all-valuesets-bundle.xml"; FileReader fr = new FileReader(fileName); Bundle b = ctx.newXmlParser().parseResource(Bundle.class, fr); @@ -23,15 +24,12 @@ private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger BaseResource nextRes = (BaseResource) nextEntry.getResource(); nextRes.setText(new NarrativeDt()); } - + File f = new File(fileName); OutputStreamWriter fw = new OutputStreamWriter(new FileOutputStream(f, false), "UTF-8"); ctx.newXmlParser().encodeResourceToWriter(b, fw); fw.close(); - - ourLog.info("Fixed {} valuesets", b.getEntry().size()); - - - } + ourLog.info("Fixed {} valuesets", b.getEntry().size()); + } } diff --git a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/util/XMLUtils.java b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/util/XMLUtils.java index ceb6b264c90..e16b511c88e 100644 --- a/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/util/XMLUtils.java +++ b/hapi-tinder-plugin/src/main/java/ca/uhn/fhir/tinder/util/XMLUtils.java @@ -1,11 +1,6 @@ - package ca.uhn.fhir.tinder.util; import ca.uhn.fhir.i18n.Msg; -import java.io.InputStream; -import java.io.StringWriter; -import java.io.Writer; - import org.w3c.dom.DOMConfiguration; import org.w3c.dom.DOMErrorHandler; import org.w3c.dom.DOMImplementation; @@ -18,115 +13,117 @@ import org.w3c.dom.ls.LSParser; import org.w3c.dom.ls.LSResourceResolver; import org.w3c.dom.ls.LSSerializer; +import java.io.InputStream; +import java.io.StringWriter; +import java.io.Writer; + public class XMLUtils { - private static DOMImplementation IMPL; + private static DOMImplementation IMPL; - @SuppressWarnings("unchecked") - public synchronized static T getDOMImpl() { - if (IMPL == null) { - try { - DOMImplementationRegistry registry = DOMImplementationRegistry.newInstance(); - IMPL = registry.getDOMImplementation("LS 3.0"); - } catch (Exception e) { - throw new RuntimeException(Msg.code(149) + e); - } - } - return (T) IMPL; - } + @SuppressWarnings("unchecked") + public static synchronized T getDOMImpl() { + if (IMPL == null) { + try { + DOMImplementationRegistry registry = DOMImplementationRegistry.newInstance(); + IMPL = registry.getDOMImplementation("LS 3.0"); + } catch (Exception e) { + throw new RuntimeException(Msg.code(149) + e); + } + } + return (T) IMPL; + } - @SuppressWarnings("unchecked") - public static T getDOMImplUncached() { - try { - DOMImplementationRegistry registry = DOMImplementationRegistry.newInstance(); - return (T) registry.getDOMImplementation("LS 3.0"); - } catch (Exception e) { - throw new RuntimeException(Msg.code(150) + e); - } - } + @SuppressWarnings("unchecked") + public static T getDOMImplUncached() { + try { + DOMImplementationRegistry registry = DOMImplementationRegistry.newInstance(); + return (T) registry.getDOMImplementation("LS 3.0"); + } catch (Exception e) { + throw new RuntimeException(Msg.code(150) + e); + } + } - public static Document parse(String s) { - return parse(s, false); - } + public static Document parse(String s) { + return parse(s, false); + } - public static Document parse(String s, boolean validateIfSchema) { - DOMImplementationLS impl = getDOMImpl(); - LSInput input = impl.createLSInput(); - input.setStringData(s); - return parse(input, validateIfSchema); - } + public static Document parse(String s, boolean validateIfSchema) { + DOMImplementationLS impl = getDOMImpl(); + LSInput input = impl.createLSInput(); + input.setStringData(s); + return parse(input, validateIfSchema); + } - public static Document parse(InputStream s, boolean validateIfSchema) { - DOMImplementationLS impl = getDOMImpl(); - LSInput input = impl.createLSInput(); - input.setByteStream(s); - return parse(input, validateIfSchema); - } + public static Document parse(InputStream s, boolean validateIfSchema) { + DOMImplementationLS impl = getDOMImpl(); + LSInput input = impl.createLSInput(); + input.setByteStream(s); + return parse(input, validateIfSchema); + } - private static Document parse(LSInput input, boolean validateIfSchema) { - DOMImplementationLS impl = getDOMImpl(); - LSParser parser = impl.createLSParser(DOMImplementationLS.MODE_SYNCHRONOUS, null); - DOMConfiguration config = parser.getDomConfig(); - config.setParameter("element-content-whitespace", false); - config.setParameter("namespaces", true); - config.setParameter("validate-if-schema", validateIfSchema); - return parser.parse(input); - } + private static Document parse(LSInput input, boolean validateIfSchema) { + DOMImplementationLS impl = getDOMImpl(); + LSParser parser = impl.createLSParser(DOMImplementationLS.MODE_SYNCHRONOUS, null); + DOMConfiguration config = parser.getDomConfig(); + config.setParameter("element-content-whitespace", false); + config.setParameter("namespaces", true); + config.setParameter("validate-if-schema", validateIfSchema); + return parser.parse(input); + } - public static void validate(Document d, String schema, DOMErrorHandler handler) { - DOMConfiguration config = d.getDomConfig(); - config.setParameter("schema-type", "http://www.w3.org/2001/XMLSchema"); - config.setParameter("validate", true); - config.setParameter("schema-location", schema); - config.setParameter("resource-resolver", new ClasspathResourceResolver()); - config.setParameter("error-handler", handler); - d.normalizeDocument(); - } + public static void validate(Document d, String schema, DOMErrorHandler handler) { + DOMConfiguration config = d.getDomConfig(); + config.setParameter("schema-type", "http://www.w3.org/2001/XMLSchema"); + config.setParameter("validate", true); + config.setParameter("schema-location", schema); + config.setParameter("resource-resolver", new ClasspathResourceResolver()); + config.setParameter("error-handler", handler); + d.normalizeDocument(); + } - public static String serialize(Document document, boolean prettyPrint) { - DOMImplementationLS impl = getDOMImpl(); - LSSerializer serializer = impl.createLSSerializer(); - // document.normalizeDocument(); - DOMConfiguration config = serializer.getDomConfig(); - if (prettyPrint && config.canSetParameter("format-pretty-print", Boolean.TRUE)) { - config.setParameter("format-pretty-print", true); - } - config.setParameter("xml-declaration", true); - LSOutput output = impl.createLSOutput(); - output.setEncoding("UTF-8"); - Writer writer = new StringWriter(); - output.setCharacterStream(writer); - serializer.write(document, output); - return writer.toString(); - } + public static String serialize(Document document, boolean prettyPrint) { + DOMImplementationLS impl = getDOMImpl(); + LSSerializer serializer = impl.createLSSerializer(); + // document.normalizeDocument(); + DOMConfiguration config = serializer.getDomConfig(); + if (prettyPrint && config.canSetParameter("format-pretty-print", Boolean.TRUE)) { + config.setParameter("format-pretty-print", true); + } + config.setParameter("xml-declaration", true); + LSOutput output = impl.createLSOutput(); + output.setEncoding("UTF-8"); + Writer writer = new StringWriter(); + output.setCharacterStream(writer); + serializer.write(document, output); + return writer.toString(); + } - public static Document emptyDocument(String title) { - DOMImplementation impl = getDOMImpl(); - Document doc = impl.createDocument("urn:hl7-org:v2xml", title, null); - return doc; - } + public static Document emptyDocument(String title) { + DOMImplementation impl = getDOMImpl(); + Document doc = impl.createDocument("urn:hl7-org:v2xml", title, null); + return doc; + } - /** - * This is an implementation of LSResourceResolver that can resolve XML schemas from the - * classpath - */ - private static class ClasspathResourceResolver implements LSResourceResolver { - private DOMImplementationLS impl; + /** + * This is an implementation of LSResourceResolver that can resolve XML schemas from the + * classpath + */ + private static class ClasspathResourceResolver implements LSResourceResolver { + private DOMImplementationLS impl; - ClasspathResourceResolver() { - impl = getDOMImpl(); - } - - @Override - public LSInput resolveResource(String type, String namespaceURI, String publicId, String systemId, - String baseURI) { - LSInput lsInput = impl.createLSInput(); - InputStream is = getClass().getResourceAsStream("/" + systemId); - if (is == null) - return null; - lsInput.setByteStream(is); - return lsInput; - } - } + ClasspathResourceResolver() { + impl = getDOMImpl(); + } + @Override + public LSInput resolveResource( + String type, String namespaceURI, String publicId, String systemId, String baseURI) { + LSInput lsInput = impl.createLSInput(); + InputStream is = getClass().getResourceAsStream("/" + systemId); + if (is == null) return null; + lsInput.setByteStream(is); + return lsInput; + } + } } diff --git a/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IBase.java b/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IBase.java index ba982b103e5..97989819402 100644 --- a/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IBase.java +++ b/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IBase.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,10 +22,9 @@ package org.hl7.fhir.dstu2.model; /** * This interface is a simple marker for anything which is an HL7 * structure of some kind. It is provided mostly to simplify convergence - * between the HL7.org structures and the HAPI ones. + * between the HL7.org structures and the HAPI ones. */ public interface IBase { boolean isEmpty(); - } diff --git a/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IBaseResource.java b/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IBaseResource.java index e3145028425..4f1c5dff580 100644 --- a/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IBaseResource.java +++ b/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IBaseResource.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,21 +19,19 @@ */ package org.hl7.fhir.dstu2.model; - /** - * For now, this is a simple marker interface indicating that a class is a resource type. + * For now, this is a simple marker interface indicating that a class is a resource type. * There are two concrete types of implementations of this interrface. The first are - * HL7.org's Resource structures (e.g. + * HL7.org's Resource structures (e.g. * org.hl7.fhir.instance.model.Patient) and - * the second are HAPI's Resource structures, e.g. + * the second are HAPI's Resource structures, e.g. * ca.uhn.fhir.model.dstu.resource.Patient) */ public interface IBaseResource extends IBase { IIdType getIdElement(); - + IBaseResource setId(String theId); IBaseResource setId(IIdType theId); - } diff --git a/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IIdType.java b/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IIdType.java index 7949ba6d0a7..7c3546ee072 100644 --- a/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IIdType.java +++ b/hapi-tinder-plugin/src/main/java/org/hl7/fhir/instance/model/IIdType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,8 +19,6 @@ */ package org.hl7.fhir.dstu2.model; - - public interface IIdType extends IBase { @Override @@ -33,7 +31,7 @@ public interface IIdType extends IBase { /** * Returns the value of this ID. Note that this value may be a fully qualified URL, a relative/partial URL, or a simple ID. Use {@link #getIdPart()} to get just the ID portion. - * + * * @see #getIdPart() */ String getValue(); @@ -50,12 +48,12 @@ public interface IIdType extends IBase { String getBaseUrl(); /** - * Returns a copy of this ID without the base URL or the version + * Returns a copy of this ID without the base URL or the version */ IIdType toUnqualifiedVersionless(); /** - * Returns a copy of this ID without the version + * Returns a copy of this ID without the version */ IIdType toVersionless(); @@ -80,5 +78,4 @@ public interface IIdType extends IBase { boolean isIdPartValidLong(); Long getIdPartAsLong(); - } diff --git a/pom.xml b/pom.xml index 627d11a1a4b..c8e986c246a 100644 --- a/pom.xml +++ b/pom.xml @@ -2,6 +2,8 @@ + + 4.0.0 ca.uhn.hapi.fhir @@ -894,6 +896,7 @@ 6.0.15 + 2.37.0 1.0.3 -Dfile.encoding=UTF-8 -Xmx2048m @@ -2209,6 +2212,55 @@ + + com.diffplug.spotless + spotless-maven-plugin + ${spotless_version} + + + + apply + check + + compile + + + + + + **/test/**/*.java + + + + + + true + 4 + + + + + + true + ,java|javax,\# + + + + + + + + *.md + .gitignore + *.java + + + + + + + + org.apache.maven.plugins maven-resources-plugin

    * Note that a complete list of RESTful exceptions is available in the Package * Summary. @@ -53,5 +53,4 @@ public class PayloadTooLargeException extends BaseServerResponseException { public PayloadTooLargeException(String theMessage, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, theMessage, theOperationOutcome); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/PreconditionFailedException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/PreconditionFailedException.java index f240a101e4f..e01a685094a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/PreconditionFailedException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/PreconditionFailedException.java @@ -19,15 +19,14 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.rest.annotation.Update; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * Represents an HTTP 412 Precondition Failed response. This exception - * should be thrown for an {@link Update} operation if that operation requires a version to + * should be thrown for an {@link Update} operation if that operation requires a version to * be specified in an HTTP header, and none was. */ @SuppressWarnings("deprecation") @@ -35,15 +34,16 @@ import ca.uhn.fhir.util.CoverageIgnore; public class PreconditionFailedException extends ResourceVersionNotSpecifiedException { @SuppressWarnings("hiding") public static final int STATUS_CODE = Constants.STATUS_HTTP_412_PRECONDITION_FAILED; + private static final long serialVersionUID = 1L; public PreconditionFailedException(String error) { super(STATUS_CODE, error); } - + /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome The OperationOutcome resource to return to the client @@ -51,5 +51,4 @@ public class PreconditionFailedException extends ResourceVersionNotSpecifiedExce public PreconditionFailedException(String theMessage, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, theMessage, theOperationOutcome); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceGoneException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceGoneException.java index 1e484a9de60..a90edb17c2f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceGoneException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceGoneException.java @@ -53,7 +53,9 @@ public class ResourceGoneException extends BaseServerResponseException { */ @Deprecated public ResourceGoneException(Class theClass, BaseIdentifierDt thePatientId) { - super(STATUS_CODE, "Resource of type " + theClass.getSimpleName() + " with ID " + thePatientId + " is gone/deleted"); + super( + STATUS_CODE, + "Resource of type " + theClass.getSimpleName() + " with ID " + thePatientId + " is gone/deleted"); myResourceId = null; } @@ -64,7 +66,9 @@ public class ResourceGoneException extends BaseServerResponseException { * @param theResourceId The ID of the resource that could not be found */ public ResourceGoneException(Class theClass, IIdType theResourceId) { - super(STATUS_CODE, "Resource of type " + theClass.getSimpleName() + " with ID " + theResourceId + " is gone/deleted"); + super( + STATUS_CODE, + "Resource of type " + theClass.getSimpleName() + " with ID " + theResourceId + " is gone/deleted"); myResourceId = theResourceId; } @@ -94,5 +98,4 @@ public class ResourceGoneException extends BaseServerResponseException { public void setResourceId(IIdType theResourceId) { myResourceId = theResourceId; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceNotFoundException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceNotFoundException.java index 999aec68926..74487ed43b7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceNotFoundException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceNotFoundException.java @@ -20,14 +20,13 @@ package ca.uhn.fhir.rest.server.exceptions; import ca.uhn.fhir.i18n.Msg; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; -import org.hl7.fhir.instance.model.api.IIdType; - import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.base.composite.BaseIdentifierDt; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; +import org.hl7.fhir.instance.model.api.IIdType; /** * Represents an HTTP 404 Resource Not Found response, which means that the request is pointing to a resource that does not exist. @@ -43,7 +42,8 @@ public class ResourceNotFoundException extends BaseServerResponseException { super(STATUS_CODE, createErrorMessage(theClass, theId)); } - public ResourceNotFoundException(Class theClass, IdDt theId, IBaseOperationOutcome theOperationOutcome) { + public ResourceNotFoundException( + Class theClass, IdDt theId, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, createErrorMessage(theClass, theId), theOperationOutcome); } @@ -51,13 +51,14 @@ public class ResourceNotFoundException extends BaseServerResponseException { super(STATUS_CODE, createErrorMessage(theClass, theId)); } - public ResourceNotFoundException(Class theClass, IIdType theId, IBaseOperationOutcome theOperationOutcome) { + public ResourceNotFoundException( + Class theClass, IIdType theId, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, createErrorMessage(theClass, theId), theOperationOutcome); } /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome The OperationOutcome resource to return to the client @@ -90,12 +91,11 @@ public class ResourceNotFoundException extends BaseServerResponseException { super(STATUS_CODE, theMessage); } - private static String createErrorMessage(Class theClass, IIdType theId) { + private static String createErrorMessage(Class theClass, IIdType theId) { return Msg.code(970) + "Resource of type " + theClass.getSimpleName() + " with ID " + theId + " is not known"; } private static String createErrorMessage(IIdType theId) { return Msg.code(971) + "Resource " + (theId != null ? theId.getValue() : "") + " is not known"; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceVersionConflictException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceVersionConflictException.java index 3661524d186..d2eeeb8068d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceVersionConflictException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceVersionConflictException.java @@ -19,17 +19,16 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.rest.annotation.Delete; import ca.uhn.fhir.rest.annotation.Update; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** - * Represents an HTTP 409 Conflict response. This exception should be - * thrown in methods which accept a version (e.g. {@link Update}, {@link Delete}) - * when the operation fails because of a version conflict as specified in the FHIR specification. + * Represents an HTTP 409 Conflict response. This exception should be + * thrown in methods which accept a version (e.g. {@link Update}, {@link Delete}) + * when the operation fails because of a version conflict as specified in the FHIR specification. */ @CoverageIgnore public class ResourceVersionConflictException extends BaseServerResponseException { @@ -42,10 +41,10 @@ public class ResourceVersionConflictException extends BaseServerResponseExceptio public ResourceVersionConflictException(String error) { super(STATUS_CODE, error); } - + /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome The OperationOutcome resource to return to the client @@ -61,8 +60,8 @@ public class ResourceVersionConflictException extends BaseServerResponseExceptio * The message * @param theOperationOutcome The OperationOutcome resource to return to the client */ - public ResourceVersionConflictException(String theMessage, Throwable theCause, IBaseOperationOutcome theOperationOutcome) { + public ResourceVersionConflictException( + String theMessage, Throwable theCause, IBaseOperationOutcome theOperationOutcome) { super(STATUS_CODE, theMessage, theCause, theOperationOutcome); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceVersionNotSpecifiedException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceVersionNotSpecifiedException.java index 42ae113e8a7..6a5840c50bf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceVersionNotSpecifiedException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/ResourceVersionNotSpecifiedException.java @@ -19,10 +19,9 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * @deprecated Use {@link PreconditionFailedException} instead - This exception is @@ -37,10 +36,10 @@ public class ResourceVersionNotSpecifiedException extends BaseServerResponseExce public ResourceVersionNotSpecifiedException(String error) { super(STATUS_CODE, error); } - + /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome The OperationOutcome resource to return to the client @@ -52,16 +51,16 @@ public class ResourceVersionNotSpecifiedException extends BaseServerResponseExce public ResourceVersionNotSpecifiedException(int theStatusCode, String error) { super(theStatusCode, error); } - + /** * Constructor - * + * * @param theMessage * The message * @param theOperationOutcome The OperationOutcome resource to return to the client */ - public ResourceVersionNotSpecifiedException(int theStatusCode, String theMessage, IBaseOperationOutcome theOperationOutcome) { + public ResourceVersionNotSpecifiedException( + int theStatusCode, String theMessage, IBaseOperationOutcome theOperationOutcome) { super(theStatusCode, theMessage, theOperationOutcome); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/UnclassifiedServerFailureException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/UnclassifiedServerFailureException.java index 4229e169350..65e2936141a 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/UnclassifiedServerFailureException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/UnclassifiedServerFailureException.java @@ -19,9 +19,8 @@ */ package ca.uhn.fhir.rest.server.exceptions; -import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; - import ca.uhn.fhir.util.CoverageIgnore; +import org.hl7.fhir.instance.model.api.IBaseOperationOutcome; /** * Exception for use when a response is received or being sent that does not correspond to any other exception type. An HTTP status code must be provided, and will be provided to the caller in the @@ -32,7 +31,7 @@ public class UnclassifiedServerFailureException extends BaseServerResponseExcept /** * Constructor - * + * * @param theStatusCode * The HTTP status code to return (e.g. 404 if you wish to return an HTTP 404 status) * @param theMessage @@ -44,17 +43,17 @@ public class UnclassifiedServerFailureException extends BaseServerResponseExcept /** * Constructor - * + * * @param theStatusCode * The HTTP status code to return (e.g. 404 if you wish to return an HTTP 404 status) * @param theMessage * The message to add to the status line * @param theOperationOutcome The OperationOutcome resource to return to the client */ - public UnclassifiedServerFailureException(int theStatusCode, String theMessage, IBaseOperationOutcome theOperationOutcome) { + public UnclassifiedServerFailureException( + int theStatusCode, String theMessage, IBaseOperationOutcome theOperationOutcome) { super(theStatusCode, theMessage, theOperationOutcome); } private static final long serialVersionUID = 1L; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/UnprocessableEntityException.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/UnprocessableEntityException.java index b11f7f5f159..814b68b80da 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/UnprocessableEntityException.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/server/exceptions/UnprocessableEntityException.java @@ -51,7 +51,6 @@ public class UnprocessableEntityException extends BaseServerResponseException { super(STATUS_CODE, theMessage, theOperationOutcome); } - /** * Constructor which accepts an {@link IBaseOperationOutcome} resource which will be supplied in the response * @@ -89,5 +88,4 @@ public class UnprocessableEntityException extends BaseServerResponseException { public UnprocessableEntityException(String... theMessage) { super(STATUS_CODE, theMessage); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/store/IAuditDataStore.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/store/IAuditDataStore.java index 378b3ac5ffc..fe7722d276e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/store/IAuditDataStore.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/store/IAuditDataStore.java @@ -21,17 +21,15 @@ package ca.uhn.fhir.store; import ca.uhn.fhir.model.base.resource.BaseSecurityEvent; - /** * This interface provides a way to persist FHIR SecurityEvents to any kind of data store */ public interface IAuditDataStore { - + /** * Take in a SecurityEvent object and handle storing it to a persistent data store (database, JMS, file, etc). * @param auditEvent a FHIR SecurityEvent to be persisted * @throws Exception if there is an error while persisting the data */ public void store(BaseSecurityEvent auditEvent) throws Exception; - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/system/HapiSystemProperties.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/system/HapiSystemProperties.java index 547963963e4..4f3711bb0cf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/system/HapiSystemProperties.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/system/HapiSystemProperties.java @@ -27,7 +27,9 @@ public final class HapiSystemProperties { /** * This is provided for testing only! Use with caution as this property may change. */ - static final String TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS = "TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS"; + static final String TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS = + "TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS"; + static final String UNIT_TEST_CAPTURE_STACK = "unit_test_capture_stack"; static final String STACKFILTER_PATTERN_PROP = "log.stackfilter.pattern"; static final String HAPI_CLIENT_KEEPRESPONSES = "hapi.client.keepresponses"; @@ -35,8 +37,7 @@ public final class HapiSystemProperties { static final String UNIT_TEST_MODE = "unit_test_mode"; static final long DEFAULT_TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS = 10 * DateUtils.MILLIS_PER_SECOND; - private HapiSystemProperties() { - } + private HapiSystemProperties() {} /** * This property is used by unit tests - do not rely on it in production code @@ -65,7 +66,6 @@ public final class HapiSystemProperties { * It causes logged stack traces to skip a number of packages that are * just noise. */ - public static void setStackFilterPattern(String thePattern) { System.setProperty(STACKFILTER_PATTERN_PROP, thePattern); } @@ -83,7 +83,6 @@ public final class HapiSystemProperties { * Get the validation resource cache expireAfterWrite timeout in milliseconds. If it has not been set, the default * value is 10 seconds. */ - public static long getTestValidationResourceCachesMs() { String property = System.getProperty(TEST_SYSTEM_PROP_VALIDATION_RESOURCE_CACHES_MS); if (property == null) { @@ -109,6 +108,7 @@ public final class HapiSystemProperties { public static void enableUnitTestMode() { System.setProperty(UNIT_TEST_MODE, Boolean.TRUE.toString()); } + public static void disableUnitTestMode() { System.setProperty(UNIT_TEST_MODE, Boolean.FALSE.toString()); } @@ -158,5 +158,4 @@ public final class HapiSystemProperties { public static boolean isSuppressHapiFhirVersionLogEnabled() { return Boolean.parseBoolean(System.getProperty(SUPPRESS_HAPI_FHIR_VERSION_LOG)); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/BaseStoreInfo.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/BaseStoreInfo.java index 51e9befebde..e27c5a08cd2 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/BaseStoreInfo.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/BaseStoreInfo.java @@ -33,16 +33,14 @@ public abstract class BaseStoreInfo { private final KeyStoreType myType; public BaseStoreInfo(String theFilePath, String theStorePass, String theAlias) { - if(theFilePath.startsWith(PathType.RESOURCE.getPrefix())){ + if (theFilePath.startsWith(PathType.RESOURCE.getPrefix())) { myFilePath = theFilePath.substring(PathType.RESOURCE.getPrefix().length()); myPathType = PathType.RESOURCE; - } - else if(theFilePath.startsWith(PathType.FILE.getPrefix())){ + } else if (theFilePath.startsWith(PathType.FILE.getPrefix())) { myFilePath = theFilePath.substring(PathType.FILE.getPrefix().length()); myPathType = PathType.FILE; - } - else { - throw new StoreInfoException(Msg.code(2117)+"Invalid path prefix"); + } else { + throw new StoreInfoException(Msg.code(2117) + "Invalid path prefix"); } myStorePass = toCharArray(theStorePass); @@ -72,12 +70,13 @@ public abstract class BaseStoreInfo { return myPathType; } - protected char[] toCharArray(String theString){ + protected char[] toCharArray(String theString) { return isBlank(theString) ? "".toCharArray() : theString.toCharArray(); } public static class StoreInfoException extends RuntimeException { private static final long serialVersionUID = 1l; + public StoreInfoException(String theMessage) { super(theMessage); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/KeyStoreType.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/KeyStoreType.java index 98260c1fefb..01d8aada5fc 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/KeyStoreType.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/KeyStoreType.java @@ -25,13 +25,12 @@ import java.util.Arrays; import java.util.List; public enum KeyStoreType { - PKCS12("p12", "pfx"), JKS("jks"); private List myFileExtensions; - KeyStoreType(String... theFileExtensions){ + KeyStoreType(String... theFileExtensions) { myFileExtensions = Arrays.asList(theFileExtensions); } @@ -40,11 +39,11 @@ public enum KeyStoreType { } public static KeyStoreType fromFileExtension(String theFileExtension) { - for(KeyStoreType type : KeyStoreType.values()){ - if(type.getFileExtensions().contains(theFileExtension.toLowerCase())){ + for (KeyStoreType type : KeyStoreType.values()) { + if (type.getFileExtensions().contains(theFileExtension.toLowerCase())) { return type; } } - throw new IllegalArgumentException(Msg.code(2121)+"Invalid KeyStore Type"); + throw new IllegalArgumentException(Msg.code(2121) + "Invalid KeyStore Type"); } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/PathType.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/PathType.java index b8c1c2bd347..54d7f63cdb7 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/PathType.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/PathType.java @@ -20,7 +20,6 @@ package ca.uhn.fhir.tls; public enum PathType { - FILE("file://"), RESOURCE("classpath:"); @@ -30,7 +29,7 @@ public enum PathType { myPrefix = thePrefix; } - public String getPrefix(){ + public String getPrefix() { return myPrefix; } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/TrustStoreInfo.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/TrustStoreInfo.java index 002de3c8b67..c7452c1bbf8 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/TrustStoreInfo.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/tls/TrustStoreInfo.java @@ -24,5 +24,4 @@ public class TrustStoreInfo extends BaseStoreInfo { public TrustStoreInfo(String theFilePath, String theStorePass, String theAlias) { super(theFilePath, theStorePass, theAlias); } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ArrayUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ArrayUtil.java index 648f324045d..ceb2fd0257e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ArrayUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ArrayUtil.java @@ -38,10 +38,9 @@ public class ArrayUtil { public static Set commaSeparatedListToCleanSet(String theValueAsString) { Set resourceTypes; resourceTypes = Arrays.stream(split(theValueAsString, ",")) - .map(t->trim(t)) - .filter(t->isNotBlank(t)) - .collect(Collectors.toSet()); + .map(t -> trim(t)) + .filter(t -> isNotBlank(t)) + .collect(Collectors.toSet()); return resourceTypes; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AsyncUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AsyncUtil.java index 72514dbcb38..b7deb55ade3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AsyncUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AsyncUtil.java @@ -33,8 +33,7 @@ public class AsyncUtil { /** * Non instantiable */ - private AsyncUtil() { - } + private AsyncUtil() {} /** * Calls Thread.sleep and if an InterruptedException occurs, logs a warning but otherwise continues @@ -53,7 +52,8 @@ public class AsyncUtil { } } - public static boolean awaitLatchAndThrowInternalErrorExceptionOnInterrupt(CountDownLatch theInitialCollectionLatch, long theTime, TimeUnit theTimeUnit) { + public static boolean awaitLatchAndThrowInternalErrorExceptionOnInterrupt( + CountDownLatch theInitialCollectionLatch, long theTime, TimeUnit theTimeUnit) { try { return theInitialCollectionLatch.await(theTime, theTimeUnit); } catch (InterruptedException e) { @@ -62,7 +62,8 @@ public class AsyncUtil { } } - public static boolean awaitLatchAndIgnoreInterrupt(CountDownLatch theInitialCollectionLatch, long theTime, TimeUnit theTimeUnit) { + public static boolean awaitLatchAndIgnoreInterrupt( + CountDownLatch theInitialCollectionLatch, long theTime, TimeUnit theTimeUnit) { try { return theInitialCollectionLatch.await(theTime, theTimeUnit); } catch (InterruptedException e) { diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AttachmentUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AttachmentUtil.java index c0c7f013499..9e7d82dd51d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AttachmentUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/AttachmentUtil.java @@ -46,18 +46,15 @@ public class AttachmentUtil { } @SuppressWarnings("unchecked") - private static IPrimitiveType getOrCreateChild(FhirContext theContext, ICompositeType theAttachment, String theChildName, String theChildDatatype) { + private static IPrimitiveType getOrCreateChild( + FhirContext theContext, ICompositeType theAttachment, String theChildName, String theChildDatatype) { BaseRuntimeChildDefinition entryChild = getChild(theContext, theAttachment, theChildName); List entries = entryChild.getAccessor().getValues(theAttachment); - return entries - .stream() - .map(t -> (IPrimitiveType) t) - .findFirst() - .orElseGet(() -> { - IPrimitiveType string = newPrimitive(theContext, theChildDatatype, null); - entryChild.getMutator().setValue(theAttachment, string); - return (IPrimitiveType) string; - }); + return entries.stream().map(t -> (IPrimitiveType) t).findFirst().orElseGet(() -> { + IPrimitiveType string = newPrimitive(theContext, theChildDatatype, null); + entryChild.getMutator().setValue(theAttachment, string); + return (IPrimitiveType) string; + }); } public static void setUrl(FhirContext theContext, ICompositeType theAttachment, String theUrl) { @@ -84,8 +81,8 @@ public class AttachmentUtil { BaseRuntimeChildDefinition entryChild = getChild(theContext, theAttachment, "size"); if (theLength == null) { entryChild.getMutator().setValue(theAttachment, null); - } else if (theContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R5)){ - entryChild.getMutator().setValue(theAttachment, newPrimitive(theContext, "integer64", (long)theLength)); + } else if (theContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.R5)) { + entryChild.getMutator().setValue(theAttachment, newPrimitive(theContext, "integer64", (long) theLength)); } else { entryChild.getMutator().setValue(theAttachment, newPrimitive(theContext, "unsignedInt", theLength)); } @@ -107,7 +104,8 @@ public class AttachmentUtil { * This is internal API- Use with caution as it may change */ static BaseRuntimeChildDefinition getChild(FhirContext theContext, IBase theElement, String theName) { - BaseRuntimeElementCompositeDefinition def = (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theElement.getClass()); + BaseRuntimeElementCompositeDefinition def = + (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(theElement.getClass()); return def.getChildByName(theName); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BinaryUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BinaryUtil.java index 408260f06ff..fd14783e95f 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BinaryUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BinaryUtil.java @@ -50,18 +50,13 @@ public class BinaryUtil { BaseRuntimeChildDefinition entryChild = AttachmentUtil.getChild(theContext, theBinary, elementName); List entries = entryChild.getAccessor().getValues(theBinary); - return entries - .stream() - .map(t -> (IPrimitiveType) t) - .findFirst() - .orElseGet(() -> { - IPrimitiveType binary = AttachmentUtil.newPrimitive(theContext, "base64Binary", null); - entryChild.getMutator().setValue(theBinary, binary); - return binary; - }); + return entries.stream().map(t -> (IPrimitiveType) t).findFirst().orElseGet(() -> { + IPrimitiveType binary = AttachmentUtil.newPrimitive(theContext, "base64Binary", null); + entryChild.getMutator().setValue(theBinary, binary); + return binary; + }); } - public static IBaseReference getSecurityContext(FhirContext theCtx, IBaseBinary theBinary) { RuntimeResourceDefinition def = theCtx.getResourceDefinition("Binary"); BaseRuntimeChildDefinition child = def.getChildByName("securityContext"); @@ -96,16 +91,14 @@ public class BinaryUtil { String elementName = "contentType"; BaseRuntimeChildDefinition entryChild = AttachmentUtil.getChild(theCtx, theBinary, elementName); List entries = entryChild.getAccessor().getValues(theBinary); - IPrimitiveType contentTypeElement = entries - .stream() - .map(t -> (IPrimitiveType) t) - .findFirst() - .orElseGet(() -> { - IPrimitiveType stringType = AttachmentUtil.newPrimitive(theCtx, "code", null); - entryChild.getMutator().setValue(theBinary, stringType); - return stringType; - }); + IPrimitiveType contentTypeElement = entries.stream() + .map(t -> (IPrimitiveType) t) + .findFirst() + .orElseGet(() -> { + IPrimitiveType stringType = AttachmentUtil.newPrimitive(theCtx, "code", null); + entryChild.getMutator().setValue(theBinary, stringType); + return stringType; + }); contentTypeElement.setValue(theContentType); - } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java index 049194a9661..4c2bd44fb72 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleBuilder.java @@ -34,10 +34,10 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Date; import java.util.Objects; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * This class can be used to build a Bundle resource to be used as a FHIR transaction. Convenience methods provide @@ -118,7 +118,8 @@ public class BundleBuilder { BaseRuntimeChildDefinition typeChild = myBundleDef.getChildByName(theFieldName); Validate.notNull(typeChild, "Unable to find field %s", theFieldName); - IPrimitiveType type = (IPrimitiveType) typeChild.getChildByName(theFieldName).newInstance(typeChild.getInstanceConstructorArguments()); + IPrimitiveType type = (IPrimitiveType) + typeChild.getChildByName(theFieldName).newInstance(typeChild.getInstanceConstructorArguments()); type.setValueAsString(theFieldValue); typeChild.getMutator().setValue(myBundle, type); return this; @@ -135,7 +136,8 @@ public class BundleBuilder { BaseRuntimeChildDefinition typeChild = mySearchDef.getChildByName(theFieldName); Validate.notNull(typeChild, "Unable to find field %s", theFieldName); - IPrimitiveType type = (IPrimitiveType) typeChild.getChildByName(theFieldName).newInstance(typeChild.getInstanceConstructorArguments()); + IPrimitiveType type = (IPrimitiveType) + typeChild.getChildByName(theFieldName).newInstance(typeChild.getInstanceConstructorArguments()); type.setValueAsString(theFieldValue); typeChild.getMutator().setValue(theSearch, type); return this; @@ -161,7 +163,11 @@ public class BundleBuilder { Validate.notBlank(theTarget.getResourceType(), "theTarget must contain a resource type"); Validate.notBlank(theTarget.getIdPart(), "theTarget must contain an ID"); - IPrimitiveType url = addAndPopulateTransactionBundleEntryRequest(thePatch, theTarget.getValue(), theTarget.toUnqualifiedVersionless().getValue(), "PATCH"); + IPrimitiveType url = addAndPopulateTransactionBundleEntryRequest( + thePatch, + theTarget.getValue(), + theTarget.toUnqualifiedVersionless().getValue(), + "PATCH"); return new PatchBuilder(url); } @@ -206,18 +212,21 @@ public class BundleBuilder { } @Nonnull - private IPrimitiveType addAndPopulateTransactionBundleEntryRequest(IBaseResource theResource, String theFullUrl, String theRequestUrl, String theHttpVerb) { + private IPrimitiveType addAndPopulateTransactionBundleEntryRequest( + IBaseResource theResource, String theFullUrl, String theRequestUrl, String theHttpVerb) { setBundleField("type", "transaction"); IBase request = addEntryAndReturnRequest(theResource, theFullUrl); // Bundle.entry.request.url - IPrimitiveType url = (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); + IPrimitiveType url = + (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); url.setValueAsString(theRequestUrl); myEntryRequestUrlChild.getMutator().setValue(request, url); // Bundle.entry.request.method - IPrimitiveType method = (IPrimitiveType) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments()); + IPrimitiveType method = (IPrimitiveType) + myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments()); method.setValueAsString(theHttpVerb); myEntryRequestMethodChild.getMutator().setValue(request, method); return url; @@ -232,17 +241,20 @@ public class BundleBuilder { public CreateBuilder addTransactionCreateEntry(IBaseResource theResource) { setBundleField("type", "transaction"); - IBase request = addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue()); + IBase request = + addEntryAndReturnRequest(theResource, theResource.getIdElement().getValue()); String resourceType = myContext.getResourceType(theResource); // Bundle.entry.request.url - IPrimitiveType url = (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); + IPrimitiveType url = + (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); url.setValueAsString(resourceType); myEntryRequestUrlChild.getMutator().setValue(request, url); // Bundle.entry.request.url - IPrimitiveType method = (IPrimitiveType) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments()); + IPrimitiveType method = (IPrimitiveType) + myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments()); method.setValueAsString("POST"); myEntryRequestMethodChild.getMutator().setValue(request, method); @@ -305,7 +317,9 @@ public class BundleBuilder { setBundleField("type", "transaction"); IdDt idDt = new IdDt(theIdPart); - String deleteUrl = idDt.toUnqualifiedVersionless().withResourceType(theResourceType).getValue(); + String deleteUrl = idDt.toUnqualifiedVersionless() + .withResourceType(theResourceType) + .getValue(); return addDeleteEntry(deleteUrl); } @@ -327,19 +341,20 @@ public class BundleBuilder { IBase request = addEntryAndReturnRequest(); // Bundle.entry.request.url - IPrimitiveType url = (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); + IPrimitiveType url = + (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); url.setValueAsString(theDeleteUrl); myEntryRequestUrlChild.getMutator().setValue(request, url); // Bundle.entry.request.method - IPrimitiveType method = (IPrimitiveType) myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments()); + IPrimitiveType method = (IPrimitiveType) + myEntryRequestMethodDef.newInstance(myEntryRequestMethodChild.getInstanceConstructorArguments()); method.setValueAsString("DELETE"); myEntryRequestMethodChild.getMutator().setValue(request, method); return new DeleteBuilder(); } - /** * Adds an entry for a Collection bundle type */ @@ -376,7 +391,9 @@ public class BundleBuilder { * @return Returns the search instance */ public IBaseBackboneElement addSearch(IBase entry) { - Validate.isTrue(myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), "This method may only be called for FHIR version DSTU3 and above"); + Validate.isTrue( + myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), + "This method may only be called for FHIR version DSTU3 and above"); IBase searchInstance = mySearchDef.newInstance(); mySearchChild.getMutator().setValue(entry, searchInstance); @@ -389,7 +406,8 @@ public class BundleBuilder { IBase entry = addEntry(); // Bundle.entry.fullUrl - IPrimitiveType fullUrl = (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); + IPrimitiveType fullUrl = + (IPrimitiveType) myContext.getElementDefinition("uri").newInstance(); fullUrl.setValueAsString(theFullUrl); myEntryFullUrlChild.getMutator().setValue(entry, fullUrl); @@ -409,10 +427,8 @@ public class BundleBuilder { IBase request = myEntryRequestDef.newInstance(); myEntryRequestChild.getMutator().setValue(entry, request); return request; - } - public IBaseBundle getBundle() { return myBundle; } @@ -431,9 +447,12 @@ public class BundleBuilder { * on DSTU3+. */ public BundleBuilder setMetaField(String theFieldName, IBase theFieldValue) { - Validate.isTrue(myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), "This method may only be called for FHIR version DSTU3 and above"); + Validate.isTrue( + myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), + "This method may only be called for FHIR version DSTU3 and above"); - BaseRuntimeChildDefinition.IMutator mutator = myMetaDef.getChildByName(theFieldName).getMutator(); + BaseRuntimeChildDefinition.IMutator mutator = + myMetaDef.getChildByName(theFieldName).getMutator(); mutator.setValue(myBundle.getMeta(), theFieldValue); return this; } @@ -460,7 +479,8 @@ public class BundleBuilder { addToBase(theSearch, theSearchFieldName, theSearchFieldValue, mySearchDef); } - private void addToBase(IBase theBase, String theSearchChildName, IBase theValue, BaseRuntimeElementDefinition mySearchDef) { + private void addToBase( + IBase theBase, String theSearchChildName, IBase theValue, BaseRuntimeElementDefinition mySearchDef) { BaseRuntimeChildDefinition defn = mySearchDef.getChildByName(theSearchChildName); Validate.notNull(defn, "Unable to get child definition %s from %s", theSearchChildName, theBase); defn.getMutator().addValue(theBase, theValue); @@ -526,27 +546,23 @@ public class BundleBuilder { terser.setElement(myBundle, "Bundle.timestamp", theTimestamp.getValueAsString()); } - public class DeleteBuilder extends BaseOperationBuilder { // nothing yet } - public class PatchBuilder extends BaseOperationBuilderWithConditionalUrl { PatchBuilder(IPrimitiveType theUrl) { super(theUrl); } - } public class UpdateBuilder extends BaseOperationBuilderWithConditionalUrl { UpdateBuilder(IPrimitiveType theUrl) { super(theUrl); } - } public class CreateBuilder extends BaseOperationBuilder { @@ -560,7 +576,8 @@ public class BundleBuilder { * Make this create a Conditional Create */ public CreateBuilder conditional(String theConditionalUrl) { - BaseRuntimeElementDefinition stringDefinition = Objects.requireNonNull(myContext.getElementDefinition("string")); + BaseRuntimeElementDefinition stringDefinition = + Objects.requireNonNull(myContext.getElementDefinition("string")); IPrimitiveType ifNoneExist = (IPrimitiveType) stringDefinition.newInstance(); ifNoneExist.setValueAsString(theConditionalUrl); @@ -568,7 +585,6 @@ public class BundleBuilder { return this; } - } public abstract class BaseOperationBuilder { @@ -585,11 +601,10 @@ public class BundleBuilder { public BundleBuilder andThen() { return BundleBuilder.this; } - - } - public abstract class BaseOperationBuilderWithConditionalUrl extends BaseOperationBuilder { + public abstract class BaseOperationBuilderWithConditionalUrl + extends BaseOperationBuilder { private final IPrimitiveType myUrl; @@ -605,6 +620,5 @@ public class BundleBuilder { myUrl.setValueAsString(theConditionalUrl); return (T) this; } - } } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java index b27c1155818..2ed1a3b47af 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/BundleUtil.java @@ -67,8 +67,8 @@ public class BundleUtil { private static final String PREV = "prev"; private static final Set previousOrPrev = Sets.newHashSet(PREVIOUS, PREV); - public static final String DIFFERENT_LINK_ERROR_MSG = "Mismatching 'previous' and 'prev' links exist. 'previous' " + - "is: '$PREVIOUS' and 'prev' is: '$PREV'."; + public static final String DIFFERENT_LINK_ERROR_MSG = + "Mismatching 'previous' and 'prev' links exist. 'previous' " + "is: '$PREVIOUS' and 'prev' is: '$PREV'."; /** * @return Returns null if the link isn't found or has no value @@ -77,19 +77,22 @@ public class BundleUtil { return getLinkUrlOfType(theContext, theBundle, theLinkRelation, true); } - private static String getLinkUrlOfType(FhirContext theContext, IBaseBundle theBundle, String theLinkRelation, boolean isPreviousCheck) { + private static String getLinkUrlOfType( + FhirContext theContext, IBaseBundle theBundle, String theLinkRelation, boolean isPreviousCheck) { RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle); BaseRuntimeChildDefinition entryChild = def.getChildByName("link"); List links = entryChild.getAccessor().getValues(theBundle); for (IBase nextLink : links) { boolean isRightRel = false; - BaseRuntimeElementCompositeDefinition relDef = (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(nextLink.getClass()); + BaseRuntimeElementCompositeDefinition relDef = + (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(nextLink.getClass()); BaseRuntimeChildDefinition relChild = relDef.getChildByName("relation"); List relValues = relChild.getAccessor().getValues(nextLink); for (IBase next : relValues) { IPrimitiveType nextValue = (IPrimitiveType) next; - if (isRelationMatch(theContext, theBundle,theLinkRelation, nextValue.getValueAsString(), isPreviousCheck)) { + if (isRelationMatch( + theContext, theBundle, theLinkRelation, nextValue.getValueAsString(), isPreviousCheck)) { isRightRel = true; } } @@ -98,7 +101,8 @@ public class BundleUtil { continue; } - BaseRuntimeElementCompositeDefinition linkDef = (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(nextLink.getClass()); + BaseRuntimeElementCompositeDefinition linkDef = + (BaseRuntimeElementCompositeDefinition) theContext.getElementDefinition(nextLink.getClass()); BaseRuntimeChildDefinition urlChild = linkDef.getChildByName("url"); List values = urlChild.getAccessor().getValues(nextLink); for (IBase nextUrl : values) { @@ -112,14 +116,15 @@ public class BundleUtil { return null; } - private static boolean isRelationMatch(FhirContext theContext, IBaseBundle theBundle, String value, String matching, boolean theIsPreviousCheck) { - if ( ! theIsPreviousCheck) { + private static boolean isRelationMatch( + FhirContext theContext, IBaseBundle theBundle, String value, String matching, boolean theIsPreviousCheck) { + if (!theIsPreviousCheck) { return value.equals(matching); } - if ( previousOrPrev.contains(value) ) { + if (previousOrPrev.contains(value)) { validateUniqueOrMatchingPreviousValues(theContext, theBundle); - if ( previousOrPrev.contains(matching) ) { + if (previousOrPrev.contains(matching)) { return true; } } @@ -130,8 +135,10 @@ public class BundleUtil { String previousLink = getLinkNoCheck(theContext, theBundle, PREVIOUS); String prevLink = getLinkNoCheck(theContext, theBundle, PREV); if (prevLink != null && previousLink != null) { - if ( ! previousLink.equals(prevLink)) { - String msg = DIFFERENT_LINK_ERROR_MSG.replace("$PREVIOUS", previousLink).replace("$PREV", prevLink); + if (!previousLink.equals(prevLink)) { + String msg = DIFFERENT_LINK_ERROR_MSG + .replace("$PREVIOUS", previousLink) + .replace("$PREV", prevLink); throw new InternalErrorException(Msg.code(2368) + msg); } } @@ -142,16 +149,19 @@ public class BundleUtil { } @SuppressWarnings("unchecked") - public static List> getBundleEntryUrlsAndResources(FhirContext theContext, IBaseBundle theBundle) { + public static List> getBundleEntryUrlsAndResources( + FhirContext theContext, IBaseBundle theBundle) { RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle); BaseRuntimeChildDefinition entryChild = def.getChildByName("entry"); List entries = entryChild.getAccessor().getValues(theBundle); - BaseRuntimeElementCompositeDefinition entryChildElem = (BaseRuntimeElementCompositeDefinition) entryChild.getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryChildElem = + (BaseRuntimeElementCompositeDefinition) entryChild.getChildByName("entry"); BaseRuntimeChildDefinition resourceChild = entryChildElem.getChildByName("resource"); BaseRuntimeChildDefinition requestChild = entryChildElem.getChildByName("request"); - BaseRuntimeElementCompositeDefinition requestDef = (BaseRuntimeElementCompositeDefinition) requestChild.getChildByName("request"); + BaseRuntimeElementCompositeDefinition requestDef = + (BaseRuntimeElementCompositeDefinition) requestChild.getChildByName("request"); BaseRuntimeChildDefinition urlChild = requestDef.getChildByName("url"); @@ -193,7 +203,8 @@ public class BundleUtil { RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle); BaseRuntimeChildDefinition entryChild = def.getChildByName("type"); BaseRuntimeElementDefinition element = entryChild.getChildByName("type"); - IPrimitiveType typeInstance = (IPrimitiveType) element.newInstance(entryChild.getInstanceConstructorArguments()); + IPrimitiveType typeInstance = + (IPrimitiveType) element.newInstance(entryChild.getInstanceConstructorArguments()); typeInstance.setValueAsString(theType); entryChild.getMutator().setValue(theBundle, typeInstance); @@ -217,7 +228,8 @@ public class BundleUtil { RuntimeResourceDefinition def = theContext.getResourceDefinition(theBundle); BaseRuntimeChildDefinition entryChild = def.getChildByName("total"); @SuppressWarnings("unchecked") - IPrimitiveType value = (IPrimitiveType) entryChild.getChildByName("total").newInstance(); + IPrimitiveType value = + (IPrimitiveType) entryChild.getChildByName("total").newInstance(); value.setValue(theTotal); entryChild.getMutator().setValue(theBundle, value); } @@ -251,45 +263,55 @@ public class BundleUtil { * @param theContext The FhirContext. * @param theBundle The {@link IBaseBundle} which contains the entries you would like sorted into processing order. */ - public static void sortEntriesIntoProcessingOrder(FhirContext theContext, IBaseBundle theBundle) throws IllegalStateException { + public static void sortEntriesIntoProcessingOrder(FhirContext theContext, IBaseBundle theBundle) + throws IllegalStateException { Map partsToIBaseMap = getPartsToIBaseMap(theContext, theBundle); - //Get all deletions. - LinkedHashSet deleteParts = sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.DELETE, partsToIBaseMap); + // Get all deletions. + LinkedHashSet deleteParts = + sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.DELETE, partsToIBaseMap); validatePartsNotNull(deleteParts); LinkedHashSet retVal = new LinkedHashSet<>(deleteParts); - //Get all Creations - LinkedHashSet createParts= sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.POST, partsToIBaseMap); + // Get all Creations + LinkedHashSet createParts = + sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.POST, partsToIBaseMap); validatePartsNotNull(createParts); retVal.addAll(createParts); // Get all Updates - LinkedHashSet updateParts= sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.PUT, partsToIBaseMap); + LinkedHashSet updateParts = + sortEntriesOfTypeIntoProcessingOrder(theContext, RequestTypeEnum.PUT, partsToIBaseMap); validatePartsNotNull(updateParts); retVal.addAll(updateParts); - //Once we are done adding all DELETE, POST, PUT operations, add everything else. - //Since this is a set, it will just fail to add already-added operations. + // Once we are done adding all DELETE, POST, PUT operations, add everything else. + // Since this is a set, it will just fail to add already-added operations. retVal.addAll(partsToIBaseMap.values()); - //Blow away the entries and reset them in the right order. + // Blow away the entries and reset them in the right order. TerserUtil.clearField(theContext, theBundle, "entry"); TerserUtil.setField(theContext, "entry", theBundle, retVal.toArray(new IBase[0])); } private static void validatePartsNotNull(LinkedHashSet theDeleteParts) { if (theDeleteParts == null) { - throw new IllegalStateException(Msg.code(1745) + "This transaction contains a cycle, so it cannot be sorted."); + throw new IllegalStateException( + Msg.code(1745) + "This transaction contains a cycle, so it cannot be sorted."); } } - private static LinkedHashSet sortEntriesOfTypeIntoProcessingOrder(FhirContext theContext, RequestTypeEnum theRequestTypeEnum, Map thePartsToIBaseMap) { + private static LinkedHashSet sortEntriesOfTypeIntoProcessingOrder( + FhirContext theContext, + RequestTypeEnum theRequestTypeEnum, + Map thePartsToIBaseMap) { SortLegality legality = new SortLegality(); HashMap color = new HashMap<>(); HashMap> adjList = new HashMap<>(); List topologicalOrder = new ArrayList<>(); - Set bundleEntryParts = thePartsToIBaseMap.keySet().stream().filter(part -> part.getRequestType().equals(theRequestTypeEnum)).collect(Collectors.toSet()); + Set bundleEntryParts = thePartsToIBaseMap.keySet().stream() + .filter(part -> part.getRequestType().equals(theRequestTypeEnum)) + .collect(Collectors.toSet()); HashMap resourceIdToBundleEntryMap = new HashMap<>(); for (BundleEntryParts bundleEntryPart : bundleEntryParts) { @@ -317,22 +339,25 @@ public class BundleUtil { resourceId = bundleEntryPart.getFullUrl(); } } - List allResourceReferences = theContext.newTerser().getAllResourceReferences(resource); + List allResourceReferences = + theContext.newTerser().getAllResourceReferences(resource); String finalResourceId = resourceId; - allResourceReferences - .forEach(refInfo -> { - String referencedResourceId = refInfo.getResourceReference().getReferenceElement().toVersionless().getValue(); - if (color.containsKey(referencedResourceId)) { - if (!adjList.containsKey(finalResourceId)) { - adjList.put(finalResourceId, new ArrayList<>()); - } - adjList.get(finalResourceId).add(referencedResourceId); + allResourceReferences.forEach(refInfo -> { + String referencedResourceId = refInfo.getResourceReference() + .getReferenceElement() + .toVersionless() + .getValue(); + if (color.containsKey(referencedResourceId)) { + if (!adjList.containsKey(finalResourceId)) { + adjList.put(finalResourceId, new ArrayList<>()); } - }); + adjList.get(finalResourceId).add(referencedResourceId); + } + }); } } - for (Map.Entry entry:color.entrySet()) { + for (Map.Entry entry : color.entrySet()) { if (entry.getValue() == WHITE) { depthFirstSearch(entry.getKey(), color, adjList, topologicalOrder, legality); } @@ -363,25 +388,31 @@ public class BundleUtil { } } - private static void depthFirstSearch(String theResourceId, HashMap theResourceIdToColor, HashMap> theAdjList, List theTopologicalOrder, SortLegality theLegality) { + private static void depthFirstSearch( + String theResourceId, + HashMap theResourceIdToColor, + HashMap> theAdjList, + List theTopologicalOrder, + SortLegality theLegality) { if (!theLegality.isLegal()) { ourLog.debug("Found a cycle while trying to sort bundle entries. This bundle is not sortable."); return; } - //We are currently recursing over this node (gray) + // We are currently recursing over this node (gray) theResourceIdToColor.put(theResourceId, GRAY); - for (String neighbourResourceId: theAdjList.getOrDefault(theResourceId, new ArrayList<>())) { + for (String neighbourResourceId : theAdjList.getOrDefault(theResourceId, new ArrayList<>())) { if (theResourceIdToColor.get(neighbourResourceId) == WHITE) { - depthFirstSearch(neighbourResourceId, theResourceIdToColor, theAdjList, theTopologicalOrder, theLegality); + depthFirstSearch( + neighbourResourceId, theResourceIdToColor, theAdjList, theTopologicalOrder, theLegality); } else if (theResourceIdToColor.get(neighbourResourceId) == GRAY) { theLegality.setLegal(false); return; } } - //Mark the node as black + // Mark the node as black theResourceIdToColor.put(theResourceId, BLACK); theTopologicalOrder.add(theResourceId); } @@ -391,17 +422,26 @@ public class BundleUtil { BaseRuntimeChildDefinition entryChildDef = bundleDef.getChildByName("entry"); List entries = entryChildDef.getAccessor().getValues(theBundle); - BaseRuntimeElementCompositeDefinition entryChildContentsDef = (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryChildContentsDef = + (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); BaseRuntimeChildDefinition fullUrlChildDef = entryChildContentsDef.getChildByName("fullUrl"); BaseRuntimeChildDefinition resourceChildDef = entryChildContentsDef.getChildByName("resource"); BaseRuntimeChildDefinition requestChildDef = entryChildContentsDef.getChildByName("request"); - BaseRuntimeElementCompositeDefinition requestChildContentsDef = (BaseRuntimeElementCompositeDefinition) requestChildDef.getChildByName("request"); + BaseRuntimeElementCompositeDefinition requestChildContentsDef = + (BaseRuntimeElementCompositeDefinition) requestChildDef.getChildByName("request"); BaseRuntimeChildDefinition requestUrlChildDef = requestChildContentsDef.getChildByName("url"); BaseRuntimeChildDefinition requestIfNoneExistChildDef = requestChildContentsDef.getChildByName("ifNoneExist"); BaseRuntimeChildDefinition methodChildDef = requestChildContentsDef.getChildByName("method"); Map map = new HashMap<>(); for (IBase nextEntry : entries) { - BundleEntryParts parts = getBundleEntryParts(fullUrlChildDef, resourceChildDef, requestChildDef, requestUrlChildDef, requestIfNoneExistChildDef, methodChildDef, nextEntry); + BundleEntryParts parts = getBundleEntryParts( + fullUrlChildDef, + resourceChildDef, + requestChildDef, + requestUrlChildDef, + requestIfNoneExistChildDef, + methodChildDef, + nextEntry); /* * All 3 might be null - That's ok because we still want to know the * order in the original bundle. @@ -411,37 +451,44 @@ public class BundleUtil { return map; } - - public static List getSearchBundleEntryParts(FhirContext theContext, IBaseBundle theBundle) { + public static List getSearchBundleEntryParts( + FhirContext theContext, IBaseBundle theBundle) { RuntimeResourceDefinition bundleDef = theContext.getResourceDefinition(theBundle); BaseRuntimeChildDefinition entryChildDef = bundleDef.getChildByName("entry"); List entries = entryChildDef.getAccessor().getValues(theBundle); - BaseRuntimeElementCompositeDefinition entryChildContentsDef = (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryChildContentsDef = + (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); BaseRuntimeChildDefinition fullUrlChildDef = entryChildContentsDef.getChildByName("fullUrl"); BaseRuntimeChildDefinition resourceChildDef = entryChildContentsDef.getChildByName("resource"); BaseRuntimeChildDefinition searchChildDef = entryChildContentsDef.getChildByName("search"); - BaseRuntimeElementCompositeDefinition searchChildContentsDef = (BaseRuntimeElementCompositeDefinition) searchChildDef.getChildByName("search"); + BaseRuntimeElementCompositeDefinition searchChildContentsDef = + (BaseRuntimeElementCompositeDefinition) searchChildDef.getChildByName("search"); BaseRuntimeChildDefinition searchModeChildDef = searchChildContentsDef.getChildByName("mode"); List retVal = new ArrayList<>(); for (IBase nextEntry : entries) { - SearchBundleEntryParts parts = getSearchBundleEntryParts(fullUrlChildDef, resourceChildDef, searchChildDef, searchModeChildDef, nextEntry); + SearchBundleEntryParts parts = getSearchBundleEntryParts( + fullUrlChildDef, resourceChildDef, searchChildDef, searchModeChildDef, nextEntry); retVal.add(parts); } return retVal; - } - private static SearchBundleEntryParts getSearchBundleEntryParts( BaseRuntimeChildDefinition fullUrlChildDef, BaseRuntimeChildDefinition resourceChildDef, BaseRuntimeChildDefinition searchChildDef, BaseRuntimeChildDefinition searchModeChildDef, IBase entry) { + private static SearchBundleEntryParts getSearchBundleEntryParts( + BaseRuntimeChildDefinition fullUrlChildDef, + BaseRuntimeChildDefinition resourceChildDef, + BaseRuntimeChildDefinition searchChildDef, + BaseRuntimeChildDefinition searchModeChildDef, + IBase entry) { IBaseResource resource = null; String matchMode = null; String fullUrl = fullUrlChildDef - .getAccessor() - .getFirstValueOrNull(entry) - .map(t->((IPrimitiveType)t).getValueAsString()) - .orElse(null); + .getAccessor() + .getFirstValueOrNull(entry) + .map(t -> ((IPrimitiveType) t).getValueAsString()) + .orElse(null); for (IBase nextResource : resourceChildDef.getAccessor().getValues(entry)) { resource = (IBaseResource) nextResource; @@ -462,42 +509,60 @@ public class BundleUtil { * @param theBundle The bundle to have its entries processed. * @param theProcessor a {@link Consumer} which will operate on all the entries of a bundle. */ - public static void processEntries(FhirContext theContext, IBaseBundle theBundle, Consumer theProcessor) { + public static void processEntries( + FhirContext theContext, IBaseBundle theBundle, Consumer theProcessor) { RuntimeResourceDefinition bundleDef = theContext.getResourceDefinition(theBundle); BaseRuntimeChildDefinition entryChildDef = bundleDef.getChildByName("entry"); List entries = entryChildDef.getAccessor().getValues(theBundle); - BaseRuntimeElementCompositeDefinition entryChildContentsDef = (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryChildContentsDef = + (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); BaseRuntimeChildDefinition fullUrlChildDef = entryChildContentsDef.getChildByName("fullUrl"); BaseRuntimeChildDefinition resourceChildDef = entryChildContentsDef.getChildByName("resource"); BaseRuntimeChildDefinition requestChildDef = entryChildContentsDef.getChildByName("request"); - BaseRuntimeElementCompositeDefinition requestChildContentsDef = (BaseRuntimeElementCompositeDefinition) requestChildDef.getChildByName("request"); + BaseRuntimeElementCompositeDefinition requestChildContentsDef = + (BaseRuntimeElementCompositeDefinition) requestChildDef.getChildByName("request"); BaseRuntimeChildDefinition requestUrlChildDef = requestChildContentsDef.getChildByName("url"); BaseRuntimeChildDefinition requestIfNoneExistChildDef = requestChildContentsDef.getChildByName("ifNoneExist"); BaseRuntimeChildDefinition methodChildDef = requestChildContentsDef.getChildByName("method"); for (IBase nextEntry : entries) { - BundleEntryParts parts = getBundleEntryParts(fullUrlChildDef, resourceChildDef, requestChildDef, requestUrlChildDef, requestIfNoneExistChildDef, methodChildDef, nextEntry); + BundleEntryParts parts = getBundleEntryParts( + fullUrlChildDef, + resourceChildDef, + requestChildDef, + requestUrlChildDef, + requestIfNoneExistChildDef, + methodChildDef, + nextEntry); /* * All 3 might be null - That's ok because we still want to know the * order in the original bundle. */ - BundleEntryMutator mutator = new BundleEntryMutator(theContext, nextEntry, requestChildDef, requestChildContentsDef, entryChildContentsDef); + BundleEntryMutator mutator = new BundleEntryMutator( + theContext, nextEntry, requestChildDef, requestChildContentsDef, entryChildContentsDef); ModifiableBundleEntry entry = new ModifiableBundleEntry(parts, mutator); theProcessor.accept(entry); } } - private static BundleEntryParts getBundleEntryParts(BaseRuntimeChildDefinition fullUrlChildDef, BaseRuntimeChildDefinition resourceChildDef, BaseRuntimeChildDefinition requestChildDef, BaseRuntimeChildDefinition requestUrlChildDef, BaseRuntimeChildDefinition requestIfNoneExistChildDef, BaseRuntimeChildDefinition methodChildDef, IBase nextEntry) { + private static BundleEntryParts getBundleEntryParts( + BaseRuntimeChildDefinition fullUrlChildDef, + BaseRuntimeChildDefinition resourceChildDef, + BaseRuntimeChildDefinition requestChildDef, + BaseRuntimeChildDefinition requestUrlChildDef, + BaseRuntimeChildDefinition requestIfNoneExistChildDef, + BaseRuntimeChildDefinition methodChildDef, + IBase nextEntry) { IBaseResource resource = null; String url = null; RequestTypeEnum requestType = null; String conditionalUrl = null; String fullUrl = fullUrlChildDef - .getAccessor() - .getFirstValueOrNull(nextEntry) - .map(t->((IPrimitiveType)t).getValueAsString()) - .orElse(null); + .getAccessor() + .getFirstValueOrNull(nextEntry) + .map(t -> ((IPrimitiveType) t).getValueAsString()) + .orElse(null); for (IBase nextResource : resourceChildDef.getAccessor().getValues(nextEntry)) { resource = (IBaseResource) nextResource; @@ -520,7 +585,8 @@ public class BundleUtil { conditionalUrl = url != null && url.contains("?") ? url : null; break; case POST: - List ifNoneExistReps = requestIfNoneExistChildDef.getAccessor().getValues(nextRequest); + List ifNoneExistReps = + requestIfNoneExistChildDef.getAccessor().getValues(nextRequest); if (ifNoneExistReps.size() > 0) { IPrimitiveType ifNoneExist = (IPrimitiveType) ifNoneExistReps.get(0); conditionalUrl = ifNoneExist.getValueAsString(); @@ -544,15 +610,16 @@ public class BundleUtil { */ public static List toListOfResourceIds(FhirContext theContext, IBaseBundle theBundle) { return toListOfResourcesOfType(theContext, theBundle, IBaseResource.class).stream() - .map(resource -> resource.getIdElement().getIdPart()) - .collect(Collectors.toList()); + .map(resource -> resource.getIdElement().getIdPart()) + .collect(Collectors.toList()); } /** * Extract all of the resources of a given type from a given bundle */ @SuppressWarnings("unchecked") - public static List toListOfResourcesOfType(FhirContext theContext, IBaseBundle theBundle, Class theTypeToInclude) { + public static List toListOfResourcesOfType( + FhirContext theContext, IBaseBundle theBundle, Class theTypeToInclude) { Objects.requireNonNull(theTypeToInclude, "ResourceType must not be null"); List retVal = new ArrayList<>(); @@ -560,7 +627,8 @@ public class BundleUtil { BaseRuntimeChildDefinition entryChild = def.getChildByName("entry"); List entries = entryChild.getAccessor().getValues(theBundle); - BaseRuntimeElementCompositeDefinition entryChildElem = (BaseRuntimeElementCompositeDefinition) entryChild.getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryChildElem = + (BaseRuntimeElementCompositeDefinition) entryChild.getChildByName("entry"); BaseRuntimeChildDefinition resourceChild = entryChildElem.getChildByName("resource"); for (IBase nextEntry : entries) { for (IBase next : resourceChild.getAccessor().getValues(nextEntry)) { @@ -593,7 +661,6 @@ public class BundleUtil { return isPatch; } - /** * create a new bundle entry and set a value for a single field * @param theContext Context holding resource definition @@ -601,18 +668,24 @@ public class BundleUtil { * @param theValues The values to set on the bundle entry child field name * @return the new bundle entry */ - public static IBase createNewBundleEntryWithSingleField(FhirContext theContext, String theFieldName, IBase... theValues) { + public static IBase createNewBundleEntryWithSingleField( + FhirContext theContext, String theFieldName, IBase... theValues) { IBaseBundle newBundle = TerserUtil.newResource(theContext, "Bundle"); - BaseRuntimeChildDefinition entryChildDef = theContext.getResourceDefinition(newBundle).getChildByName("entry"); + BaseRuntimeChildDefinition entryChildDef = + theContext.getResourceDefinition(newBundle).getChildByName("entry"); - BaseRuntimeElementCompositeDefinition entryChildElem = (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); + BaseRuntimeElementCompositeDefinition entryChildElem = + (BaseRuntimeElementCompositeDefinition) entryChildDef.getChildByName("entry"); BaseRuntimeChildDefinition resourceChild = entryChildElem.getChildByName(theFieldName); IBase bundleEntry = entryChildElem.newInstance(); for (IBase value : theValues) { try { resourceChild.getMutator().addValue(bundleEntry, value); } catch (UnsupportedOperationException e) { - ourLog.warn("Resource {} does not support multiple values, but an attempt to set {} was made. Setting the first item only", bundleEntry, theValues); + ourLog.warn( + "Resource {} does not support multiple values, but an attempt to set {} was made. Setting the first item only", + bundleEntry, + theValues); resourceChild.getMutator().setValue(bundleEntry, value); break; } @@ -626,6 +699,7 @@ public class BundleUtil { SortLegality() { this.myIsLegal = true; } + private void setLegal(boolean theLegal) { myIsLegal = theLegal; } @@ -634,5 +708,4 @@ public class BundleUtil { return myIsLegal; } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ClasspathUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ClasspathUtil.java index 27682182577..f22e114484e 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ClasspathUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ClasspathUtil.java @@ -30,7 +30,6 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -38,6 +37,7 @@ import java.io.Reader; import java.nio.charset.StandardCharsets; import java.util.function.Function; import java.util.zip.GZIPInputStream; +import javax.annotation.Nonnull; /** * Use this API with caution, it may change! @@ -123,7 +123,8 @@ public class ClasspathUtil { * @since 6.4.0 */ @Nonnull - public static T loadCompressedResource(FhirContext theCtx, Class theType, String theClasspath) { + public static T loadCompressedResource( + FhirContext theCtx, Class theType, String theClasspath) { String resource = loadCompressedResource(theClasspath); return parseResource(theCtx, theType, resource); } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CollectionUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CollectionUtil.java index b4192603ce1..26604a4f911 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CollectionUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CollectionUtil.java @@ -32,5 +32,4 @@ public class CollectionUtil { } return retVal; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CompositionBuilder.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CompositionBuilder.java index f5b7ee2f4ec..2cd02c96e09 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CompositionBuilder.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CompositionBuilder.java @@ -28,8 +28,8 @@ import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.Date; +import javax.annotation.Nonnull; /** * This class can be used to generate Composition resources in @@ -71,7 +71,6 @@ public class CompositionBuilder { myTerser.setElement(myComposition, "Composition.status", theStatusCode); } - /** * Set a value in Composition.subject */ @@ -167,7 +166,4 @@ public class CompositionBuilder { myTerser.setElement(text, "div", theDivHtml); } } - - } - diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CountingAndLimitingInputStream.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CountingAndLimitingInputStream.java index 216b2aeb51a..71ed1b009e0 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CountingAndLimitingInputStream.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/CountingAndLimitingInputStream.java @@ -56,7 +56,6 @@ public class CountingAndLimitingInputStream extends InputStream { } } - /** * Wraps another input stream, counting the number of bytes read. * diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateRangeUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateRangeUtil.java index e4f717439c7..6e488c39a2d 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateRangeUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateRangeUtil.java @@ -21,9 +21,9 @@ package ca.uhn.fhir.util; import ca.uhn.fhir.rest.param.DateRangeParam; +import java.util.Date; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Date; public class DateRangeUtil { @@ -36,11 +36,15 @@ public class DateRangeUtil { * @return a DateRange within the original range, and between theStartInclusive and theEnd */ @Nonnull - public static DateRangeParam narrowDateRange(@Nullable DateRangeParam theDateRangeParam, @Nullable Date theStartInclusive, @Nullable Date theEndExclusive) { + public static DateRangeParam narrowDateRange( + @Nullable DateRangeParam theDateRangeParam, + @Nullable Date theStartInclusive, + @Nullable Date theEndExclusive) { if (theStartInclusive == null && theEndExclusive == null) { return theDateRangeParam; } - DateRangeParam result = theDateRangeParam == null ? new DateRangeParam() : new DateRangeParam(theDateRangeParam); + DateRangeParam result = + theDateRangeParam == null ? new DateRangeParam() : new DateRangeParam(theDateRangeParam); Date startInclusive = theStartInclusive; if (startInclusive != null) { @@ -56,7 +60,9 @@ public class DateRangeUtil { } } if (theEndExclusive != null) { - Date inputEnd = result.getUpperBound() == null ? null : result.getUpperBound().getValue(); + Date inputEnd = result.getUpperBound() == null + ? null + : result.getUpperBound().getValue(); if (theDateRangeParam == null || inputEnd == null || inputEnd.after(theEndExclusive)) { result.setUpperBoundExclusive(theEndExclusive); } @@ -64,5 +70,4 @@ public class DateRangeUtil { return result; } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateUtils.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateUtils.java index de9d64c8132..ad4086d7ab9 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateUtils.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateUtils.java @@ -77,11 +77,7 @@ public final class DateUtils { private static final String PATTERN_INTEGER_DATE = "yyyyMMdd"; - private static final String[] DEFAULT_PATTERNS = new String[]{ - PATTERN_RFC1123, - PATTERN_RFC1036, - PATTERN_ASCTIME - }; + private static final String[] DEFAULT_PATTERNS = new String[] {PATTERN_RFC1123, PATTERN_RFC1036, PATTERN_ASCTIME}; private static final Date DEFAULT_TWO_DIGIT_YEAR_START; static { @@ -95,17 +91,17 @@ public final class DateUtils { /** * This class should not be instantiated. */ - private DateUtils() { - } + private DateUtils() {} /** * A factory for {@link SimpleDateFormat}s. The instances are stored in a * threadlocal way because SimpleDateFormat is not thread safe as noted in * {@link SimpleDateFormat its javadoc}. */ - final static class DateFormatHolder { + static final class DateFormatHolder { - private static final ThreadLocal>> THREADLOCAL_FORMATS = ThreadLocal.withInitial(() -> new SoftReference<>(new HashMap<>())); + private static final ThreadLocal>> THREADLOCAL_FORMATS = + ThreadLocal.withInitial(() -> new SoftReference<>(new HashMap<>())); /** * creates a {@link SimpleDateFormat} for the requested format string. @@ -123,8 +119,7 @@ public final class DateUtils { Map formats = ref.get(); if (formats == null) { formats = new HashMap<>(); - THREADLOCAL_FORMATS.set( - new SoftReference<>(formats)); + THREADLOCAL_FORMATS.set(new SoftReference<>(formats)); } SimpleDateFormat format = formats.get(pattern); @@ -136,7 +131,6 @@ public final class DateUtils { return format; } - } /** @@ -226,12 +220,12 @@ public final class DateUtils { } return argument; } - + /** * Convert an incomplete date e.g. 2020 or 2020-01 to a complete date with lower * bound to the first day of the year/month, and upper bound to the last day of * the year/month - * + * * e.g. 2020 to 2020-01-01 (left), 2020-12-31 (right) * 2020-02 to 2020-02-01 (left), 2020-02-29 (right) * @@ -239,47 +233,45 @@ public final class DateUtils { * @return a pair of complete date, left is lower bound, and right is upper bound */ public static Pair getCompletedDate(String theIncompleteDateStr) { - - if (StringUtils.isBlank(theIncompleteDateStr)) - return new ImmutablePair<>(null, null); - + + if (StringUtils.isBlank(theIncompleteDateStr)) return new ImmutablePair<>(null, null); + String lbStr, upStr; // YYYY only, return the last day of the year - if (theIncompleteDateStr.length() == 4) { + if (theIncompleteDateStr.length() == 4) { lbStr = theIncompleteDateStr + "-01-01"; // first day of the year upStr = theIncompleteDateStr + "-12-31"; // last day of the year return new ImmutablePair<>(lbStr, upStr); } - + // Not YYYY-MM, no change - if (theIncompleteDateStr.length() != 7) - return new ImmutablePair<>(theIncompleteDateStr, theIncompleteDateStr); - + if (theIncompleteDateStr.length() != 7) return new ImmutablePair<>(theIncompleteDateStr, theIncompleteDateStr); + // YYYY-MM Only Date lb; try { // first day of the month - lb = new SimpleDateFormat("yyyy-MM-dd").parse(theIncompleteDateStr+"-01"); + lb = new SimpleDateFormat("yyyy-MM-dd").parse(theIncompleteDateStr + "-01"); } catch (ParseException e) { return new ImmutablePair<>(theIncompleteDateStr, theIncompleteDateStr); } - + // last day of the month - Calendar calendar = Calendar.getInstance(); - calendar.setTime(lb); + Calendar calendar = Calendar.getInstance(); + calendar.setTime(lb); - calendar.add(Calendar.MONTH, 1); - calendar.set(Calendar.DAY_OF_MONTH, 1); - calendar.add(Calendar.DATE, -1); + calendar.add(Calendar.MONTH, 1); + calendar.set(Calendar.DAY_OF_MONTH, 1); + calendar.add(Calendar.DATE, -1); - Date ub = calendar.getTime(); + Date ub = calendar.getTime(); + + lbStr = new SimpleDateFormat("yyyy-MM-dd").format(lb); + upStr = new SimpleDateFormat("yyyy-MM-dd").format(ub); - lbStr = new SimpleDateFormat("yyyy-MM-dd").format(lb); - upStr = new SimpleDateFormat("yyyy-MM-dd").format(ub); - return new ImmutablePair<>(lbStr, upStr); } - + public static Date getEndOfDay(Date theDate) { Calendar cal = Calendar.getInstance(); diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ElementUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ElementUtil.java index 5e0b162204d..6b56c42d9ca 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ElementUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ElementUtil.java @@ -40,7 +40,7 @@ public class ElementUtil { if (!isEmpty((List) next)) { return false; } - } else if (next instanceof String && (!((String)next).isEmpty())) { + } else if (next instanceof String && (!((String) next).isEmpty())) { return false; } else if (next != null && !((IBase) next).isEmpty()) { return false; @@ -85,7 +85,8 @@ public class ElementUtil { next = theElements.get(i); } catch (ClassCastException e) { List elements = theElements; - String s = "Found instance of " + elements.get(i).getClass() + " - Did you set a field value to the incorrect type? Expected " + IBase.class.getName(); + String s = "Found instance of " + elements.get(i).getClass() + + " - Did you set a field value to the incorrect type? Expected " + IBase.class.getName(); throw new ClassCastException(Msg.code(1748) + s); } if (next != null && !next.isEmpty()) { @@ -103,33 +104,32 @@ public class ElementUtil { for (Object next : theElements) { if (next == null) { continue; - }else if (next instanceof IElement) { + } else if (next instanceof IElement) { addElement(retVal, (IElement) next, theType); } else if (next instanceof List) { - for (Object nextElement : ((List)next)) { + for (Object nextElement : ((List) next)) { if (!(nextElement instanceof IBase)) { - throw new IllegalArgumentException(Msg.code(1749) + "Found element of "+nextElement.getClass()); + throw new IllegalArgumentException( + Msg.code(1749) + "Found element of " + nextElement.getClass()); } addElement(retVal, (IElement) nextElement, theType); } } else { - throw new IllegalArgumentException(Msg.code(1750) + "Found element of "+next.getClass()); + throw new IllegalArgumentException(Msg.code(1750) + "Found element of " + next.getClass()); } - } return retVal; } - //@SuppressWarnings("unchecked") + // @SuppressWarnings("unchecked") private static void addElement(ArrayList retVal, IElement next, Class theType) { if (theType != null && theType.isAssignableFrom(next.getClass())) { retVal.add(theType.cast(next)); } if (next instanceof ICompositeElement) { ICompositeElement iCompositeElement = (ICompositeElement) next; - //TODO: Use of a deprecated method should be resolved. + // TODO: Use of a deprecated method should be resolved. retVal.addAll(iCompositeElement.getAllPopulatedChildElementsOfType(theType)); } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ExtensionConstants.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ExtensionConstants.java index dab73ff0ac6..a6cc83c3292 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ExtensionConstants.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ExtensionConstants.java @@ -20,30 +20,38 @@ package ca.uhn.fhir.util; public class ExtensionConstants { - + /** * Non instantiable */ private ExtensionConstants() { // nothing } - - public static final String PARAM_IS_REQUIRED = "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#paramIsRequired"; - public static final String QUERY_RETURN_TYPE = "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#queryReturnType"; + public static final String PARAM_IS_REQUIRED = + "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#paramIsRequired"; - public static final String CONF_ADDITIONAL_PARAM = "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParam"; - - public static final String CONF_ADDITIONAL_PARAM_NAME = "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParamName"; + public static final String QUERY_RETURN_TYPE = + "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#queryReturnType"; - public static final String CONF_ADDITIONAL_PARAM_DESCRIPTION = "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParamDescription"; + public static final String CONF_ADDITIONAL_PARAM = + "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParam"; - public static final String CONF_ADDITIONAL_PARAM_TYPE = "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParamType"; + public static final String CONF_ADDITIONAL_PARAM_NAME = + "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParamName"; - public static final String CONF_ADDITIONAL_PARAM_REQUIRED = "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParamRequired"; + public static final String CONF_ADDITIONAL_PARAM_DESCRIPTION = + "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParamDescription"; - public static final String CONF_RESOURCE_COUNT = "http://hl7api.sourceforge.net/hapi-fhir/res/extdefs.html#resourceCount"; + public static final String CONF_ADDITIONAL_PARAM_TYPE = + "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParamType"; - public static final String QUERY_ALLOWED_INCLUDE = "http://hl7api.sourceforge.net/hapi-fhir/res/extdefs.html#allowedInclude"; - + public static final String CONF_ADDITIONAL_PARAM_REQUIRED = + "http://hl7api.sourceforge.net/hapi-fhir/extensions.xml#additionalParamRequired"; + + public static final String CONF_RESOURCE_COUNT = + "http://hl7api.sourceforge.net/hapi-fhir/res/extdefs.html#resourceCount"; + + public static final String QUERY_ALLOWED_INCLUDE = + "http://hl7api.sourceforge.net/hapi-fhir/res/extdefs.html#allowedInclude"; } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ExtensionUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ExtensionUtil.java index d87bf1bb1f2..074fbac22f3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ExtensionUtil.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ExtensionUtil.java @@ -19,7 +19,6 @@ */ package ca.uhn.fhir.util; - import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.i18n.Msg; import org.apache.commons.lang3.StringUtils; @@ -30,11 +29,11 @@ import org.hl7.fhir.instance.model.api.IBaseExtension; import org.hl7.fhir.instance.model.api.IBaseHasExtensions; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; import java.util.List; import java.util.Optional; import java.util.function.Predicate; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Utility for modifying with extensions in a FHIR version-independent approach. @@ -58,7 +57,7 @@ public class ExtensionUtil { */ public static IBaseExtension getOrCreateExtension(IBase theBase, String theUrl) { IBaseHasExtensions baseHasExtensions = validateExtensionSupport(theBase); - IBaseExtension extension = getExtensionByUrl(baseHasExtensions, theUrl); + IBaseExtension extension = getExtensionByUrl(baseHasExtensions, theUrl); if (extension == null) { extension = baseHasExtensions.addExtension(); extension.setUrl(theUrl); @@ -87,7 +86,7 @@ public class ExtensionUtil { */ public static IBaseExtension addExtension(IBase theBase, String theUrl) { IBaseHasExtensions baseHasExtensions = validateExtensionSupport(theBase); - IBaseExtension extension = baseHasExtensions.addExtension(); + IBaseExtension extension = baseHasExtensions.addExtension(); if (theUrl != null) { extension.setUrl(theUrl); } @@ -103,14 +102,16 @@ public class ExtensionUtil { * @param theValue Extension value * @param theFhirContext The context containing FHIR resource definitions */ - public static void addExtension(FhirContext theFhirContext, IBase theBase, String theUrl, String theValueType, Object theValue) { - IBaseExtension ext = addExtension(theBase, theUrl); + public static void addExtension( + FhirContext theFhirContext, IBase theBase, String theUrl, String theValueType, Object theValue) { + IBaseExtension ext = addExtension(theBase, theUrl); setExtension(theFhirContext, ext, theValueType, theValue); } private static IBaseHasExtensions validateExtensionSupport(IBase theBase) { if (!(theBase instanceof IBaseHasExtensions)) { - throw new IllegalArgumentException(Msg.code(1747) + String.format("Expected instance that supports extensions, but got %s", theBase)); + throw new IllegalArgumentException( + Msg.code(1747) + String.format("Expected instance that supports extensions, but got %s", theBase)); } return (IBaseHasExtensions) theBase; } @@ -159,17 +160,16 @@ public class ExtensionUtil { * @return Returns the first available extension with the specified URL, or null if such extension doesn't exist */ public static IBaseExtension getExtensionByUrl(IBase theBase, String theExtensionUrl) { - Predicate> filter; + Predicate> filter; if (theExtensionUrl == null) { filter = (e -> true); } else { filter = (e -> theExtensionUrl.equals(e.getUrl())); } - return getExtensionsMatchingPredicate(theBase, filter) - .stream() - .findFirst() - .orElse(null); + return getExtensionsMatchingPredicate(theBase, filter).stream() + .findFirst() + .orElse(null); } /** @@ -178,19 +178,16 @@ public class ExtensionUtil { * and returns a list of the string version of the extension values. */ public static List getExtensionPrimitiveValues(IBaseHasExtensions theBase, String theExtensionUrl) { - List values = theBase - .getExtension() - .stream() - .filter(t -> theExtensionUrl.equals(t.getUrl())) - .filter(t -> t.getValue() instanceof IPrimitiveType) - .map(t->(IPrimitiveType)t.getValue()) - .map(IPrimitiveType::getValueAsString) - .filter(StringUtils::isNotBlank) - .collect(Collectors.toList()); + List values = theBase.getExtension().stream() + .filter(t -> theExtensionUrl.equals(t.getUrl())) + .filter(t -> t.getValue() instanceof IPrimitiveType) + .map(t -> (IPrimitiveType) t.getValue()) + .map(IPrimitiveType::getValueAsString) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); return values; } - /** * Gets all extensions that match the specified filter predicate * @@ -198,12 +195,11 @@ public class ExtensionUtil { * @param theFilter Predicate to match the extension against * @return Returns all extension with the specified URL, or an empty list if such extensions do not exist */ - public static List> getExtensionsMatchingPredicate(IBase theBase, Predicate> theFilter) { - return validateExtensionSupport(theBase) - .getExtension() - .stream() - .filter(theFilter) - .collect(Collectors.toList()); + public static List> getExtensionsMatchingPredicate( + IBase theBase, Predicate> theFilter) { + return validateExtensionSupport(theBase).getExtension().stream() + .filter(theFilter) + .collect(Collectors.toList()); } /** @@ -234,11 +230,10 @@ public class ExtensionUtil { * @param theFilter Defines which extensions should be cleared * @return Returns all extension that were removed */ - private static List> clearExtensionsMatchingPredicate(IBase theBase, Predicate> theFilter) { + private static List> clearExtensionsMatchingPredicate( + IBase theBase, Predicate> theFilter) { List> retVal = getExtensionsMatchingPredicate(theBase, theFilter); - validateExtensionSupport(theBase) - .getExtension() - .removeIf(theFilter); + validateExtensionSupport(theBase).getExtension().removeIf(theFilter); return retVal; } @@ -250,7 +245,7 @@ public class ExtensionUtil { * @return Returns all extension with the specified URL, or an empty list if such extensions do not exist */ public static List> getExtensionsByUrl(IBaseHasExtensions theBase, String theExtensionUrl) { - Predicate> urlEqualityPredicate = e -> theExtensionUrl.equals(e.getUrl()); + Predicate> urlEqualityPredicate = e -> theExtensionUrl.equals(e.getUrl()); return getExtensionsMatchingPredicate(theBase, urlEqualityPredicate); } @@ -261,7 +256,7 @@ public class ExtensionUtil { * @param theValue The value to set * @param theFhirContext The context containing FHIR resource definitions */ - public static void setExtension(FhirContext theFhirContext, IBaseExtension theExtension, String theValue) { + public static void setExtension(FhirContext theFhirContext, IBaseExtension theExtension, String theValue) { setExtension(theFhirContext, theExtension, "string", theValue); } @@ -273,7 +268,8 @@ public class ExtensionUtil { * @param theValue The value to set * @param theFhirContext The context containing FHIR resource definitions */ - public static void setExtension(FhirContext theFhirContext, IBaseExtension theExtension, String theExtensionType, Object theValue) { + public static void setExtension( + FhirContext theFhirContext, IBaseExtension theExtension, String theExtensionType, Object theValue) { theExtension.setValue(TerserUtil.newElement(theFhirContext, theExtensionType, theValue)); } @@ -286,7 +282,7 @@ public class ExtensionUtil { * @param theFhirContext The context containing FHIR resource definitions */ public static void setExtensionAsString(FhirContext theFhirContext, IBase theBase, String theUrl, String theValue) { - IBaseExtension ext = getOrCreateExtension(theBase, theUrl); + IBaseExtension ext = getOrCreateExtension(theBase, theUrl); setExtension(theFhirContext, ext, theValue); } @@ -299,8 +295,9 @@ public class ExtensionUtil { * @param theValue Extension value * @param theFhirContext The context containing FHIR resource definitions */ - public static void setExtension(FhirContext theFhirContext, IBase theBase, String theUrl, String theValueType, Object theValue) { - IBaseExtension ext = getOrCreateExtension(theBase, theUrl); + public static void setExtension( + FhirContext theFhirContext, IBase theBase, String theUrl, String theValueType, Object theValue) { + IBaseExtension ext = getOrCreateExtension(theBase, theUrl); setExtension(theFhirContext, ext, theValueType, theValue); } @@ -311,7 +308,7 @@ public class ExtensionUtil { * @param theRightExtension : Extension to be evaluated #2 * @return Result of the comparison */ - public static boolean equals(IBaseExtension theLeftExtension, IBaseExtension theRightExtension) { + public static boolean equals(IBaseExtension theLeftExtension, IBaseExtension theRightExtension) { return TerserUtil.equals(theLeftExtension, theRightExtension); } @@ -326,18 +323,18 @@ public class ExtensionUtil { * @param theChildExtensionUrl The child extension URL. Must not be null or blank. * @since 6.6.0 */ - public static > String extractChildPrimitiveExtensionValue(@Nonnull IBaseExtension theExtension, @Nonnull String theChildExtensionUrl) { + public static > String extractChildPrimitiveExtensionValue( + @Nonnull IBaseExtension theExtension, @Nonnull String theChildExtensionUrl) { Validate.notNull(theExtension, "theExtension must not be null"); Validate.notBlank(theChildExtensionUrl, "theChildExtensionUrl must not be null or blank"); - Optional codeExtension = theExtension - .getExtension() - .stream() - .filter(t -> theChildExtensionUrl.equals(t.getUrl())) - .findFirst(); + Optional codeExtension = theExtension.getExtension().stream() + .filter(t -> theChildExtensionUrl.equals(t.getUrl())) + .findFirst(); String retVal = null; if (codeExtension.isPresent() && codeExtension.get().getValue() instanceof IPrimitiveType) { - IPrimitiveType codeValue = (IPrimitiveType) codeExtension.get().getValue(); + IPrimitiveType codeValue = + (IPrimitiveType) codeExtension.get().getValue(); retVal = codeValue.getValueAsString(); } return retVal; diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java index 9bdd9a99a62..4e4319076c3 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirTerser.java @@ -55,8 +55,6 @@ import org.hl7.fhir.instance.model.api.IDomainResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -73,6 +71,8 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -81,8 +81,10 @@ import static org.apache.commons.lang3.StringUtils.substring; public class FhirTerser { - private static final Pattern COMPARTMENT_MATCHER_PATH = Pattern.compile("([a-zA-Z.]+)\\.where\\(resolve\\(\\) is ([a-zA-Z]+)\\)"); - private static final String USER_DATA_KEY_CONTAIN_RESOURCES_COMPLETED = FhirTerser.class.getName() + "_CONTAIN_RESOURCES_COMPLETED"; + private static final Pattern COMPARTMENT_MATCHER_PATH = + Pattern.compile("([a-zA-Z.]+)\\.where\\(resolve\\(\\) is ([a-zA-Z]+)\\)"); + private static final String USER_DATA_KEY_CONTAIN_RESOURCES_COMPLETED = + FhirTerser.class.getName() + "_CONTAIN_RESOURCES_COMPLETED"; private final FhirContext myContext; public FhirTerser(FhirContext theContext) { @@ -91,8 +93,7 @@ public class FhirTerser { } private List addNameToList(List theCurrentList, BaseRuntimeChildDefinition theChildDefinition) { - if (theChildDefinition == null) - return null; + if (theChildDefinition == null) return null; if (theCurrentList == null || theCurrentList.isEmpty()) return new ArrayList<>(Collections.singletonList(theChildDefinition.getElementName())); List newList = new ArrayList<>(theCurrentList); @@ -111,11 +112,13 @@ public class FhirTerser { return retVal; } - private ExtensionDt createEmptyExtensionDt(ISupportsUndeclaredExtensions theSupportsUndeclaredExtensions, String theUrl) { + private ExtensionDt createEmptyExtensionDt( + ISupportsUndeclaredExtensions theSupportsUndeclaredExtensions, String theUrl) { return createEmptyExtensionDt(theSupportsUndeclaredExtensions, false, theUrl); } - private ExtensionDt createEmptyExtensionDt(ISupportsUndeclaredExtensions theSupportsUndeclaredExtensions, boolean theIsModifier, String theUrl) { + private ExtensionDt createEmptyExtensionDt( + ISupportsUndeclaredExtensions theSupportsUndeclaredExtensions, boolean theIsModifier, String theUrl) { return theSupportsUndeclaredExtensions.addUndeclaredExtension(theIsModifier, theUrl); } @@ -123,11 +126,14 @@ public class FhirTerser { return (IBaseExtension) theBaseHasExtensions.addExtension().setUrl(theUrl); } - private IBaseExtension createEmptyModifierExtension(IBaseHasModifierExtensions theBaseHasModifierExtensions, String theUrl) { - return (IBaseExtension) theBaseHasModifierExtensions.addModifierExtension().setUrl(theUrl); + private IBaseExtension createEmptyModifierExtension( + IBaseHasModifierExtensions theBaseHasModifierExtensions, String theUrl) { + return (IBaseExtension) + theBaseHasModifierExtensions.addModifierExtension().setUrl(theUrl); } - private ExtensionDt createEmptyModifierExtensionDt(ISupportsUndeclaredExtensions theSupportsUndeclaredExtensions, String theUrl) { + private ExtensionDt createEmptyModifierExtensionDt( + ISupportsUndeclaredExtensions theSupportsUndeclaredExtensions, String theUrl) { return createEmptyExtensionDt(theSupportsUndeclaredExtensions, true, theUrl); } @@ -183,11 +189,15 @@ public class FhirTerser { if (theIgnoreMissingFields) { return theSource; } - throw new DataFormatException(Msg.code(1788) + "Can not copy value from primitive of type " + theSource.getClass().getName() + " into type " + theTarget.getClass().getName()); + throw new DataFormatException(Msg.code(1788) + "Can not copy value from primitive of type " + + theSource.getClass().getName() + " into type " + + theTarget.getClass().getName()); } - BaseRuntimeElementCompositeDefinition sourceDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theSource.getClass()); - BaseRuntimeElementCompositeDefinition targetDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theTarget.getClass()); + BaseRuntimeElementCompositeDefinition sourceDef = + (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theSource.getClass()); + BaseRuntimeElementCompositeDefinition targetDef = + (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theTarget.getClass()); List children = sourceDef.getChildren(); if (sourceDef instanceof RuntimeExtensionDtDefinition) { @@ -203,7 +213,8 @@ public class FhirTerser { if (theIgnoreMissingFields) { continue; } - throw new DataFormatException(Msg.code(1789) + "Type " + theTarget.getClass().getName() + " does not have a child with name " + elementName); + throw new DataFormatException(Msg.code(1789) + "Type " + + theTarget.getClass().getName() + " does not have a child with name " + elementName); } BaseRuntimeElementDefinition element = myContext.getElementDefinition(valueType); @@ -251,13 +262,19 @@ public class FhirTerser { * @param theType The type to search for. Must not be null. * @return Returns a list of all matching elements */ - public List getAllPopulatedChildElementsOfType(IBaseResource theResource, final Class theType) { + public List getAllPopulatedChildElementsOfType( + IBaseResource theResource, final Class theType) { final ArrayList retVal = new ArrayList<>(); BaseRuntimeElementCompositeDefinition def = myContext.getResourceDefinition(theResource); visit(newMap(), theResource, theResource, null, null, def, new IModelVisitor() { @SuppressWarnings("unchecked") @Override - public void acceptElement(IBaseResource theOuterResource, IBase theElement, List thePathToElement, BaseRuntimeChildDefinition theChildDefinition, BaseRuntimeElementDefinition theDefinition) { + public void acceptElement( + IBaseResource theOuterResource, + IBase theElement, + List thePathToElement, + BaseRuntimeChildDefinition theChildDefinition, + BaseRuntimeElementDefinition theDefinition) { if (theElement == null || theElement.isEmpty()) { return; } @@ -275,25 +292,33 @@ public class FhirTerser { BaseRuntimeElementCompositeDefinition def = myContext.getResourceDefinition(theResource); visit(newMap(), theResource, theResource, null, null, def, new IModelVisitor() { @Override - public void acceptElement(IBaseResource theOuterResource, IBase theElement, List thePathToElement, BaseRuntimeChildDefinition theChildDefinition, BaseRuntimeElementDefinition theDefinition) { + public void acceptElement( + IBaseResource theOuterResource, + IBase theElement, + List thePathToElement, + BaseRuntimeChildDefinition theChildDefinition, + BaseRuntimeElementDefinition theDefinition) { if (theElement == null || theElement.isEmpty()) { return; } if (IBaseReference.class.isAssignableFrom(theElement.getClass())) { - retVal.add(new ResourceReferenceInfo(myContext, theOuterResource, thePathToElement, (IBaseReference) theElement)); + retVal.add(new ResourceReferenceInfo( + myContext, theOuterResource, thePathToElement, (IBaseReference) theElement)); } } }); return retVal; } - private BaseRuntimeChildDefinition getDefinition(BaseRuntimeElementCompositeDefinition theCurrentDef, List theSubList) { + private BaseRuntimeChildDefinition getDefinition( + BaseRuntimeElementCompositeDefinition theCurrentDef, List theSubList) { BaseRuntimeChildDefinition nextDef = theCurrentDef.getChildByNameOrThrowDataFormatException(theSubList.get(0)); if (theSubList.size() == 1) { return nextDef; } - BaseRuntimeElementCompositeDefinition cmp = (BaseRuntimeElementCompositeDefinition) nextDef.getChildByName(theSubList.get(0)); + BaseRuntimeElementCompositeDefinition cmp = + (BaseRuntimeElementCompositeDefinition) nextDef.getChildByName(theSubList.get(0)); return getDefinition(cmp, theSubList.subList(1, theSubList.size())); } @@ -306,7 +331,6 @@ public class FhirTerser { throw new ConfigurationException(Msg.code(1790) + "Invalid path: " + thePath); } return getDefinition(def, subList); - } public Object getSingleValueOrNull(IBase theTarget, String thePath) { @@ -321,7 +345,8 @@ public class FhirTerser { BaseRuntimeElementDefinition def = myContext.getElementDefinition(theTarget.getClass()); if (!(def instanceof BaseRuntimeElementCompositeDefinition)) { - throw new IllegalArgumentException(Msg.code(1791) + "Target is not a composite type: " + theTarget.getClass().getName()); + throw new IllegalArgumentException(Msg.code(1791) + "Target is not a composite type: " + + theTarget.getClass().getName()); } BaseRuntimeElementCompositeDefinition currentDef = (BaseRuntimeElementCompositeDefinition) def; @@ -340,19 +365,31 @@ public class FhirTerser { } public String getSinglePrimitiveValueOrNull(IBase theTarget, String thePath) { - return getSingleValue(theTarget, thePath, IPrimitiveType.class).map(t -> t.getValueAsString()).orElse(null); + return getSingleValue(theTarget, thePath, IPrimitiveType.class) + .map(t -> t.getValueAsString()) + .orElse(null); } public Optional getSingleValue(IBase theTarget, String thePath, Class theWantedType) { return Optional.ofNullable(getSingleValueOrNull(theTarget, thePath, theWantedType)); } - private List getValues(BaseRuntimeElementCompositeDefinition theCurrentDef, IBase theCurrentObj, List theSubList, Class theWantedClass) { + private List getValues( + BaseRuntimeElementCompositeDefinition theCurrentDef, + IBase theCurrentObj, + List theSubList, + Class theWantedClass) { return getValues(theCurrentDef, theCurrentObj, theSubList, theWantedClass, false, false); } @SuppressWarnings("unchecked") - private List getValues(BaseRuntimeElementCompositeDefinition theCurrentDef, IBase theCurrentObj, List theSubList, Class theWantedClass, boolean theCreate, boolean theAddExtension) { + private List getValues( + BaseRuntimeElementCompositeDefinition theCurrentDef, + IBase theCurrentObj, + List theSubList, + Class theWantedClass, + boolean theCreate, + boolean theAddExtension) { if (theSubList.isEmpty()) { return Collections.emptyList(); } @@ -372,25 +409,27 @@ public class FhirTerser { final String extensionDtUrlForLambda = extensionUrl; List extensionDts = Collections.emptyList(); if (theCurrentObj instanceof ISupportsUndeclaredExtensions) { - extensionDts = ((ISupportsUndeclaredExtensions) theCurrentObj).getUndeclaredExtensions() - .stream() - .filter(t -> t.getUrl().equals(extensionDtUrlForLambda)) - .collect(Collectors.toList()); + extensionDts = ((ISupportsUndeclaredExtensions) theCurrentObj) + .getUndeclaredExtensions().stream() + .filter(t -> t.getUrl().equals(extensionDtUrlForLambda)) + .collect(Collectors.toList()); if (theAddExtension - && (!(theCurrentObj instanceof IBaseExtension) || (extensionDts.isEmpty() && theSubList.size() == 1))) { - extensionDts.add(createEmptyExtensionDt((ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl)); + && (!(theCurrentObj instanceof IBaseExtension) + || (extensionDts.isEmpty() && theSubList.size() == 1))) { + extensionDts.add( + createEmptyExtensionDt((ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl)); } if (extensionDts.isEmpty() && theCreate) { - extensionDts.add(createEmptyExtensionDt((ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl)); + extensionDts.add( + createEmptyExtensionDt((ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl)); } } else if (theCurrentObj instanceof IBaseExtension) { extensionDts = ((IBaseExtension) theCurrentObj).getExtension(); - if (theAddExtension - && (extensionDts.isEmpty() && theSubList.size() == 1)) { + if (theAddExtension && (extensionDts.isEmpty() && theSubList.size() == 1)) { extensionDts.add(createEmptyExtensionDt((IBaseExtension) theCurrentObj, extensionUrl)); } @@ -409,13 +448,14 @@ public class FhirTerser { final String extensionUrlForLambda = extensionUrl; List extensions = Collections.emptyList(); if (theCurrentObj instanceof IBaseHasExtensions) { - extensions = ((IBaseHasExtensions) theCurrentObj).getExtension() - .stream() - .filter(t -> t.getUrl().equals(extensionUrlForLambda)) - .collect(Collectors.toList()); + extensions = ((IBaseHasExtensions) theCurrentObj) + .getExtension().stream() + .filter(t -> t.getUrl().equals(extensionUrlForLambda)) + .collect(Collectors.toList()); if (theAddExtension - && (!(theCurrentObj instanceof IBaseExtension) || (extensions.isEmpty() && theSubList.size() == 1))) { + && (!(theCurrentObj instanceof IBaseExtension) + || (extensions.isEmpty() && theSubList.size() == 1))) { extensions.add(createEmptyExtension((IBaseHasExtensions) theCurrentObj, extensionUrl)); } @@ -435,8 +475,15 @@ public class FhirTerser { List values = retVal; retVal = new ArrayList<>(); for (T nextElement : values) { - BaseRuntimeElementCompositeDefinition nextChildDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(nextElement.getClass()); - List foundValues = getValues(nextChildDef, nextElement, theSubList.subList(1, theSubList.size()), theWantedClass, theCreate, theAddExtension); + BaseRuntimeElementCompositeDefinition nextChildDef = (BaseRuntimeElementCompositeDefinition) + myContext.getElementDefinition(nextElement.getClass()); + List foundValues = getValues( + nextChildDef, + nextElement, + theSubList.subList(1, theSubList.size()), + theWantedClass, + theCreate, + theAddExtension); retVal.addAll(foundValues); } } @@ -456,25 +503,27 @@ public class FhirTerser { final String extensionDtUrlForLambda = extensionUrl; List extensionDts = Collections.emptyList(); if (theCurrentObj instanceof ISupportsUndeclaredExtensions) { - extensionDts = ((ISupportsUndeclaredExtensions) theCurrentObj).getUndeclaredModifierExtensions() - .stream() - .filter(t -> t.getUrl().equals(extensionDtUrlForLambda)) - .collect(Collectors.toList()); + extensionDts = ((ISupportsUndeclaredExtensions) theCurrentObj) + .getUndeclaredModifierExtensions().stream() + .filter(t -> t.getUrl().equals(extensionDtUrlForLambda)) + .collect(Collectors.toList()); if (theAddExtension - && (!(theCurrentObj instanceof IBaseExtension) || (extensionDts.isEmpty() && theSubList.size() == 1))) { - extensionDts.add(createEmptyModifierExtensionDt((ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl)); + && (!(theCurrentObj instanceof IBaseExtension) + || (extensionDts.isEmpty() && theSubList.size() == 1))) { + extensionDts.add(createEmptyModifierExtensionDt( + (ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl)); } if (extensionDts.isEmpty() && theCreate) { - extensionDts.add(createEmptyModifierExtensionDt((ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl)); + extensionDts.add(createEmptyModifierExtensionDt( + (ISupportsUndeclaredExtensions) theCurrentObj, extensionUrl)); } } else if (theCurrentObj instanceof IBaseExtension) { extensionDts = ((IBaseExtension) theCurrentObj).getExtension(); - if (theAddExtension - && (extensionDts.isEmpty() && theSubList.size() == 1)) { + if (theAddExtension && (extensionDts.isEmpty() && theSubList.size() == 1)) { extensionDts.add(createEmptyExtensionDt((IBaseExtension) theCurrentObj, extensionUrl)); } @@ -494,18 +543,21 @@ public class FhirTerser { List extensions = Collections.emptyList(); if (theCurrentObj instanceof IBaseHasModifierExtensions) { - extensions = ((IBaseHasModifierExtensions) theCurrentObj).getModifierExtension() - .stream() - .filter(t -> t.getUrl().equals(extensionUrlForLambda)) - .collect(Collectors.toList()); + extensions = ((IBaseHasModifierExtensions) theCurrentObj) + .getModifierExtension().stream() + .filter(t -> t.getUrl().equals(extensionUrlForLambda)) + .collect(Collectors.toList()); if (theAddExtension - && (!(theCurrentObj instanceof IBaseExtension) || (extensions.isEmpty() && theSubList.size() == 1))) { - extensions.add(createEmptyModifierExtension((IBaseHasModifierExtensions) theCurrentObj, extensionUrl)); + && (!(theCurrentObj instanceof IBaseExtension) + || (extensions.isEmpty() && theSubList.size() == 1))) { + extensions.add( + createEmptyModifierExtension((IBaseHasModifierExtensions) theCurrentObj, extensionUrl)); } if (extensions.isEmpty() && theCreate) { - extensions.add(createEmptyModifierExtension((IBaseHasModifierExtensions) theCurrentObj, extensionUrl)); + extensions.add( + createEmptyModifierExtension((IBaseHasModifierExtensions) theCurrentObj, extensionUrl)); } } @@ -520,8 +572,15 @@ public class FhirTerser { List values = retVal; retVal = new ArrayList<>(); for (T nextElement : values) { - BaseRuntimeElementCompositeDefinition nextChildDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(nextElement.getClass()); - List foundValues = getValues(nextChildDef, nextElement, theSubList.subList(1, theSubList.size()), theWantedClass, theCreate, theAddExtension); + BaseRuntimeElementCompositeDefinition nextChildDef = (BaseRuntimeElementCompositeDefinition) + myContext.getElementDefinition(nextElement.getClass()); + List foundValues = getValues( + nextChildDef, + nextElement, + theSubList.subList(1, theSubList.size()), + theWantedClass, + theCreate, + theAddExtension); retVal.addAll(foundValues); } } @@ -576,8 +635,15 @@ public class FhirTerser { } } else { for (IBase nextElement : values) { - BaseRuntimeElementCompositeDefinition nextChildDef = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(nextElement.getClass()); - List foundValues = getValues(nextChildDef, nextElement, theSubList.subList(1, theSubList.size()), theWantedClass, theCreate, theAddExtension); + BaseRuntimeElementCompositeDefinition nextChildDef = (BaseRuntimeElementCompositeDefinition) + myContext.getElementDefinition(nextElement.getClass()); + List foundValues = getValues( + nextChildDef, + nextElement, + theSubList.subList(1, theSubList.size()), + theWantedClass, + theCreate, + theAddExtension); retVal.addAll(foundValues); } } @@ -640,7 +706,8 @@ public class FhirTerser { * @return A list of values of type theWantedClass. */ public List getValues(IBase theElement, String thePath, Class theWantedClass) { - BaseRuntimeElementCompositeDefinition def = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theElement.getClass()); + BaseRuntimeElementCompositeDefinition def = + (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theElement.getClass()); List parts = parsePath(def, thePath); return getValues(def, theElement, parts, theWantedClass); } @@ -656,8 +723,10 @@ public class FhirTerser { * @param Type declared by theWantedClass * @return A list of values of type theWantedClass. */ - public List getValues(IBase theElement, String thePath, Class theWantedClass, boolean theCreate) { - BaseRuntimeElementCompositeDefinition def = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theElement.getClass()); + public List getValues( + IBase theElement, String thePath, Class theWantedClass, boolean theCreate) { + BaseRuntimeElementCompositeDefinition def = + (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theElement.getClass()); List parts = parsePath(def, thePath); return getValues(def, theElement, parts, theWantedClass, theCreate, false); } @@ -674,8 +743,10 @@ public class FhirTerser { * @param Type declared by theWantedClass * @return A list of values of type theWantedClass. */ - public List getValues(IBase theElement, String thePath, Class theWantedClass, boolean theCreate, boolean theAddExtension) { - BaseRuntimeElementCompositeDefinition def = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theElement.getClass()); + public List getValues( + IBase theElement, String thePath, Class theWantedClass, boolean theCreate, boolean theAddExtension) { + BaseRuntimeElementCompositeDefinition def = + (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(theElement.getClass()); List parts = parsePath(def, thePath); return getValues(def, theElement, parts, theWantedClass, theCreate, theAddExtension); } @@ -729,7 +800,8 @@ public class FhirTerser { * @return true if theSource is in the compartment * @throws IllegalArgumentException If theTarget does not contain both a resource type and ID */ - public boolean isSourceInCompartmentForTarget(String theCompartmentName, IBaseResource theSource, IIdType theTarget) { + public boolean isSourceInCompartmentForTarget( + String theCompartmentName, IBaseResource theSource, IIdType theTarget) { return isSourceInCompartmentForTarget(theCompartmentName, theSource, theTarget, null); } @@ -744,18 +816,27 @@ public class FhirTerser { * @return true if theSource is in the compartment or one of the additional parameters matched. * @throws IllegalArgumentException If theTarget does not contain both a resource type and ID */ - public boolean isSourceInCompartmentForTarget(String theCompartmentName, IBaseResource theSource, IIdType theTarget, Set theAdditionalCompartmentParamNames) { + public boolean isSourceInCompartmentForTarget( + String theCompartmentName, + IBaseResource theSource, + IIdType theTarget, + Set theAdditionalCompartmentParamNames) { Validate.notBlank(theCompartmentName, "theCompartmentName must not be null or blank"); Validate.notNull(theSource, "theSource must not be null"); Validate.notNull(theTarget, "theTarget must not be null"); - Validate.notBlank(defaultString(theTarget.getResourceType()), "theTarget must have a populated resource type (theTarget.getResourceType() does not return a value)"); - Validate.notBlank(defaultString(theTarget.getIdPart()), "theTarget must have a populated ID (theTarget.getIdPart() does not return a value)"); + Validate.notBlank( + defaultString(theTarget.getResourceType()), + "theTarget must have a populated resource type (theTarget.getResourceType() does not return a value)"); + Validate.notBlank( + defaultString(theTarget.getIdPart()), + "theTarget must have a populated ID (theTarget.getIdPart() does not return a value)"); String wantRef = theTarget.toUnqualifiedVersionless().getValue(); RuntimeResourceDefinition sourceDef = myContext.getResourceDefinition(theSource); if (theSource.getIdElement().hasIdPart()) { - if (wantRef.equals(sourceDef.getName() + '/' + theSource.getIdElement().getIdPart())) { + if (wantRef.equals( + sourceDef.getName() + '/' + theSource.getIdElement().getIdPart())) { return true; } } @@ -776,7 +857,8 @@ public class FhirTerser { @Override public boolean consume(IIdType theCompartmentOwner) { - if (myWantRef.equals(theCompartmentOwner.toUnqualifiedVersionless().getValue())) { + if (myWantRef.equals( + theCompartmentOwner.toUnqualifiedVersionless().getValue())) { myFound = true; } return !myFound; @@ -788,7 +870,6 @@ public class FhirTerser { return consumer.isFound(); } - /** * Returns the owners of the compartment in theSource is in the compartment named theCompartmentName. * @@ -797,7 +878,8 @@ public class FhirTerser { * @param theAdditionalCompartmentParamNames If provided, search param names provided here will be considered as included in the given compartment for this comparison. */ @Nonnull - public List getCompartmentOwnersForResource(String theCompartmentName, IBaseResource theSource, Set theAdditionalCompartmentParamNames) { + public List getCompartmentOwnersForResource( + String theCompartmentName, IBaseResource theSource, Set theAdditionalCompartmentParamNames) { Validate.notBlank(theCompartmentName, "theCompartmentName must not be null or blank"); Validate.notNull(theSource, "theSource must not be null"); @@ -824,8 +906,11 @@ public class FhirTerser { return consumer.getOwners(); } - - private void visitCompartmentOwnersForResource(String theCompartmentName, IBaseResource theSource, Set theAdditionalCompartmentParamNames, ICompartmentOwnerVisitor theConsumer) { + private void visitCompartmentOwnersForResource( + String theCompartmentName, + IBaseResource theSource, + Set theAdditionalCompartmentParamNames, + ICompartmentOwnerVisitor theConsumer) { Validate.notBlank(theCompartmentName, "theCompartmentName must not be null or blank"); Validate.notNull(theSource, "theSource must not be null"); @@ -834,9 +919,10 @@ public class FhirTerser { // If passed an additional set of searchparameter names, add them for comparison purposes. if (theAdditionalCompartmentParamNames != null) { - List additionalParams = theAdditionalCompartmentParamNames.stream().map(sourceDef::getSearchParam) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + List additionalParams = theAdditionalCompartmentParamNames.stream() + .map(sourceDef::getSearchParam) + .filter(Objects::nonNull) + .collect(Collectors.toList()); if (params == null || params.isEmpty()) { params = additionalParams; } else { @@ -897,24 +983,30 @@ public class FhirTerser { return; } } - } - } } - } - private void visit(IBase theElement, BaseRuntimeChildDefinition theChildDefinition, BaseRuntimeElementDefinition theDefinition, IModelVisitor2 theCallback, List theContainingElementPath, - List theChildDefinitionPath, List> theElementDefinitionPath) { + private void visit( + IBase theElement, + BaseRuntimeChildDefinition theChildDefinition, + BaseRuntimeElementDefinition theDefinition, + IModelVisitor2 theCallback, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { if (theChildDefinition != null) { theChildDefinitionPath.add(theChildDefinition); } theContainingElementPath.add(theElement); theElementDefinitionPath.add(theDefinition); - boolean recurse = theCallback.acceptElement(theElement, Collections.unmodifiableList(theContainingElementPath), Collections.unmodifiableList(theChildDefinitionPath), - Collections.unmodifiableList(theElementDefinitionPath)); + boolean recurse = theCallback.acceptElement( + theElement, + Collections.unmodifiableList(theContainingElementPath), + Collections.unmodifiableList(theChildDefinitionPath), + Collections.unmodifiableList(theElementDefinitionPath)); if (recurse) { /* @@ -924,7 +1016,8 @@ public class FhirTerser { ISupportsUndeclaredExtensions containingElement = (ISupportsUndeclaredExtensions) theElement; for (ExtensionDt nextExt : containingElement.getUndeclaredExtensions()) { theContainingElementPath.add(nextExt); - theCallback.acceptUndeclaredExtension(nextExt, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath); + theCallback.acceptUndeclaredExtension( + nextExt, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath); theContainingElementPath.remove(theContainingElementPath.size() - 1); } } @@ -942,7 +1035,8 @@ public class FhirTerser { case RESOURCE: case RESOURCE_BLOCK: case COMPOSITE_DATATYPE: { - BaseRuntimeElementCompositeDefinition childDef = (BaseRuntimeElementCompositeDefinition) theDefinition; + BaseRuntimeElementCompositeDefinition childDef = + (BaseRuntimeElementCompositeDefinition) theDefinition; for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) { List values = nextChild.getAccessor().getValues(theElement); if (values != null) { @@ -968,9 +1062,21 @@ public class FhirTerser { childElementDef = nextChild.getChildElementDefinitionByDatatype(typeClass); } - Validate.notNull(childElementDef, "Found value of type[%s] which is not valid for field[%s] in %s", nextValue.getClass(), nextChild.getElementName(), childDef.getName()); + Validate.notNull( + childElementDef, + "Found value of type[%s] which is not valid for field[%s] in %s", + nextValue.getClass(), + nextChild.getElementName(), + childDef.getName()); - visit(nextValue, nextChild, childElementDef, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath); + visit( + nextValue, + nextChild, + childElementDef, + theCallback, + theContainingElementPath, + theChildDefinitionPath, + theElementDefinitionPath); } } } @@ -980,23 +1086,37 @@ public class FhirTerser { BaseContainedDt value = (BaseContainedDt) theElement; for (IResource next : value.getContainedResources()) { BaseRuntimeElementCompositeDefinition def = myContext.getResourceDefinition(next); - visit(next, null, def, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath); + visit( + next, + null, + def, + theCallback, + theContainingElementPath, + theChildDefinitionPath, + theElementDefinitionPath); } break; } case EXTENSION_DECLARED: case UNDECL_EXT: { - throw new IllegalStateException(Msg.code(1793) + "state should not happen: " + theDefinition.getChildType()); + throw new IllegalStateException( + Msg.code(1793) + "state should not happen: " + theDefinition.getChildType()); } case CONTAINED_RESOURCE_LIST: { if (theElement != null) { BaseRuntimeElementDefinition def = myContext.getElementDefinition(theElement.getClass()); - visit(theElement, null, def, theCallback, theContainingElementPath, theChildDefinitionPath, theElementDefinitionPath); + visit( + theElement, + null, + def, + theCallback, + theContainingElementPath, + theChildDefinitionPath, + theElementDefinitionPath); } break; } } - } if (theChildDefinition != null) { @@ -1045,14 +1165,25 @@ public class FhirTerser { BaseRuntimeElementCompositeDefinition defComposite = (BaseRuntimeElementCompositeDefinition) def; visit(theElement, null, def, theVisitor, new ArrayList<>(), new ArrayList<>(), new ArrayList<>()); } else if (theElement instanceof IBaseExtension) { - theVisitor.acceptUndeclaredExtension((IBaseExtension) theElement, Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); + theVisitor.acceptUndeclaredExtension( + (IBaseExtension) theElement, + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList()); } else { - theVisitor.acceptElement(theElement, Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); + theVisitor.acceptElement( + theElement, Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); } } - private void visit(Map theStack, IBaseResource theResource, IBase theElement, List thePathToElement, BaseRuntimeChildDefinition theChildDefinition, - BaseRuntimeElementDefinition theDefinition, IModelVisitor theCallback) { + private void visit( + Map theStack, + IBaseResource theResource, + IBase theElement, + List thePathToElement, + BaseRuntimeChildDefinition theChildDefinition, + BaseRuntimeElementDefinition theDefinition, + IModelVisitor theCallback) { List pathToElement = addNameToList(thePathToElement, theChildDefinition); if (theStack.put(theElement, theElement) != null) { @@ -1071,7 +1202,8 @@ public class FhirTerser { if (theElement instanceof IBaseReference) { IBaseResource target = ((IBaseReference) theElement).getResource(); if (target != null) { - if (target.getIdElement().hasIdPart() == false || target.getIdElement().isLocal()) { + if (target.getIdElement().hasIdPart() == false + || target.getIdElement().isLocal()) { RuntimeResourceDefinition targetDef = myContext.getResourceDefinition(target); visit(theStack, target, target, pathToElement, null, targetDef, theCallback); } @@ -1100,7 +1232,9 @@ public class FhirTerser { try { nextValue = (IBase) nextValueObject; } catch (ClassCastException e) { - String s = "Found instance of " + nextValueObject.getClass() + " - Did you set a field value to the incorrect type? Expected " + IBase.class.getName(); + String s = "Found instance of " + nextValueObject.getClass() + + " - Did you set a field value to the incorrect type? Expected " + + IBase.class.getName(); throw new ClassCastException(Msg.code(1794) + s); } if (nextValue == null) { @@ -1115,14 +1249,22 @@ public class FhirTerser { if (childElementDef == null) { childElementDef = myContext.getElementDefinition(clazz); - Validate.notNull(childElementDef, "Unable to find element definition for class: %s", clazz); + Validate.notNull( + childElementDef, "Unable to find element definition for class: %s", clazz); } if (nextChild instanceof RuntimeChildDirectResource) { // Don't descend into embedded resources theCallback.acceptElement(theResource, nextValue, null, nextChild, childElementDef); } else { - visit(theStack, theResource, nextValue, pathToElement, nextChild, childElementDef, theCallback); + visit( + theStack, + theResource, + nextValue, + pathToElement, + nextChild, + childElementDef, + theCallback); } } } @@ -1145,7 +1287,6 @@ public class FhirTerser { } theStack.remove(theElement); - } /** @@ -1168,7 +1309,11 @@ public class FhirTerser { visit(theResource, new IModelVisitor2() { @Override - public boolean acceptElement(IBase theElement, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptElement( + IBase theElement, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { if (theElement == theResource) { return true; } @@ -1180,7 +1325,11 @@ public class FhirTerser { } @Override - public boolean acceptUndeclaredExtension(IBaseExtension theNextExt, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptUndeclaredExtension( + IBaseExtension theNextExt, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { return true; } }); @@ -1194,7 +1343,11 @@ public class FhirTerser { public void clear(IBaseResource theInput) { visit(theInput, new IModelVisitor2() { @Override - public boolean acceptElement(IBase theElement, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptElement( + IBase theElement, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { if (theElement instanceof IPrimitiveType) { ((IPrimitiveType) theElement).setValueAsString(null); } @@ -1202,22 +1355,28 @@ public class FhirTerser { } @Override - public boolean acceptUndeclaredExtension(IBaseExtension theNextExt, List theContainingElementPath, List theChildDefinitionPath, List> theElementDefinitionPath) { + public boolean acceptUndeclaredExtension( + IBaseExtension theNextExt, + List theContainingElementPath, + List theChildDefinitionPath, + List> theElementDefinitionPath) { theNextExt.setUrl(null); theNextExt.setValue(null); return true; } - }); } - private void containResourcesForEncoding(ContainedResources theContained, IBaseResource theResource, boolean theModifyResource) { + private void containResourcesForEncoding( + ContainedResources theContained, IBaseResource theResource, boolean theModifyResource) { List allReferences = getAllPopulatedChildElementsOfType(theResource, IBaseReference.class); for (IBaseReference next : allReferences) { IBaseResource resource = next.getResource(); if (resource == null && next.getReferenceElement().isLocal()) { if (theContained.hasExistingIdToContainedResource()) { - IBaseResource potentialTarget = theContained.getExistingIdToContainedResource().remove(next.getReferenceElement().getValue()); + IBaseResource potentialTarget = theContained + .getExistingIdToContainedResource() + .remove(next.getReferenceElement().getValue()); if (potentialTarget != null) { theContained.addContained(next.getReferenceElement(), potentialTarget); containResourcesForEncoding(theContained, potentialTarget, theModifyResource); @@ -1240,14 +1399,13 @@ public class FhirTerser { next.setReference(id.getValue()); } if (resource.getIdElement().isLocal() && theContained.hasExistingIdToContainedResource()) { - theContained.getExistingIdToContainedResource().remove(resource.getIdElement().getValue()); + theContained + .getExistingIdToContainedResource() + .remove(resource.getIdElement().getValue()); } } - } - } - } /** @@ -1307,7 +1465,8 @@ public class FhirTerser { private List getContainedResourceList(T theResource) { List containedResources = Collections.emptyList(); if (theResource instanceof IResource) { - containedResources = (List) ((IResource) theResource).getContained().getContainedResources(); + containedResources = + (List) ((IResource) theResource).getContained().getContainedResources(); } else if (theResource instanceof IDomainResource) { containedResources = (List) ((IDomainResource) theResource).getContained(); } @@ -1349,7 +1508,8 @@ public class FhirTerser { } IBase target = theTarget; - BaseRuntimeElementCompositeDefinition def = (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(target.getClass()); + BaseRuntimeElementCompositeDefinition def = + (BaseRuntimeElementCompositeDefinition) myContext.getElementDefinition(target.getClass()); List parts = parsePath(def, thePath); for (int i = 0, partsSize = parts.size(); ; i++) { @@ -1358,7 +1518,12 @@ public class FhirTerser { BaseRuntimeChildDefinition nextChild = def.getChildByName(nextPart); if (nextChild == null) { - throw new DataFormatException(Msg.code(1796) + "Invalid path " + thePath + ": Element of type " + def.getName() + " has no child named " + nextPart + ". Valid names: " + def.getChildrenAndExtension().stream().map(t -> t.getElementName()).sorted().collect(Collectors.joining(", "))); + throw new DataFormatException(Msg.code(1796) + "Invalid path " + thePath + ": Element of type " + + def.getName() + " has no child named " + nextPart + ". Valid names: " + + def.getChildrenAndExtension().stream() + .map(t -> t.getElementName()) + .sorted() + .collect(Collectors.joining(", "))); } List childValues = nextChild.getAccessor().getValues(target); @@ -1371,8 +1536,10 @@ public class FhirTerser { if (!childValues.isEmpty()) { if (theElementsToAdd == -1) { return (List) Collections.singletonList(childValues.get(0)); - } else if (nextChild.getMax() == 1 && !childValues.get(0).isEmpty()) { - throw new DataFormatException(Msg.code(1797) + "Element at path " + thePath + " is not repeatable and not empty"); + } else if (nextChild.getMax() == 1 + && !childValues.get(0).isEmpty()) { + throw new DataFormatException( + Msg.code(1797) + "Element at path " + thePath + " is not repeatable and not empty"); } else if (nextChild.getMax() == 1 && childValues.get(0).isEmpty()) { return (List) Collections.singletonList(childValues.get(0)); } @@ -1388,7 +1555,8 @@ public class FhirTerser { return (List) Collections.singletonList(childValue); } else { if (nextChild.getMax() == 1) { - throw new DataFormatException(Msg.code(1798) + "Can not add multiple values at path " + thePath + ": Element does not repeat"); + throw new DataFormatException(Msg.code(1798) + "Can not add multiple values at path " + + thePath + ": Element does not repeat"); } List values = (List) Lists.newArrayList(childValue); @@ -1401,7 +1569,6 @@ public class FhirTerser { return values; } } - } target = childValue; @@ -1409,12 +1576,12 @@ public class FhirTerser { if (!lastPart) { BaseRuntimeElementDefinition nextDef = myContext.getElementDefinition(target.getClass()); if (!(nextDef instanceof BaseRuntimeElementCompositeDefinition)) { - throw new DataFormatException(Msg.code(1799) + "Invalid path " + thePath + ": Element of type " + def.getName() + " has no child named " + nextPart + " (this is a primitive type)"); + throw new DataFormatException(Msg.code(1799) + "Invalid path " + thePath + ": Element of type " + + def.getName() + " has no child named " + nextPart + " (this is a primitive type)"); } def = (BaseRuntimeElementCompositeDefinition) nextDef; } } - } /** @@ -1436,10 +1603,13 @@ public class FhirTerser { */ @SuppressWarnings("unchecked") @Nonnull - public T addElement(@Nonnull IBase theTarget, @Nonnull String thePath, @Nullable String theValue) { + public T addElement( + @Nonnull IBase theTarget, @Nonnull String thePath, @Nullable String theValue) { T value = (T) doAddElement(theTarget, thePath, 1).get(0); if (!(value instanceof IPrimitiveType)) { - throw new DataFormatException(Msg.code(1800) + "Element at path " + thePath + " is not a primitive datatype. Found: " + myContext.getElementDefinition(value.getClass()).getName()); + throw new DataFormatException( + Msg.code(1800) + "Element at path " + thePath + " is not a primitive datatype. Found: " + + myContext.getElementDefinition(value.getClass()).getName()); } ((IPrimitiveType) value).setValueAsString(theValue); @@ -1466,10 +1636,13 @@ public class FhirTerser { */ @SuppressWarnings("unchecked") @Nonnull - public T setElement(@Nonnull IBase theTarget, @Nonnull String thePath, @Nullable String theValue) { + public T setElement( + @Nonnull IBase theTarget, @Nonnull String thePath, @Nullable String theValue) { T value = (T) doAddElement(theTarget, thePath, -1).get(0); if (!(value instanceof IPrimitiveType)) { - throw new DataFormatException(Msg.code(1801) + "Element at path " + thePath + " is not a primitive datatype. Found: " + myContext.getElementDefinition(value.getClass()).getName()); + throw new DataFormatException( + Msg.code(1801) + "Element at path " + thePath + " is not a primitive datatype. Found: " + + myContext.getElementDefinition(value.getClass()).getName()); } ((IPrimitiveType) value).setValueAsString(theValue); @@ -1492,12 +1665,13 @@ public class FhirTerser { for (IBase target : targets) { if (!(target instanceof IPrimitiveType)) { - throw new DataFormatException(Msg.code(1802) + "Element at path " + thePath + " is not a primitive datatype. Found: " + myContext.getElementDefinition(target.getClass()).getName()); + throw new DataFormatException(Msg.code(1802) + "Element at path " + thePath + + " is not a primitive datatype. Found: " + + myContext.getElementDefinition(target.getClass()).getName()); } ((IPrimitiveType) target).setValueAsString(valuesIter.next()); } - } /** @@ -1536,7 +1710,6 @@ public class FhirTerser { STORE_AND_REUSE_RESULTS } - @FunctionalInterface private interface ICompartmentOwnerVisitor { @@ -1544,7 +1717,6 @@ public class FhirTerser { * @return Returns true if we should keep looking for more */ boolean consume(IIdType theCompartmentOwner); - } public static class ContainedResources { @@ -1668,12 +1840,8 @@ public class FhirTerser { getResourceToIdMap().put(nextResource, new IdDt(nextCandidate)); } - } - } - } } - } diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirVersionIndependentConcept.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirVersionIndependentConcept.java index 421468b7a56..22d6b1ca079 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirVersionIndependentConcept.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/FhirVersionIndependentConcept.java @@ -47,10 +47,7 @@ public class FhirVersionIndependentConcept implements ComparableSubscription.channel element *